blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5a2a55a993186bacf8fed7bada23f6ab0264b6b5 | 8e55b122aa7b18b1734aafc3699b477c1e85cb91 | /pippy/fx/experimental/refinement_types.py | 665c9d0d651b4ed929e974d822a447809f3d26dd | [
"BSD-3-Clause"
] | permissive | yunxing/tau | 0a7a641db49da795de70e8db9748d93205be31ac | c679248814a76c915e651806f1b4f30a3de9fe92 | refs/heads/main | 2023-03-17T01:02:23.222622 | 2022-09-30T23:18:25 | 2022-09-30T23:18:25 | 543,822,164 | 0 | 0 | BSD-3-Clause | 2022-09-30T23:13:57 | 2022-09-30T23:13:56 | null | UTF-8 | Python | false | false | 457 | py | # Copyright (c) Meta Platforms, Inc. and affiliates
class Equality:
def __init__(self, lhs, rhs):
self.lhs = lhs
self.rhs = rhs
def __str__(self):
return f'{self.lhs} = {self.rhs}'
def __repr__(self):
return f'{self.lhs} = {self.rhs}'
def __eq__(self, other):
if isinstance(other, Equality):
return self.lhs == other.lhs and self.rhs == other.rhs
else:
return False
| [
"[email protected]"
] | |
fd99bf3d3c0e15b0191184e3ac916eff8f0f70d2 | a57e66be33512a7e2e99adb6f597151b56c4c373 | /psgreet.py | 23cec5ae7e9d198ceaf39575804a96c85a4837e3 | [] | no_license | ravijaya/sep28 | 17025ea0997a371f54a6374f90d4bf56e0206840 | 8907f4b03ac2c4b2f1806d0d7cf3fd6aa680680c | refs/heads/master | 2022-12-18T23:58:12.989144 | 2020-09-30T12:13:15 | 2020-09-30T12:13:15 | 299,901,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | name = input('enter the name :')
city = input('enter the city :')
zip_code = int(input('enter the postal code :'))
print('name :', name)
print('city :', city)
print(zip_code)
print(type(zip_code))
| [
"[email protected]"
] | |
e095bb08830df4cb39df6e6f8c0f7a370cf6d071 | 524baf7de05bd3fc5b9d08083cbb0b7b47a67979 | /40.py | d5ffe3d133240d1686dcb26040a6492346e80813 | [] | no_license | gk90731/100-questions-practice | 1356dd577516567a5c51a4257f59fe01b123e7ff | f855549e037b9924dd6f0370dc2f2a53765d9227 | refs/heads/master | 2020-05-25T14:04:59.642819 | 2019-05-21T12:49:04 | 2019-05-21T12:49:04 | 187,835,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | '''Question: Please try to guess what is missing in the following code and add the missing part so that the code works fine.
import math
print(math.pow(2))'''
import math
print(math.pow(2,3))
| [
"[email protected]"
] | |
d8c1268253a55b552a1766dca9855a16bb18ab78 | df489fddec1ffa936f0223efca3a35a22df36dc0 | /99_backup/25_各国历年二氧化碳CO2排放量统计分析/challenge7_1.py | 7c9a6e07c6db0eee678564cc8f73ddede36755f8 | [] | no_license | echo-xr/pc | 3af106530815956feb458889408085285b3fd8f5 | 03160675e912b4e4ad4642e14b5ab0230b973f6c | refs/heads/master | 2020-04-26T16:54:03.408688 | 2019-03-03T11:27:34 | 2019-03-03T11:27:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,730 | py | def co2():
import pandas as pd
# 读取 Data 表
data = pd.read_excel('ClimateChange.xlsx')
# 取 'Series code' 这列中值为 'EN.ATM.CO2E.KT' 的行并设置索引
data = data[data['Series code']=='EN.ATM.CO2E.KT'].set_index('Country code')
# 刪掉多余的前五列,只留各年排量数据
data.drop(data.columns[:5], axis=1, inplace=True)
# 把数组中值为 '..' 的元素替换成 'NaN'
data.replace({'..': pd.np.nan}, inplace=True)
# 对 NaN 空值进行向前和向后填充
data = data.fillna(method='ffill', axis=1).fillna(method='bfill', axis=1)
# 读取 Country 表
country = pd.read_excel('ClimateChange.xlsx', 'Country')
# 设置国家代号为索引,方便合并数据
country.index = country['Country code']
# 合并这俩 Series :国家总排量和国家收入属于什么群体
df = pd.concat([data.sum(axis=1), country['Income group']], axis=1)
# Sum emissions
a = df.groupby('Income group').sum()
# 设置列名
a.columns = ['Sum emissions']
# 在 df 中加入一列国家名字
df[2] = country['Country name']
# 各收入群体中排放量最高的国家和最高排放量
h = df.sort_values(0, ascending=False).groupby('Income group').head(1).set_index('Income group')
# 设置列名
h.columns = ['Highest emissions', 'Highest emission country']
# 各收入群体中排放量最低的国家和最低排放量
l = df[df[0]>0].sort_values(0).groupby('Income group').head(1).set_index('Income group')
# 设置列名
l.columns = ['Lowest emissions', 'Lowest emission country']
# 返回全部数据, concat 会自动对 index 排序
return pd.concat([a, h, l], axis=1)
| [
"[email protected]"
] | |
cd7f37a9b936647ac196bbd22cf1e6cb455cb8de | acec0a3d2220bbd0531ab44159e20bf28a9dd71b | /src/moviesapp/migrations/0001_initial.py | fcd20ded5a642116b0df6b114cd73f280a471420 | [] | no_license | desecho/movies | ad48c262f836a65902e7135fe7d3e3dde9e5c69c | 04141e4cfc885ba6c53328e1222980b85d9828ef | refs/heads/master | 2023-06-22T23:06:53.114491 | 2023-06-10T14:20:55 | 2023-06-10T14:21:35 | 8,838,721 | 14 | 4 | null | 2022-10-01T15:00:05 | 2013-03-17T18:00:49 | Python | UTF-8 | Python | false | false | 8,907 | py | import annoying.fields
import django.core.validators
import django.utils.timezone
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("auth", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="User",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("password", models.CharField(max_length=128, verbose_name="password")),
("last_login", models.DateTimeField(default=django.utils.timezone.now, verbose_name="last login")),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
help_text="Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.",
unique=True,
max_length=30,
verbose_name="username",
validators=[
django.core.validators.RegexValidator("^[\\w.@+-]+$", "Enter a valid username.", "invalid")
],
),
),
("first_name", models.CharField(max_length=30, verbose_name="first name", blank=True)),
("last_name", models.CharField(max_length=30, verbose_name="last name", blank=True)),
("email", models.EmailField(max_length=75, verbose_name="email address", blank=True)),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
("date_joined", models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined")),
(
"groups",
models.ManyToManyField(
related_query_name="user",
related_name="user_set",
to="auth.Group",
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of his/her group.",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
related_query_name="user",
related_name="user_set",
to="auth.Permission",
blank=True,
help_text="Specific permissions for this user.",
verbose_name="user permissions",
),
),
],
options={
"abstract": False,
"verbose_name": "user",
"verbose_name_plural": "users",
},
bases=(models.Model,),
),
migrations.CreateModel(
name="Action",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
(
"name",
models.CharField(
max_length=255,
),
),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="ActionRecord",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("comment", models.CharField(max_length=255, null=True, blank=True)),
("rating", models.IntegerField(null=True, blank=True)),
("date", models.DateTimeField(auto_now_add=True)),
("action", models.ForeignKey(to="moviesapp.Action", on_delete=models.CASCADE)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="List",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("title", models.CharField(max_length=255)),
("key_name", models.CharField(max_length=255)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="Movie",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
(
"title",
models.CharField(
max_length=255,
),
),
("title_ru", models.CharField(max_length=255)),
("overview", models.TextField(null=True, blank=True)),
("plot", models.TextField(null=True, blank=True)),
("director", models.CharField(max_length=255, null=True, blank=True)),
("writer", models.CharField(max_length=255, null=True, blank=True)),
("genre", models.CharField(max_length=255, null=True, blank=True)),
("actors", models.CharField(max_length=255, null=True, blank=True)),
("imdb_id", models.CharField(unique=True, max_length=15, verbose_name="IMDB id")),
("tmdb_id", models.IntegerField(unique=True, verbose_name="TMDB id")),
("imdb_rating", models.DecimalField(null=True, max_digits=2, decimal_places=1)),
(
"poster",
models.CharField(
max_length=255,
null=True,
),
),
(
"release_date",
models.DateField(
null=True,
),
),
("runtime", models.TimeField(null=True, blank=True)),
("homepage", models.URLField(null=True, verbose_name="\xd1\x81\xd0\xb0\xd0\xb9\xd1\x82", blank=True)),
("trailers", annoying.fields.JSONField(null=True, blank=True)),
],
options={
"ordering": ["pk"],
},
bases=(models.Model,),
),
migrations.CreateModel(
name="Record",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("rating", models.IntegerField(default=0)),
(
"comment",
models.CharField(
default="",
max_length=255,
),
),
(
"date",
models.DateTimeField(
auto_now_add=True,
),
),
("list", models.ForeignKey(to="moviesapp.List", on_delete=models.CASCADE)),
("movie", models.ForeignKey(related_name="records", to="moviesapp.Movie", on_delete=models.CASCADE)),
("user", models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
bases=(models.Model,),
),
migrations.AddField(
model_name="actionrecord",
name="list",
field=models.ForeignKey(blank=True, to="moviesapp.List", null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name="actionrecord",
name="movie",
field=models.ForeignKey(to="moviesapp.Movie", on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name="actionrecord",
name="user",
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
]
| [
"[email protected]"
] | |
ca60ffe0443bb07e494632fa5f39f6372218f4ec | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/125/usersdata/172/29042/submittedfiles/ap1.py | 683a565ff52ea8deb8c87ed40b155d4040448259 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | # -*- coding: utf-8 -*-
a=float(input('digite o 1° número:'))
b=float(input('digite o 2° número:'))
c=float(input('digite o 3° número:'))
if a<=b and a<=c:
print(a)
if b<=c:
print(b)
print(c)
else :
print(c)
print(b)
| [
"[email protected]"
] | |
9b875b0f2099e0bfd6817014a1288c30d4e9b0be | c4ce3e9115cd393eca4152dadf0178ab813c7851 | /13.자연어 처리 기술-16T/course/nlp/applications/named_entity_recognition/tf_train.py | 70ef05e7ed576081df65a147faad777a7a78158d | [] | no_license | ckiekim/TIL | 2314cbfd304d60240ece879a0e405ec4dd73e386 | bc77e2c9e8d10757ee07c6c6c4d3c08f8539d376 | refs/heads/master | 2023-02-20T16:59:54.805208 | 2023-02-08T09:00:49 | 2023-02-08T09:00:49 | 188,774,191 | 0 | 2 | null | 2019-05-27T05:28:57 | 2019-05-27T05:05:51 | null | UTF-8 | Python | false | false | 12,606 | py | """
Named Entity Recognition
Author : Sangkeun Jung (2017)
- using Tensorflow
"""
import sys, os, inspect
# add common to path
from pathlib import Path
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
common_path = str(Path(currentdir).parent.parent)
sys.path.append( common_path )
from common.nlp.vocab import Vocab
from common.nlp.data_loader import N2NTextData
from common.nlp.converter import N2NConverter
from dataset import NERDataset
from dataset import load_data
from common.ml.hparams import HParams
import numpy as np
import copy
import time
import tensorflow as tf
from tensorflow.python.ops import rnn
from tensorflow.contrib.layers.python.layers import linear
from tensorflow.python.ops import variable_scope
from tensorflow.contrib.seq2seq import sequence_loss
from common.ml.tf.deploy import freeze_graph
print( "Tensorflow Version : ", tf.__version__)
class NER():
def __init__(self, hps, mode="train"):
self.hps = hps
self.x = tf.placeholder(tf.int32, [None, hps.num_steps], name="pl_tokens")
self.y = tf.placeholder(tf.int32, [None, hps.num_steps], name="pl_target")
self.w = tf.placeholder(tf.float32, [None, hps.num_steps], name="pl_weight")
self.keep_prob = tf.placeholder(tf.float32, [], name="pl_keep_prob")
### 4 blocks ###
# 1) embedding
# 2) dropout on input embedding
# 3) sentence encoding using rnn
# 4) bidirectional rnn's output to target classes
# 5) loss calcaulation
def _embedding(x):
# character embedding
shape = [hps.vocab_size, hps.emb_size]
initializer = tf.initializers.variance_scaling(distribution="uniform", dtype=tf.float32)
emb_mat = tf.get_variable("emb", shape, initializer=initializer, dtype=tf.float32)
input_emb = tf.nn.embedding_lookup(emb_mat, x) # [batch_size, sent_len, emb_dim]
# split input_emb -> num_steps
step_inputs = tf.unstack(input_emb, axis=1)
return step_inputs
def _sequence_dropout(step_inputs, keep_prob):
# apply dropout to each input
# input : a list of input tensor which shape is [None, input_dim]
with tf.name_scope('sequence_dropout') as scope:
step_outputs = []
for t, input in enumerate(step_inputs):
step_outputs.append( tf.nn.dropout(input, keep_prob) )
return step_outputs
def sequence_encoding_n2n(step_inputs, seq_length, cell_size):
# birnn based N2N encoding and output
f_rnn_cell = tf.contrib.rnn.GRUCell(cell_size, reuse=False)
b_rnn_cell = tf.contrib.rnn.GRUCell(cell_size, reuse=False)
_inputs = tf.stack(step_inputs, axis=1)
# step_inputs = a list of [batch_size, emb_dim]
# input = [batch_size, num_step, emb_dim]
# np.stack( [a,b,c,] )
outputs, states, = tf.nn.bidirectional_dynamic_rnn( f_rnn_cell,
b_rnn_cell,
_inputs,
sequence_length=tf.cast(seq_length, tf.int64),
time_major=False,
dtype=tf.float32,
scope='birnn',
)
output_fw, output_bw = outputs
states_fw, states_bw = states
output = tf.concat([output_fw, output_bw], 2)
step_outputs = tf.unstack(output, axis=1)
final_state = tf.concat([states_fw, states_bw], 1)
return step_outputs # a list of [batch_size, enc_dim]
def _to_class_n2n(step_inputs, num_class):
T = len(step_inputs)
step_output_logits = []
for t in range(T):
# encoder to linear(map)
out = step_inputs[t]
if t==0: out = linear(out, num_class, scope="Rnn2Target")
else: out = linear(out, num_class, scope="Rnn2Target", reuse=True)
step_output_logits.append(out)
return step_output_logits
def _loss(step_outputs, step_refs, weights):
# step_outputs : a list of [batch_size, num_class] float32 - unscaled logits
# step_refs : [batch_size, num_steps] int32
# weights : [batch_size, num_steps] float32
# calculate sequence wise loss function using cross-entropy
_batch_output_logits = tf.stack(step_outputs, axis=1)
loss = sequence_loss(
logits=_batch_output_logits,
targets=step_refs,
weights=weights
)
return loss
seq_length = tf.reduce_sum(self.w, 1) # [batch_size]
step_inputs = _embedding(self.x)
step_inputs = _sequence_dropout(step_inputs, self.keep_prob)
step_enc_outputs = sequence_encoding_n2n(step_inputs, seq_length, hps.enc_dim)
step_outputs = _to_class_n2n(step_enc_outputs, hps.num_target_class)
self.loss = _loss(step_outputs, self.y, self.w)
# step_preds and step_out_probs
step_out_probs = []
step_out_preds = []
for _output in step_outputs:
_out_probs = tf.nn.softmax(_output)
_out_pred = tf.argmax(_out_probs, 1)
step_out_probs.append(_out_probs)
step_out_preds.append(_out_pred)
# stack for interface
self.step_out_probs = tf.stack(step_out_probs, axis=1, name="step_out_probs")
self.step_out_preds = tf.stack(step_out_preds, axis=1, name="step_out_preds")
self.global_step = tf.get_variable("global_step", [], tf.int32, initializer=tf.zeros_initializer, trainable=False)
if mode == "train":
optimizer = tf.train.AdamOptimizer(hps.learning_rate)
self.train_op = optimizer.minimize(self.loss, global_step=self.global_step)
else:
self.train_op = tf.no_op()
for v in tf.trainable_variables(): print(v.name)
@staticmethod
def get_default_hparams():
return HParams(
learning_rate = 0.01,
keep_prob = 0.5,
)
def train(train_id_data, num_vocabs, num_taget_class):
#
# train sentiment analysis using given train_id_data
#
max_epoch = 300
model_dir = "./trained_models"
hps = NER.get_default_hparams()
hps.update(
batch_size= 100,
num_steps = 128,
emb_size = 50,
enc_dim = 100,
vocab_size=num_vocabs,
num_target_class=num_taget_class
)
with tf.variable_scope("model"):
model = NER(hps, "train")
sv = tf.train.Supervisor(is_chief=True,
logdir=model_dir,
summary_op=None,
global_step=model.global_step)
# tf assign compatible operators for gpu and cpu
tf_config = tf.ConfigProto(allow_soft_placement=True)
with sv.managed_session(config=tf_config) as sess:
local_step = 0
prev_global_step = sess.run(model.global_step)
train_data_set = NERDataset(train_id_data, hps.batch_size, hps.num_steps)
losses = []
while not sv.should_stop():
fetches = [model.global_step, model.loss, model.train_op]
a_batch_data = next( train_data_set.iterator )
y, x, w = a_batch_data
fetched = sess.run(fetches, {
model.x: x,
model.y: y,
model.w: w,
model.keep_prob: hps.keep_prob,
}
)
local_step += 1
_global_step = fetched[0]
_loss = fetched[1]
losses.append( _loss )
if local_step < 10 or local_step % 10 == 0:
epoch = train_data_set.get_epoch_num()
print("Epoch = {:3d} Step = {:7d} loss = {:5.3f}".format(epoch, _global_step, np.mean(losses)) )
_loss = []
if epoch >= max_epoch : break
print("Training is done.")
sv.stop()
# model.out_pred, model.out_probs
freeze_graph(model_dir, "model/step_out_preds,model/step_out_probs", "frozen_graph.tf.pb") ## freeze graph with params to probobuf format
from tensorflow.core.framework import graph_pb2
def predict(token_vocab, target_vocab, sent):
os.environ['CUDA_VISIBLE_DEVICES'] = '-1' # force to use cpu only (prediction)
model_dir = "./trained_models"
# prepare sentence converting
# to make raw sentence to id data easily
pred_data = N2NTextData(sent, mode='sentence')
pred_id_data = N2NConverter.convert(pred_data, target_vocab, token_vocab)
pred_data_set = NERDataset(pred_id_data, 1, 128)
#
a_batch_data = next(pred_data_set.predict_iterator) # a result
b_nes_id, b_token_ids, b_weight = a_batch_data
# Restore graph
# note that frozen_graph.tf.pb contains graph definition with parameter values in binary format
_graph_fn = os.path.join(model_dir, 'frozen_graph.tf.pb')
with tf.gfile.GFile(_graph_fn, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(graph_def)
with tf.Session(graph=graph) as sess:
# to check load graph
#for n in tf.get_default_graph().as_graph_def().node: print(n.name)
# make interface for input
pl_token = graph.get_tensor_by_name('import/model/pl_tokens:0')
pl_weight = graph.get_tensor_by_name('import/model/pl_weight:0')
pl_keep_prob = graph.get_tensor_by_name('import/model/pl_keep_prob:0')
# make interface for output
step_out_preds = graph.get_tensor_by_name('import/model/step_out_preds:0')
step_out_probs = graph.get_tensor_by_name('import/model/step_out_probs:0')
# predict sentence
b_best_step_pred_indexs, b_step_pred_probs = sess.run([step_out_preds, step_out_probs],
feed_dict={
pl_token : b_token_ids,
pl_weight : b_weight,
pl_keep_prob : 1.0,
}
)
best_step_pred_indexs = b_best_step_pred_indexs[0]
step_pred_probs = b_step_pred_probs[0]
step_best_targets = []
step_best_target_probs = []
for time_step, best_pred_index in enumerate(best_step_pred_indexs):
_target_class = target_vocab.get_symbol(best_pred_index)
step_best_targets.append( _target_class )
_prob = step_pred_probs[time_step][best_pred_index]
step_best_target_probs.append( _prob )
for idx, char in enumerate(list(sent)):
print('{}\t{}\t{}'.format(char, step_best_targets[idx], step_best_target_probs[idx]) )
if __name__ == '__main__':
train_id_data, token_vocab, target_vocab = load_data()
num_vocabs = token_vocab.get_num_tokens()
num_target_class = target_vocab.get_num_targets()
train_data_set = NERDataset(train_id_data, 5, 128)
train(train_id_data, num_vocabs, num_target_class)
predict(token_vocab, target_vocab, '의정지기단은 첫 사업으로 45 명 시의원들의 선거 공약을 수집해 개인별로 카드를 만들었다.')
predict(token_vocab, target_vocab, '한국소비자보호원은 19일 시판중인 선물세트의 상당수가 과대 포장된 것으로 드러났다고 밝혔다.')
| [
"[email protected]"
] | |
7ccbfdbf9eb671918f8f2d2beca9b69a75d10e8d | a3e1e797acb16bf4d1e298271545927df8eaeae7 | /tests/unit/services/storage/test_base.py | 81a39a0af33497a1293be5621915105e9b1b977e | [
"MIT"
] | permissive | totem/cluster-deployer | 481b11b2953a3e195c46762659c97fceca75945a | 75160f051250fd782af42af472f965d50f721ff5 | refs/heads/develop | 2022-10-12T09:09:30.417412 | 2017-06-30T22:32:56 | 2017-06-30T22:32:56 | 23,778,754 | 0 | 2 | MIT | 2022-09-13T21:48:32 | 2014-09-08T04:03:06 | Python | UTF-8 | Python | false | false | 2,592 | py | import datetime
from freezegun import freeze_time
from mock import MagicMock
from nose.tools import raises
import pytz
from deployer.services.storage.base import AbstractStore
from tests.helper import dict_compare
NOW = datetime.datetime(2022, 01, 01, tzinfo=pytz.UTC)
NOW_NOTZ = datetime.datetime(2022, 01, 01)
class TestAbstractStore:
def setup(self):
self.store = AbstractStore()
@raises(NotImplementedError)
def test_get(self):
self.store.create_deployment(MagicMock())
@raises(NotImplementedError)
def test_get_deployment(self):
self.store.get_deployment('fake_id')
@raises(NotImplementedError)
def test_update_state(self):
self.store.update_state('fake_id', 'PROMOTED')
@raises(NotImplementedError)
def test_update_runtime_upstreams(self):
self.store.update_runtime_upstreams('fake_id', {})
@raises(NotImplementedError)
def test_update_runtime_units(self):
self.store.update_runtime_units('fake_id', [])
@raises(NotImplementedError)
def test_update_state_bulk(self):
self.store.update_state_bulk('myapp', 'DECOMMISSIONED')
@raises(NotImplementedError)
def test_get_health(self):
self.store.health()
@freeze_time(NOW_NOTZ)
def test_add_event(self):
# Given: Mock implementation for adding raw event
self.store._add_raw_event = MagicMock()
# When: I add event to the store
self.store.add_event('MOCK_EVENT')
# Then: Event gets added to the store
self.store._add_raw_event.assert_called_once_with({
'type': 'MOCK_EVENT',
'component': 'deployer',
'details': None,
'date': NOW_NOTZ
})
@raises(NotImplementedError)
def test_add_raw_event(self):
self.store.add_event({})
def test_setup(self):
self.store.setup()
# NOOP
@raises(NotImplementedError)
def test_find_apps(self):
self.store.find_apps()
@raises(NotImplementedError)
def test_filter_deployments(self):
self.store.filter_deployments('myapp')
@freeze_time(NOW)
def test_apply_modified_ts(self):
# When: I apply modified timestamp for given deployemnt
deployement = self.store.apply_modified_ts({
'deployement': {
'id': 'test'
}
})
# Then: Modified timestamp is applied as expected
dict_compare(deployement, {
'deployement': {
'id': 'test'
},
'modified': NOW
})
| [
"[email protected]"
] | |
d4e55b4a3c1058d466250a81fc43250a26f6e81e | 79f541042e4b4d6bb443e7a758ca918817ea0f33 | /PythonGTK/Examples/19_cellRendererPixbuf.py | 075ed4f57dca6d57224051e634c8867f76537d47 | [] | no_license | ashutoshm1771/Source-Code-from-Tutorials | d5f950db8f5f648e87303835e9558eeba404939a | f5552d4bd0f4bebcf5c674ff730fcb61f2d7a1ce | refs/heads/master | 2020-09-15T06:08:31.777622 | 2019-11-22T09:08:31 | 2019-11-22T09:08:31 | 223,364,275 | 4 | 0 | null | 2019-11-22T09:01:51 | 2019-11-22T09:01:48 | null | UTF-8 | Python | false | false | 962 | py | from gi.repository import Gtk
class CellRendererPixbufWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="CellRendererPixbuf Example")
self.set_default_size(200, 200)
self.liststore = Gtk.ListStore(str, str)
self.liststore.append(["New", "document-new"])
self.liststore.append(["Open", "document-open"])
self.liststore.append(["Save", "document-save"])
treeview = Gtk.TreeView(model=self.liststore)
renderer_text = Gtk.CellRendererText()
column_text = Gtk.TreeViewColumn("Text", renderer_text, text=0)
treeview.append_column(column_text)
renderer_pixbuf = Gtk.CellRendererPixbuf()
column_pixbuf = Gtk.TreeViewColumn("Image", renderer_pixbuf, icon_name=1)
treeview.append_column(column_pixbuf)
self.add(treeview)
win = CellRendererPixbufWindow()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
| [
"[email protected]"
] | |
f7fb4d9a7d5027b2266d958ce939e45c3c255fd8 | 571a89f94f3ebd9ec8e6b618cddb7d05811e0d62 | /past202010/h/main.py | 3b515938a65636dfb734674b57c922c04b987390 | [] | no_license | ryu19-1/atcoder_python | 57de9e1db8ff13a107b5861f8f6a231e40366313 | cc24b3c2895aad71d40cefbb8e2893dc397b8f4f | refs/heads/master | 2023-05-10T05:32:16.507207 | 2021-05-19T17:48:10 | 2021-05-19T17:48:10 | 368,954,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,409 | py | #!/usr/bin/env python3
import sys
from collections import deque, Counter
from heapq import heappop, heappush
from bisect import bisect_right
from itertools import accumulate
sys.setrecursionlimit(10**6)
INF = 10**12
m = 10**9 + 7
def main():
N, M, K = map(int, input().split())
S = [list(input()) for _ in range(N)]
# countを二次元累積和でもっておく
count = [[[0] * (M+1) for _ in range(N+1)] for _ in range(10)]
for i in range(N):
for j in range(M):
count[int(S[i][j])][i+1][j+1] += 1
for k in range(10):
for i in range(N):
for j in range(M):
count[k][i + 1][j + 1] += count[k][i + 1][j]
for j in range(M+1):
count[k][i + 1][j] += count[k][i][j]
# print(count[1])
for n in range(min(N, M), 0, -1):
# print(n)
for i in range(N - n + 1):
for j in range(M - n + 1):
# (i,j)~(i+n,j+n)の中にある数字の数+Kがn**2を超えていたらok
for k in range(10):
# print(n, i, j, k)
cnt = count[k][i + n][j + n] - count[k][i +
n][j] - count[k][i][j + n] + count[k][i][j]
if cnt + K >= n * n:
print(n)
exit()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
555768f7f3294b960854d8e29e515027c5aefc09 | c1e87e9a7f0f2e81e3113821c21378f7b6436b6f | /Щелчок/15_поляков/22.py | 23cd2689e5ef0c8d8a5c67a9dc002aa76f830d5a | [] | no_license | Pochemu/Activity | 8e2a7ec4f6b7fd233c0ee48e893733b077aac7a4 | 1b21e674635ff95104e18e93241c30020032e26a | refs/heads/main | 2023-07-09T04:04:06.337321 | 2021-07-06T21:38:26 | 2021-07-06T21:38:26 | 337,492,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | for i in range(101, 1000):
x = i
L = x - 21
M = x + 12
while L != M:
if L > M:
L = L - M
else:
M = M - L
if M == 11:
print(i) | [
"[email protected]"
] | |
0d2cb05f03c5148d545df139b233bb65df55f7c4 | 21d1d0cade05ae0ab3dff1598d64f80cef9a411d | /Python/Programmers/Level3/리틀프렌즈사천성.py | 7aabad1adc691ca74efc1da5c8df28984e00bf84 | [] | no_license | louisuss/Algorithms-Code-Upload | 56d1a140a1674a53c13bcec5be96ea6da7c35219 | 6c97c713858b075162d5d86f124c0555f383c5b0 | refs/heads/master | 2023-01-08T21:37:19.702276 | 2020-11-09T04:03:50 | 2020-11-09T04:03:50 | 258,399,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,352 | py | def find_route(m1, n1, m2, n2, c):
up, down, left, right = check_edges(m1, n1, m2, n2, c)
case = (m1-m2)*(n1-n2)
if case == 0:
return (m1 == m2 and up) or (n1 == n2 and left)
elif case > 0:
return (up and right) or (down and left)
else:
return (up and left) or (down and right)
def check_edges(m1, n1, m2, n2, c):
up, down, left, right = [True]*4
for i in range(min(n1, n2), max(n1, n2)+1):
if matrix[min(m1, m2)][i] not in ('.', c):
up = False
break
for i in range(min(n1, n2), max(n1, n2)+1):
if matrix[max(m1, m2)][i] not in ('.', c):
down = False
break
for i in range(min(m1, m2), max(m1, m2)+1):
if matrix[i][min(n1, n2)] not in ('.', c):
left = False
break
for i in range(min(m1, m2), max(m1, m2)+1):
if matrix[i][max(n1, n2)] not in ('.', c):
right = False
break
return up, down, left, right
m, n = map(int, input().split())
matrix = []
coordinates = {}
for i in range(m):
row = list(input())
matrix.append(row)
for j in range(n):
c = row[j]
if c.isupper():
coordinates.setdefault(c, []).append((i, j))
result = []
friends = sorted(coordinates)
i = 0
while i < len(friends):
c = friends[i]
if c in result or c == '.':
i += 1
continue
(m1, n1), (m2, n2) = coordinates[c]
if find_route(m1, n1, m2, n2, c):
result.append(c)
friends[i] = '.'
matrix[m1][n1] = '.'
matrix[m2][n2] = '.'
i = 0
continue
i += 1
if len(result) == len(friends):
print(''.join(result))
else:
print('IMPOSSIBLE')
# # 기존 프렌즈 사천성 -> 경로가 세 개 이하 선분
# # 리틀 프렌즈 -> 경로가 2개 이하의 수평/수직 선분
# # . 빈칸 / * 막힌칸
# from collections import defaultdict
# from copy import deepcopy
# def delete_line(a, b, board, key):
# # 선분1
# x1, y1 = a
# x2, y2 = b
# check = True
# # 같은 행 위치
# if x1 == x2:
# for i in range(y1+1, y2):
# if board[x1][i] != '.':
# check = False
# break
# if check:
# board[x1][y1], board[x2][y2] = '.', '.'
# print(board)
# return key
# # 같은 열 위치
# elif y1 == y2:
# for i in range(x1+1, x2):
# if board[i][y1] != '.':
# check = False
# break
# if check:
# board[x1][y1], board[x2][y2] = '.', '.'
# return key
# # 선분2
# check1, check2 = True, True
# # 왼쪽에 있는 경우
# if x1 < x2 and y1 > y2:
# # 두 방향이 있음
# for i in range(y2, y1):
# if board[x1][i] != '.':
# check1 = False
# break
# if check1:
# for i in range(x1, x2):
# if board[i][y2] != '.':
# check1 = False
# break
# for i in range(x1+1, x2+1):
# if board[i][y1] != '.':
# check2 = False
# break
# if check2:
# for i in range(y2+1, y1+1):
# if board[x2][i] != '.':
# check2 = False
# break
# elif x1 < x2 and y1 < y2:
# for i in range(y1+1, y2+1):
# if board[x1][i] != '.':
# check1 = False
# break
# if check1:
# for i in range(x1, x2):
# if board[i][y2] != '.':
# check1 = False
# break
# for i in range(x1+1, x2+1):
# if board[i][y1] != '.':
# check2 = False
# break
# if check2:
# for i in range(y1+1, y2):
# if board[x2][i] != '.':
# check2 = False
# break
# if check1 and check2:
# board[x1][y1], board[x2][y2] = '.', '.'
# return key
# def solution(m, n, board):
# answer = []
# board = list(map(list, board))
# positions = defaultdict(list)
# for i in range(m):
# for j in range(n):
# if ord(board[i][j]) in list(range(ord('A'), ord('Z')+1)):
# positions[board[i][j]].append((i, j))
# while True:
# before_answer = deepcopy(answer)
# temp = []
# for a, b in positions.values():
# t = delete_line(a, b, board, board[a[0]][a[1]])
# if t != None:
# temp.append(t)
# if temp:
# answer.extend(sorted(temp))
# if len(answer) == len(positions):
# return ''.join(answer)
# else:
# if before_answer == answer:
# return "IMPOSSIBLE"
# m = 3
# n = 3
# board = ['DBA', 'C*A', 'CDB']
# print(solution(m, n, board))
# a=[[1,2],[3,4]]
# a[0][1] = 3
# print(a)
# # 특정 문자 조건 찾기
# # print(list(range(ord('A'), ord('Z')+1)))
# # 문자열 1개씩 가져오는지?
# # print(board[0][0])
# # for v in 'abc':
# # print(v)
# # 문자열 부분 리스트로 변환하기
# # board = list(map(list, board))
# # board = [list(b) for b in board]
# # print(board)
| [
"[email protected]"
] | |
3997d4051e2df12bf21d542ce8da6bd90be8e2b3 | 1c25798a9ae17ca228383fcd04a1e801415a78e7 | /Chapter 3 Math, Function, Strings and Objects/commonPythonFunction.py | 25618b0b6296236322877889a7f2286192eceb02 | [] | no_license | padamcs36/Introduction_to_Python_By_Daniel | 688c56fff598617e979a5f71e9a48e50844ad7ea | 8b8f00c9d93428c58df9c90e5edd8f75a1662647 | refs/heads/main | 2023-02-22T08:11:01.450054 | 2021-01-28T09:45:01 | 2021-01-28T09:45:01 | 333,700,411 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | '''
abs(), max(), min(), pow(), round() these are python built in functions
there is no need to import separate module for them
'''
print(abs(-3)) #return the value in positive always
print(max(4,5,-8,0,7))
print(min(0,4,6,3,-4))
print(pow(4,3))
print(round(4.535))
print(round(5.554667, 2)) | [
"[email protected]"
] | |
785a0e838d2086e476faf98a5ab30824e7268acd | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/learningCurve/tenPercent/lrClassifierN.py | 5b2e407f3df56718ee1c0db2c70f5c4f024f8ad3 | [] | no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,133 | py | # 9 September 2019
# Kiyoto Aramis Tanemura
# I modified the rfClassifier.py script to implement a logistic regression classifier. This classifier runs faster than the random forest classifier and Jun previously observed comparable results between logistic regression and random forest classifiers for the protein folding system. Due to the lesser time cost, I may sample a greater hyperparameter space using the logistic regression classifier. If the sampling yields a region in which overfitting is not observed, then I can refine the search. If the results are similar to that of the random forest classifier, then I may have exhausted the dataset for generalizability.
# Modified 26 October 2019 by Kiyoto Aramis Tanemura. Apply logistic regression classifier to CASF-PPI dataset.
# Modified 2020-02-09 by KAT. Code generalized for public use on GitHub.
import pandas as pd
import numpy as np
import os
import json
import pickle
#from multiprocessing import Pool
from time import time
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import RandomizedSearchCV
from sklearn.preprocessing import StandardScaler
from random import shuffle, random
#os.chdir('/mnt/scratch/tanemur1/')
toc = time()
# Randomize input file orders
pathToInput = 'data/comparison_descriptors/'
pathToOutput = 'results/learningCurve/'
fileNames = [x for x in os.listdir(pathToInput) if '.csv' in x]
shuffle(fileNames) # note: shuffle is in-place. Do not assign to variable
# Specify training set fraction
train_fraction = 0.1
if len(fileNames) * train_fraction == int(len(fileNames) * train_fraction):
train_file_number = int(len(fileNames) * train_fraction)
else:
train_file_number = int(len(fileNames) * train_fraction + 1)
x_train = pd.DataFrame()
y_train = pd.DataFrame()
# Read individual csv for comparison descriptors, append to train_data, and partition to x_train, y_train
fileNamesWithPath = [pathToInput + fileName for fileName in fileNames]
def read_csv(filePath):
return pd.read_csv(filePath, index_col = 0)
print('begin read training set')
#with Pool(np.min([train_file_number, 28])) as p:
# train_dataList = list(p.map(read_csv, fileNamesWithPath[:train_file_number]))
train_dataList = list(map(read_csv, fileNamesWithPath[:train_file_number]))
print('begin append DF | ', (time() - toc) / 60, ' min')
# Append DataFrames into one. While loop used to reduce append operations. Iteratively, DFs in a list are appended
# to the following DF.
while len(train_dataList) != 1:
number = int(len(train_dataList) / 2)
for i in range(number):
train_dataList[2 * i] = train_dataList[2 * i].append(train_dataList[2 * i + 1], sort = True)
for j in range(number):
del train_dataList[j + 1]
x_train = train_dataList[0]
del train_dataList
print('train_data dimensions', x_train.shape, ' | ', (time() - toc) / 60, ' min')
y_train = x_train['class']
x_train = x_train.drop('class', axis = 1) # x_train contains only nonbonding descriptors
feature_names = x_train.columns
scaler = StandardScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
y_train = y_train.values
print('Dimensions x_train ', x_train.shape, ' | y_train', y_train.shape)
# Define a logistic regression classifier along with pertinent hyperparameters. Here, default values are used.
clf = LogisticRegression(penalty='l2', verbose = 1)
def sampleRationalVals(minVal, maxVal):
return 2 ** (random() * (np.log2(maxVal) - np.log2(minVal)) + np.log2(minVal))
def sampleRationalList(minVal, maxVal):
theList = []
for i in range(int(2 * np.log2(maxVal - minVal) + 1)):
theVal = sampleRationalVals(minVal, maxVal)
theList.append(theVal)
return theList
parameters = {
# include any hyperparameters to sample. Otherwise, leave empty to perform five fold cross validation with default values. For example:
# 'C': sampleRationalList(0.001, 1000),
# 'solver': ['newton-cg', 'lbfgs', 'sag','saga']
}
print('begin RandomizedSearchCV | ' + str((time() - toc)/60) + ' mins')
randomized_search = RandomizedSearchCV(estimator = clf, param_distributions = parameters, n_iter = 1, scoring = 'accuracy', refit = True, cv = 5, verbose = 1, n_jobs = 1, pre_dispatch = 'n_jobs', return_train_score=True)
randomized_search.fit(x_train, y_train)
print('begin output | ', (time() - toc) / 60 / 60, ' hours')
tic = time()
with open(pathToOutput + 'bestParamN.json', 'w') as g:
json.dump(randomized_search.best_estimator_.get_params(), g)
with open(pathToOutput + 'modelN.pkl', 'wb') as h:
pickle.dump(randomized_search, h)
with open(pathToOutput + 'trainingSetN.txt', 'w') as i:
i.write('Training set:\n')
for pdbID in fileNames[:train_file_number]:
i.write(pdbID + '\n')
i.write('\nJob time: ' + str((tic - toc) / 60 / 60) + ' hours')
with open(pathToOutput + 'standardScalerN.pkl', 'wb') as j:
pickle.dump(scaler, j)
bestCoefficient = randomized_search.best_estimator_.coef_
coefDf = pd.DataFrame(bestCoefficient, columns = feature_names)
with open(pathToOutput + 'coefficientsN.csv', 'w') as f:
coefDf.to_csv(f)
| [
"[email protected]"
] | |
bd9ed5751eee1c211f209fe86a475e63d9c97c2d | 93684882400d0249ad733249f5b2c8dbd230110f | /ClassExercise & studio/chapter 8/Ex.02 pass 7-04-2016.py | b2e97a34f99d78980e9dea2a41e658df9a7c41ee | [] | no_license | SmileShmily/LaunchCode-summerofcode-Unit1 | c492bbed966547cc8c1be7f15d7a23cb989d407b | 03474cf77b0dae2bcfaf8513711d3fec72bd4166 | refs/heads/master | 2021-01-16T23:19:23.413010 | 2017-06-29T02:49:19 | 2017-06-29T02:49:19 | 95,730,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,628 | py | ''' ’
(GRADED) Write a function analyze_text that receives a string as input.
Your function should count the number of alphabetic characters (a through z, or A through Z)
in the text and also keep track of how many are the letter 'e' (upper or lowercase).
Your function should return an analysis of the text, something like this:
The text contains 243 alphabetic characters, of which 109 (44.8%) are ‘e’.
'''
#analyze_text=input("Please enter your str:")
def analyze_text(str):
# your code here
# lows="abcdefghijklmnopqrstuvwxyz"
# ups="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
str = repr(str).lower()
numberOfe = 0
totalChars = 0
for ch in str:
if ((ch <= 'Z' and ch >= 'A') or (ch <= 'z' and ch >= 'a')):
totalChars = totalChars + 1
if ch == 'e':
numberOfe = numberOfe + 1
percent_with_e = (numberOfe/totalChars) * 100
return("The text contains %d alphabetic characters, of which %d (%.1f%) %sare 'e'."% (totalChars, numberOfe, percent_with_e))
# Don't copy these tests into Vocareum
from test import testEqual
str = "Eeeee"
expected = "The text contains 5 alphabetic characters, of which 5 (100.0%) are 'e'."
testEqual(analyze_text(str), expected)
str = "Blueberries are tastee!"
expected = "The text contains 20 alphabetic characters, of which 6 (30.0%) are 'e'."
testEqual(analyze_text(str), expected)
str = "Wright's book, Gadsby, contains a total of 0 of that most common symbol ;)"
expected = "The text contains 55 alphabetic characters, of which 0 (0.0%) are 'e'."
testEqual(analyze_text(str), expected)
'''Output
Pass
Pass
Pass''' | [
"[email protected]"
] | |
59d4c01d5cfcf4ed342790fd1274876e93dd8832 | 26b0a513571a68576687b98de61d24be609b7595 | /problem_set_7/NewsStory.py | 350dd691a2f84602fbf6138df49edbbbe8c487c1 | [] | no_license | sirajmuneer123/MITx-6.00.1x-Introduction-to-Computer-Science-and-Programming-Using-Python | ea487447e119d9f1a23d49b9d6ddbe3a27b6f972 | 757c8087005bebb0fb250526d0caf7b79e3d1973 | refs/heads/master | 2021-01-10T03:21:53.565688 | 2015-11-07T17:57:26 | 2015-11-07T17:57:26 | 45,476,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | # Enter your code for NewsStory in this box
#Part 1: Data Structure Design
class NewsStory(object):
def __init__(self, guid, title, subject, summary, link):
self.guid = guid
self.title = title
self.subject = subject
self.summary = summary
self.link = link
def getGuid(self):
return self.guid
def getTitle(self):
return self.title
def getSubject(self):
return self.subject
def getSummary(self):
return self.summary
def getLink(self):
return self.link
| [
"[email protected]"
] | |
8c7528abac71136cad54131e9a41c7e5a0de6109 | 573d470c9fcb3799e8822e6953e1259b74e0672c | /Course/syntax/example_37.py | 4dec6e18ca446e0776f554d3f82b7f877eb2a471 | [
"Apache-2.0"
] | permissive | zevgenia/Python_shultais | e6f35773e54a72477ea5ee83520dbecfbee7ff48 | e51c31de221c5e7f36ede857a960138009ec8a05 | refs/heads/master | 2020-03-31T21:46:25.061571 | 2018-10-11T13:43:47 | 2018-10-11T13:43:47 | 152,593,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | """
Модуль демонстрации строк документации.
Пример использования смотрите в файле example_38.py
"""
def square(x):
"""
Функция принимает число и возвращает квадрат этого числа.
"""
return x ** 2
| [
"[email protected]"
] | |
e8b3a1c00a0adcbb6c45e0613c18c21c3c1a2f8b | 1bc7456240639a4fac54c411fbcb562cdbcc420c | /20.valid-parenthesis.py | 505f4e6def7acd2d3df87521451cf3e681bda834 | [] | no_license | Manash-git/CP-LeetCode-Solve | bdbb9f13946faee5da24e191a3d593b99da61ed2 | 45052c7613345c76f8a12bac780ffb899062dea9 | refs/heads/master | 2022-11-29T13:16:03.474242 | 2020-08-11T19:06:07 | 2020-08-11T19:06:07 | 275,853,956 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | def isValid(s):
stack=[]
pairs={
'(':')',
'[':']',
'{':'}'
}
for char in s:
if char in pairs:
# storing corresponding closing parenthesis
stack.append(pairs[char])
# print(stack)
else:
if not stack or stack.pop() != char:
return False
# print("Final=>",stack)
return not stack
print(isValid("()[{}]"))
print(isValid("()[]{}"))
# test=[]
test=[1]
# print(not test) # Return True if list is empty. Bcoz: empty = False. so not False= True
# print(test) | [
"[email protected]"
] | |
99712e5994e4f62c535cc123f5dee3ae5e41dc7c | b3a2e683ca82de3758fd62a8dea16bf2be7ebb78 | /sequences/sorting.py | eb8a7a548068d777b5de382221deaa3f25e1f0d2 | [] | no_license | jschnab/data-structures-algos-python | dc60f4e26d4d86470039bab1f4422e86a6da736b | 7b7821d5fc5378c8c7a4be088aadd6c95b154312 | refs/heads/master | 2022-12-27T17:56:34.622558 | 2020-10-24T19:47:37 | 2020-10-24T19:47:37 | 265,121,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,719 | py | import random
def select(seq, start):
min_idx = start
for j in range(start + 1, len(seq)):
if seq[j] < seq[min_idx]:
min_idx = j
return min_idx
def selection_sort(seq):
for i in range(len(seq) - 1):
min_idx = select(seq, i)
tmp = seq[i]
seq[i] = seq[min_idx]
seq[min_idx] = tmp
def merge(seq, start, mid, stop):
lst = []
i = start
j = mid
# merge the two lists while each has more elements
while i < mid and j < stop:
if seq[i] < seq[j]:
lst.append(seq[i])
i += 1
else:
lst.append(seq[j])
j += 1
# copy in the rest of the start-to-mid sequence
while i < mid:
lst.append(seq[i])
i += 1
# no need to copy the rest of the sequence from j to stop
# the next part of the code does this for us
# so no need to do:
# while j < stop:
# lst.append(seq[j])
# j += 1
# copy elements back to the original sequence
for i in range(len(lst)):
seq[start + i] = lst[i]
def merge_sort_recursive(seq, start, stop):
# >= is necessary if sequence is empty
# otherwise start == stop - 1 does the job
if start >= stop - 1:
return
mid = (start + stop) // 2
merge_sort_recursive(seq, start, mid)
merge_sort_recursive(seq, mid, stop)
merge(seq, start, mid, stop)
def merge_sort(seq):
merge_sort_recursive(seq, 0, len(seq))
def partition(seq, start, stop):
# pivot_index comes from the start location in the list
pivot_index = start
pivot = seq[pivot_index]
i = start + 1
j = stop - 1
while i <= j:
while i <= j and seq[i] <= pivot:
i += 1
while i <= j and seq[j] > pivot:
j -= 1
if i < j:
tmp = seq[i]
seq[i] = seq[j]
seq[j] = tmp
i += 1
j -= 1
seq[pivot_index] = seq[j]
seq[j] = pivot
return j
def quicksort_recursive(seq, start, stop):
if start >= stop - 1:
return
# pivot index ends up between the two halves
# where the pivot value is in its final location
pivot_index = partition(seq, start, stop)
quicksort_recursive(seq, start, pivot_index)
quicksort_recursive(seq, pivot_index + 1, stop)
def quicksort(seq):
# randomize sequence to find a good pivot
for i in range(len(seq)):
j = random.randint(0, len(seq) - 1)
seq[i], seq[j] = seq[j], seq[i]
quicksort_recursive(seq, 0, len(seq))
if __name__ == "__main__":
seq = [random.randint(0, 100) for _ in range(10)]
print("Before sort:", seq)
quicksort(seq)
print("After sort:", seq)
| [
"[email protected]"
] | |
be19e49ff8ca50f044aa74a299385f507daf7c95 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_34/785.py | 9c28392c2800225546e52dd464a7bb364a89ea2b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | import sys
import re
firstline = sys.stdin.readline()
(L, D, N) = map(int, firstline.split())
lex = []
for x in range(D):
lex.append(sys.stdin.readline())
lex = ''.join(lex)
for (i, pattern) in enumerate(sys.stdin.readlines()):
pattern = pattern.replace('(','[').replace(')',']')
matches = re.findall(pattern, lex)
print 'Case #%d: %d' % (i+1, len(matches))
| [
"[email protected]"
] | |
c886d026554eb02a740ba2ff1fe41d5cda414774 | b385f39c5b701fb6f22796ab951872257ae8398a | /exercicios-secao08/exercicio29.py | ebaee4355b5c1a1ef226ff1e088f5a6abccb4523 | [
"MIT"
] | permissive | EhODavi/curso-python | 5c97a6913bad198ae590519287ed441c95399d80 | cf07e308be9d7516f2cfe7f21c539d214c836979 | refs/heads/main | 2023-08-07T13:44:46.608118 | 2021-06-14T21:40:50 | 2021-06-14T21:40:50 | 356,542,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | from math import factorial, radians
def sinh(graus: float) -> float:
soma: float = 0.0
x: float = radians(graus)
for n in range(6):
soma += (x ** (2 * n + 1)) / factorial(2 * n + 1)
return soma
| [
"[email protected]"
] | |
6b49473abce39f0169294f832573d958ed264e27 | c7a404e6fe1861c90ff8dc9cbc69462ebcbb744c | /app.py | 7c2a34914a0512a254d56cb7547873a693cb3975 | [
"MIT"
] | permissive | bossnode/rufo-mp3-fetcher | b40ced257b7127456b5fbdd27b19cb5ce9f6289e | ed14c44c27761e4d2b2625da17a62c036f38dafd | refs/heads/master | 2020-06-10T04:11:09.257754 | 2019-06-26T20:25:12 | 2019-06-26T20:25:12 | 193,578,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,680 | py | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import os, sys
from soundspider import SoundSpider
from time import sleep
import threading
class Handler:
def onDestroy(self, *args):
try:
download_thread._stop()
except:
pass
Gtk.main_quit()
def onToggleDownload(self, button):
status = "Downloading..."
builder.get_object('label4').set_text(status)
button.set_sensitive(False)
builder.get_object("folder_label").set_sensitive(False)
builder.get_object("url_label").set_sensitive(False)
## verbose?
# verbose = True
verbose = False
params = (builder.get_object("url_label").get_text(),builder.get_object("folder_label").get_text(),verbose, builder.get_object('label4'), button,builder.get_object("url_label"),builder.get_object("folder_label"))
download_thread = threading.Thread(target=SoundSpider.convert, args=params)
download_thread.start()
return
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
# """ Get absolute path to resource, works for dev and for PyInstaller """
# base_path = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
# return os.path.join(base_path, relative_path)
download_thread = threading.Thread()
builder = Gtk.Builder()
builder.add_from_file(resource_path("ui.glade"))
builder.connect_signals(Handler())
window = builder.get_object("window1")
window.show_all()
Gtk.main() | [
"="
] | = |
d08d566fec8e6362ded5eced9937d5449872b30a | 613d0766a36a0354ed8b7be4c8a552068afe5e71 | /08 Linked List/15 Reverse Linked List/02.py | 45530e293097a676b12012d31b88a5d9f2c7ec75 | [] | no_license | HYLee1008/Python | e9fb2109ed29d11faa37b0a823e603134d85dc28 | 04900b6de1d56c11fbb98d518c9b4cdd37772490 | refs/heads/master | 2022-12-12T12:07:49.198551 | 2020-09-05T12:41:55 | 2020-09-05T12:41:55 | 280,879,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | ### Reverse linked list by iterative
### Iterative consumes 70% memory of the recursive method. And slightly faster.
from datastructure import *
def reverse_list(head):
node, prev = head, None
while node:
next, node.next = node.next, prev
node, prev = next, node
return prev
input = ListNode(1)
input.next = ListNode(2)
input.next.next = ListNode(3)
input.next.next.next = ListNode(4)
input.next.next.next.next = ListNode(5)
reverse = reverse_list(input)
while reverse:
print(reverse.val)
reverse = reverse.next | [
"[email protected]"
] | |
d32bdac865b53dfa2b0b6159cd89f731b1be54c7 | 1565d4f012622c98dd519d5f6feedf7d5a67a70b | /etc/openshift_clean.py | a05d4c5f52a0c98d4598cac6d152eb7475f82f94 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | doaa-altarawy/quokka | 6862da8e3f0f3583350ca4faf2bc7133a7aa9d6b | 3730008100266569d1fab47c7dfa765650e2a346 | refs/heads/development | 2021-01-16T17:46:22.983206 | 2015-12-21T22:15:15 | 2015-12-21T22:15:15 | 48,460,112 | 1 | 0 | null | 2015-12-23T00:14:31 | 2015-12-23T00:14:31 | null | UTF-8 | Python | false | false | 543 | py | #!/usr/bin/python
"""
THIS SCRIPT CLEANS ALL DATA IN YOUR QUOKKA DB
RUN ONLY IN OPENSHIFT DEMO DEPLOY
OR AT YOUR OWN RISK!!!!
"""
from quokka import create_app
from quokka.core.models.content import Content
from quokka.core.models.config import Config
from quokka.core.models.channel import Channel
from quokka.modules.accounts.models import User
app = create_app()
Content.objects.delete()
User.objects.delete()
Config.objects.delete()
for channel in Channel.objects.filter(parent__ne=None):
channel.delete()
Channel.objects.delete()
| [
"[email protected]"
] | |
bc38759d351f6ebdfac3b69fb0061f1b382afdaf | 37a0cd9ed68558c7302f6effc44d5e970416441d | /ivona_speak/__init__.py | d813a2f48657a2b5555092851ff556be9f9e1007 | [
"Python-2.0",
"MIT"
] | permissive | MagnetonBora/ivona-speak | 00eced9ee2231069c9b0fb8e818ba6191209e6fa | d0ced823d28c0026bb685d5a321ed11fdbab9ad8 | refs/heads/master | 2021-01-20T14:36:15.293070 | 2017-01-21T12:39:10 | 2017-01-21T12:39:10 | 82,699,158 | 0 | 0 | null | 2017-02-21T16:01:37 | 2017-02-21T16:01:37 | null | UTF-8 | Python | false | false | 105 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
__version__ = '0.2.0'
| [
"[email protected]"
] | |
8aeea72b7d8a05fcd367ad2245993782dde02fba | 3330944ef9af811ed59e3f40721e6b7da754e1e7 | /setup.py | 1d70fdd49af304dc6bdae85abbb3f2671b9bf919 | [] | no_license | loum/baip-loader | 8ee6ad184f4cb557437ed92590b591eaa0032956 | 68a4b1556b8d745e51a1502092cd1d54bfd96c76 | refs/heads/master | 2021-01-25T07:39:34.190887 | 2015-05-05T00:15:21 | 2015-05-05T00:15:21 | 30,627,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,249 | py | """:mod:docutils` setup.py file to generate Python compatible sources in
build/ directory
"""
import os
import glob
import fnmatch
import shutil
from setuptools import setup
VERSION = '0.0.0'
def opj(*args):
path = os.path.join(*args)
return os.path.normpath(path)
def find_data_files(srcdir, *wildcards, **kw):
"""Get a list of all files under the *srcdir* matching *wildcards*,
returned in a format to be used for install_data.
"""
def walk_helper(arg, dirname, files):
names = []
lst, wildcards = arg
for wildcard in wildcards:
wc_name = opj(dirname, wildcard)
for current_file in files:
filename = opj(dirname, current_file)
if (fnmatch.fnmatch(filename, wc_name) and
not os.path.isdir(filename)):
if kw.get('version') is None:
names.append(filename)
else:
versioned_file = '%s.%s' % (filename,
kw.get('version'))
shutil.copyfile(filename, versioned_file)
names.append('%s.%s' % (filename,
kw.get('version')))
if names:
if kw.get('target_dir') is None:
lst.append(('', names))
else:
lst.append((kw.get('target_dir'), names))
file_list = []
recursive = kw.get('recursive', True)
if recursive:
os.path.walk(srcdir, walk_helper, (file_list, wildcards))
else:
walk_helper((file_list, wildcards),
srcdir,
[os.path.basename(current_file) for current_file in glob.glob(opj(srcdir, '*'))])
return file_list
find_data_files('baip_loader/conf/', '*.conf', version=VERSION)
setup(name='python-baip-loader',
version=VERSION,
description='BAIP-Loader',
author='Lou Markovski',
author_email='[email protected]',
url='',
scripts=['baip_loader/bin/baip-loader'],
packages=['baip_loader',
'baip_loader.config'],
package_data={'baip_loader': ['conf/*.conf.[0-9]*.[0-9]*.[0-9]*']})
| [
"[email protected]"
] | |
02d2bcf9f1aaaee3aff4b006298d5417e5d9fecb | 386d5d4f8f102e701d02b326cd066f520e3dff9f | /ProjectApplication/grant_management/migrations/0030_renames_media_to_medium.py | f3c44d48ea456f3b94289c02f7b305bf53d2dd0b | [
"CC-BY-NC-SA-4.0",
"CC-BY-SA-4.0",
"CC-BY-4.0",
"MIT"
] | permissive | Swiss-Polar-Institute/project-application | ae2561c3ae2c1d5412d165d959ce2e5886135e0a | 7dc4a9f7e0f8d28c89977b85f99bc5e35ea77d43 | refs/heads/master | 2023-08-31T04:01:23.492272 | 2023-08-25T14:33:02 | 2023-08-25T14:33:02 | 206,330,401 | 7 | 5 | MIT | 2023-09-13T08:03:53 | 2019-09-04T13:49:39 | Python | UTF-8 | Python | false | false | 685 | py | # Generated by Django 3.0.5 on 2020-05-12 10:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project_core', '0118_calls_need_to_be_part_of_a_funding_instrument'),
('grant_management', '0029_signed_by_multiple_people'),
]
operations = [
migrations.RenameModel(
old_name='Media',
new_name='Medium',
),
migrations.AlterField(
model_name='grantagreement',
name='signed_by',
field=models.ManyToManyField(blank=True, help_text='People who signed the grant agreement', to='project_core.PhysicalPerson'),
),
]
| [
"[email protected]"
] | |
7bcce88caf756bb58b5c86d24a41dfc8e635fa25 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/omninote/testcase/firstcases/testcase10_018.py | bb43949dc1a984d35d229ba18c38794740f1bd50 | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,301 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'it.feio.android.omninotes',
'appActivity' : 'it.feio.android.omninotes.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'it.feio.android.omninotes/it.feio.android.omninotes.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase018
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememtBack(driver, "new UiSelector().text(\"Nothing here!\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"it.feio.android.omninotes:id/menu_sort\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"it.feio.android.omninotes:id/menu_sort\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"it.feio.android.omninotes:id/menu_sort\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Nothing here!\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"More options\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Reduced view\")", "new UiSelector().className(\"android.widget.TextView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"it.feio.android.omninotes:id/menu_search\").className(\"android.widget.TextView\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"10_018\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'it.feio.android.omninotes'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
d8a67dd3fc7ad483dcc1c079ddf71657300fe471 | 4c3e992678341ccaa1d4d14e97dac2e0682026d1 | /addons/account/tests/test_tax.py | 62e63e8bb07a770a7428d5bb811ee52b406f5201 | [] | no_license | gahan-corporation/wyatt | 3a6add8f8f815bd26643e1e7c81aea024945130d | 77e56da362bec56f13bf0abc9f8cf13e98461111 | refs/heads/master | 2021-09-03T18:56:15.726392 | 2018-01-08T02:54:47 | 2018-01-08T02:54:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,285 | py | from gerp.addons.account.tests.account_test_users import AccountTestUsers
import time
class TestTax(AccountTestUsers):
def setUp(self):
super(TestTax, self).setUp()
self.fixed_tax = self.tax_model.create({
'name': "Fixed tax",
'amount_type': 'fixed',
'amount': 10,
'sequence': 1,
})
self.fixed_tax_bis = self.tax_model.create({
'name': "Fixed tax bis",
'amount_type': 'fixed',
'amount': 15,
'sequence': 2,
})
self.percent_tax = self.tax_model.create({
'name': "Percent tax",
'amount_type': 'percent',
'amount': 10,
'sequence': 3,
})
self.division_tax = self.tax_model.create({
'name': "Division tax",
'amount_type': 'division',
'amount': 10,
'sequence': 4,
})
self.group_tax = self.tax_model.create({
'name': "Group tax",
'amount_type': 'group',
'amount': 0,
'sequence': 5,
'children_tax_ids': [
(4, self.fixed_tax.id, 0),
(4, self.percent_tax.id, 0)
]
})
self.group_tax_bis = self.tax_model.create({
'name': "Group tax bis",
'amount_type': 'group',
'amount': 0,
'sequence': 6,
'children_tax_ids': [
(4, self.fixed_tax.id, 0),
(4, self.percent_tax.id, 0)
]
})
self.group_of_group_tax = self.tax_model.create({
'name': "Group of group tax",
'amount_type': 'group',
'amount': 0,
'sequence': 7,
'children_tax_ids': [
(4, self.group_tax.id, 0),
(4, self.group_tax_bis.id, 0)
]
})
self.bank_journal = self.env['account.journal'].search([('type', '=', 'bank'), ('company_id', '=', self.account_manager.company_id.id)])[0]
self.bank_account = self.bank_journal.default_debit_account_id
self.expense_account = self.env['account.account'].search([('user_type_id.type', '=', 'payable')], limit=1) #Should be done by onchange later
def test_tax_group_of_group_tax(self):
self.fixed_tax.include_base_amount = True
self.group_tax.include_base_amount = True
self.group_of_group_tax.include_base_amount = True
res = self.group_of_group_tax.compute_all(200.0)
self.assertEquals(res['total_excluded'], 200.0)
# After calculation of first group
# base = 210
# total_included = 231
# Base of the first grouped is passed
# Base after the second group (220) is dropped.
# Base of the group of groups is passed out,
# so we obtain base as after first group
self.assertEquals(res['base'], 210.0)
self.assertEquals(res['total_included'], 263.0)
def test_tax_group(self):
res = self.group_tax.compute_all(200.0)
self.assertEquals(res['total_excluded'], 200.0)
self.assertEquals(res['total_included'], 230.0)
self.assertEquals(len(res['taxes']), 2)
self.assertEquals(res['taxes'][0]['amount'], 10.0)
self.assertEquals(res['taxes'][1]['amount'], 20.0)
def test_tax_percent_division(self):
self.division_tax.price_include = True
self.division_tax.include_base_amount = True
self.percent_tax.price_include = False
self.percent_tax.include_base_amount = False
res_division = self.division_tax.compute_all(200.0)
res_percent = self.percent_tax.compute_all(200.0)
self.assertEquals(res_division['taxes'][0]['amount'], 20.0)
self.assertEquals(res_percent['taxes'][0]['amount'], 20.0)
self.division_tax.price_include = False
self.division_tax.include_base_amount = False
self.percent_tax.price_include = True
self.percent_tax.include_base_amount = True
res_division = self.division_tax.compute_all(200.0)
res_percent = self.percent_tax.compute_all(200.0)
self.assertEquals(res_division['taxes'][0]['amount'], 22.22)
self.assertEquals(res_percent['taxes'][0]['amount'], 18.18)
def test_tax_sequence_normalized_set(self):
self.division_tax.sequence = 1
self.fixed_tax.sequence = 2
self.percent_tax.sequence = 3
taxes_set = (self.group_tax | self.division_tax)
res = taxes_set.compute_all(200.0)
self.assertEquals(res['taxes'][0]['amount'], 22.22)
self.assertEquals(res['taxes'][1]['amount'], 10.0)
self.assertEquals(res['taxes'][2]['amount'], 20.0)
def test_tax_include_base_amount(self):
self.fixed_tax.include_base_amount = True
res = self.group_tax.compute_all(200.0)
self.assertEquals(res['total_included'], 231.0)
def test_tax_currency(self):
self.division_tax.amount = 15.0
res = self.division_tax.compute_all(200.0, currency=self.env.ref('base.VEF'))
self.assertAlmostEqual(res['total_included'], 235.2941)
def test_tax_move_lines_creation(self):
""" Test that creating a move.line with tax_ids generates the tax move lines and adjust line amount when a tax is price_include """
self.fixed_tax.price_include = True
self.fixed_tax.include_base_amount = True
company_id = self.env['res.users'].browse(self.env.uid).company_id.id
vals = {
'date': time.strftime('%Y-01-01'),
'journal_id': self.bank_journal.id,
'name': 'Test move',
'line_ids': [(0, 0, {
'account_id': self.bank_account.id,
'debit': 235,
'credit': 0,
'name': 'Bank Fees',
'partner_id': False,
}), (0, 0, {
'account_id': self.expense_account.id,
'debit': 0,
'credit': 200,
'date': time.strftime('%Y-01-01'),
'name': 'Bank Fees',
'partner_id': False,
'tax_ids': [(4, self.group_tax.id), (4, self.fixed_tax_bis.id)]
})],
'company_id': company_id,
}
move = self.env['account.move'].with_context(apply_taxes=True).create(vals)
aml_fixed_tax = move.line_ids.filtered(lambda l: l.tax_line_id.id == self.fixed_tax.id)
aml_percent_tax = move.line_ids.filtered(lambda l: l.tax_line_id.id == self.percent_tax.id)
aml_fixed_tax_bis = move.line_ids.filtered(lambda l: l.tax_line_id.id == self.fixed_tax_bis.id)
self.assertEquals(len(aml_fixed_tax), 1)
self.assertEquals(aml_fixed_tax.credit, 10)
self.assertEquals(len(aml_percent_tax), 1)
self.assertEquals(aml_percent_tax.credit, 20)
self.assertEquals(len(aml_fixed_tax_bis), 1)
self.assertEquals(aml_fixed_tax_bis.credit, 15)
aml_with_taxes = move.line_ids.filtered(lambda l: set(l.tax_ids.ids) == set([self.group_tax.id, self.fixed_tax_bis.id]))
self.assertEquals(len(aml_with_taxes), 1)
self.assertEquals(aml_with_taxes.credit, 190)
| [
"[email protected]"
] | |
1ff24ffb45f8546c93fbec4517976b96ca01b0bc | b8ed71f3d1a36c119d846e97f1aa7d8ba6774f52 | /680_Valid_Palindrome_II.py | 5dd32bf280843101a247fdd621ab833fcc2b7d1b | [] | no_license | imjaya/Leetcode_solved | 0831c4114dd919864452430c4e46d3f69b4bd0cd | 374eb0f23ae14d9638d20bbfe622209f71397ae0 | refs/heads/master | 2023-05-24T17:57:56.633611 | 2023-05-16T06:31:42 | 2023-05-16T06:31:42 | 284,203,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | def helper(s,l,r):
while l<r:
if(s[l]!=s[r]):
return False
l+=1
r-=1
return True
class Solution:
def validPalindrome(self, s: str) -> bool:
left=0
right=len(s)-1
while(left<right):
if(s[left]!=s[right]):
return helper(s,left+1,right) or helper(s,left,right-1)
else:
left+=1
right-=1
return True
| [
"[email protected]"
] | |
c335ae67050596c16c4e388d411d18e7b88ca797 | 863a7b075963c2882d9fe5df66af16c6e52576a9 | /deepiu/textsim/evaluate/evaluate-sim.py | 2079a1cc158d696d4cf173579d1afde74c720102 | [] | no_license | buptpriswang/hasky | 3c3510d43821bbdfdfa216a337cde6e0747b3423 | 93afdc32956643fe191fcf1a5aa635570e219ab0 | refs/heads/master | 2021-06-30T19:45:03.329238 | 2017-09-22T15:41:58 | 2017-09-22T15:41:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,532 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# \file evaluate-sim-score.py
# \author chenghuige
# \date 2016-09-25 00:46:53.890615
# \Description
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('model_dir', '/home/gezi/new/temp/makeup/title2name/model/bow/', '')
#flags.DEFINE_string('model_dir', '/home/gezi/new/temp/makeup/title2name/model/cnn.hic/', '')
flags.DEFINE_string('exact_model_dir', '/home/gezi/new/temp/makeup/title2name/model/bow.elementwise/', '')
flags.DEFINE_string('vocab', '/home/gezi/new/temp/makeup/title2name/tfrecord/seq-basic/vocab.txt', '')
flags.DEFINE_bool('use_exact_predictor', False, '')
flags.DEFINE_string('key', 'score', '')
flags.DEFINE_string('lkey', 'dual_bow/main/ltext:0', '')
flags.DEFINE_string('rkey', 'dual_bow/main/rtext:0', '')
#flags.DEFINE_string('lkey', 'dual_cnn/main/ltext:0', '')
#flags.DEFINE_string('rkey', 'dual_cnn/main/rtext:0', '')
flags.DEFINE_string('exact_key', 'score', '')
flags.DEFINE_string('exact_lkey', 'dual_bow2/main/ltext:0', '')
flags.DEFINE_string('exact_rkey', 'dual_bow2/main/rtext:0', '')
flags.DEFINE_float('exact_ratio', 1., '')
flags.DEFINE_integer('np_seed', 1024, '0 random otherwise fixed random')
import sys
import numpy as np
import melt
logging = melt.logging
import gezi
from deepiu.util import evaluator
from deepiu.util import algos_factory
class Predictor(melt.PredictorBase):
def __init__(self, model_dir, key, lkey, rkey, index=0):
self._predictor = melt.Predictor(model_dir)
self._key = key
self._lkey = lkey
self._rkey = rkey
self._index = index
def predict(self, ltext, rtext):
score = self._predictor.inference(self._key,
feed_dict= {
self._lkey: ltext,
self._rkey: rtext
},
index=self._index
)
return score
def evaluate_score():
evaluator.init()
text_max_words = evaluator.all_distinct_texts.shape[1]
print('text_max_words:', text_max_words)
predictor = Predictor(FLAGS.model_dir, FLAGS.key, FLAGS.lkey, FLAGS.rkey, index=0)
exact_predictor=None
if FLAGS.use_exact_predictor:
exact_predictor = Predictor(FLAGS.exact_model_dir, FLAGS.exact_key, FLAGS.exact_lkey, FLAGS.exact_rkey, index=-1)
print(tf.get_collection(FLAGS.key))
seed = FLAGS.np_seed if FLAGS.np_seed else None
index = evaluator.random_predict_index(seed=seed)
evaluator.evaluate_scores(predictor, random=True, index=index)
if exact_predictor is not None:
##well for seq2seq did experiment and for makeup title2name score(average time per step) is much better then ori_score
##so just juse score will be fine
#exact_predictor._key = 'ori_score'
#evaluator.evaluate_scores(predictor, random=True, exact_predictor=exact_predictor, index=index)
#exact_predictor._key = 'score'
evaluator.evaluate_scores(predictor, random=True, exact_predictor=exact_predictor, exact_ratio=FLAGS.exact_ratio, index=index)
def main(_):
logging.init(logtostderr=True, logtofile=False)
evaluate_score()
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
] | |
8b7320fbb782d353ece0bf32dd2977b744b8acd8 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/I_to_M_Gk3_no_pad/pyramid_2side/bce_s001_tv_s0p1_L8/step11_L2345678.py | c03824dab2e889ccb93f4ca311d98915db768bee | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,986 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
###############################################################################################################################################################################################################
# 按F5執行時, 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~ 才可 import step10_a.py 喔!
code_exe_dir = os.path.dirname(code_exe_path) ### 目前執行 step10_b.py 的 dir
if(os.getcwd() != code_exe_dir): ### 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~
os.chdir(code_exe_dir)
# print("current_path:", os.getcwd())
###############################################################################################################################################################################################################
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_0side.bce_s001_tv_s0p1_L8.step10_a as L8_0side
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_1side.bce_s001_tv_s0p1_L8.step10_a as L8_1side
import step10_a as side2
#################################################################################################################################################################################################################################################################################################################################################################################################
ch032_1side_1__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_1__2side_1, ]
ch032_1side_2__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_2__2side_1, side2.ch032_1side_2__2side_2, ]
ch032_1side_3__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_3__2side_1, side2.ch032_1side_3__2side_2, side2.ch032_1side_3__2side_3, ]
ch032_1side_4__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_4__2side_1, side2.ch032_1side_4__2side_2, side2.ch032_1side_4__2side_3, side2.ch032_1side_4__2side_4, ]
ch032_1side_5__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_5__2side_1, side2.ch032_1side_5__2side_2, side2.ch032_1side_5__2side_3, side2.ch032_1side_5__2side_4, side2.ch032_1side_5__2side_5, ]
ch032_1side_6__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_6__2side_1, side2.ch032_1side_6__2side_2, side2.ch032_1side_6__2side_3, side2.ch032_1side_6__2side_4, side2.ch032_1side_6__2side_5, side2.ch032_1side_6__2side_6, ]
ch032_1side_7__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_7__2side_1, side2.ch032_1side_7__2side_2, side2.ch032_1side_7__2side_3, side2.ch032_1side_7__2side_4, side2.ch032_1side_7__2side_5, side2.ch032_1side_7__2side_6, side2.ch032_1side_7__2side_7, ]
ch032_1side_8__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_8__2side_1, side2.ch032_1side_8__2side_2, side2.ch032_1side_8__2side_3, side2.ch032_1side_8__2side_4, side2.ch032_1side_8__2side_5, side2.ch032_1side_8__2side_6, side2.ch032_1side_8__2side_7, side2.ch032_1side_8__2side_8, ]
ch032_1side_9__2side_all = [ L8_0side.ch032_0side, L8_1side.ch032_1side_1, side2.ch032_1side_9__2side_1, side2.ch032_1side_9__2side_2, side2.ch032_1side_9__2side_3, side2.ch032_1side_9__2side_4, side2.ch032_1side_9__2side_5, side2.ch032_1side_9__2side_6, side2.ch032_1side_9__2side_7, side2.ch032_1side_9__2side_8, side2.ch032_1side_9__2side_9, ]
ch032_1side_all__2side_all = [
ch032_1side_1__2side_all,
ch032_1side_2__2side_all,
ch032_1side_3__2side_all,
ch032_1side_4__2side_all,
ch032_1side_5__2side_all,
ch032_1side_6__2side_all,
ch032_1side_7__2side_all,
ch032_1side_8__2side_all,
ch032_1side_9__2side_all,
]
| [
"[email protected]"
] | |
930d498a17db90721c75d65299f8f10b0549e799 | 4a88daea1741ed6b5b5088b8d1f2787093abdf80 | /tests/test_gen_couroutine.py | 16e6479cc1bbc400493e0975a2f4a4ccec8b11d3 | [
"MIT"
] | permissive | Nigelzhf/torweb | 8e0bfeb6793ae9e50230e6b7b6284ec5e6da0d57 | 6859a878c126addb74410e872307ac9399f20eb7 | refs/heads/master | 2021-01-12T14:16:10.474945 | 2016-10-03T20:47:25 | 2016-10-03T20:47:25 | 69,928,142 | 0 | 1 | null | 2016-10-04T02:25:34 | 2016-10-04T02:25:33 | null | UTF-8 | Python | false | false | 414 | py | #encoding:utf-8
import sys, os
sys.path.append(os.path.dirname(sys.path[0]))
from tornado.httpclient import AsyncHTTPClient
from tornado import gen
import tornado.ioloop
@gen.coroutine
def fetch_coroutine(url):
http_client = AsyncHTTPClient()
response = yield http_client.fetch(url)
print('test')
print(response)
# fetch_coroutine('http://sxu.today')
tornado.ioloop.IOLoop.instance().start()
| [
"[email protected]"
] | |
8edd25a05553c2dfeaaf20dd6357cab4c4a87d07 | 4b5173af602439cdca0db985dcbdd25aef9b7a6b | /bin/topology-json-connector.py | fb1c33926b316f0d4ed9f1c45d8fbaeac5353af5 | [] | no_license | kevangel79/argo-egi-connectors | a3d4bf502708cdce08acf01b5c58812042e4d201 | dff42c3d4e5e7ea31ca839dd93f975ea27f4ffda | refs/heads/master | 2022-02-23T15:01:13.081358 | 2022-02-15T14:54:05 | 2022-02-15T14:54:05 | 250,012,827 | 0 | 0 | null | 2020-03-25T15:14:01 | 2020-03-25T15:14:00 | null | UTF-8 | Python | false | false | 7,746 | py | #!/usr/bin/python3
import argparse
import os
import sys
import json
import uvloop
import asyncio
from argo_egi_connectors.io.http import SessionWithRetry
from argo_egi_connectors.exceptions import ConnectorHttpError, ConnectorParseError
from argo_egi_connectors.io.webapi import WebAPI
from argo_egi_connectors.io.avrowrite import AvroWriter
from argo_egi_connectors.io.statewrite import state_write
from argo_egi_connectors.log import Logger
from argo_egi_connectors.config import Global, CustomerConf
from argo_egi_connectors.utils import filename_date, datestamp, date_check
from argo_egi_connectors.parse.flat_topology import ParseFlatEndpoints, ParseContacts
from argo_egi_connectors.mesh.contacts import attach_contacts_topodata
from urllib.parse import urlparse
logger = None
globopts = {}
custname = ''
def is_feed(feed):
data = urlparse(feed)
if not data.netloc:
return False
else:
return True
async def send_webapi(webapi_opts, data, topotype, fixed_date=None):
webapi = WebAPI(sys.argv[0], webapi_opts['webapihost'],
webapi_opts['webapitoken'], logger,
int(globopts['ConnectionRetry'.lower()]),
int(globopts['ConnectionTimeout'.lower()]),
int(globopts['ConnectionSleepRetry'.lower()]),
date=fixed_date)
await webapi.send(data, topotype)
def get_webapi_opts(cglob, confcust):
webapi_custopts = confcust.get_webapiopts()
webapi_opts = cglob.merge_opts(webapi_custopts, 'webapi')
webapi_complete, missopt = cglob.is_complete(webapi_opts, 'webapi')
if not webapi_complete:
logger.error('Customer:%s %s options incomplete, missing %s' % (logger.customer, 'webapi', ' '.join(missopt)))
raise SystemExit(1)
return webapi_opts
async def fetch_data(feed):
remote_topo = urlparse(feed)
session = SessionWithRetry(logger, custname, globopts)
res = await session.http_get('{}://{}{}'.format(remote_topo.scheme,
remote_topo.netloc,
remote_topo.path))
return res
def parse_source_topo(res, uidservtype, fetchtype):
# group_groups, group_endpoints = ParseEoscTopo(logger, res, uidservtype, fetchtype).get_data()
topo = ParseFlatEndpoints(logger, res, custname, uidservtype, fetchtype, scope=custname)
group_groups = topo.get_groupgroups()
group_endpoints = topo.get_groupendpoints()
return group_groups, group_endpoints
async def write_state(confcust, fixed_date, state):
cust = list(confcust.get_customers())[0]
jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust)
fetchtype = confcust.get_topofetchtype()
if fixed_date:
await state_write(sys.argv[0], jobstatedir, state,
globopts['InputStateDays'.lower()],
fixed_date.replace('-', '_'))
else:
await state_write(sys.argv[0], jobstatedir, state,
globopts['InputStateDays'.lower()])
def write_avro(confcust, group_groups, group_endpoints, fixed_date):
custdir = confcust.get_custdir()
if fixed_date:
filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], custdir, fixed_date.replace('-', '_'))
else:
filename = filename_date(logger, globopts['OutputTopologyGroupOfGroups'.lower()], custdir)
avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfGroups'.lower()], filename)
ret, excep = avro.write(group_groups)
if not ret:
logger.error('Customer:%s : %s' % (logger.customer, repr(excep)))
raise SystemExit(1)
if fixed_date:
filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], custdir, fixed_date.replace('-', '_'))
else:
filename = filename_date(logger, globopts['OutputTopologyGroupOfEndpoints'.lower()], custdir)
avro = AvroWriter(globopts['AvroSchemasTopologyGroupOfEndpoints'.lower()], filename)
ret, excep = avro.write(group_endpoints)
if not ret:
logger.error('Customer:%s : %s' % (logger.customer, repr(excep)))
raise SystemExit(1)
def main():
global logger, globopts, confcust
parser = argparse.ArgumentParser(description="""Fetch and construct entities from EOSC-PORTAL feed""")
parser.add_argument('-c', dest='custconf', nargs=1, metavar='customer.conf', help='path to customer configuration file', type=str, required=False)
parser.add_argument('-g', dest='gloconf', nargs=1, metavar='global.conf', help='path to global configuration file', type=str, required=False)
parser.add_argument('-d', dest='date', metavar='YEAR-MONTH-DAY', help='write data for this date', type=str, required=False)
args = parser.parse_args()
group_endpoints, group_groups = list(), list()
logger = Logger(os.path.basename(sys.argv[0]))
fixed_date = None
if args.date and date_check(args.date):
fixed_date = args.date
confpath = args.gloconf[0] if args.gloconf else None
cglob = Global(sys.argv[0], confpath)
globopts = cglob.parse()
confpath = args.custconf[0] if args.custconf else None
confcust = CustomerConf(sys.argv[0], confpath)
confcust.parse()
confcust.make_dirstruct()
confcust.make_dirstruct(globopts['InputStateSaveDir'.lower()])
global custname
custname = confcust.get_custname()
# safely assume here one customer defined in customer file
cust = list(confcust.get_customers())[0]
jobstatedir = confcust.get_fullstatedir(globopts['InputStateSaveDir'.lower()], cust)
fetchtype = confcust.get_topofetchtype()[0]
state = None
logger.customer = custname
uidservtype = confcust.get_uidserviceendpoints()
topofeed = confcust.get_topofeed()
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
try:
if is_feed(topofeed):
res = loop.run_until_complete(fetch_data(topofeed))
group_groups, group_endpoints = parse_source_topo(res, uidservtype, fetchtype)
contacts = ParseContacts(logger, res, uidservtype, is_csv=False).get_contacts()
attach_contacts_topodata(logger, contacts, group_endpoints)
else:
try:
with open(topofeed) as fp:
js = json.load(fp)
group_groups, group_endpoints = parse_source_topo(js, uidservtype, fetchtype)
except IOError as exc:
logger.error('Customer:%s : Problem opening %s - %s' % (logger.customer, topofeed, repr(exc)))
loop.run_until_complete(
write_state(confcust, fixed_date, True)
)
webapi_opts = get_webapi_opts(cglob, confcust)
numge = len(group_endpoints)
numgg = len(group_groups)
# send concurrently to WEB-API in coroutines
if eval(globopts['GeneralPublishWebAPI'.lower()]):
loop.run_until_complete(
asyncio.gather(
send_webapi(webapi_opts, group_groups, 'groups', fixed_date),
send_webapi(webapi_opts, group_endpoints,'endpoints', fixed_date)
)
)
if eval(globopts['GeneralWriteAvro'.lower()]):
write_avro(confcust, group_groups, group_endpoints, fixed_date)
logger.info('Customer:' + custname + ' Fetched Endpoints:%d' % (numge) + ' Groups(%s):%d' % (fetchtype, numgg))
except (ConnectorHttpError, ConnectorParseError, KeyboardInterrupt) as exc:
logger.error(repr(exc))
loop.run_until_complete(
write_state(confcust, fixed_date, False )
)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9db8984e6ebf73d18d04cec93f522f942fae5520 | 81539aba88c22cf75bd2e14f5e0e92f2bf54e962 | /DarkMatterMap2017/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8/TTbarDMJets_Inclusive_pseudoscalar_LO_Mchi-30_Mphi-100_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8_40000_0_cff.py | 05fe9e736ba8add5e80e5b30edd2c2a3536e5193 | [] | no_license | nistefan/RandomizedParametersSeparator | ad35b48b95e9745814c0bf9d8d8b6eb8aa479177 | 66a0e291b59113c6b5301768f1c10e36cf23d3c3 | refs/heads/master | 2021-01-03T00:41:17.415005 | 2020-02-19T13:30:54 | 2020-02-19T13:30:54 | 239,838,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,387 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, lumisToProcess = cms.untracked.VLuminosityBlockRange(*('1:133', '1:356', '1:341', '1:357', '1:34613', '1:36531', '1:36858', '1:35693', '1:33790', '1:33399', '1:33453', '1:33471', '1:33558', '1:33574', '1:33580', '1:33516', '1:33526', '1:34945', '1:68369', '1:68373', '1:68376', '1:68334', '1:34145', '1:37618', '1:34437', '1:38912', '1:36860', '1:34742', '1:34782', '1:34792', '1:36173', '1:36245', '1:36272', '1:36275', '1:38949', '1:68100', '1:38496', '1:38895', '1:68060', '1:50577', '1:50614', '1:50769', '1:50827', '1:50926', '1:985', '1:50079', '1:38148', '1:37790', '1:33998', '1:34316', '1:38308', '1:33700', '1:34411', '1:37644', '1:63662', '1:63668', '1:63697', '1:63986', '1:63990', '1:69141', '1:69145', '1:69154', '1:69209', '1:99233', '1:99348', '1:99381', '1:99389', '1:99407', '1:99128', '1:99267', '1:99360', '1:99339', '1:99303', '1:99429', '1:99443', '1:99306', '1:99399', '1:99413', '1:100326', '1:35138', '1:35500', '1:35917', '1:37506', '1:33933', '1:36051', '1:36054', '1:37517', '1:37530', '1:37533', '1:99088', '1:99638', '1:34254', '1:36452', '1:33935', '1:36587', '1:50048', '1:34890', '1:38645', '1:68284', '1:50046', '1:63754', '1:69791', '1:63712', '1:63715', '1:63948', '1:69952', '1:100060', '1:290', '1:34021', '1:36044', '1:33100', '1:33285', '1:33305', '1:33430', '1:100257', '1:50424', '1:50615', '1:50724', '1:50297', '1:34994', '1:34885', '1:50630', '1:50684', '1:50842', '1:50865', '1:50599', '1:63452', '1:99699', '1:85', '1:401', '1:34231', '1:100271', '1:100607', '1:34019', '1:34253', '1:35866', '1:100917', '1:100969', '1:33011', '1:33017', '1:37850', '1:37874', '1:34049', '1:37558', '1:34472', '1:34468', '1:36071', '1:100529', '1:38439', '1:38729', '1:38775', '1:68138', '1:68190', '1:68205', '1:69088', '1:35228', '1:35741', '1:33881', '1:33887', '1:34975', '1:34979', '1:36664', '1:37298', '1:33646', '1:33693', '1:34045', '1:37363', '1:34057', '1:69692', '1:33592', '1:34334', '1:34381', '1:63194', '1:63542', '1:50247', '1:63562', ))
)
readFiles.extend( ['/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/ACA45E67-4710-EA11-993D-AC1F6B1AF142.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/EE07DEB0-7311-EA11-88E2-7CD30ACE1479.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/DEA7093A-5810-EA11-9AB2-0CC47AFF0454.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/62D36710-EC10-EA11-A1B6-98039B3B003A.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/46F6364E-2311-EA11-A748-0CC47A1DF800.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/8CF037CB-4710-EA11-80A5-0CC47A5FC281.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/88FDBA27-9A11-EA11-B345-A0369FC524AC.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/B0B5A181-2812-EA11-B322-B083FED1321B.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/B6F2C869-7914-EA11-8091-F01FAFD69D00.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Inclusive_pseudoscalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/62A1C426-9D14-EA11-86E0-1418774A24C6.root']); | [
"[email protected]"
] | |
8dd0e8db5c106dc0a4a867b92314ee3975bfac32 | ac64fda7f1bfc92f7897efd60b8f3f0aeb22b4d7 | /syntactic_mutations/cifar/mutants/mutant85.py | 36cc2dc7a92ebacc6a2c035d43c75d2a8980ffb9 | [] | no_license | dlfaults/mutation_operators_evaluation | ea7f33459ba7bcf7d70092d9db8b40f9b338d516 | 7d1ff30e901931a46bf8908e9bb05cae3daa5f0f | refs/heads/master | 2020-12-27T15:45:07.262012 | 2020-02-03T12:22:01 | 2020-02-03T12:22:01 | 237,955,342 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,737 | py | import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.datasets import cifar10
from keras.layers import Dense, Activation, Flatten, Dropout, BatchNormalization
from keras.layers import Conv2D, MaxPooling2D
def train_model(x_train, y_train, x_test, y_test, model_name):
num_classes = 10
batch_size = 32
epochs = 25
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, (3, 3), padding='same', input_shape=\
x_train.shape[1:]))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))
opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-06)
pass
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
model.fit(x_train, y_train, batch_size=\
batch_size, epochs=\
epochs, validation_data=\
(x_test, y_test), shuffle=\
True)
model.save(model_name)
scores = model.evaluate(x_test, y_test, verbose=1)
return (scores[0], scores[1]) | [
"[email protected]"
] | |
a8a7d2bf186db9e87192d299ecfbb8586f09ef41 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/pax.py | 6ddd75e25638bed8cf1f184a2bb9f8a5d8e723e8 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 62 | py | ii = [('DaltJMA.py', 1), ('MereHHB.py', 1), ('BrewDTO.py', 1)] | [
"[email protected]"
] | |
b707e83ed214fca7294e5bb95202740e88d02f5a | 8065409913cc84af9cab97703e9928700a7d6ee9 | /physix/bodyeditor/tools/transform/__init__.py | 062ea8e6e6be0c24ddefd2cb465bb620a8173151 | [] | no_license | 2xR/legacy | 678b6d77a11fe9705e6156589e67504e710d42de | 56099d07bdbc6feb7ebc848a37d5940cbd5f5552 | refs/heads/master | 2021-01-16T22:13:34.380662 | 2016-07-07T17:47:14 | 2016-07-07T17:47:14 | 62,826,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | from physix.bodyeditor.tools.transform.tool import TransformTool
| [
"[email protected]"
] | |
00ba0a6e6947133adc26d002b22be559066ae5c3 | daf9b136e9f55f3f24e0e6b4ca93ae8bcd0c4b94 | /3puntos1.py | 03866df81eb300ff22fa2e5932df8cc4a24d96e0 | [] | no_license | jorszs/computacion_grafica | 432b60a87eceff2e50b86c8a27568621b9e27c1b | 4b60639beab3410b1349af749efd09f32d657412 | refs/heads/master | 2021-04-12T11:29:40.929664 | 2018-05-14T07:50:47 | 2018-05-14T07:50:47 | 126,243,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py |
def mostrar():
print "esta es la prueba"
| [
"[email protected]"
] | |
3f867e03ffc111fa3e897382bcaacef5230d16eb | 5442e70e9a68b0a76601082e9aa307674fd4fb95 | /evaluation/draw.py | a8f5d430416dfc0ae686c2c9b91b4ba0ed113967 | [] | no_license | sycophant-stone/mmdetection_2.0 | 1871e85311fee3b81d3b1415787c837aadf6b543 | 6caabdcf81e7b003e612d053c94bd6c993dd5c3e | refs/heads/master | 2020-09-09T10:49:42.961409 | 2019-12-18T14:01:57 | 2019-12-18T14:01:57 | 221,426,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,120 | py | import cv2
def drawline(img, pt1, pt2, color, thickness=1, style='dotted', gap=10):
dist = ((pt1[0] - pt2[0]) ** 2 + (pt1[1] - pt2[1]) ** 2) ** .5
pts= []
for i in np.arange(0, dist, gap):
r = i / dist
x = int((pt1[0] * (1 - r) + pt2[0] * r) + .5)
y = int((pt1[1] * (1 - r) + pt2[1] * r) + .5)
p = (x, y)
pts.append(p)
if style == 'dotted':
for p in pts:
cv2.circle(img, p, thickness, color, -1)
else:
s = pts[0]
e = pts[0]
i = 0
for p in pts:
s = e
e = p
if i % 2 == 1:
cv2.line(img,s,e,color,thickness)
i += 1
def drawpoly(img, pts, color, thickness=1, style='dotted'):
s = pts[0]
e = pts[0]
pts.append(pts.pop(0))
for p in pts:
s = e
e = p
drawline(img, s, e, color, thickness, style)
def drawrect(img, pt1, pt2, color, thickness=1, style='dotted'):
pts = [pt1, (pt2[0], pt1[1]), pt2, (pt1[0], pt2[1])]
drawpoly(img, pts, color, thickness, style) | [
"[email protected]"
] | |
b7883cd4b78b7653e74b59d2049a25ba4db5e551 | 5355d2e8576ed3afdc0b13f330ab38eda00a84e0 | /odin/ml/decompositions.py | a0d4c9ebb37857726bd7f7c0022f68966e95cdc4 | [
"MIT"
] | permissive | johndpope/odin-ai | 63ee1d6e5c8dc11c72d04c7cc7350b3d4e3208ec | b6eb0c7b89c50620fe88f41f421330a50965c88e | refs/heads/master | 2020-06-24T21:30:24.388734 | 2019-07-13T12:49:55 | 2019-07-13T12:49:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39,660 | py | # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import math
from numbers import Number
from six import string_types
import numpy as np
from scipy import linalg
from multiprocessing import Value, Array
from sklearn.decomposition import IncrementalPCA, PCA
from sklearn.utils import (check_array, gen_batches, check_random_state,
as_float_array)
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import (svd_flip, _incremental_mean_and_var,
randomized_svd)
from odin.fuel import Data
from odin.utils.mpi import MPI
from odin.ml.base import TransformerMixin, BaseEstimator
from odin.utils import batching, ctext, flatten_list, Progbar
__all__ = [
"fast_pca",
"MiniBatchPCA",
"PPCA",
"SupervisedPPCA",
]
def fast_pca(*x, n_components=None, algo='rpca', y=None,
batch_size=1024, return_model=False,
random_state=1234):
""" A shortcut for many different PCA algorithms
Parameters
----------
x : {list, tuple}
list of matrices for transformation, the first matrix will
be used for training
n_components : {None, int}
number of PCA components
algo : {'pca', 'ipca', 'ppca', 'sppca', 'plda', 'rpca'}
different PCA algorithm:
'ipca' - IncrementalPCA,
'ppca' - Probabilistic PCA,
'sppca' - Supervised Probabilistic PCA,
'plda' - Probabilistic LDA,
'rpca' - randomized PCA using randomized SVD
y : {numpy.ndarray, None}
required for labels in case of `sppca`
batch_size : int (default: 1024)
batch size, only used for IncrementalPCA
return_model : bool (default: False)
if True, return the trained PCA model as the FIRST return
"""
batch_size = int(batch_size)
algo = str(algo).lower()
if algo not in ('pca', 'ipca', 'ppca', 'sppca', 'plda', 'rpca'):
raise ValueError("`algo` must be one of the following: 'pca', "
"'ppca', 'plda', 'sppca', or 'rpca'; but given: '%s'" % algo)
if algo in ('sppca', 'plda') and y is None:
raise RuntimeError("`y` must be not None if `algo='sppca'`")
x = flatten_list(x, level=None)
x = [i[:] if i.__class__.__name__ == 'MmapData' else i
for i in x]
# ====== check input ====== #
x_train = x[0]
x_test = x[1:]
input_shape = None
if x_train.ndim > 2: # only 2D for PCA
input_shape = (-1,) + x_train.shape[1:]
new_shape = (-1, np.prod(input_shape[1:]))
x_train = np.reshape(x_train, new_shape)
x_test = [np.reshape(x, new_shape) for x in x_test]
if n_components is not None: # no need to reshape back
input_shape = None
# ====== train PCA ====== #
if algo == 'sppca':
pca = SupervisedPPCA(n_components=n_components, random_state=random_state)
pca.fit(x_train, y)
elif algo == 'plda':
from odin.ml import PLDA
pca = PLDA(n_phi=n_components, random_state=random_state)
pca.fit(x_train, y)
elif algo == 'pca':
pca = PCA(n_components=n_components, random_state=random_state)
pca.fit(x_train)
elif algo == 'rpca':
# we copy the implementation of RandomizedPCA because
# it is significantly faster than PCA(svd_solver='randomize')
pca = RandomizedPCA(n_components=n_components, iterated_power=2,
random_state=random_state)
pca.fit(x_train)
elif algo == 'ipca':
pca = IncrementalPCA(n_components=n_components, batch_size=batch_size)
prog = Progbar(target=x_train.shape[0],
print_report=False, print_summary=False, name="Fitting PCA")
for start, end in batching(batch_size=batch_size, n=x_train.shape[0],
seed=1234):
pca.partial_fit(x_train[start:end], check_input=False)
prog.add(end - start)
elif algo == 'ppca':
pca = PPCA(n_components=n_components, random_state=random_state)
pca.fit(x_train)
# ====== transform ====== #
x_train = pca.transform(x_train)
x_test = [pca.transform(x) for x in x_test]
# reshape back to original shape if necessary
if input_shape is not None:
x_train = np.reshape(x_train, input_shape)
x_test = [np.reshape(x, input_shape) for x in x_test]
# return the results
if len(x_test) == 0:
return x_train if not return_model else (pca, x_train)
return tuple([x_train] + x_test) if not return_model else tuple([pca, x_train] + x_test)
# ===========================================================================
# PPCA
# ===========================================================================
class PPCA(BaseEstimator, TransformerMixin):
""" Probabilistic Principal Components Analysis
(C) Copyright University of Eastern Finland (UEF).
Ville Vestman, [email protected],
Tomi Kinnunen, [email protected].
Parameters
----------
n_components : {int, None}
if None, keep the same dimensions as input features
bias : {vector, 'auto'} [feat_dim,]
if 'auto' take mean of training data
n_iter : {integer, 'auto'}
if 'auto', keep iterating until no more improvement (i.e. reduction in `sigma` value)
compared to the `improve_threshold`
improve_threshold : scalar
Only used in case `n_iter='auto'`
solver : {'traditional', 'simple'}
verbose: {0, 1}
showing logging information during fitting
random_state : {None, integer, numpy.random.RandomState}
Attributes
----------
V_ : [feat_dim, n_components]
total variability matrix
bias_ : [feat_dim]
bias vector
sigma_ : scalar
variance of error term
References
----------
[1] Ville Vestman and Tomi Kinnunen, "Supervector Compression
Strategies to Speed up i-vector System Development",
submitted to Speaker Odyssey 2018.
"""
def __init__(self, n_components=None, bias='auto',
n_iter='auto', improve_threshold=1e-3, solver='traditional',
verbose=0, random_state=None):
super(PPCA, self).__init__()
if isinstance(n_components, Number):
assert n_components > 0, \
"`n_components` must be greater than 0, but given: %d" % n_components
n_components = int(n_components)
elif n_components is not None:
raise ValueError("`n_components` can be None or integer")
self.n_components_ = n_components
# ====== checking bias ====== #
if isinstance(bias, string_types):
bias = bias.strip().lower()
assert bias == 'auto', 'Invalid value for `bias`: %s' % bias
elif not isinstance(bias, (np.ndarray, Number)):
raise ValueError("`bias` can be 'auto', numpy.ndarray or a number")
self.bias_ = bias
# ====== checking solver ====== #
if solver not in ('traditional', 'simple'):
raise ValueError("`solver` must be: 'traditional', or 'simple'")
self.solver_ = solver
# ====== checking n_iter ====== #
if isinstance(n_iter, string_types):
n_iter = n_iter.lower()
assert n_iter == 'auto', 'Invalid `n_iter` value: %s' % n_iter
elif isinstance(n_iter, Number):
assert n_iter > 0, "`n_iter` must greater than 0, but given: %d" % n_iter
self.n_iter_ = n_iter
# ====== checking random_state ====== #
if random_state is None:
rand = np.random.RandomState(seed=None)
elif isinstance(random_state, Number):
rand = np.random.RandomState(seed=None)
elif isinstance(random_state, np.random.RandomState):
rand = random_state
else:
raise ValueError("No suppport for `random_state` value: %s" % str(random_state))
self.random_state_ = rand
# ====== other dimension ====== #
self.improve_threshold_ = float(improve_threshold)
self.feat_dim_ = None
self.verbose_ = int(verbose)
def fit(self, X, y=None):
# ====== initialize ====== #
num_samples, feat_dim = X.shape
n_components = feat_dim if self.n_components_ is None else self.n_components_
if self.bias_ == 'auto':
bias = np.mean(X, 0)
elif isinstance(self.bias_, Number):
bias = np.full(shape=(feat_dim,), fill_value=self.bias_)
else:
bias = self.bias_
assert bias.shape == (feat_dim,), \
"Invialid `bias` given shape: %s, require shape: %s" % (str(bias.shape), str((feat_dim,)))
# ====== initialize parameters ====== #
V = self.random_state_.rand(feat_dim, n_components)
last_sigma = None
sigma = 1
centeredM = X - bias[np.newaxis, :]
varianceM = np.sum(centeredM**2) / (num_samples * feat_dim)
# ====== training ====== #
if self.verbose_:
print('[PPCA]n_components: %d n_sample: %d feat_dim: %d n_iter: %d threshold: %f solver: %s' %
(n_components, num_samples, feat_dim,
-1 if self.n_iter_ == 'auto' else self.n_iter_, self.improve_threshold_, self.solver_))
curr_n_iter = 0
while True:
B = (V * 1 / sigma).T # [feat_dim, n_components]
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V)) # [n_components, n_components]
my = np.dot(np.dot(Sigma, B), centeredM.T) # [n_components, num_samples]
if self.solver_ == 'traditional':
sumEmm = num_samples * Sigma + np.dot(my, my.T)
elif self.solver_ == 'simple':
sumEmm = np.dot(my, my.T)
sumEmmInv = np.linalg.inv(sumEmm) # [n_components, n_components]
# updating V and sigma for next iteration
V = np.dot(np.dot(centeredM.T, my.T), sumEmmInv) # [feat_dim, n_components]
last_sigma = sigma
sigma = varianceM - np.sum(sumEmm * np.dot(V.T, V)) / (feat_dim * num_samples)
improvement = last_sigma - sigma
# log
if self.verbose_ > 0:
print("Iteration: %d sigma: %.3f improvement: %.3f" % (curr_n_iter, sigma, improvement))
# check iteration escape
curr_n_iter += 1
if isinstance(self.n_iter_, Number):
if curr_n_iter >= self.n_iter_:
break
elif curr_n_iter > 1 and improvement < self.improve_threshold_:
break
# ====== save the model ====== #
# record new dimensions
self.feat_dim_ = feat_dim
self.n_components_ = n_components
# trained vectors and matrices
self.V_ = V
self.bias_ = bias
self.sigma_ = sigma
# pre-calculate matrix for transform
B = (V * 1 / sigma).T
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V))
self.extractorMatrix_ = np.dot(Sigma, B) # [n_components, feat_dim]
def transform(self, X):
"""
Parameters
----------
X : matrix [num_samples, feat_dim]
"""
assert hasattr(self, 'extractorMatrix_'), "The model hasn't `fit` on data"
assert X.shape[1] == self.feat_dim_, \
"Expect input matrix with shape: [?, %d], but give: %s" % (self.feat_dim_, str(X.shape))
ivec = np.dot(self.extractorMatrix_, (X - self.bias_[np.newaxis, :]).T)
return ivec.T
class SupervisedPPCA(PPCA):
""" Supervised Probabilistic Principal Components Analysis
(C) Copyright University of Eastern Finland (UEF).
Ville Vestman, [email protected],
Tomi Kinnunen, [email protected].
Parameters
----------
n_components : {int, None}
if None, keep the same dimensions as input features
bias : {vector, 'auto'} [feat_dim,]
if 'auto' take mean of training data
beta : scalar (default: 1)
a weight parameter (use beta = 1 as default)
n_iter : {integer, 'auto'}
if 'auto', keep iterating until no more improvement (i.e. reduction in `sigma` value)
compared to the `improve_threshold`
improve_threshold : scalar
Only used in case `n_iter='auto'`
solver : {'traditional', 'simple'}
extractor : {'supervised', 'unsupervised'}
'supervised' is the probabilistic partial least squares extractor using
both unsupervised and supervised information
verbose: {0, 1}
showing logging information during fitting
random_state : {None, integer, numpy.random.RandomState}
Attributes
----------
V_ : [feat_dim, n_components]
total variability matrix
Q_ : [feat_dim, n_components]
matrix for mapping speaker-dependent supervectors to i-vectors
sigma_ : scalar
variance of error term
rho_ : scalar
variance of error term in speaker-dependent supervector model
bias_ : [feat_dim,]
bias vector
classBias_ : [feat_dim,]
mean of speaker-dependent supervectors
"""
def __init__(self, n_components=None, bias='auto', beta=1,
n_iter='auto', improve_threshold=1e-3,
solver='traditional', extractor='supervised',
verbose=0, random_state=None):
super(SupervisedPPCA, self).__init__(n_components=n_components, bias=bias,
n_iter=n_iter, solver=solver, improve_threshold=improve_threshold,
verbose=verbose, random_state=random_state)
self.beta_ = float(beta)
# ====== check extractor ====== #
extractor = str(extractor).lower()
if extractor not in ('supervised', 'unsupervised'):
raise ValueError("`extractor` can only be: 'unsupervised' or 'supervised'")
self.extractor_ = extractor
def fit(self, X, y, z=None):
"""
Parameters
----------
X : matrix [num_samples, feat_dim]
y : vector (int) [num_samples,]
z : matrix [num_classes, feat_dim]
class-dependent feature vectors for each class from 0 to `num_classes - 1`
(in this order).
"""
# ====== initialize ====== #
num_samples, feat_dim = X.shape
num_classes = z.shape[0] if z is not None else len(np.unique(y))
n_components = feat_dim if self.n_components_ is None else self.n_components_
if self.bias_ == 'auto':
bias = np.mean(X, 0)
elif isinstance(self.bias_, Number):
bias = np.full(shape=(feat_dim,), fill_value=self.bias_)
else:
bias = self.bias_
assert bias.shape == (feat_dim,), \
"Invialid `bias` given shape: %s, require shape: %s" % (str(bias.shape), str((feat_dim,)))
# checking `y`
y = y.ravel().astype('int32')
assert y.shape[0] == num_samples, \
"Number of samples incosistent in `X`(%s) and `y`(%s)" % (str(X.shape), str(y.shape))
# checking `z`
if z is None:
z = np.empty(shape=(max(np.max(y) + 1, num_classes), feat_dim),
dtype=X.dtype)
for i in np.unique(y):
z[i, :] = np.mean(X[y == i], axis=0, keepdims=True)
else:
assert z.shape[0] == num_classes
assert z.shape[1] == feat_dim
# ====== initialize parameters ====== #
V = self.random_state_.rand(feat_dim, n_components)
Q = self.random_state_.rand(feat_dim, n_components)
last_sigma = None
sigma = 1
last_rho = None
rho = 1
centeredM = X - bias[np.newaxis, :]
varianceM = np.sum(centeredM**2) / (num_samples * feat_dim)
centeredY = z[y]
classBias = np.mean(centeredY, 0)
centeredY = centeredY - classBias[np.newaxis, :]
varianceY = np.sum(centeredY**2) / (num_samples * feat_dim)
# ====== training ====== #
if self.verbose_:
print('[S-PPCA]n_components: %d n_sample: %d feat_dim: %d n_iter: %d threshold: %f solver: %s' %
(n_components, num_samples, feat_dim,
-1 if self.n_iter_ == 'auto' else self.n_iter_, self.improve_threshold_, self.solver_))
curr_n_iter = 0
while True:
B = (V * 1 / sigma).T # [feat_dim, n_components]
C = (Q * self.beta_ * 1 / rho).T # [feat_dim, n_components]
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V) + np.dot(C, Q)) # [n_components, n_components]
# [n_components, num_samples]
my = np.dot(Sigma, np.dot(B, centeredM.T) + np.dot(C, centeredY.T))
if self.solver_ == 'traditional':
sumEmm = num_samples * Sigma + np.dot(my, my.T)
elif self.solver_ == 'simple':
sumEmm = np.dot(my, my.T)
sumEmmInv = np.linalg.inv(sumEmm) # [n_components, n_components]
# updating V and sigma for next iteration
V = np.dot(np.dot(centeredM.T, my.T), sumEmmInv) # [feat_dim, n_components]
Q = np.dot(np.dot(centeredY.T, my.T), sumEmmInv) # [feat_dim, n_components]
last_sigma = sigma
sigma = varianceM - np.sum(sumEmm * np.dot(V.T, V)) / (feat_dim * num_samples)
improvement_sigma = last_sigma - sigma
last_rho = rho
rho = varianceY - np.sum(sumEmm * np.dot(Q.T, Q)) / (feat_dim * num_samples)
improvement_rho = last_rho - rho
# log
if self.verbose_ > 0:
print("Iteration: %d sigma: %.3f rho: %.3f improvement: %.3f:%.3f" %
(curr_n_iter, sigma, rho, improvement_sigma, improvement_rho))
# check iteration escape
curr_n_iter += 1
if isinstance(self.n_iter_, Number):
if curr_n_iter >= self.n_iter_:
break
elif curr_n_iter > 1 and \
improvement_sigma < self.improve_threshold_ and \
improvement_rho < self.improve_threshold_:
break
# ====== save the model ====== #
# record new dimensions
self.feat_dim_ = feat_dim
self.n_components_ = n_components
self.num_classes_ = num_classes
# trained vectors and matrices
self.V_ = V
self.Q_ = Q
self.bias_ = bias
self.classBias_ = classBias
self.sigma_ = sigma
self.rho_ = rho
# pre-calculate matrix for PPCA transform
B = (V * 1 / sigma).T
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V))
self.extractorMatrix_ = np.dot(Sigma, B) # [n_components, feat_dim]
# pre-calculate matrix for PPLS transform
A = np.concatenate([V, Q], axis=0) # [2 * feat_dim, n_components]
B = np.concatenate([(V * 1 / sigma).T, (Q * 1 / rho).T], axis=-1) # [n_components, 2 * feat_dim]
sigmaW = np.linalg.inv(np.eye(n_components) + np.dot(B, A)) # [n_components, n_components]
self.extractorMatrixPPLS_ = np.dot(sigmaW, B) # [n_components, 2 * feat_dim]
C = np.dot(V.T, V) + sigma * np.eye(n_components) # [n_components, n_components]
self.labelMatrix_ = np.dot(Q, np.linalg.solve(C, V.T)) # [feat_dim, feat_dim]
def transform(self, X):
if self.extractor_ == 'unsupervised':
return super(SupervisedPPCA, self).transform(X)
else:
centeredM = X - self.bias_[np.newaxis, :]
labels = np.dot(self.labelMatrix_, centeredM.T) + self.classBias_[:, np.newaxis]
ivec = np.dot(self.extractorMatrixPPLS_,
np.concatenate([X.T, labels], axis=0) -
np.concatenate([self.bias_, self.classBias_])[:, np.newaxis])
return ivec.T
# ===========================================================================
# PCA
# ===========================================================================
class RandomizedPCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, default=2
Number of iterations for the power method.
whiten : bool, optional
When True (False by default) the `components_` vectors are multiplied
by the square root of (n_samples) and divided by the singular values to
ensure uncorrelated outputs with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int, RandomState instance or None, optional, default=None
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
components_ : array, shape (n_components, n_features)
Components with maximum variance.
explained_variance_ratio_ : array, shape (n_components,)
Percentage of variance explained by each of the selected components.
If k is not set then all components are stored and the sum of explained
variances is equal to 1.0.
singular_values_ : array, shape (n_components,)
The singular values corresponding to each of the selected components.
The singular values are equal to the 2-norms of the ``n_components``
variables in the lower-dimensional space.
mean_ : array, shape (n_features,)
Per-feature empirical mean, estimated from the training set.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=2, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
>>> print(pca.singular_values_) # doctest: +ELLIPSIS
[ 6.30061... 0.54980...]
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
"""
def __init__(self, n_components=None, copy=True, iterated_power=2,
whiten=False, random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X by extracting the first principal components.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
y : Ignored.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(check_array(X))
return self
def _fit(self, X):
"""Fit the model to the data X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
"""
random_state = check_random_state(self.random_state)
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X, n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S ** 2) / (n_samples - 1)
full_var = np.var(X, ddof=1, axis=0).sum()
self.explained_variance_ratio_ = exp_var / full_var
self.singular_values_ = S # Store the singular values.
if self.whiten:
self.components_ = V / S[:, np.newaxis] * math.sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X):
"""Apply dimensionality reduction on X.
X is projected on the first principal components previous extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
check_is_fitted(self, 'mean_')
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X = np.dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
y : Ignored.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = check_array(X)
X = self._fit(X)
return np.dot(X, self.components_.T)
def inverse_transform(self, X):
"""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
"""
check_is_fitted(self, 'mean_')
X_original = np.dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
class MiniBatchPCA(IncrementalPCA):
""" A modified version of IncrementalPCA to effectively
support multi-processing (but not work)
Original Author: Kyle Kastner <[email protected]>
Giorgio Patrini
License: BSD 3 clause
Incremental principal components analysis (IPCA).
Linear dimensionality reduction using Singular Value Decomposition of
centered data, keeping only the most significant singular vectors to
project the data to a lower dimensional space.
Depending on the size of the input data, this algorithm can be much more
memory efficient than a PCA.
This algorithm has constant memory complexity, on the order
of ``batch_size``, enabling use of np.memmap files without loading the
entire file into memory.
The computational overhead of each SVD is
``O(batch_size * n_features ** 2)``, but only 2 * batch_size samples
remain in memory at a time. There will be ``n_samples / batch_size`` SVD
computations to get the principal components, versus 1 large SVD of
complexity ``O(n_samples * n_features ** 2)`` for PCA.
Read more in the :ref:`User Guide <IncrementalPCA>`.
Parameters
----------
n_components : int or None, (default=None)
Number of components to keep. If ``n_components `` is ``None``,
then ``n_components`` is set to ``min(n_samples, n_features)``.
batch_size : int or None, (default=None)
The number of samples to use for each batch. Only used when calling
``fit``. If ``batch_size`` is ``None``, then ``batch_size``
is inferred from the data and set to ``5 * n_features``, to provide a
balance between approximation accuracy and memory consumption.
copy : bool, (default=True)
If False, X will be overwritten. ``copy=False`` can be used to
save memory but is unsafe for general use.
whiten : bool, optional
When True (False by default) the ``components_`` vectors are divided
by ``n_samples`` times ``components_`` to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometimes
improve the predictive accuracy of the downstream estimators by
making data respect some hard-wired assumptions.
Attributes
----------
components_ : array, shape (n_components, n_features)
Components with maximum variance.
explained_variance_ : array, shape (n_components,)
Variance explained by each of the selected components.
explained_variance_ratio_ : array, shape (n_components,)
Percentage of variance explained by each of the selected components.
If all components are stored, the sum of explained variances is equal
to 1.0
mean_ : array, shape (n_features,)
Per-feature empirical mean, aggregate over calls to ``partial_fit``.
var_ : array, shape (n_features,)
Per-feature empirical variance, aggregate over calls to
``partial_fit``.
noise_variance_ : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf.
n_components_ : int
The estimated number of components. Relevant when
``n_components=None``.
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across ``partial_fit`` calls.
Notes
-----
Implements the incremental PCA model from:
`D. Ross, J. Lim, R. Lin, M. Yang, Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77, Issue 1-3,
pp. 125-141, May 2008.`
See http://www.cs.toronto.edu/~dross/ivt/RossLimLinYang_ijcv.pdf
This model is an extension of the Sequential Karhunen-Loeve Transform from:
`A. Levy and M. Lindenbaum, Sequential Karhunen-Loeve Basis Extraction and
its Application to Images, IEEE Transactions on Image Processing, Volume 9,
Number 8, pp. 1371-1374, August 2000.`
See http://www.cs.technion.ac.il/~mic/doc/skl-ip.pdf
We have specifically abstained from an optimization used by authors of both
papers, a QR decomposition used in specific situations to reduce the
algorithmic complexity of the SVD. The source for this technique is
`Matrix Computations, Third Edition, G. Holub and C. Van Loan, Chapter 5,
section 5.4.4, pp 252-253.`. This technique has been omitted because it is
advantageous only when decomposing a matrix with ``n_samples`` (rows)
>= 5/3 * ``n_features`` (columns), and hurts the readability of the
implemented algorithm. This would be a good opportunity for future
optimization, if it is deemed necessary.
For `multiprocessing`, you can do parallelized `partial_fit` or `transform`
but you cannot do `partial_fit` in one process and `transform` in the others.
Application
-----------
In detail, in order for PCA to work well, informally we require that
(i) The features have approximately zero mean, and
(ii) The different features have similar variances to each other.
With natural images, (ii) is already satisfied even without variance
normalization, and so we won’t perform any variance normalization.
(If you are training on audio data—say, on spectrograms—or on text data—say,
bag-of-word vectors—we will usually not perform variance normalization
either.)
By using PCA, we aim for:
(i) the features are less correlated with each other, and
(ii) the features all have the same variance.
Original link: http://ufldl.stanford.edu/tutorial/unsupervised/PCAWhitening/
References
----------
D. Ross, J. Lim, R. Lin, M. Yang. Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77,
Issue 1-3, pp. 125-141, May 2008.
G. Golub and C. Van Loan. Matrix Computations, Third Edition, Chapter 5,
Section 5.4.4, pp. 252-253.
See also
--------
PCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self, n_components=None, whiten=False, copy=True,
batch_size=None):
super(MiniBatchPCA, self).__init__(n_components=n_components,
whiten=whiten, copy=copy, batch_size=batch_size)
# some statistics
self.n_samples_seen_ = 0
self.mean_ = .0
self.var_ = .0
self.components_ = None
# if nb_samples < nb_components, then the mini batch is cached until
# we have enough samples
self._cache_batches = []
self._nb_cached_samples = 0
@property
def is_fitted(self):
return self.components_ is not None
# ==================== Training ==================== #
def fit(self, X, y=None):
"""Fit the model with X, using minibatches of size batch_size.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
y: Passthrough for ``Pipeline`` compatibility.
Returns
-------
self: object
Returns the instance itself.
"""
if isinstance(X, Data):
X = X[:]
X = check_array(X, copy=self.copy, dtype=[np.float64, np.float32])
n_samples, n_features = X.shape
if self.batch_size is None:
batch_size = 12 * n_features
else:
batch_size = self.batch_size
for batch in gen_batches(n_samples, batch_size):
x = X[batch]
self.partial_fit(x, check_input=False)
return self
def partial_fit(self, X, y=None, check_input=True):
"""Incremental fit with X. All of X is processed as a single batch.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self: object
Returns the instance itself.
"""
# ====== check the samples and cahces ====== #
if isinstance(X, Data):
X = X[:]
if check_input:
X = check_array(X, copy=self.copy, dtype=[np.float64, np.float32])
n_samples, n_features = X.shape
# check number of components
if self.n_components is None:
self.n_components_ = n_features
elif not 1 <= self.n_components <= n_features:
raise ValueError("n_components=%r invalid for n_features=%d, need "
"more rows than columns for IncrementalPCA "
"processing" % (self.n_components, n_features))
else:
self.n_components_ = self.n_components
# check the cache
if n_samples < n_features or self._nb_cached_samples > 0:
self._cache_batches.append(X)
self._nb_cached_samples += n_samples
# not enough samples yet
if self._nb_cached_samples < n_features:
return
else: # group mini batch into big batch
X = np.concatenate(self._cache_batches, axis=0)
self._cache_batches = []
self._nb_cached_samples = 0
n_samples = X.shape[0]
# ====== fit the model ====== #
if (self.components_ is not None) and (self.components_.shape[0] !=
self.n_components_):
raise ValueError("Number of input features has changed from %i "
"to %i between calls to partial_fit! Try "
"setting n_components to a fixed value." %
(self.components_.shape[0], self.n_components_))
# Update stats - they are 0 if this is the fisrt step
col_mean, col_var, n_total_samples = \
_incremental_mean_and_var(X, last_mean=self.mean_,
last_variance=self.var_,
last_sample_count=self.n_samples_seen_)
total_var = np.sum(col_var * n_total_samples)
if total_var == 0: # if variance == 0, make no sense to continue
return self
# Whitening
if self.n_samples_seen_ == 0:
# If it is the first step, simply whiten X
X -= col_mean
else:
col_batch_mean = np.mean(X, axis=0)
X -= col_batch_mean
# Build matrix of combined previous basis and new data
mean_correction = \
np.sqrt((self.n_samples_seen_ * n_samples) /
n_total_samples) * (self.mean_ - col_batch_mean)
X = np.vstack((self.singular_values_.reshape((-1, 1)) *
self.components_, X, mean_correction))
U, S, V = linalg.svd(X, full_matrices=False)
U, V = svd_flip(U, V, u_based_decision=False)
explained_variance = S ** 2 / n_total_samples
explained_variance_ratio = S ** 2 / total_var
self.n_samples_seen_ = n_total_samples
self.components_ = V[:self.n_components_]
self.singular_values_ = S[:self.n_components_]
self.mean_ = col_mean
self.var_ = col_var
self.explained_variance_ = explained_variance[:self.n_components_]
self.explained_variance_ratio_ = \
explained_variance_ratio[:self.n_components_]
if self.n_components_ < n_features:
self.noise_variance_ = \
explained_variance[self.n_components_:].mean()
else:
self.noise_variance_ = 0.
return self
def transform(self, X, n_components=None):
# ====== check number of components ====== #
# specified percentage of explained variance
if n_components is not None:
# percentage of variances
if n_components < 1.:
_ = np.cumsum(self.explained_variance_ratio_)
n_components = (_ > n_components).nonzero()[0][0] + 1
# specific number of components
else:
n_components = int(n_components)
# ====== other info ====== #
n = X.shape[0]
if self.batch_size is None:
batch_size = 12 * len(self.mean_)
else:
batch_size = self.batch_size
# ====== start transforming ====== #
X_transformed = []
for start, end in batching(n=n, batch_size=batch_size):
x = super(MiniBatchPCA, self).transform(X=X[start:end])
if n_components is not None:
x = x[:, :n_components]
X_transformed.append(x)
return np.concatenate(X_transformed, axis=0)
def invert_transform(self, X):
if isinstance(X, Data):
X = X[:]
return super(MiniBatchPCA, self).inverse_transform(X=X)
def transform_mpi(self, X, keep_order=True, ncpu=4,
n_components=None):
""" Sample as transform but using multiprocessing """
n = X.shape[0]
if self.batch_size is None:
batch_size = 12 * len(self.mean_)
else:
batch_size = self.batch_size
batch_list = [(i, min(i + batch_size, n))
for i in range(0, n + batch_size, batch_size) if i < n]
# ====== run MPI jobs ====== #
def map_func(batch):
start, end = batch
x = super(MiniBatchPCA, self).transform(X=X[start:end])
# doing dim reduction here save a lot of memory for
# inter-processors transfer
if n_components is not None:
x = x[:, :n_components]
# just need to return the start for ordering
yield start, x
mpi = MPI(batch_list, func=map_func,
ncpu=ncpu, batch=1, hwm=ncpu * 12,
backend='python')
# ====== process the return ====== #
X_transformed = []
for start, x in mpi:
X_transformed.append((start, x))
if keep_order:
X_transformed = sorted(X_transformed, key=lambda x: x[0])
X_transformed = np.concatenate([x[-1] for x in X_transformed], axis=0)
return X_transformed
def __str__(self):
if self.is_fitted:
explained_vars = ';'.join([ctext('%.2f' % i, 'cyan')
for i in self.explained_variance_ratio_[:8]])
else:
explained_vars = 0
s = '%s(batch_size=%s, #components=%s, #samples=%s, vars=%s)' % \
(ctext('MiniBatchPCA', 'yellow'),
ctext(self.batch_size, 'cyan'),
ctext(self.n_components, 'cyan'),
ctext(self.n_samples_seen_, 'cyan'),
explained_vars)
return s
| [
"[email protected]"
] | |
d534ae28d3aa0734296825c043a748eb4545a4d6 | 0f0f8b3b027f412930ca1890b0666538358a2807 | /dotop/osv/__init__.py | 8de54bfa5a534d8654b634c43bd9062f8c573ddc | [] | no_license | konsoar/dotop_pos_v11 | 741bd5ca944dfd52eb886cab6f4b17b6d646e131 | 576c860917edd25661a72726d0729c769977f39a | refs/heads/master | 2021-09-06T13:25:34.783729 | 2018-02-07T02:11:12 | 2018-02-07T02:11:12 | 111,168,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | # -*- coding: utf-8 -*-
# Part of dotop. See LICENSE file for full copyright and licensing details.
import osv
| [
"Administrator@20nuo003-PC"
] | Administrator@20nuo003-PC |
b8db0ef08fd5c0b20c1bdbe3988364316d66243b | 35044ecc79fad5d1ad35a4f713ff22849c17212c | /hw1/hw1_1/part2/main_3.py | d54310ff19a1bb2f192f12e9c432975f5330478e | [] | no_license | pohanchi/MLDS2018SPRING | 8235f9bfe38eb861be24b9cdb2c0a7d30afdea21 | 694e5f07911ba0f780fb01bb47701dac90eeb7d2 | refs/heads/master | 2020-05-09T09:55:38.257016 | 2019-04-12T16:23:17 | 2019-04-12T16:23:17 | 166,774,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,511 | py | from tensorflow.examples.tutorials.mnist import input_data
import matplotlib.pyplot as plt
import numpy as np
import pickle
import random
from model_2 import *
def unpickle(file):
with open(file, 'rb') as fo:
dict_ = pickle.load(fo, encoding='bytes')
return dict_
def calculate():
vars = tf.trainable_variables()
for var in vars:
print(var)
all_number=sum([np.prod(var.get_shape()) for var in vars])
print('you use %d parameters' %(all_number))
return
if __name__ == '__main__':
data_list=list()
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
train_img = mnist.train.images
train_label = mnist.train.labels
test_img = mnist.test.images
test_label = mnist.test.labels
index = list(range(len(train_img)))
random.shuffle(index)
# print(data_list[0:3])
epoch = 10
batch_size = 100
num_batch = int(55000 / 100)
CNN = CNN_3()
loss = CNN.loss
learning_rates= [random.uniform(0.001, 0.005),random.uniform(0.00001, 0.00005),random.uniform(0.0001, 0.0005),random.uniform(0.000001, 0.000005)]
for learning_rate in learning_rates:
CNN.learning_rate = learning_rate
Train_step = tf.train.AdamOptimizer(CNN.learning_rate).minimize(loss)
loss_array = []
step = 0
step_array = []
sess = tf.Session()
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
for i in range(epoch):
random.shuffle(index)
train_img = train_img[index]
train_label= train_label[index]
for j in range(num_batch):
x_data =train_img[j*100:(j+1)*100]
y_data=train_label[j*100:(j+1)*100]
_=sess.run(Train_step,feed_dict={CNN.x:x_data,CNN.y:y_data})
if (j+1) % 55 == 0:
loss_show=sess.run(loss,feed_dict={CNN.x:x_data,CNN.y:y_data})
print("epoch %d num_batch %2d loss = %.5f" %(i,j,loss_show))
loss_array+=[loss_show]
step_array+=[step]
step +=1
plt.plot(step_array,loss_array,label='learning_rate= {}'.format(learning_rate))
sess.close()
plt.yscale('symlog')
plt.title('CNN_3 Loss on training')
plt.legend(loc='upper left')
plt.style.use('ggplot')
plt.savefig('three_hidden_layer_loss_step.png')
plt.show()
| [
"[email protected]"
] | |
ecad6018c8e035a3d71ffae5d67fa4943aa1843e | decefb13f8a603c1f5cc7eb00634b4649915204f | /packages/node-mobile/deps/uv/uv.gyp | a9a3635570161b275e12e9ebd1d75699f599bba9 | [
"Zlib",
"CC0-1.0",
"ISC",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"ICU",
"MIT",
"LicenseRef-scancode-public-domain-disclaimer",
"Artistic-2.0",
"BSD-3-Clause",
"NTP",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-unicode",
"NAIST-2003",
"LicenseRef-scancode-free-unknown",
"CC-BY-4.0"
] | permissive | open-pwa/open-pwa | f092b377dc6cb04123a16ef96811ad09a9956c26 | 4c88c8520b4f6e7af8701393fd2cedbe1b209e8f | refs/heads/master | 2022-05-28T22:05:19.514921 | 2022-05-20T07:27:10 | 2022-05-20T07:27:10 | 247,925,596 | 24 | 1 | Apache-2.0 | 2021-08-10T07:38:42 | 2020-03-17T09:13:00 | C++ | UTF-8 | Python | false | false | 10,599 | gyp | {
'variables': {
'conditions': [
['OS=="win"', {
'shared_unix_defines': [ ],
}, 'OS=="android" and target_arch in ("arm","ia32")', {
# Android on API < 24 will miss function definitions for
#_FILE_OFFSET_BITS=64
'shared_unix_defines': [
'_LARGEFILE_SOURCE',
],
}, {
'shared_unix_defines': [
'_LARGEFILE_SOURCE',
'_FILE_OFFSET_BITS=64',
],
}],
['OS in "mac ios"', {
'shared_mac_defines': [ '_DARWIN_USE_64_BIT_INODE=1' ],
}, {
'shared_mac_defines': [ ],
}],
['OS=="zos"', {
'shared_zos_defines': [
'_UNIX03_THREADS',
'_UNIX03_SOURCE',
'_UNIX03_WITHDRAWN',
'_OPEN_SYS_IF_EXT',
'_OPEN_SYS_SOCK_EXT3',
'_OPEN_SYS_SOCK_IPV6',
'_OPEN_MSGQ_EXT',
'_XOPEN_SOURCE_EXTENDED',
'_ALL_SOURCE',
'_LARGE_TIME_API',
'_OPEN_SYS_FILE_EXT',
'_AE_BIMODAL',
'PATH_MAX=255'
],
}, {
'shared_zos_defines': [ ],
}],
],
},
'targets': [
{
'target_name': 'libuv',
'type': '<(uv_library)',
'include_dirs': [
'include',
'src/',
],
'defines': [
'<@(shared_mac_defines)',
'<@(shared_unix_defines)',
'<@(shared_zos_defines)',
],
'direct_dependent_settings': {
'defines': [
'<@(shared_mac_defines)',
'<@(shared_unix_defines)',
'<@(shared_zos_defines)',
],
'include_dirs': [ 'include' ],
'conditions': [
['OS == "linux"', {
'defines': [ '_POSIX_C_SOURCE=200112' ],
}],
],
},
'sources': [
'common.gypi',
'include/uv.h',
'include/uv/tree.h',
'include/uv/errno.h',
'include/uv/threadpool.h',
'include/uv/version.h',
'src/fs-poll.c',
'src/heap-inl.h',
'src/idna.c',
'src/idna.h',
'src/inet.c',
'src/queue.h',
'src/random.c',
'src/strscpy.c',
'src/strscpy.h',
'src/threadpool.c',
'src/timer.c',
'src/uv-data-getter-setters.c',
'src/uv-common.c',
'src/uv-common.h',
'src/version.c'
],
'xcode_settings': {
'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
'WARNING_CFLAGS': [
'-Wall',
'-Wextra',
'-Wno-unused-parameter',
'-Wstrict-prototypes',
],
'OTHER_CFLAGS': [ '-g', '--std=gnu89' ],
},
'conditions': [
[ 'OS=="win"', {
'defines': [
'_WIN32_WINNT=0x0600',
'_GNU_SOURCE',
],
'sources': [
'include/uv/win.h',
'src/win/async.c',
'src/win/atomicops-inl.h',
'src/win/core.c',
'src/win/detect-wakeup.c',
'src/win/dl.c',
'src/win/error.c',
'src/win/fs.c',
'src/win/fs-event.c',
'src/win/getaddrinfo.c',
'src/win/getnameinfo.c',
'src/win/handle.c',
'src/win/handle-inl.h',
'src/win/internal.h',
'src/win/loop-watcher.c',
'src/win/pipe.c',
'src/win/thread.c',
'src/win/poll.c',
'src/win/process.c',
'src/win/process-stdio.c',
'src/win/req-inl.h',
'src/win/signal.c',
'src/win/snprintf.c',
'src/win/stream.c',
'src/win/stream-inl.h',
'src/win/tcp.c',
'src/win/tty.c',
'src/win/udp.c',
'src/win/util.c',
'src/win/winapi.c',
'src/win/winapi.h',
'src/win/winsock.c',
'src/win/winsock.h',
],
'link_settings': {
'libraries': [
'-ladvapi32',
'-liphlpapi',
'-lpsapi',
'-lshell32',
'-luser32',
'-luserenv',
'-lws2_32'
],
},
}, { # Not Windows i.e. POSIX
'sources': [
'include/uv/unix.h',
'include/uv/linux.h',
'include/uv/sunos.h',
'include/uv/darwin.h',
'include/uv/bsd.h',
'include/uv/aix.h',
'src/unix/async.c',
'src/unix/atomic-ops.h',
'src/unix/core.c',
'src/unix/dl.c',
'src/unix/fs.c',
'src/unix/getaddrinfo.c',
'src/unix/getnameinfo.c',
'src/unix/internal.h',
'src/unix/loop.c',
'src/unix/loop-watcher.c',
'src/unix/pipe.c',
'src/unix/poll.c',
'src/unix/process.c',
'src/unix/random-devurandom.c',
'src/unix/signal.c',
'src/unix/spinlock.h',
'src/unix/stream.c',
'src/unix/tcp.c',
'src/unix/thread.c',
'src/unix/tty.c',
'src/unix/udp.c',
],
'link_settings': {
'libraries': [ '-lm' ],
'conditions': [
['OS=="solaris"', {
'ldflags': [ '-pthreads' ],
}],
[ 'OS=="zos" and uv_library=="shared_library"', {
'ldflags': [ '-Wl,DLL' ],
}],
['OS != "solaris" and OS != "android" and OS != "zos"', {
'ldflags': [ '-pthread' ],
}],
],
},
'conditions': [
['uv_library=="shared_library"', {
'conditions': [
['OS=="zos"', {
'cflags': [ '-qexportall' ],
}, {
'cflags': [ '-fPIC' ],
}],
],
}],
['uv_library=="shared_library" and OS!="mac" and OS!="zos"', {
# This will cause gyp to set soname
# Must correspond with UV_VERSION_MAJOR
# in include/uv/version.h
'product_extension': 'so.1',
}],
],
}],
[ 'OS in "linux mac ios android zos"', {
'sources': [ 'src/unix/proctitle.c' ],
}],
[ 'OS != "zos"', {
'cflags': [
'-fvisibility=hidden',
'-g',
'--std=gnu89',
'-Wall',
'-Wextra',
'-Wno-unused-parameter',
'-Wstrict-prototypes',
],
}],
[ 'OS in "mac ios"', {
'sources': [
'src/unix/darwin.c',
'src/unix/fsevents.c',
'src/unix/darwin-proctitle.c',
'src/unix/random-getentropy.c',
],
'defines': [
'_DARWIN_USE_64_BIT_INODE=1',
'_DARWIN_UNLIMITED_SELECT=1',
]
}],
[ 'OS=="linux"', {
'defines': [ '_GNU_SOURCE' ],
'sources': [
'src/unix/linux-core.c',
'src/unix/linux-inotify.c',
'src/unix/linux-syscalls.c',
'src/unix/linux-syscalls.h',
'src/unix/procfs-exepath.c',
'src/unix/random-getrandom.c',
'src/unix/random-sysctl-linux.c',
],
'link_settings': {
'libraries': [ '-ldl', '-lrt' ],
},
}],
[ 'OS=="android"', {
'sources': [
'src/unix/linux-core.c',
'src/unix/linux-inotify.c',
'src/unix/linux-syscalls.c',
'src/unix/linux-syscalls.h',
'src/unix/pthread-fixes.c',
'src/unix/android-ifaddrs.c',
'src/unix/procfs-exepath.c',
'src/unix/random-getrandom.c',
'src/unix/random-sysctl-linux.c',
],
'link_settings': {
'libraries': [ '-ldl' ],
},
}],
[ 'OS=="solaris"', {
'sources': [
'src/unix/no-proctitle.c',
'src/unix/sunos.c',
],
'defines': [
'__EXTENSIONS__',
'_XOPEN_SOURCE=500',
],
'link_settings': {
'libraries': [
'-lkstat',
'-lnsl',
'-lsendfile',
'-lsocket',
],
},
}],
[ 'OS=="aix"', {
'variables': {
'os_name': '<!(uname -s)',
},
'sources': [
'src/unix/aix-common.c',
],
'defines': [
'_ALL_SOURCE',
'_XOPEN_SOURCE=500',
'_LINUX_SOURCE_COMPAT',
'_THREAD_SAFE',
],
'conditions': [
[ '"<(os_name)"=="OS400"', {
'sources': [
'src/unix/ibmi.c',
'src/unix/posix-poll.c',
'src/unix/no-fsevents.c',
'src/unix/no-proctitle.c',
],
}, {
'sources': [
'src/unix/aix.c'
],
'defines': [
'HAVE_SYS_AHAFS_EVPRODS_H'
],
'link_settings': {
'libraries': [
'-lperfstat',
],
},
}],
]
}],
[ 'OS=="freebsd" or OS=="dragonflybsd"', {
'sources': [ 'src/unix/freebsd.c' ],
}],
[ 'OS=="freebsd"', {
'sources': [ 'src/unix/random-getrandom.c' ],
}],
[ 'OS=="openbsd"', {
'sources': [
'src/unix/openbsd.c',
'src/unix/random-getentropy.c',
],
}],
[ 'OS=="netbsd"', {
'link_settings': {
'libraries': [ '-lkvm' ],
},
'sources': [ 'src/unix/netbsd.c' ],
}],
[ 'OS in "freebsd dragonflybsd openbsd netbsd".split()', {
'sources': [
'src/unix/posix-hrtime.c',
'src/unix/bsd-proctitle.c'
],
}],
[ 'OS in "ios mac freebsd dragonflybsd openbsd netbsd".split()', {
'sources': [
'src/unix/bsd-ifaddrs.c',
'src/unix/kqueue.c',
],
}],
['uv_library=="shared_library"', {
'defines': [ 'BUILDING_UV_SHARED=1' ]
}],
['OS=="zos"', {
'sources': [
'src/unix/pthread-fixes.c',
'src/unix/os390.c',
'src/unix/os390-syscalls.c'
]
}],
]
},
]
}
| [
"[email protected]"
] | |
9f5d7e03d338bcee649e0798c30ea3227c1c60cc | b15a9d9c7374c4a1fa5ec3ef63603a8c57e8681f | /Design-Patterns-Python/bridge/circle.py | cd3638d219be793d909728245bf9ccdec57095a8 | [] | no_license | gohils/zemr_notebook | 3f7490ef7a2559655746c3e2e0dbfb835a83891e | 00d53cea9970df44160c51e6ad2bdeadfae2c91f | refs/heads/master | 2023-08-04T14:32:35.428016 | 2023-07-20T11:51:08 | 2023-07-20T11:51:08 | 222,027,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | # pylint: disable=too-few-public-methods
"A Circle Abstraction"
from interface_shape import IShape
class Circle(IShape):
"The Circle is a Refined Abstraction"
def __init__(self, implementer):
self.implementer = implementer()
def draw(self):
self.implementer.draw_implementation()
| [
"[email protected]"
] | |
7098d0bd349101c9033469adeb130241fc910b6c | f88f900c0384f6da82eeb749371ad44115527700 | /course-book/04-basic-comp/0410-img-copy2.py | 2688bf316082da1a8c629fe6e1a12eaf1bf4b44d | [] | no_license | aaron-kr/learning-opencv | eff382e8f0c822400f765451d57b192a63cd1b74 | 158239f0140569aec519fc1fbf255c54ef2567d2 | refs/heads/main | 2023-08-21T11:02:49.775425 | 2021-10-27T00:04:01 | 2021-10-27T00:04:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # 0410.py
import cv2
import numpy as np
src = cv2.imread('../../img/spirit-week.jpg', cv2.IMREAD_GRAYSCALE)
shape = src.shape[0], src.shape[1], 3
dst = np.zeros(shape, dtype = np.uint8)
# dst[:,:,0] = src # B - Blue channel
# dst[:,:,1] = src # G - Green channel
dst[:,:,2] = src # R - Red channel
dst[100:400, 200:300, :] = [255,255,255]
cv2.imshow('src', src)
cv2.imshow('dst', dst)
cv2.waitKey()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
76f963318fc279d6b0842011b2404e059f5081d7 | fb1e852da0a026fb59c8cb24aeb40e62005501f1 | /simlm/misc/compute_metrics_marco.py | 02ac181cd1a10310b7a44ccad6784c96a3b8f2b3 | [
"LGPL-2.1-or-later",
"LicenseRef-scancode-free-unknown",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | microsoft/unilm | 134aa44867c5ed36222220d3f4fd9616d02db573 | b60c741f746877293bb85eed6806736fc8fa0ffd | refs/heads/master | 2023-08-31T04:09:05.779071 | 2023-08-29T14:07:57 | 2023-08-29T14:07:57 | 198,350,484 | 15,313 | 2,192 | MIT | 2023-08-19T11:33:20 | 2019-07-23T04:15:28 | Python | UTF-8 | Python | false | false | 1,162 | py | import json
import sys
import argparse
sys.path.insert(0, './src')
from logger_config import logger
from metrics import compute_mrr, trec_eval
from utils import save_json_to_file
from data_utils import load_qrels, load_msmarco_predictions
parser = argparse.ArgumentParser(description='compute metrics for ms-marco predictions')
parser.add_argument('--in-path', default='', type=str, metavar='N',
help='path to predictions in msmarco output format')
parser.add_argument('--qrels', default='./data/msmarco/dev_qrels.txt', type=str, metavar='N',
help='path to qrels')
args = parser.parse_args()
logger.info('Args={}'.format(json.dumps(args.__dict__, ensure_ascii=False, indent=4)))
def main():
qrels = load_qrels(path=args.qrels)
predictions = load_msmarco_predictions(args.in_path)
all_metrics = trec_eval(qrels=qrels, predictions=predictions)
all_metrics['mrr'] = compute_mrr(qrels=qrels, predictions=predictions)
logger.info(json.dumps(all_metrics, ensure_ascii=False, indent=4))
save_json_to_file(all_metrics, '{}.metrics.json'.format(args.in_path))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
e8034ae0e9e03f800a93d0b36f2691dfbdee9972 | f34e34e4a1fc4b606781633370ff5b9493446b89 | /HackerRank/Problems/HR python staircase.py | fffe84b83deb2d0878b9309965bbbe9c1da7a0fa | [] | no_license | YeasirArafatRatul/problem_solving | b44deddb63dc9ff5ec285e03c13720523473906d | 98267865a90483489f792bdb448319dbf0cc9a81 | refs/heads/master | 2023-02-13T17:59:35.863750 | 2021-01-05T04:01:13 | 2021-01-05T04:01:13 | 219,266,174 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | def staircase(n):
for i in range(1,n+1):
#print the #, i times. so it will increase with each iteration
print(('#'*i).rjust(n,' '))
n = int(input())
staircase(n)
# str.rjust(width[, fillchar])
#Parameters
# width − This is the string length in total after padding.
# fillchar − This is the filler character, default is a space.
| [
"[email protected]"
] | |
7c60be6add65fa3c6fee396f4fe5cbaf2ea6b95d | 94f8d393536a38136420b299555a47989cb95e06 | /shengsai/shengsailianxi/shengsailianxi/run.py | 772b13443311bf140c5a90309ca8d32b3d97648c | [] | no_license | nolan0536/weizhiBigDataPython | 9164ddc50cd0b850ec7536270d690dd0848b9f06 | ef4ab9d749159166fcfe48883d680ac058b12425 | refs/heads/main | 2023-04-21T21:15:11.235258 | 2021-05-08T01:28:51 | 2021-05-08T01:28:51 | 361,971,771 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | from scrapy import cmdline
cmdline.execute("scrapy crawl xiaolianxi".split()) | [
"[email protected]"
] | |
dc5dbbd6e1de02ad6f948712a49636c56ba0f5a5 | a5af610fa01a2a77a99f094ceb6fa882df783139 | /core/deps/tornado/web.py | af5ea5127fe268df3a90ea1e7bd7cf616e22e112 | [
"MIT"
] | permissive | imclab/tilde | 87ee93449a18249a58712527b5164c506b2c7df9 | a9f9fec7be08d5a1647973c6b2b9a9816b347041 | refs/heads/master | 2021-01-24T00:53:21.601833 | 2014-09-02T22:55:27 | 2014-09-02T22:55:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90,332 | py | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
import uuid
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.gmtime()),
})
self.set_default_headers()
if (not self.request.supports_http_1_1() and
getattr(self.request, 'connection', None) and
not self.request.connection.no_keep_alive):
conn_header = self.request.headers.get("Connection")
if conn_header and (conn_header.lower() == "keep-alive"):
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.arguments.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
for name in self.request.cookies:
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
"""
self.set_cookie(name, self.create_signed_value(name, value),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value)
def get_secure_cookie(self, name, value=None, max_age_days=31):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the IOStream (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.stream.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("debug") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes)
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
We append ``?v=<signature>`` to the returned URL, which makes our
static file handler set an infinite expiration header on the
returned content. The signature is based on the content of the
file.
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
static_handler_class = self.settings.get(
"static_handler_class", StaticFileHandler)
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + static_handler_class.make_static_url(self.settings, path)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self._when_complete(self.prepare(), self._execute_method)
except Exception as e:
self._handle_request_exception(e)
def _when_complete(self, result, callback):
try:
if result is None:
callback()
elif isinstance(result, Future):
if result.done():
if result.result() is not None:
raise ValueError('Expected None, got %r' % result)
callback()
else:
# Delayed import of IOLoop because it's not available
# on app engine
from tornado.ioloop import IOLoop
IOLoop.current().add_future(
result, functools.partial(self._when_complete,
callback=callback))
else:
raise ValueError("Expected Future or None, got %r" % result)
except Exception as e:
self._handle_request_exception(e)
def _execute_method(self):
method = getattr(self, self.request.method.lower())
self._when_complete(method(*self.path_args, **self.path_kwargs),
self._execute_finish)
def _execute_finish(self):
if self._auto_finish and not self._finished:
self.finish()
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if isinstance(result, Future):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
return result
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be
a dictionary if it is present. That dictionary is passed as
keyword arguments to the contructor of the handler. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi:
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None,
plus=False)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, status_code=404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
To map a path to this handler for a static data directory ``/var/www``,
you would add a line to your application like::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To support aggressive browser caching, if the argument ``v`` is given
with the path, we set an infinite HTTP expiration header. So, if you
want browsers to cache a file indefinitely, send them to, e.g.,
``/static/images/myimage.png?v=xxx``. Override `get_cache_time` method for
more fine-grained cache control.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = os.path.abspath(path) + os.path.sep
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
path = self.parse_url_path(path)
abspath = os.path.abspath(os.path.join(self.root, path))
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (abspath + os.path.sep).startswith(self.root):
raise HTTPError(403, "%s is not in root static directory", path)
if os.path.isdir(abspath) and self.default_filename is not None:
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/")
return
abspath = os.path.join(abspath, self.default_filename)
if not os.path.exists(abspath):
raise HTTPError(404)
if not os.path.isfile(abspath):
raise HTTPError(403, "%s is not a file", path)
stat_result = os.stat(abspath)
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
cache_time = self.get_cache_time(path, modified, mime_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= modified:
self.set_status(304)
return
with open(abspath, "rb") as file:
data = file.read()
if include_body:
self.write(data)
else:
assert self.request.method == "HEAD"
self.set_header("Content-Length", len(data))
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it is
a class method rather than an instance method).
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
"""
static_url_prefix = settings.get('static_url_prefix', '/static/')
version_hash = cls.get_version(settings, path)
if version_hash:
return static_url_prefix + path + "?v=" + version_hash
return static_url_prefix + path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
This method may be overridden in subclasses (but note that it
is a class method rather than a static method). The default
implementation uses a hash of the file's contents.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
"""
abs_path = os.path.join(settings["static_path"], path)
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
f = open(abs_path, "rb")
hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
f.close()
except Exception:
gen_log.error("Could not open static file %r", path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh[:5]
return None
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.current_user = handler.current_user
self.locale = handler.locale
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self.handler_class = handler_class
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value):
timestamp = utf8(str(int(time.time())))
value = base64.b64encode(utf8(value))
signature = _create_signature(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
def decode_signed_value(secret, name, value, max_age_days=31):
if not value:
return None
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > time.time() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _create_signature(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
| [
"[email protected]"
] | |
47a70babb3db84356361acf60b5e6d54a50c94ce | 38ba13df9ea6e53c7b924cad1f3bea2de59c7a6a | /nibbler/trading/collectors/Watcher.py | 19ea6b0adbe54c634a51182074af580ff6b54745 | [] | no_license | JizzFactoryEmployee/nibblerppman | 0fbc1ce662cf8b4868b41a97291250fae29dc41d | 160e557578a3e8a614450354f6ade233d32b052f | refs/heads/master | 2022-11-14T01:10:31.743000 | 2020-07-04T01:21:52 | 2020-07-04T01:21:52 | 273,835,770 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import nibbler.trading.collectors.futures_collector as futures
class MyHandler(FileSystemEventHandler):
def on_modified(self, event):
event_list = event.src_path.split('\\')
filename = event_list[-1]
print(filename)
if "BTC" in filename:
print('success')
def runner():
event_handler = MyHandler()
observer = Observer()
observer.schedule(event_handler, path=r'C:\Users\James\Documents\GitHub\Nibbler\nibbler\trading\collectors', recursive=False)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
runner() | [
"[email protected]"
] | |
fde45c72b38be988de28cfb02d09ce268e6a0ac8 | c29ee57cdba30d0099c55d7b138a361a4964af4b | /apps/blogs/urls.py | a6a724f8cca29d67b19a8b9d2ed788dfe37cdafc | [] | no_license | bakker4444/django_app | ea26be5fa7789a083b199d5094767e16ec0fa63e | 6b1cd0cfc55be11f31cdc5970d4787f15828ac94 | refs/heads/master | 2020-03-10T10:06:17.177952 | 2018-04-13T00:30:54 | 2018-04-13T00:30:54 | 129,322,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^new/', views.new),
url(r'^create/', views.create),
url(r'^(?P<number>[0-9]+)/$', views.show),
url(r'^(?P<number>[0-9]+)/edit/', views.edit),
url(r'^(?P<number>[0-9]+)/delete/', views.destroy),
]
| [
"[email protected]"
] | |
cdf12814d08e0941202a5da5abdf6a067a3b34f7 | 36126f91a2d5903483b84ba2d8be77e160803058 | /examples/python/empirical_transition_matrix.py | 61c92ac81e1bbe433c6a5a7ec9d083e8e9e8aaca | [
"Apache-2.0"
] | permissive | open-risk/transitionMatrix | 9962bb2656eb637ba56afc3adecf42bbe68f9593 | d05e75cbc251f01842dd8c5ce225894b988f4d99 | refs/heads/master | 2023-03-05T08:01:20.816425 | 2023-02-22T20:46:38 | 2023-02-22T20:46:38 | 110,365,127 | 73 | 29 | Apache-2.0 | 2022-12-08T11:37:12 | 2017-11-11T17:25:08 | Python | UTF-8 | Python | false | false | 5,000 | py | # encoding: utf-8
# (c) 2017-2022 Open Risk, all rights reserved
#
# TransitionMatrix is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of TransitionMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
"""
Example workflows using transitionMatrix to estimate an empirical transition matrix from duration type data. The datasets are produced in examples/generate_synthetic_data.py
"""
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import transitionMatrix as tm
from transitionMatrix import source_path
from transitionMatrix.estimators import aalen_johansen_estimator as aj
from transitionMatrix.utils.converters import datetime_to_float
dataset_path = source_path + "datasets/"
# Example 1: Credit Rating Migration example
# Example 2: Simple 2x2 Matrix for testing
# Example 3: Credit Rating Migration example with timestamps in raw date format
example = 3
# Step 1
# Load the data set into a pandas frame
# Make sure state is read as a string and not as integer
# Second synthetic data example:
# n entities with ~10 observations each, [0,1] state, 50%/50% transition matrix
print("> Step 1: Load the data set into a pandas frame")
if example == 1:
data = pd.read_csv(dataset_path + 'synthetic_data7.csv', dtype={'State': str})
elif example == 2:
data = pd.read_csv(dataset_path + 'synthetic_data8.csv', dtype={'State': str})
elif example == 3:
data = pd.read_csv(dataset_path + 'synthetic_data9.csv', parse_dates=True)
# convert datetime data to floats, return also the observation window data
bounds, data = datetime_to_float(data)
print('Start and End dates', bounds)
sorted_data = data.sort_values(['Time', 'ID'], ascending=[True, True])
print(sorted_data.head(5))
print(sorted_data.describe())
# Step 2
# Describe and validate the State Space against the data
print("> Step 2: Describe and validate the State Space against the data")
# We insert the expected labels of the state space
if example == 1 or example == 3:
definition = [('0', "AAA"), ('1', "AA"), ('2', "A"), ('3', "BBB"),
('4', "BB"), ('5', "B"), ('6', "CCC"), ('7', "D")]
elif example == 2:
definition = [('0', "G"), ('1', "B")]
myState = tm.StateSpace(definition)
myState.describe()
# We validate that indeed the data set conforms to our expectations
labels = {'State': 'From'}
print(myState.validate_dataset(dataset=sorted_data, labels=labels))
labels = {'State': 'To'}
print(myState.validate_dataset(dataset=sorted_data, labels=labels))
# Step 3
# Estimate matrices using the Aalen-Johansen estimator
print("> Step 3: Estimate matrices using the Aalen-Johansen estimator")
myEstimator = aj.AalenJohansenEstimator(states=myState)
# labels = {'Timestamp': 'Time', 'From_State': 'From', 'To_State': 'To', 'ID': 'ID'}
labels = {'Time': 'Time', 'From': 'From', 'To': 'To', 'ID': 'ID'}
etm, times = myEstimator.fit(sorted_data, labels=labels)
# Step 4
# Print the cumulative computed matrix
print("> Step 4: Print the cumulative computed matrix")
print(etm[:, :, -1])
# Step 5
# Create a visualization of the transition rates
if example == 1 or example == 3:
# Now lets plot a collection of curves for all ratings
print("> Plot the transition curves")
Periods = 10
Ratings = 8
m = 4
n = 2
f, axarr = plt.subplots(m, n)
f.subplots_adjust(left=0.05, bottom=0.05, right=0.95, top=0.90, wspace=0.0, hspace=0.1)
# plt.style.use(['ggplot'])
for ri in range(0, Ratings):
axj = int(ri / 2)
axi = ri % 2
print(ri, axj, axi)
curves = []
for rf in range(0, Ratings):
cPD = etm[ri, rf, :]
curves.append(cPD)
# axarr[axj, axi].set_aspect(5)
axarr[axj, axi].set_ylabel('State ' + str(ri), fontsize=12)
axarr[axj, axi].set_xlabel("Time")
axarr[axj, axi].plot(times[1:], curves[rf], label="RI=%d" % (rf,))
# axarr[axj, axi].set_xticks(range(10), minor=False)
axarr[axj, axi].set_yticks(np.linspace(0, 1, 5), minor=False)
# axarr[axj, axi].yaxis.grid(True, which='minor')
axarr[axj, axi].margins(y=0.05, x=0.05)
axarr[axj, axi].autoscale()
axarr[axj, axi].grid(True)
# plt.tight_layout()
f.suptitle("Multi-period Transition Probabilities", fontsize=12)
# plt.title("Multi-period Transition Probabilities")
plt.savefig("transition_probabilities.png")
plt.show()
def main():
print("Done")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
0cef33f9b804fb8d06480356283a48afa11a4d9c | 905020fce75b4b63517ec31c601e721f5c260cd1 | /Карточные расклады.py | 1853c600b2cf5ef636e2296518a51051750b2fef | [] | no_license | Dimaed90800/Python_Y | 7858ad46309281a89c5c1e83a0f09030996182a4 | 04092b854605cb05df439eeeb52003e585bb5a29 | refs/heads/main | 2023-01-24T04:11:17.858281 | 2020-11-17T20:42:45 | 2020-11-17T20:42:45 | 313,731,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | import itertools
nominals = ['2','3','4','5','6','7','8','9','10','валет','дама','король','туз']
suits = ['пик','треф','бубен','червей']
allcomb = list(itertools.product(nominals,suits))
comb = []
for elem in allcomb:
if (elem not in comb) and (list(elem).reverse() not in comb):
comb.append(' '.join(elem))
comb.sort()
three = sorted(itertools.combinations(comb, 3))
for comb in three:
comb = list(comb)
comb.sort()
if any([x.split()[1] == 'червей' for x in comb]) and any([y.split()[0] in nominals[9:] for y in comb]):
print(', '.join(list(comb))) | [
"[email protected]"
] | |
2c735c06acb15668edfe36a3cf848f1a07cb956b | a418afb0ec01cb8f41d9fcaf167f59b0f05aea90 | /Hackerrank/Python Problems/itertools.combinations.py | 221a322634997b5c9b8b4e304a3db940458757d5 | [] | no_license | manjurulhoque/problem-solving | 95d05960e83edde5721b73348270b6123fd3bf12 | e5547a8f5593141ac87f9797ddb25b2467c44e57 | refs/heads/master | 2022-08-08T06:44:21.839224 | 2022-07-25T09:32:26 | 2022-07-25T09:32:26 | 209,125,784 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | import itertools
x, y = input().split()
for i in range(1, int(y) + 1):
out = list(itertools.combinations(sorted(x), i))
for n in out:
print(*n, sep='') | [
"[email protected]"
] | |
9e27115696afc8fea8212d51a643e9d1c2702dc1 | b15a9d9c7374c4a1fa5ec3ef63603a8c57e8681f | /Design-Patterns-Python/flyweight/client.py | b7714e4c7fe83aaeb7505226e720f0509797c36d | [] | no_license | gohils/zemr_notebook | 3f7490ef7a2559655746c3e2e0dbfb835a83891e | 00d53cea9970df44160c51e6ad2bdeadfae2c91f | refs/heads/master | 2023-08-04T14:32:35.428016 | 2023-07-20T11:51:08 | 2023-07-20T11:51:08 | 222,027,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | "The Flyweight Use Case Example"
from table import Table
from flyweight_factory import FlyweightFactory
TABLE = Table(3, 3)
TABLE.rows[0].columns[0].data = "abra"
TABLE.rows[0].columns[1].data = "112233"
TABLE.rows[0].columns[2].data = "cadabra"
TABLE.rows[1].columns[0].data = "racadab"
TABLE.rows[1].columns[1].data = "12345"
TABLE.rows[1].columns[2].data = "332211"
TABLE.rows[2].columns[0].data = "cadabra"
TABLE.rows[2].columns[1].data = "445566"
TABLE.rows[2].columns[2].data = "aa 22 bb"
TABLE.rows[0].columns[0].justify = 1
TABLE.rows[1].columns[0].justify = 1
TABLE.rows[2].columns[0].justify = 1
TABLE.rows[0].columns[2].justify = 2
TABLE.rows[1].columns[2].justify = 2
TABLE.rows[2].columns[2].justify = 2
TABLE.rows[0].columns[1].width = 15
TABLE.rows[1].columns[1].width = 15
TABLE.rows[2].columns[1].width = 15
TABLE.draw()
print(f"FlyweightFactory has {FlyweightFactory.get_count()} flyweights")
| [
"[email protected]"
] | |
3c959eed8a79ca30fbcb647176095c291b99e320 | 4f510470b3093ab2c60f929221af82c79b121ca7 | /linux/FinalKeyPress/smallplane.py | c63ffce47aa72b6d465ad959c7dcf92fe5873091 | [] | no_license | q737645224/python3 | ce98926c701214f0fc7da964af45ba0baf8edacf | 4bfabe3f4bf5ba4133a16102c51bf079d500e4eb | refs/heads/master | 2020-03-30T07:11:17.202996 | 2018-10-30T06:14:51 | 2018-10-30T06:14:51 | 150,921,088 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | import tkinter
import mover
import config
class SmallPlane(mover.BaseMover):
'''
移动的敌机 - 大飞机
'''
def __init__(self, root, canvas, position, x, y, tags):
super().__init__(root, canvas, position, x, y, tags,
config.image_smallplane_width, config.image_smallplane_height, True)
# 移动者的移动步长
self.steps = [config.step_length_smallplane_x, config.step_length_smallplane_y]
# 移动方向 - 向下
self.move_direction = [0, 1]
# 移动者加载背景图像
self.bg_image_fullname = config.images_path + config.filename_smallplane + config.filename_suffix
self.bg_image = tkinter.PhotoImage(file=self.bg_image_fullname)
# # 重置生命值
# super().set_lives_num(config.lives_num_enemy)
def exec_move(self):
if self.nw[1] < config.window_boundary_row:
# Y轴边界之内正常移动
x = self.steps[0] * self.move_direction[0]
y = self.steps[1] * self.move_direction[1]
self.base_move(self.bg_image_tags, x, y)
else:
# Y轴边界之外错误处理
self.base_move(self.bg_image_tags, 0, -config.window_boundary_row)
# 获取死亡图片
def get_dead_images(self):
img = []
if self.do_dead_play:
for i in self.dead_image_index:
image_fullname = config.images_path + config.filename_smallplane + str(i) + config.filename_suffix
image = tkinter.PhotoImage(file=self.bg_image_fullname)
img.append(image)
return img
| [
"[email protected]"
] | |
974ce89e9bb593af97847898b2977b214a4b7980 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/python/autograph/pyct/testing/decorators 2.py | 9e47729bef44976b842be804899c28aafffda65b | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:bc496cdd81c37eb5f9cddd2250b6ebe37eae2739e5ee1b3228a97b6571bfaac1
size 1298
| [
"[email protected]"
] | |
01eaa69624efccb456e86f8cfdcca70c39f9f0e4 | d81bbc8ef3e7916b0dd89bfe9fe32ea4f37aa0c6 | /jobs/ts3_ppump/v0/ts3_ppump.schema | 5d9771a3ee22f16fb2d27ebbc9e593d48ea0a27d | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | lsst-camera-dh/ts3-analysis | 9fb16b2245f390b5518e67c18f0a549e12bc4018 | bf3400f286876c5ed4368e2dafe730a8598d0bf7 | refs/heads/master | 2016-09-05T21:14:10.599337 | 2015-09-10T22:27:47 | 2015-09-10T22:27:47 | 30,820,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80 | schema | # -*- python -*-
{
'schema_name' : 'ts3_ppump',
'schema_version' : 0,
}
| [
"[email protected]"
] | |
f74f457ca2d73374d4fecada0faf53f0825cedef | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/comp/ctrlrp.py | 7652ab3889aff4c21b3f159b874656994a53d27a | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 8,396 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class CtrlrP(Mo):
meta = ClassMeta("cobra.model.comp.CtrlrP")
meta.isAbstract = True
meta.moClassName = "compCtrlrP"
meta.moClassName = "compCtrlrP"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Abstraction of Controller Profile"
meta.writeAccessMask = 0x0
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Ctrlr")
meta.concreteSubClasses.add("cobra.model.vmm.CtrlrP")
meta.concreteSubClasses.add("cobra.model.vmm.CtrlrPDef")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "configIssues", "configIssues", 1034, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("domain-not-deployable", "parent-domain-is-not-deployable-due-to-configuration-issues.", 128)
prop._addConstant("enfpref-not-set", "switching-preference-not-set-yet.", 256)
prop._addConstant("invalid-CtrlrAssoc", "controller-is-not-associated-to-a-valid-vm-controller.-please-provide-the-association.", 4)
prop._addConstant("invalid-mcastAddr", "no-valid-multicast-address-block-specified-for-the-controller.", 16)
prop._addConstant("invalid-policy", "mac-pinning-policy-basing-on-physical-nic-load-is-not-supported-for-this-mode.", 1024)
prop._addConstant("invalid-statsP", "controller-is-not-associated-to-a-valid-statistics-collection-policy", 2)
prop._addConstant("invalid-usraccp", "controller-is-not-associated-to-a-valid-access-profile", 1)
prop._addConstant("invalid-vxlanNs", "no-valid-vxlan-pool-specified-for-the-controller.", 8)
prop._addConstant("missing-infra-vlan", "missing-infra-vlan-for-the-controller.", 64)
prop._addConstant("missing-ns-ctrlr-assoc", "vcenter-should-be-associated-to-a-vshield-or-the-parent-domain-should-be-associated-with-a-vlan-pool.", 32)
prop._addConstant("mode-not-set", "mode-not-set-yet.", 512)
prop._addConstant("not-applicable", "n/a", 0)
meta.props.add("configIssues", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "hostOrIp", "hostOrIp", 1031, PropCategory.REGULAR)
prop.label = "Hostname or IP Address"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.regex = ['[a-zA-Z0-9:][a-zA-Z0-9.:-]{0,254}']
meta.props.add("hostOrIp", prop)
prop = PropMeta("str", "inventoryTrigSt", "inventoryTrigSt", 16507, PropCategory.REGULAR)
prop.label = "Triggered Inventory Sync Status"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "untriggered"
prop._addConstant("autoTriggered", "autotriggered", 2)
prop._addConstant("triggered", "triggered", 0)
prop._addConstant("untriggered", "untriggered", 1)
meta.props.add("inventoryTrigSt", prop)
prop = PropMeta("str", "msftConfigErrMsg", "msftConfigErrMsg", 35385, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 256)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("msftConfigErrMsg", prop)
prop = PropMeta("str", "msftConfigIssues", "msftConfigIssues", 18487, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("aaacert-invalid", "scvmm-comp-inventory-is-missing-due-to-invalid-certificate-on-scvmm-server-or-aaavmmcertificaterule-is-missing-on-apic-controller.", 16)
prop._addConstant("duplicate-mac-in-inventory", "scvmm-agent-found-duplicate-mac-address-in-scvmm-comp-inventory.", 128)
prop._addConstant("duplicate-rootContName", "controller-is-not-deployable.-the-same-rootcontname-controller-also-exist-in-another-vmm-domain.", 2)
prop._addConstant("invalid-object-in-inventory", "scvmm-agent-has-ignored-duplicate-object(s)-in-scvmm-comp-inventory-.", 64)
prop._addConstant("invalid-rootContName", "scvmm-controller-does-not-have-cloudname-that-matches-with-rootcontname.-controller-is-not-deployable.", 1)
prop._addConstant("inventory-failed", "scvmm-agent-failed-to-post-comp-inventory-to-apic-controller.", 32)
prop._addConstant("missing-hostGroup-in-cloud", "no-valid-hostgroup-exist-under-the-cloud-resource-in-scvmm-controller.-controller-is-not-deployable", 4)
prop._addConstant("missing-rootContName", "scvmm-controller-has-missing-cloudname-that-matches-with-rootcontname-but-its-scvmm-hostgroup-is-still-exists-along-with-apic-vmmdomain-logical-switch.", 8)
prop._addConstant("not-applicable", "n/a", 0)
prop._addConstant("zero-mac-in-inventory", "scvmm-agent-found-zero-mac-address-in-scvmm-comp-inventory.", 256)
meta.props.add("msftConfigIssues", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "port", "port", 1032, PropCategory.REGULAR)
prop.label = "Port"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 65535)]
prop.defaultValue = 0
prop.defaultValueStr = "0"
meta.props.add("port", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "rootContName", "rootContName", 1033, PropCategory.REGULAR)
prop.label = "Datacenter"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("rootContName", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
9f4459a0e78eb6578c2a9bc931224ac195ef575f | edf79f6964b15ea61faa9ecd70871d1ce776eda2 | /Jupyter/first-python-notebook/lib/python3.6/site-packages/pandas/tseries/util.py | 5934f5843736cc9afbff1a5dcc1c59f9eb8f2180 | [
"MIT"
] | permissive | Roychenlei/LearnPython | 326bf7d59ebec904623ea7102a1e07f1d7cb112b | eaf44787b86fe6da69dc9ba4e4c907884db57fda | refs/heads/master | 2022-10-09T07:23:39.410429 | 2018-04-28T02:35:07 | 2018-04-28T02:35:07 | 115,184,834 | 0 | 1 | MIT | 2022-10-01T15:18:43 | 2017-12-23T09:19:47 | Python | UTF-8 | Python | false | false | 3,286 | py | import warnings
from pandas.compat import lrange
import numpy as np
from pandas.core.dtypes.common import _ensure_platform_int
from pandas.core.frame import DataFrame
import pandas.core.algorithms as algorithms
def pivot_annual(series, freq=None):
"""
Deprecated. Use ``pivot_table`` instead.
Group a series by years, taking leap years into account.
The output has as many rows as distinct years in the original series,
and as many columns as the length of a leap year in the units corresponding
to the original frequency (366 for daily frequency, 366*24 for hourly...).
The fist column of the output corresponds to Jan. 1st, 00:00:00,
while the last column corresponds to Dec, 31st, 23:59:59.
Entries corresponding to Feb. 29th are masked for non-leap years.
For example, if the initial series has a daily frequency, the 59th column
of the output always corresponds to Feb. 28th, the 61st column to Mar. 1st,
and the 60th column is masked for non-leap years.
With a hourly initial frequency, the (59*24)th column of the output always
correspond to Feb. 28th 23:00, the (61*24)th column to Mar. 1st, 00:00, and
the 24 columns between (59*24) and (61*24) are masked.
If the original frequency is less than daily, the output is equivalent to
``series.convert('A', func=None)``.
Parameters
----------
series : Series
freq : string or None, default None
Returns
-------
annual : DataFrame
"""
msg = "pivot_annual is deprecated. Use pivot_table instead"
warnings.warn(msg, FutureWarning)
index = series.index
year = index.year
years = algorithms.unique1d(year)
if freq is not None:
freq = freq.upper()
else:
freq = series.index.freq
if freq == 'D':
width = 366
offset = np.asarray(index.dayofyear) - 1
# adjust for leap year
offset[(~isleapyear(year)) & (offset >= 59)] += 1
columns = lrange(1, 367)
# todo: strings like 1/1, 1/25, etc.?
elif freq in ('M', 'BM'):
width = 12
offset = np.asarray(index.month) - 1
columns = lrange(1, 13)
elif freq == 'H':
width = 8784
grouped = series.groupby(series.index.year)
defaulted = grouped.apply(lambda x: x.reset_index(drop=True))
defaulted.index = defaulted.index.droplevel(0)
offset = np.asarray(defaulted.index)
offset[~isleapyear(year) & (offset >= 1416)] += 24
columns = lrange(1, 8785)
else:
raise NotImplementedError(freq)
flat_index = (year - years.min()) * width + offset
flat_index = _ensure_platform_int(flat_index)
values = np.empty((len(years), width))
values.fill(np.nan)
values.put(flat_index, series.values)
return DataFrame(values, index=years, columns=columns)
def isleapyear(year):
"""
Returns true if year is a leap year.
Parameters
----------
year : integer / sequence
A given (list of) year(s).
"""
msg = "isleapyear is deprecated. Use .is_leap_year property instead"
warnings.warn(msg, FutureWarning)
year = np.asarray(year)
return np.logical_or(year % 400 == 0,
np.logical_and(year % 4 == 0, year % 100 > 0))
| [
"[email protected]"
] | |
fcf5c448f6224169e2c92e0176375a92af21de67 | 59359e4821554f559c9ffc5bf1a7f52fff0c6051 | /descarteslabs/core/common/geo/geocontext.py | 965f6dfb85170924e722895c3fe498ef27177a2e | [
"Apache-2.0"
] | permissive | descarteslabs/descarteslabs-python | 706acfc594721a1087872744c9cb72fe2b3d2e5b | a8a3859b8ced6d4478b93ff205caad06d508501d | refs/heads/master | 2023-08-23T12:01:36.802085 | 2023-08-21T14:57:22 | 2023-08-21T15:20:01 | 84,609,153 | 176 | 49 | NOASSERTION | 2023-05-02T15:54:37 | 2017-03-10T23:27:12 | Python | UTF-8 | Python | false | false | 53,135 | py | # Copyright 2018-2023 Descartes Labs.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import threading
import warnings
import math
import reprlib
import mercantile
import shapely.geometry
from .. import shapely_support
from ..dltile import Tile, Grid
from .utils import (
is_geographic_crs,
is_wgs84_crs,
polygon_from_bounds,
valid_latlon_bounds,
)
EARTH_CIRCUMFERENCE_WGS84 = 2 * math.pi * 6378137
class GeoContext(object):
"""
Specifies spatial parameters to use when loading a raster
from the Descartes Labs catalog.
Two Scenes loaded with the same GeoContext will result in images
with the same shape (in pixels), covering the same spatial extent,
regardless of the dimensions or projection of the original data.
Specifically, a fully-defined GeoContext specifies:
* geometry to use as a cutline (WGS84), and/or bounds
* resolution (m) or a shape defining the extent in pixels
* EPSG code of the output coordinate reference system
* whether to align pixels to the output CRS
(see docstring for `AOI.align_pixels` for more information)
GeoContexts are immutable.
"""
__slots__ = (
"_geometry_lock_",
"_all_touched",
)
# slots *suffixed* with an underscore will be ignored by `__eq__` and `__repr__`.
# a double-underscore prefix would be more conventional, but that actually breaks as a slot name.
def __init__(self, all_touched=False):
"""
Parameters
----------
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
"""
# Shapely objects are not thread-safe, due to the way the underlying GEOS library is used.
# Specifically, accessing `__geo_interface__` on the same geometry across threads
# can cause bizzare exceptions. This makes `raster_params` and `__geo_interface__` thread-unsafe.
# Subclasses of GeoContext can use this lock to ensure `self._geometry.__geo_interface__`
# is accessed from at most 1 thread at a time.
self._geometry_lock_ = threading.Lock()
self._all_touched = bool(all_touched)
def __getstate__(self):
# Lock objects shouldn't be pickled or deepcopied, but recursively get all the other slots
return {
attr: getattr(self, attr)
for s in self.__class__.__mro__
for attr in getattr(s, "__slots__", [])
if not attr.endswith("_")
}
def __setstate__(self, state):
for attr, val in state.items():
setattr(self, attr, val)
self._geometry_lock_ = threading.Lock()
@property
def all_touched(self):
"""
bool: If True, this ensures that any source pixel which intersects the
GeoContext contributes to the raster result.
Normally this mode is not enabled, and its use is strongly discouraged.
However, it can be useful when the AOI is smaller than a source pixel,
which under many situations will return no result at all (i.e. entirely
masked).
"""
return self._all_touched
@property
def raster_params(self):
"""
dict: The properties of this GeoContext,
as keyword arguments to use for `Raster.ndarray` or `Raster.raster`.
"""
raster_params = {}
if self.all_touched:
raster_params["cutline_all_touched"] = True
return raster_params
def __eq__(self, other):
"""
Two GeoContexts are equal only if they are the same type,
and every property is equal.
"""
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
if attr.endswith("_"):
continue
if getattr(self, attr) != getattr(other, attr):
return False
return True
def __repr__(self):
classname = self.__class__.__name__
delim = ",\n" + " " * (len(classname) + 1)
props = delim.join(
"{}={}".format(attr.lstrip("_"), reprlib.repr(getattr(self, attr)))
for s in self.__class__.__mro__
for attr in getattr(s, "__slots__", [])
if not attr.endswith("_")
)
return "{}({})".format(classname, props)
class AOI(GeoContext):
"""
A GeoContext that clips imagery to a geometry, and/or to square bounds,
with any output resolution and CRS.
Examples
--------
.. code-block:: python
cutline_aoi = dl.scenes.AOI(my_geometry, resolution=40)
aoi_with_cutline_disabled = cutline_aoi.assign(geometry=None)
no_cutline_aoi = dl.scenes.AOI(geometry=None, resolution=15, bounds=(-40, 35, -39, 36))
aoi_without_auto_bounds = dl.scenes.AOI(geometry=my_geometry, resolution=15, bounds=(-40, 35, -39, 36))
aoi_with_specific_pixel_dimensions = dl.scenes.AOI(geometry=my_geometry, shape=(200, 400))
"""
__slots__ = (
"_geometry",
"_resolution",
"_crs",
"_align_pixels",
"_bounds",
"_bounds_crs",
"_shape",
)
def __init__(
self,
geometry=None,
resolution=None,
crs=None,
align_pixels=None,
bounds=None,
bounds_crs="EPSG:4326",
shape=None,
all_touched=False,
):
"""
Parameters
----------
geometry: GeoJSON-like dict, object with ``__geo_interface__``; optional
When searching, filter for elements which intersect this geometry.
When rastering, clip imagery to this geometry.
Coordinates must be WGS84 (lat-lon).
If :const:`None`, imagery will just be clipped to
:py:attr:`~descarteslabs.common.gecontext.AOI.bounds`.
resolution: float, optional
Distance, in native units of the CRS, that the edge of each pixel
represents on the ground. Do not assume this to always be either
degrees or meters.
Can only specify one of `resolution` and `shape`.
crs: str, optional
Coordinate Reference System into which imagery will be projected,
expressed as an EPSG code (like :const:`EPSG:4326`), a PROJ.4 definition,
or an OGC CRS Well-Known Text string.
align_pixels: bool, optional, default True if resolution is not None
If :const:`True`, this ensures that, in different images rasterized
with this same AOI GeoContext, pixels ``(i, j)`` correspond
to the same area in space. This is accomplished by snapping the
coordinates of the origin (top-left corner of top-left pixel)
to a non-fractional interval of `resolution`. Note that in cases
where `shape` has been specified, this may lead to the resulting
image being one pixel larger in each dimension, so the the entire
bounds is included.
If `align_pixels` is :const:`False`, when using imagery with different
native resolutions and/or projections, pixels at the same indices
can be misaligned by a fraction of `resolution`
(i.e. correspond to *slighly* different coordinates in space).
However, this requires warping of the original image, which can be
undesireable when you want to work with the original data in its
native resolution and projection.
bounds: 4-tuple, optional
Clip imagery to these ``(min_x, min_y, max_x, max_y)`` bounds,
expressed in :py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds_crs`
(which defaults to WGS84 lat-lon).
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`
are automatically computed from `geometry` if not specified.
Otherwise,
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds` are required.
bounds_crs: str, optional, default "EPSG:4326"
The Coordinate Reference System of the
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`,
given as an EPSG code (like :const:`EPSG:4326`), a PROJ.4 definition,
or an OGC CRS Well-Known Text string.
shape: 2-tuple, optional
``(rows, columns)``, in pixels, the output raster should fit within;
the longer side of the raster will be min(shape).
Can only specify one of `resolution` and `shape`. Note that when
`align_pixels` is :const:`True`, the actual resulting raster may
be one pixel larger in each direction.
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
"""
super(AOI, self).__init__(all_touched=all_touched)
if bounds is None and geometry is not None:
bounds = "update"
# If no bounds were given, use the bounds of the geometry
self._assign(
geometry,
resolution,
crs,
align_pixels,
bounds,
bounds_crs,
shape,
"unchanged",
)
self._validate()
@property
def geometry(self):
"""
shapely geometry: Clip imagery to this geometry
Coordinates must be WGS84 (lat-lon).
If :const:`None`, imagery will just be clipped to
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`.
"""
return self._geometry
@property
def resolution(self):
"""
float: Distance, in units of the CRS, that the edge of each pixel
represents on the ground.
"""
return self._resolution
@property
def crs(self):
"""
str: Coordinate reference system into which imagery will be projected,
expressed as an EPSG code (like :const:`EPSG:4326`), a PROJ.4 definition,
or an OGC CRS Well-Known Text string.
"""
return self._crs
@property
def align_pixels(self):
"""
bool: If True, this ensures that, in different images rasterized with
this same AOI GeoContext, pixels ``(i, j)`` correspond to the
same area in space. This is accomplished by snapping the coordinates of
the origin (top-left corner of top-left pixel) to a non-fractional
interval of `resolution`. Note that in cases where `shape` has been
specified, this may lead to the resulting image being one pixel larger
in each dimension, so the the entire bounds is included.
If `align_pixels` is False, when using imagery with different native
resolutions and/or projections, pixels at the same indicies can be
misaligned by a fraction of ``resolution`` (i.e. correspond to *slighly*
different coordinates in space).
However, this requires warping of the original image, which can be
undesireable when you want to work with the original data in its native
resolution and projection.
"""
if self._align_pixels is None:
return self._resolution is not None
else:
return self._align_pixels
@property
def bounds(self):
"""
tuple: Clip imagery to these ``(min_x, min_y, max_x, max_y)`` bounds,
expressed in the coordinate reference system in
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds_crs`.
"""
return self._bounds
@property
def bounds_crs(self):
"""
str: The coordinate reference system of the
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`,
given as an EPSG code (like :const:`EPSG:4326`), a PROJ.4 definition,
or an OGC CRS Well-Known Text string.
"""
return self._bounds_crs
@property
def shape(self):
"""
tuple: ``(rows, columns)``, in pixels, the output raster should fit within;
the longer side of the raster will be min(shape).
"""
return self._shape
@property
def raster_params(self):
"""
dict: The properties of this `AOI`,
as keyword arguments to use for
:class:`~descarteslabs.client.services.raster.raster.Raster.ndarray` or
:class:`~descarteslabs.client.services.raster.raster.Raster.raster`.
Raises ValueError if
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`, `crs`,
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds_crs`,
`resolution`, or `align_pixels` is :const:`None`.
"""
# Ensure that there can be no ambiguity: every parameter must be specified,
# so every raster call using this context will return spatially equivalent data
if self._bounds is None:
raise ValueError("AOI must have bounds specified")
if self._bounds_crs is None:
raise ValueError("AOI must have bounds_crs specified")
if self._crs is None:
raise ValueError("AOI must have CRS specified")
if self._resolution is None and self._shape is None:
raise ValueError("AOI must have one of resolution or shape specified")
# align_pixels will always be True or False based on resolution
# all_touched doesn't affect the spatial equivalence
with self._geometry_lock_:
# see comment in `GeoContext.__init__` for why we need to prevent
# parallel access to `self._geometry.__geo_interface__`
cutline = (
self._geometry.__geo_interface__ if self._geometry is not None else None
)
dimensions = (
(self._shape[1], self._shape[0]) if self._shape is not None else None
)
return {
**super().raster_params,
"cutline": cutline,
"resolution": self._resolution,
"srs": self._crs,
"bounds_srs": self._bounds_crs,
"align_pixels": self.align_pixels,
"bounds": self._bounds,
"dimensions": dimensions,
}
@property
def __geo_interface__(self):
"""
dict: :py:attr:`~descarteslabs.common.geo.geocontext.AOI.geometry` as a GeoJSON Geometry dict,
otherwise
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`
as a GeoJSON Polygon dict if
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.geometry` is
:const:`None` and
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds_crs`
is :const:`EPSG:4326`, otherwise
raises :exc:`RuntimeError`.
"""
if self._geometry is not None:
with self._geometry_lock_:
# see comment in `GeoContext.__init__` for why we need to prevent
# parallel access to `self._geometry.__geo_interface__`
return self._geometry.__geo_interface__
elif self._bounds is not None and is_wgs84_crs(self._bounds_crs):
return polygon_from_bounds(self._bounds)
else:
raise RuntimeError(
"AOI GeoContext must have a geometry set, or bounds set and a WGS84 `bounds_crs`, "
"to have a __geo_interface__"
)
def assign(
self,
geometry="unchanged",
resolution="unchanged",
crs="unchanged",
align_pixels="unchanged",
bounds="unchanged",
bounds_crs="unchanged",
shape="unchanged",
all_touched="unchanged",
):
"""
Return a copy of the AOI with the given values assigned.
Note
----
If you are assigning a new geometry and want bounds to updated as
well, use ``bounds="update"``. This will also change
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds_crs`
to :const:`EPSG:4326`, since the geometry's coordinates are in WGS84
decimal degrees, so the new bounds determined from those coordinates
must be in that CRS as well.
If you assign
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.geometry`
without changing
:py:attr:`~descarteslabs.common.geo.geocontext.AOI.bounds`,
the new AOI GeoContext will produce rasters with the same
shape and covering the same spatial area as the old one, just with
pixels masked out that fall outside your new geometry.
Returns
-------
new : `AOI`
"""
new = copy.deepcopy(self)
new._assign(
geometry,
resolution,
crs,
align_pixels,
bounds,
bounds_crs,
shape,
all_touched,
)
new._validate()
return new
def _validate(self):
# validate shape
if self._shape is not None:
if not isinstance(self._shape, (list, tuple)) or len(self._shape) != 2:
raise TypeError("Shape must be a tuple of (rows, columns) in pixels")
# validate resolution
if self._resolution is not None:
if not isinstance(self._resolution, (int, float)):
raise TypeError(
"Resolution must be an int or float, got type '{}'".format(
type(self._resolution).__name__
)
)
if self._resolution <= 0:
raise ValueError("Resolution must be greater than zero")
# can't set both resolution and shape
if self._resolution is not None and self._shape is not None:
raise ValueError("Cannot set both resolution and shape")
# test that bounds are sane
if self._bounds is not None:
shapely_support.check_valid_bounds(self._bounds)
# rough check that bounds values actually make sense for bounds_crs
if self._bounds_crs is not None and self._bounds is not None:
is_geographic, lon_wrap = is_geographic_crs(
self._bounds_crs, with_lon_wrap=True
)
if is_geographic:
# some whole-globe products are funky around the dateline. Try
# to allow up to a 1/2 pixel slop there. This will generally only
# occur with AOIs created automatically from Image properties.
if self._resolution and self._crs and is_geographic_crs(self._crs):
tol = self._resolution / 2
elif self._shape is not None:
tol = (
max(
(self._bounds[2] - self._bounds[0]) / self._shape[1],
(self._bounds[3] - self._bounds[1]) / self._shape[0],
)
/ 2
)
else:
tol = 0.001
if not valid_latlon_bounds(self._bounds, tol, lon_wrap=lon_wrap):
raise ValueError(
"Bounds must be in lat-lon coordinates, "
"but the given bounds are outside [-90, 90] for y or [-180, 180] for x."
)
else:
if valid_latlon_bounds(self._bounds):
# Warn that bounds are probably in the wrong CRS.
# But we can't be sure without a proper tool for working with CRSs,
# since bounds that look like valid lat-lon coords
# *could* be valid in a different CRS, though unlikely.
warnings.warn(
"You might have the wrong `bounds_crs` set.\n"
"Bounds appear to be in lat-lon decimal degrees, but the `bounds_crs` "
"does not seem to be a geographic coordinate reference system "
"(i.e. its units are not degrees, but meters, feet, etc.).\n\n"
"If this is unexpected, set `bounds_crs='EPSG:4326'`."
)
# check that bounds and geometry actually intersect (if bounds in wgs84)
if (
self._geometry is not None
and self._bounds is not None
and is_wgs84_crs(self._bounds_crs)
):
bounds_shp = shapely.geometry.box(*self._bounds)
if not bounds_shp.intersects(self._geometry):
raise ValueError(
"Geometry and bounds do not intersect. This would result in all data being masked. "
"If you're assigning new geometry, assign new bounds as well "
"(use `bounds='update'` to use the bounds of the new geometry)."
)
# Helpful warning about a common mistake: resolution < width
# The CRS of bounds and CRS of resolution must be the same to compare between those values
# This most often happens when switching from a projected to a geodetic CRS (i.e. UTM to WGS84)
# and not updating the (units of the) resolution accordingly, so you now have, say,
# 30 decimal degrees as your resolution. Probably not what you meant.
# TODO: better way of checking equivalence between CRSs than string equality
if (
not self._all_touched
and self._crs is not None
and self._resolution is not None
and self._bounds is not None
and self._bounds_crs == self._crs
):
crs_width = self._bounds[2] - self._bounds[0]
crs_height = self._bounds[3] - self._bounds[1]
msg = (
"Output raster's {dim} ({dim_len:.4f}) is smaller than its resolution "
"({res:.4f}), meaning it would be less than one pixel {dim_adj}.\n"
"Remember that resolution is specified in units of the output CRS, "
"which are not necessarily meters."
)
if is_geographic_crs(self._crs):
msg += "\nSince your CRS is in lat-lon coordinates, resolution must be given in decimal degrees."
msg += (
"\nIf you are intending to raster an area smaller than the source imagery resolution, then you"
"should set an appropriate value of resolution, shape, or all_touched=True on the supplied AOI"
" to signal your intentions."
)
if crs_width < self._resolution:
raise ValueError(
msg.format(
dim="width",
dim_len=crs_width,
res=self._resolution,
dim_adj="wide",
)
)
if crs_height < self._resolution:
raise ValueError(
msg.format(
dim="height",
dim_len=crs_height,
res=self._resolution,
dim_adj="tall",
)
)
def _assign(
self,
geometry,
resolution,
crs,
align_pixels,
bounds,
bounds_crs,
shape,
all_touched,
):
# we use "unchanged" as a sentinel value, because None is a valid thing to set attributes to.
if geometry is not None and geometry != "unchanged":
geometry = shapely_support.geometry_like_to_shapely(geometry)
if bounds is not None and bounds != "unchanged":
if bounds == "update":
if bounds_crs not in (None, "unchanged", "EPSG:4326"):
raise ValueError(
"Can't compute bounds from a geometry while also explicitly setting a `bounds_crs`.\n\n"
"To resolve: don't set `bounds_crs`. It will be set to 'EPSG:4326' for you. "
"(Though you can do so explicitly if you'd like.)\n\n"
"Explanation: the coordinates in a geometry are latitudes and longitudes "
"in decimal degrees, defined in the WGS84 coordinate reference system "
"(referred to by the code EPSG:4326). When we infer `bounds` from a `geometry`, "
"those bounds will be in the same coordinate reference system as the geometry---i.e., WGS84. "
"Therefore, setting `bounds_crs` to anything besides 'EPSG:4326' doesn't make sense."
)
bounds_crs = "EPSG:4326"
if geometry is not None and geometry != "unchanged":
bounds = geometry.bounds
else:
raise ValueError(
"A geometry must be given with which to update the bounds"
)
else:
bounds = tuple(bounds)
if geometry != "unchanged":
self._geometry = geometry
if resolution != "unchanged":
# To avoid breaking existing code, avoid a conflict with shape.
# getattr() to handle pre-init cases.
if (
getattr(self, "_resolution", None) is None
and getattr(self, "_shape", None) is not None
):
self._shape = None
self._resolution = resolution
if crs != "unchanged":
self._crs = crs
if align_pixels != "unchanged":
self._align_pixels = align_pixels
if bounds != "unchanged":
self._bounds = bounds
if bounds_crs != "unchanged":
self._bounds_crs = bounds_crs
if shape != "unchanged":
self._shape = shape
if all_touched != "unchanged":
self._all_touched = bool(all_touched)
class DLTile(GeoContext):
"""
A GeoContext that clips and projects imagery to a single DLTile.
DLTiles allow you to define a grid of arbitrary spacing, resolution,
and overlap that can cover the globe.
DLTiles are always in a UTM projection.
Example
-------
>>> import descarteslabs as dl
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_latlon(
... lat=35.691,
... lon=-105.944,
... tilesize=512,
... resolution=10,
... pad=0
... )
>>> scenes, ctx = dl.scenes.search(tile, "landsat:LC08:PRE:TOAR") # doctest: +SKIP
>>> Scenes # doctest: +SKIP
SceneCollection of 93 scenes
* Dates: Apr 19, 2013 to Apr 14, 2017
* Products: landsat:LC08:PRE:TOAR: 93
>>> ctx # doctest: +SKIP
DLTile(key='512:0:10.0:13:-17:771',
resolution=10.0,
tilesize=512,
pad=0,
crs='EPSG:32613',
bounds=(412960.0, 3947520.0, 418080.0, 3952640.0),
bounds_crs='EPSG:32613',
geometry=<shapely.geom...x7f121488c890>,
zone=13,
ti=-17,
tj=771,
geotrans=[
412960.0,... 0,
-10.0
], ...
"""
__slots__ = (
"_key",
"_resolution",
"_tilesize",
"_pad",
"_crs",
"_bounds",
"_bounds_crs",
"_geometry",
"_zone",
"_ti",
"_tj",
"_geotrans",
"_proj4",
"_wkt",
)
def __init__(self, dltile_dict, all_touched=False):
"""
Constructs a DLTile from a parameter dictionary.
It is preferred to use the
:meth:`DLTile.from_latlon, :meth:`DLTile.from_shape`, or :meth:`DLTile.from_key`
class methods to construct a DLTile GeoContext.
Parameters
----------
dltile_dict: Dict[Str, Any]
Dictionary for the tile.
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
"""
super(DLTile, self).__init__(all_touched=all_touched)
if isinstance(dltile_dict["geometry"], shapely.geometry.polygon.Polygon):
self._geometry = dltile_dict["geometry"]
else:
self._geometry = shapely.geometry.shape(dltile_dict["geometry"])
properties = dltile_dict["properties"]
self._key = properties["key"]
self._resolution = properties["resolution"]
self._tilesize = properties["tilesize"]
self._pad = properties["pad"]
self._crs = properties["cs_code"]
self._bounds = tuple(properties["outputBounds"])
self._bounds_crs = properties["cs_code"]
self._zone = properties["zone"]
self._ti = properties["ti"]
self._tj = properties["tj"]
# these properties may not be present
self._geotrans = properties.get("geotrans", None)
self._proj4 = properties.get("proj4", None)
self._wkt = properties.get("wkt", None)
@classmethod
def from_latlon(cls, lat, lon, resolution, tilesize, pad, all_touched=False):
"""
Return a DLTile GeoContext that covers a latitude/longitude.
Where the point falls within the tile will vary, depending on the point
and tiling parameters.
Parameters
----------
lat : float
Latitude (WGS84)
lon : float
Longitude (WGS84)
resolution : float
Distance, in meters, that the edge of each pixel represents on the ground
tilesize : int
Length of each side of the tile, in pixels
pad : int
Number of extra pixels by which each side of the tile is buffered.
This determines the number of pixels by which two tiles overlap.
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
Returns
-------
tile : DLTile
Example
-------
>>> from descarteslabs.geo import DLTile
>>> # make a tile with total size 100, centered on lat, lon
>>> # total tilesize == tilesize + 2 * pad
>>> params = {
... "lat": 30.0131,
... "lon": 31.2089,
... "resolution": 10,
... "tilesize": 2,
... "pad": 49,
... }
>>> tile = DLTile.from_latlon(**params)
>>> tile.key
'2:49:10.0:36:-8637:166079'
>>> tile.geometry.centroid.xy # doctest: +SKIP
(array('d', [31.20899205942612]), array('d', [30.013121672688087]))
"""
grid = Grid(resolution=resolution, tilesize=tilesize, pad=pad)
tile = grid.tile_from_lonlat(lat=lat, lon=lon)
return cls(tile.geocontext, all_touched=all_touched)
@classmethod
def from_shape(
cls, shape, resolution, tilesize, pad, keys_only=False, all_touched=False
):
"""
Return a list of DLTiles that intersect the given geometry.
Parameters
----------
shape : GeoJSON-like
A GeoJSON dict, or object with a ``__geo_interface__``. Must be in
:const:`EPSG:4326` (WGS84 lat-lon) projection.
resolution : float
Distance, in meters, that the edge of each pixel represents on the ground.
tilesize : int
Length of each side of the tile, in pixels.
pad : int
Number of extra pixels by which each side of the tile is buffered.
This determines the number of pixels by which two tiles overlap.
keys_only : bool, default False
Whether to return DLTile objects or only DLTile keys. Set to True when
returning a large number of tiles and you do not need the full objects.
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
Returns
-------
tiles : List[DLTile] or List[Str]
Example
-------
>>> from descarteslabs.geo import DLTile
>>> shape = {
... "type":"Feature",
... "geometry":{
... "type":"Polygon",
... "coordinates":[[
... [-122.51140471760839,37.77130087547876],
... [-122.45475646845254,37.77475476721895],
... [-122.45303985468301,37.76657207194229],
... [-122.51057242081689,37.763446782666094],
... [-122.51140471760839,37.77130087547876]]]
... },"properties": None
... }
>>> tiles = DLTile.from_shape(
... shape=shape,
... resolution=1,
... tilesize=500,
... pad=0,
... )
>>> len(tiles)
31
"""
grid = Grid(resolution=resolution, tilesize=tilesize, pad=pad)
if grid._estimate_ntiles_from_shape(shape) > 50000:
warnings.warn(
"DLTile.from_shape will return a large number of tiles. "
"Consider using DLTile.iter_from_shape instead."
)
tiles = grid.tiles_from_shape(shape=shape, keys_only=keys_only)
if keys_only:
result = [tile for tile in tiles]
else:
result = [cls(tile.geocontext, all_touched=all_touched) for tile in tiles]
return result
@classmethod
def iter_from_shape(
cls, shape, resolution, tilesize, pad, keys_only=False, all_touched=False
):
"""
Return a iterator for DLTiles that intersect the given geometry.
Parameters
----------
shape : GeoJSON-like
A GeoJSON dict, or object with a ``__geo_interface__``. Must be in
:const:`EPSG:4326` (WGS84 lat-lon) projection.
resolution : float
Distance, in meters, that the edge of each pixel represents on the ground.
tilesize : int
Length of each side of the tile, in pixels.
pad : int
Number of extra pixels by which each side of the tile is buffered.
This determines the number of pixels by which two tiles overlap.
keys_only : bool, default False
Whether to return DLTile objects or only DLTile keys. Set to True when
returning a large number of tiles and you do not need the full objects.
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
Returns
-------
Iterator of DLTiles or str
Example
-------
>>> from descarteslabs.geo import DLTile
>>> shape = {
... "type":"Feature",
... "geometry":{
... "type":"Polygon",
... "coordinates":[[
... [-122.51140471760839,37.77130087547876],
... [-122.45475646845254,37.77475476721895],
... [-122.45303985468301,37.76657207194229],
... [-122.51057242081689,37.763446782666094],
... [-122.51140471760839,37.77130087547876]]]
... },"properties": None
... }
>>> gen = DLTile.from_shape(
... shape=shape,
... resolution=1,
... tilesize=500,
... pad=0,
... keys_only=True
... )
>>> tiles = [tile for tile in gen] # doctest: +SKIP
>>> tiles[0] # doctest: +SKIP
'500:0:1.0:10:94:8359'
"""
grid = Grid(resolution=resolution, tilesize=tilesize, pad=pad)
tiles = grid.tiles_from_shape(shape=shape, keys_only=keys_only)
for tile in tiles:
if keys_only:
yield tile
else:
yield cls(tile.geocontext, all_touched=all_touched)
@classmethod
def from_key(cls, dltile_key, all_touched=False):
"""
Return a DLTile GeoContext from a DLTile key.
Parameters
----------
dltile_key : str
DLTile key, e.g. '128:16:960.0:15:-1:37'
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
Returns
-------
tile: DLTile
Example
-------
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_key("2048:16:30.0:15:3:80")
>>> tile # doctest: +SKIP
DLTile(key='2048:16:30.0:15:3:80',
resolution=30.0,
tilesize=2048,
pad=16,
crs='EPSG:32615',
bounds=(683840.0, 4914720.0, 746240.0, 4977120.0),
bounds_crs='EPSG:32615',
geometry=<shapely.geom...>,
zone=15,
ti=3,
tj=80,
geotrans=[
...
"""
tile = Tile.from_key(dltile_key)
return cls(tile.geocontext, all_touched=all_touched)
def subtile(self, subdivide, resolution=None, pad=None, keys_only=False):
"""
Return an iterator for new DLTiles that subdivide this tile.
The DLtile will be sub-divided into subdivide^2 total sub-tiles each with a side length
of tile_size / subdivide. The resulting sub-tile size must be an integer.
Each sub-tile will by default inherit the same resolution and pad as the orginal tile.
Parameters
----------
subdivide : int
The value to subdivide the tile. The total number of sub-tiles will be the
square of this value. This value must evenly divide the original tilesize.
resolution : None, float
A new resolution for the sub-tiles. None defaults to the original DLTile resolution.
The new resolution must evenly divide the the original tilesize divided by
the subdivide ratio.
pad : None, int
A new pad value for the sub-tiles. None defaults to the original DLTile pad value.
keys_only : bool, default False
Whether to return DLTile objects or only DLTile keys. Set to True when returning a large number of tiles
and you do not need the full objects.
Returns
-------
Iterator over DLTiles or str
Example:
-------
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_key("2048:0:30.0:15:3:80")
>>> tiles = [tile for tile in tile.subtile(8)]
>>> len(tiles)
64
>>> tiles[0].tilesize
256
"""
subtiles = Tile.from_key(self.key).subtile(
subdivide=subdivide,
new_resolution=resolution,
new_pad=pad,
)
for tile in subtiles:
if keys_only:
yield tile.key
else:
yield DLTile(tile.geocontext, all_touched=self.all_touched)
def rowcol_to_latlon(self, row, col):
"""
Convert pixel coordinates to lat, lon coordinates
Parameters
----------
row : int or List[int]
Pixel row coordinate or coordinates
col : int or List[int]
Pixel column coordinate or coordinates
Returns
-------
coords : List[Tuple[float], Tuple[float]]
List with the first element the latitude values and the second element longitude values
Example
-------
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_key("2048:0:30.0:15:3:80")
>>> tile.rowcol_to_latlon(row=56, col=1111)
[(44.894653081367544,), (-90.24334206726267,)]
"""
lonlat = Tile.from_key(self.key).rowcol_to_lonlat(row=row, col=col)
lonlat = lonlat.tolist()
if isinstance(lonlat[0], (int, float)):
result = [(lonlat[1],), (lonlat[0],)]
else:
result = list(zip(*lonlat))
result[0], result[1] = result[1], result[0]
return result
def latlon_to_rowcol(self, lat, lon):
"""
Convert lat, lon coordinates to pixel coordinates
Parameters
----------
lat: float or List[float]
Latitude coordinate or coordinates
lon: float or List[float]
Longitude coordinate or coordinates
Returns
-------
coords: List[Tuple[int] Tuple[int]]
Tuple with the first element the row values and the second element column values
Example
-------
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_key("2048:0:30.0:15:3:80")
>>> tile.latlon_to_rowcol(lat=44.8, lon=-90.2)
[(403,), (1237,)]
"""
rowcol = Tile.from_key(self.key).lonlat_to_rowcol(lat=lat, lon=lon)
rowcol = rowcol.tolist()
if isinstance(rowcol[0], (int, float)):
result = [(rowcol[0],), (rowcol[1],)]
else:
result = list(zip(*rowcol))
return result
def assign(self, pad="unchanged", all_touched="unchanged"):
"""
Return a copy of the DLTile with the pad and/or all_touched value modified.
Parameters
----------
pad : int, default "unchanged"
New pad value
all_touched : bool, default "unchanged"
New all_touched value
Returns
-------
tile : DLTile
Example:
--------
>>> from descarteslabs.geo import DLTile
>>> tile = DLTile.from_key("2048:16:30.0:15:3:80")
>>> tile.pad
16
>>> tile = tile.assign(123)
>>> tile.pad
123
"""
tile = Tile.from_key(self.key)
if pad != "unchanged":
tile = tile.assign(pad=pad)
if all_touched == "unchanged":
all_touched = self.all_touched
return DLTile(tile.geocontext, all_touched=all_touched)
@property
def key(self):
"""
str: The DLTile's key, which encodes the tiling parameters,
and which number in the grid this tile is.
"""
return self._key
@property
def resolution(self):
"""float: Distance, in meters, that the edge of each pixel represents on the ground"""
return self._resolution
@property
def tilesize(self):
"""
int: Length of each side of the tile, in pixels.
Note that the total number of pixels along each side of an image is
``tile_size + 2 * padding``
"""
return self._tilesize
@property
def tile_extent(self):
"""
int: total extent of geocontext length in pixels, including pad.
Size is ``tile_size + 2 * pad``.
"""
return self._tilesize + 2 * self._pad
@property
def pad(self):
"""
int: Number of extra pixels by which each side of the tile is buffered.
This determines the number of pixels by which two tiles overlap.
"""
return self._pad
@property
def crs(self):
"""
str: Coordinate reference system into which imagery will be projected.
For DLTiles, this is always a UTM projection, given as an EPSG code.
"""
return self._crs
@property
def bounds(self):
"""
tuple: The ``(min_x, min_y, max_x, max_y)`` of the area covered by
this DLTile, in the UTM coordinate reference system given in
:py:attr:`~descarteslabs.common.geo.geocontext.DLTile.bounds_crs`.
"""
return self._bounds
@property
def bounds_crs(self):
"""
str: The coordinate reference system of the
:py:attr:`~descarteslabs.common.geo.geocontext.DLTile.bounds`,
given as an EPSG code (like :const:`EPSG:32615`).
A DLTile's CRS is always UTM.
"""
return self._bounds_crs
@property
def geometry(self):
"""
shapely.geometry.Polygon: The polygon covered by this DLTile
in WGS84 (lat-lon) coordinates
"""
return self._geometry
@property
def zone(self):
"""int: The UTM zone of this tile"""
return self._zone
@property
def ti(self):
"""int: The y-index of this tile in its grid"""
return self._ti
@property
def tj(self):
"""int: The x-index of this tile in its grid"""
return self._tj
@property
def raster_params(self):
"""
dict: The properties of this DLTile,
as keyword arguments to use for `Raster.ndarray` or `Raster.raster`.
"""
return {
**super().raster_params,
"dltile": self._key,
# QUESTION: shouldn't align_pixels be True?
# based on the GDAL documentation for `-tap`, seems like that should be true
# to ensure that pixels of images with different resolutions/projections
# are aligned with the same dltile. otherwise, pixel (0,0) in 1 image could be at
# different coordinates than the other
"align_pixels": False,
}
@property
def geotrans(self):
"""
tuple: The 6-tuple GDAL geotrans for this DLTile in the shape
``(a, b, c, d, e, f)`` where
| a is the top left pixel's x-coordinate
| b is the west-east pixel resolution
| c is the row rotation, always 0 for DLTiles
| d is the top left pixel's y-coordinate
| e is the column rotation, always 0 for DLTiles
| f is the north-south pixel resolution, always a negative value
"""
if self._geotrans is None:
return None
return tuple(self._geotrans)
@property
def proj4(self):
"""str: PROJ.4 definition for this DLTile's coordinate reference system"""
return self._proj4
@property
def wkt(self):
"""str: OGC Well-Known Text definition for this DLTile's coordinate reference system"""
return self._wkt
@property
def __geo_interface__(self):
"""dict: :py:attr:`~descarteslabs.common.geo.geocontext.DLTile.geometry` as a GeoJSON Polygon"""
with self._geometry_lock_:
# see comment in `GeoContext.__init__` for why we need to prevent
# parallel access to `self._geometry.__geo_interface__`
return self._geometry.__geo_interface__
class XYZTile(GeoContext):
"""
A GeoContext for XYZ tiles, such as those used in web maps.
The tiles are always 256x256 pixels, in the spherical Mercator
or "Web Mercator" coordinate reference system (:const:`EPSG:3857`).
"""
__slots__ = ("_x", "_y", "_z")
def __init__(self, x, y, z, all_touched=False):
"""
Parameters
----------
x: int
X-index of the tile (increases going east)
y: int
Y-index of the tile (increases going south)
z: int
Zoom level of the tile
all_touched: bool, default False
If True, this ensures that any source pixel which intersects the
AOI GeoContext contributes to the raster result. Normally this mode is
not enabled, and its use is strongly discouraged. However, it can be
useful when the AOI is smaller than a source pixel, which under many
situations will return no result at all (i.e. entirely masked).
"""
self._x = x
self._y = y
self._z = z
super(XYZTile, self).__init__(all_touched=all_touched)
@property
def x(self):
"int: X-index of the tile (increases going east)"
return self._x
@property
def y(self):
"int: Y-index of the tile (increases going south)"
return self._y
@property
def z(self):
"int: Zoom level of the tile"
return self._z
def parent(self):
"The parent XYZTile enclosing this one"
return self.__class__(*mercantile.parent(self._x, self._y, self._z))
def children(self):
"List of child XYZTiles contained within this one"
return [
self.__class__(*t) for t in mercantile.children(self._x, self._y, self._z)
]
@property
def geometry(self):
"""
shapely.geometry.Polygon: The polygon covered by this XYZTile
in :const:`WGS84` (lat-lon) coordinates
"""
return shapely.geometry.box(*mercantile.bounds(self._x, self._y, self._z))
@property
def bounds(self):
"""
tuple: The ``(min_x, min_y, max_x, max_y)`` of the area covered by
this XYZTile, in spherical Mercator coordinates (EPSG:3857).
"""
return tuple(mercantile.xy_bounds(self._x, self._y, self._z))
@property
def crs(self):
"""
str: Coordinate reference system into which common.geo will be projected.
Always :const:`EPSG:3857` (spherical Mercator, aka "Web Mercator")
"""
return "EPSG:3857"
@property
def bounds_crs(self):
"""
str: The coordinate reference system of the
:py:attr:`~descarteslabs.common.geo.geocontext.XYZTile.bounds`.
Always :const:`EPSG:3857` (spherical Mercator, aka "Web Mercator")
"""
return "EPSG:3857"
@property
def tilesize(self):
"""
int: Length of each side of the tile, in pixels. Always 256.
"""
return 256
@property
def resolution(self):
"""
float: Distance, in meters, that the edge of each pixel represents in the
spherical Mercator ("Web Mercator", EPSG:3857) projection.
"""
num_tiles = 1 << self.z
return EARTH_CIRCUMFERENCE_WGS84 / num_tiles / self.tilesize
@property
def __geo_interface__(self):
"dict: :py:attr:`~descarteslabs.common.geo.geocontext.XYZTile.geometry` as a GeoJSON Polygon"
return self.geometry.__geo_interface__
@property
def raster_params(self):
"""
dict: The properties of this XYZTile,
as keyword arguments to use for `Raster.ndarray` or `Raster.raster`.
"""
return {
**super().raster_params,
"bounds": self.bounds,
"srs": self.crs,
"bounds_srs": self.bounds_crs,
"align_pixels": False,
"resolution": self.resolution,
}
| [
"[email protected]"
] | |
bce7d775325fcad89567eb94275ce095229d738a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02832/s307251036.py | 36c65bb99ff02d537361c6ecedc39e7b50f138b1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | n = int(input())
a = list(map(int,input().split()))
if 1 not in a:
print(-1)
exit()
x = 1
cnt = 0
for i in range(n):
if a[i] == x:
x += 1
else:
cnt += 1
print(cnt) | [
"[email protected]"
] | |
7a4711d2854fc4a7a832a3fb5083a348ad0af491 | 84d581219b8065cf8936070a7f1e1766000378df | /vdj/__init__.py | f80e58d17b35d19c93aa96a3ab5bb85e8774f385 | [
"MIT",
"CC-BY-4.0"
] | permissive | RPGroup-PBoC/vdj_recombination | 939e72539a134a61cc5aa8386b3a31f6de954d26 | a59214f878968e5958915b56983b0f52a0a0483e | refs/heads/publication | 2023-07-18T22:52:07.149767 | 2020-05-19T18:12:11 | 2020-05-19T18:12:11 | 187,516,995 | 0 | 0 | MIT | 2023-07-06T21:42:29 | 2019-05-19T19:05:04 | HTML | UTF-8 | Python | false | false | 169 | py | """Top level package for VDJ utilities"""
from . import viz
from . import bayes
from . import io
__author__ = "Soichi Hirokawa and Griffin Chure"
__version__ = "0.1.9" | [
"[email protected]"
] | |
e906d748fcf7530f9052fc37543b97a85ee771eb | 183fa1746e80102391926064c1628bf63690026c | /Order/migrations/0001_initial.py | ae12317b8af5a2d9fa16a2af4e2549debe7875c4 | [] | no_license | redbull2003/StoreMarketPlace | 7454fdf06a27f754f33b41532ef2d717f313eb1a | b9e915da32f23ca087bd29e5d10fa9b9299f81d2 | refs/heads/master | 2023-06-06T03:53:22.766012 | 2021-06-24T16:30:37 | 2021-06-24T16:30:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,515 | py | # Generated by Django 3.1.1 on 2021-06-24 06:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Product', '0011_auto_20210623_1817'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('is_paid', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='OrderItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField()),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_item', to='Order.order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Product.product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
eefa2607d573abc7a8823d3be4870930c3cdb7ca | 09fa0b270697652f8322cda88ff867ecfc460117 | /docs/conf.py | 4d8d1790aa39c933dc8b9cac5d7b6a32b0c470f3 | [
"MIT"
] | permissive | metapix/django-paypal | c9a4711bed5bef7b0529d5a73210f738afe7f766 | 0a94b253e376bde5ce2bd2a8d876e8c3eb56bb49 | refs/heads/master | 2020-12-26T15:59:03.653470 | 2015-06-10T15:48:24 | 2015-06-10T16:09:25 | 37,200,887 | 0 | 0 | null | 2015-06-10T14:06:49 | 2015-06-10T14:06:49 | null | UTF-8 | Python | false | false | 7,619 | py | # -*- coding: utf-8 -*-
#
# documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 17 11:46:20 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-paypal'
copyright = u'2014'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.4'
# The full version, including alpha/beta/rc tags.
release = '0.2.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', '.tex', u' Documentation',
u'', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', '', u' Documentation',
[u''], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', '', u' Documentation',
u'', '', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| [
"[email protected]"
] | |
bb8ce98a689bc611fa0e0b27ab24b5b275fbf940 | 3d29d2dbb45ff0f2a66395e59acfee6c5762b2fc | /spider/58_tc/sell_car/jiangsu/58_sell_car_nanjing.py | d0e24e3f97849672dbcc551531bef647a5bc3731 | [] | no_license | southwolf/carinfo_spider | c1d3ffa2fda545028c72444624ddf199f2fb07df | 55065b43e0e08188675506a2d1d7bb2d9db07c7d | refs/heads/master | 2021-01-21T01:26:59.638174 | 2015-04-20T00:29:23 | 2015-04-20T00:29:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,801 | py | # -*- coding: utf-8 -*-
#!/usr/bin/env python
import urllib
import urllib2
import re
import MySQLdb
import threading
from pyvirtualdisplay import Display
from selenium import webdriver
import socket
#socket.setdefaulttimeout(30)
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
from bs4 import BeautifulSoup
def grabHref(url,localfile):
user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
heads = {'User-Agent':user_agent}
req = urllib2.Request(url,headers=heads)
fails = 0
while True:
try:
if fails >= 10:
break
response = urllib2.urlopen(req,timeout=30)
html = response.read()
except:
fails += 1
print "Handing brand,the network may be not Ok,please wait...",fails
else:
break
dictionary = {}
for content in BeautifulSoup(html).find_all('td',attrs={'class':'t'}):
for a in content.find_all('a',attrs={'class':'t'}):
#if str(a.get('href')) not in dictionary:
if len(a.get('href').split('/'))>2 and len(str(a.get('href')).split('/')[2].split('.'))>0:
if str(a.get('href')).split('/')[2].split('.')[0] == 'nj':
#dictionary[str(a.get('href'))] = ''
print str(a.get('href'))
get_qiugou_info(str(a.get('href')))
def get_qiugou_info(myUrl):
#proxy = {'http':'http://202.106.16.36:3128'}
#proxy_support = urllib2.ProxyHandler(proxy)
#opener = urllib2.build_opener(proxy_support,urllib2.HTTPHandler)
#urllib2.install_opener(opener)
user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
heads = {'User-Agent':user_agent}
req = urllib2.Request(myUrl,headers=heads)
html = ''
fails = 0
while True:
try:
if fails >= 10:
break
response = urllib2.urlopen(req,timeout=30)
html = response.read()
except:
fails += 1
print "Handing brand,the network may be not Ok,please wait...",fails
else:
break
if html != '':
soup = BeautifulSoup(html)
title = ''
title2 = ''
prices = ''
addrs = u'江苏南京'.encode('utf-8')
name = ''
release_time = ''
owner_readme = ''
telephone = ''
for div in soup.find_all('div',attrs={'id':'content_sumary_right'}):
for h1 in div.find_all('h1',attrs={'class':'h1'}):
title = str(h1.get_text())
for h2 in div.find_all('h2',attrs={'class':'h2'}):
title2 = str(h2.get_text()).replace('\n','')
for div in soup.find_all('div',attrs={'class':'content_price_left'}):
for span in div.find_all('span',attrs={'class':'font_jiage'}):
prices = str(span.get_text())
for div in soup.find_all('p',attrs={'class':'lineheight_2'}):
for a in div.find_all('a',attrs={'rel':'nofollow'}):
name = str(a.get_text())
for span in soup.find_all('span',attrs={'id':'t_phone','class':'font20'}):
RG = re.compile(r'\d+-?\d*-?\d*')
if len(RG.findall(str(span.get_text())))>0:
telephone = RG.findall(str(span.get_text()))[0]
for ul in soup.find_all('ul',attrs={'class':'mtit_con_left fl'}):
for li in ul.find_all('li',attrs={'class':'time'}):
release_time = str(li.get_text())
for div in soup.find_all('div',attrs={'class':'benchepeizhi'}):
owner_readme = str(div.get_text())
print title,title2,prices,name,telephone,release_time,owner_readme,addrs
if telephone != '':
print title,title2,prices,name,telephone,release_time,owner_readme,addrs,myUrl
#res = [title,title2,prices,name,telephone,release_time,owner_readme,addrs,myUrl]
try:
conn = MySQLdb.connect(host='127.0.0.1',user='root',passwd='dp')
curs = conn.cursor()
conn.select_db('spider')
curs.execute("select id from sell_car_info where url='%s'" % myUrl)
getrows=curs.fetchall()
if not getrows:
curs.execute("select id from sell_car_info where telephone_num='%s'" % telephone)
get_telephones = curs.fetchall()
if not get_telephones:
is_seller = u'个人'.encode('utf-8')
else:
is_seller = u'商家'.encode('utf-8')
info_src = '58'
res = [title,title2,name,telephone,addrs,release_time,prices,is_seller,owner_readme,info_src,myUrl]
print "The data is not exists in the database..."
curs.execute("insert into sell_car_info(title,car_config,name,telephone_num,addrs,release_time,prices,is_seller,owner_readme,info_src,url) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",res)
else:
print 'The data is already in the database...'
conn.commit()
curs.close()
conn.close()
except MySQLdb.Error,e:
print "Error %d %s" % (e.args[0],e.args[1])
sys.exit(1)
#print "=================================================================================="
#get_qiugou_info('http://jing.58.com/adJump?adType=3&target=pZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWTdrjN1rHm3n10knj73sMPCIAd_sjT8nHnzrjD1rjmzrHE1nHmOnHnOrjnYnjNLPHbLP1cYPikQrHc3nj9Ynzk_FhQfuvIGU-qd0vRzgv-b5HThuA-107qWmgw-5HcOFhwG0LKxUAqWmykqPiuWUA--UMwxIgP-0-qGujYhuyOYpgwOpyEqnWTdrjN1rHm3n10knjDhIgP-0h-b5HczP1N1rH0vrjmYnjTvFh-VuybqFhR8IA-YXgwO0ANqnau-UMwGIZ-xmv7YuHYhuyOYpgwOgvQfmv7_5iubpgPkgLwGUyNqnHEznHnOnjcYn1DdPaukULPGIA-fUWY3riuWUA-Wpv-b5HnLPAPhuj7WsHbOm1nVPjNOmBYOnvDOsyc3uWnvPHFhPjcYuiukmgF6UHYh0AQ6IAuf0hYqsHD&end=end')
#print "=================================================================================="
#get_qiugou_info('http://jump.zhineng.58.com/clk?target=mv7V0A-b5HThmvqfpv--5HTduZGmR-nkuyuuHDFHHyGaI1R2Ey0q5iubpgPGujd-mvRWPWR6PiYvuWNdsHwbPjDVmyn3naYzm1DvnhwhuyFbPWNh0vQfIjd_pgPYUARhIaubpgPYUHYQPjcQn1bQPjEdrjmQFMF-uWdCIZwkrBtf0LN8PH98mvqVsvRz0v6fIyPCuitkszu1ULRzmvNqniuvpgPdpyEqnauvpgPdpgTqnH9ksWDkri3QPHT8nWDQFhR8IZwk5HTh0A7zmyYqFh-1ug6YuyOb5yu6UZP-FMK_uWYVniuGUyRG5iudpyEqnW91njn1rHm1rjmdPW0hmvQopyEqrjEOPycYmWnVuyN3PzYYm1TzsycYPHTVuAEzuAR-nH76nj01FMKGujYzFhR8IA-b5HckPj9OPjnQnjDvP1TLFhR8IZwk5HTh0ZFGmvNqnWTh0hRbpgcqpZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWTYrjbYn1DknHmLnjI3sMPCIAd_FhwG0LKf01YQFMPdmh-b5HDknWmQrjcdFhPzuy7YuyEqnHnvPWEkrHNvPj0OPWTkFMK60h7VpyEqFhwG0vP65HcOFMP-UAuWmHYzriubpgP_U1YdFMP-UAu_U1YdFMDqna3knjDYPHbYrH9kPjDOrHNYnHDh0A7MuyOdUHYQFhFGujYznau6mMEqnWTQPHTQnj6xugF1pAqdmv6-gvF60vRtnE')
#print "=================================================================================="
#get_qiugou_info('http://jing.58.com/adJump?adType=3&target=pZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWT1rHT3n1cOrHc1rjI3sMPCIAd_sjT8nHnzrjD1rjmzrHE1nHmOnHnOrjnYnjNLPHbLP1cYPikQrHcLrjDQrak_FhQfuvIGU-qd0vRzgv-b5HThuA-107qWmgw-5HcOFhwG0LKxUAqWmykqPiuWUA--UMwxIgP-0-qGujYhuyOYpgwOpyEqnWT1rHT3n1cOrHc1rj0hIgP-0h-b5HDdnjmkP1EzP1nLrHcLFh-VuybqFhR8IA-YXgwO0ANqnau-UMwGIZ-xmv7YuHYhuyOYpgwOgvQfmv7_5iubpgPkgLwGUyNqnHEznHnOnjcYn1DdPaukULPGIA-fUWYOniuWUA-Wpv-b5yFhujmvuW--sH0OPAEVPAFbnBY3PAnvsHRWuH6hPjKBuy7WPzukmgF6UHYh0AQ6IAuf0hYqsHD&end=end')
#print "=================================================================================="
#get_qiugou_info('http://jing.58.com/adJump?adType=3&target=pZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWTdPW9vPWmzPWcQPHw3sMPCIAd_sjT8nHnzrjD1rjmzrHE1nHmOnHnOrjnYnjNLPHbLP1cYPikQrHc1njTvrak_FhQfuvIGU-qd0vRzgv-b5HThuA-107qWmgw-5HcOFhwG0LKxUAqWmykqPiuWUA--UMwxIgP-0-qGujYhuyOYpgwOpyEqnWTdPW9vPWmzPWcQPHEhIgP-0h-b5Hc3nWbOrj9dn10QrHDkFh-VuybqFhR8IA-YXgwO0ANqnau-UMwGIZ-xmv7YuHYhuyOYpgwOgvQfmv7_5iubpgPkgLwGUyNqnHEznHnOnjcYn1DdPaukULPGIA-fUWYOnzuWUA-Wpv-b5yDvmyEzPyEdsynzPjTVPjbQnzY3nhRBsHDzuhcOPWbznHmYuaukmgF6UHYh0AQ6IAuf0hYqsHD&end=end')
#print "=================================================================================="
#get_qiugou_info('http://jump.zhineng.58.com/clk?target=mv7V0A-b5HThmvqfpv--5HTduZGmR-nkuyuuHDFHHyGaI1R2Ey0q5iubpgPGujYQnjnvrHT3niYzuhNYsHEQrjbVmW0vPBdBnAP6PhDznvR6Pjnh0vQfIjd_pgPYUARhIaubpgPYUHYQPjcQn1bQPHTdn1DvFMF-uWdCIZwkrBtf0LN8PH98mvqVsvRz0v6fIyPCuitQszu1ULRzmvNqniuvpgPdpyEqnauvpgPdpgTqnH9ksWDkri3QPHT8nWDQFhR8IZwk5HTh0A7zmyYqFh-1ug6YuyOb5yu6UZP-FMK_uWYVniuGUyRG5iudpyEqnWmkrHDvnj0dP1DLnHbhmvQopyEqn1R6uhmvnHbVPhDvmBYYuhEdsycQmyNVnAuWPHbQrjEvPjK-FMKGujYzFhR8IA-b5HckPjnQnHbvrjcdrHbkFhR8IZwk5HTh0ZFGmvNqPHmh0hRbpgcqpZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWTYn1DQrHm3nWNOrHK3sMPCIAd_FhwG0LKf01YQFMPdmh-b5HDknWm1PH0kFhPzuy7YuyEqnHnvPWE1nH0YnHTYnHmkFMK60h7VpyEqFhwG0vP65HcOFMP-UAuWmHYzriubpgP_U1YdFMP-UAu_U1YdFMDqna3knjDYPHbYrH9kPjDOrHNYnHDh0A7MuyOdUHYQFhFGujYdPBu6mMEqnWTQPHTQnj6xugF1pAqdmv6-gvF60vRtnE')
#print "=================================================================================="
#get_qiugou_info('http://jing.58.com/adJump?adType=3&target=pZwY0jCfsLPdsWN3shPfUiq-0MPCULRWpANfnWTYrHnkPHmkP19znHu3sMPCIAd_sjT8rHbkn101nWnkPHNknWnQP1NdrHmQnjnkPjTzrjEvPHn_nHbzPWDkrjN_sau_UvIMpyOxIgP-0-qGujYkFhwG0LKxmv7YuHYzriubpgPkgvQfmv7_5HNhmvQGuyOYgLR1ugFxpyEqFhR8IA-YXy-b5HckPjb1njNvnj03nWDvFMR1ugFGujYzn1TYnjE1n1DYnWTznzuGUyRG5iu-UMwGIZ-YXgK-5HThuyOYpgwOgvP6IANqFhR8IA-YXRq_UvP6UjYhuA-107qYpyd-5HDYnWD1rHDdnH0YPj9h0Aq1pgwGUv3qniuWUA-Wpv-b5HIhuWndmHcLsyw-m10VPjbdmzYOnHIWsyP6uj7-uHEQnj6BmiukmgF6UHYh0AQ6IAuf0hYqsHD&end=end')
#print "=================================================================================="
#get_qiugou_info('http://jump.zhineng.58.com/clk?target=mv7V0A-b5HThmvqfpv--5yOdUAkhuA-1pyEqnyu6P103rHcVmvDQuiYYnAEOsHbQPAcVPyNvrADYnH93nAPbFMP_ULEqUA-1IAQ-uMEhuA-1IAYqnHEznHEvPj03nWc3PzuzuymqpZwY0jCfsvOJsWN3shPfUiq-0MPCULRWpANf0A3Qszu1ULRzmvNqniuvpgPdpyEqnauvpgPdpgTqnHDYsWcznB3znHc8nHbOFhR8IZwk5HTh0A7zmyYqFh-1ug6YuyOb5yu6UZP-FMK_uWYVniuGUyRG5iudpyEqnW0dn1T3n1Ekrj0OPjchmvQopyEqmhmQP1uWrH0VuWc3raYYPH-bsycYmhmVnjnzrjcduHbYmycQFMKGujYzFhR8IA-b5HcknWbdnjnYPWb1PHTvFhR8IZwk5HTh0ZFGmvNqrHDh0hRbpgcqpZwY0jCfsvOJsWN3shPfUiq-0MPCULRWpANfnWTzrHNkn1EvrHndnju3sMPCIAd_FhwG0LKf01YQFMPdmh-b5HDknW0LnjcYFhPzuy7YuyEqnHnvP1cYn1n3rH93nHnvFMK60h7VpyEqFhwG0vP65HcOFMP-UAuWmHYzriubpgP_U1YQP1ch0vR_uhQf5HDLnBuQ5HDh0A7MuyOdUHYQFhFGujYOniu6mMEqNE')
def main():
url="http://nj.58.com/ershouche/0/pn"
localfile="Href.txt"
for i in range(1,5):
myUrl = url + str(i)
t = threading.Thread(target=grabHref(myUrl,localfile))
t.start()
if __name__=="__main__":
main()
| [
"[email protected]"
] | |
e6cb4be5d1e990d0282b7cc658324aaed21af371 | 6e8f2e28479566dbaa338300b2d61f784ff83f97 | /.history/code/run_20210414104418.py | 19fc6bf93bf6fcf7f02737538f9ee64f112f19a1 | [] | no_license | eeng5/CV-final-project | 55a7d736f75602858233ebc380c4e1d67ab2b866 | 580e28819560b86f6974959efb1d31ef138198fc | refs/heads/main | 2023-04-09T21:28:21.531293 | 2021-04-21T19:57:22 | 2021-04-21T19:57:22 | 352,703,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,308 | py | """
Project 4 - CNNs
CS1430 - Computer Vision
Brown University
"""
import os
import sys
import argparse
import re
from datetime import datetime
import tensorflow as tf
from tensorflow import keras
import hyperparameters as hp
from models import SimpleModel, ComplexModel
from preprocess import Datasets
from skimage.transform import resize
from tensorboard_utils import \
ImageLabelingLogger, ConfusionMatrixLogger, CustomModelSaver
from skimage.io import imread
from lime import lime_image
from skimage.segmentation import mark_boundaries
from matplotlib import pyplot as plt
import numpy as np
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def parse_args():
""" Perform command-line argument parsing. """
parser = argparse.ArgumentParser(
description="Let's train some neural nets!",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--task',
required=True,
choices=['1', '3'], # task 1 is simpleModel and task 3 is complexModel
help='''Which task of the assignment to run -
training the SimpleModel (1), or training the ComplexModel(3).''')
parser.add_argument(
'--data',
default='..'+os.sep+'data'+os.sep,
help='Location where the dataset is stored.')
parser.add_argument(
'--load-checkpoint',
default=None,
help='''Path to model checkpoint file (should end with the
extension .h5). Checkpoints are automatically saved when you
train your model. If you want to continue training from where
you left off, this is how you would load your weights.''')
parser.add_argument(
'--confusion',
action='store_true',
help='''Log a confusion matrix at the end of each
epoch (viewable in Tensorboard). This is turned off
by default as it takes a little bit of time to complete.''')
parser.add_argument(
'--evaluate',
action='store_true',
help='''Skips training and evaluates on the test set once.
You can use this to test an already trained model by loading
its checkpoint.''')
parser.add_argument(
'--lime-image',
default='test/Bedroom/image_0003.jpg',
help='''Name of an image in the dataset to use for LIME evaluation.''')
parser.add_argument(
'--aug',
default='1',
help='''Either 1 for less augmented data and 3 for more augmented data.''')
return parser.parse_args()
def LIME_explainer(model, path, preprocess_fn):
"""
This function takes in a trained model and a path to an image and outputs 5
visual explanations using the LIME model
"""
def image_and_mask(title, positive_only=True, num_features=5,
hide_rest=True):
temp, mask = explanation.get_image_and_mask(
explanation.top_labels[0], positive_only=positive_only,
num_features=num_features, hide_rest=hide_rest)
plt.imshow(mark_boundaries(temp / 2 + 0.5, mask))
plt.title(title)
plt.show()
image = imread(path)
if len(image.shape) == 2:
image = np.stack([image, image, image], axis=-1)
image = preprocess_fn(image)
image = resize(image, (hp.img_size, hp.img_size, 3))
explainer = lime_image.LimeImageExplainer()
explanation = explainer.explain_instance(
image.astype('double'), model.predict, top_labels=5, hide_color=0,
num_samples=1000)
# The top 5 superpixels that are most positive towards the class with the
# rest of the image hidden
image_and_mask("Top 5 superpixels", positive_only=True, num_features=5,
hide_rest=True)
# The top 5 superpixels with the rest of the image present
image_and_mask("Top 5 with the rest of the image present",
positive_only=True, num_features=5, hide_rest=False)
# The 'pros and cons' (pros in green, cons in red)
image_and_mask("Pros(green) and Cons(red)",
positive_only=False, num_features=10, hide_rest=False)
# Select the same class explained on the figures above.
ind = explanation.top_labels[0]
# Map each explanation weight to the corresponding superpixel
dict_heatmap = dict(explanation.local_exp[ind])
heatmap = np.vectorize(dict_heatmap.get)(explanation.segments)
plt.imshow(heatmap, cmap='RdBu', vmin=-heatmap.max(), vmax=heatmap.max())
plt.colorbar()
plt.title("Map each explanation weight to the corresponding superpixel")
plt.show()
def train(model, datasets, checkpoint_path, logs_path, init_epoch):
""" Training routine. """
# Keras callbacks for training
callback_list = [
tf.keras.callbacks.TensorBoard(
log_dir=logs_path,
update_freq='batch',
profile_batch=0),
ImageLabelingLogger(logs_path, datasets),
CustomModelSaver(checkpoint_path, ARGS.task, hp.max_num_weights)
]
# Include confusion logger in callbacks if flag set
if ARGS.confusion:
callback_list.append(ConfusionMatrixLogger(logs_path, datasets))
# Begin training
model.fit(
x=datasets.train_data,
validation_data=datasets.test_data,
epochs=hp.num_epochs,
batch_size=None,
callbacks=callback_list,
initial_epoch=init_epoch,
)
def test(model, test_data):
""" Testing routine. """
# Run model on test set
model.evaluate(
x=test_data,
verbose=1,
)
def main():
""" Main function. """
time_now = datetime.now()
timestamp = time_now.strftime("%m%d%y-%H%M%S")
init_epoch = 0
# If loading from a checkpoint, the loaded checkpoint's directory
# will be used for future checkpoints
if ARGS.load_checkpoint is not None:
ARGS.load_checkpoint = os.path.abspath(ARGS.load_checkpoint)
# Get timestamp and epoch from filename
regex = r"(?:.+)(?:\.e)(\d+)(?:.+)(?:.h5)"
init_epoch = int(re.match(regex, ARGS.load_checkpoint).group(1)) + 1
timestamp = os.path.basename(os.path.dirname(ARGS.load_checkpoint))
# If paths provided by program arguments are accurate, then this will
# ensure they are used. If not, these directories/files will be
# set relative to the directory of run.py
if os.path.exists(ARGS.data):
ARGS.data = os.path.abspath(ARGS.data)
# Run script from location of run.py
os.chdir(sys.path[0])
datasets = Datasets(ARGS.data, ARGS.task)
print("Data set up done")
if ARGS.task == '1':
model = SimpleModel()
model(tf.keras.Input(shape=(hp.img_size, hp.img_size, 3)))
checkpoint_path = "checkpoints" + os.sep + \
"simple_model" + os.sep + timestamp + os.sep
logs_path = "logs" + os.sep + "simple_model" + \
os.sep + timestamp + os.sep
# Print summary of model
model.summary()
else:
model = ComplexModel()
model(tf.keras.Input(shape=(hp.img_size, hp.img_size, 3)))
checkpoint_path = "checkpoints" + os.sep + \
"complex_model" + os.sep + timestamp + os.sep
logs_path = "logs" + os.sep + "complex_model" + \
os.sep + timestamp + os.sep
# Print summary of model
model.summary()
# Load checkpoints
if ARGS.load_checkpoint is not None:
model.load_weights(ARGS.load_checkpoint, by_name=False)
# Make checkpoint directory if needed
if not ARGS.evaluate and not os.path.exists(checkpoint_path):
os.makedirs(checkpoint_path)
# Compile model graph
model.compile(
optimizer=model.optimizer,
loss=model.loss_fn,
metrics=["sparse_categorical_accuracy"])
if ARGS.evaluate:
test(model, datasets.test_data)
# TODO: change the image path to be the image of your choice by changing
# the lime-image flag when calling run.py to investigate
# i.e. python run.py --evaluate --lime-image test/Bedroom/image_003.jpg
path = ARGS.data + os.sep + ARGS.lime_image
LIME_explainer(model, path, datasets.preprocess_fn)
else:
train(model, datasets, checkpoint_path, logs_path, init_epoch)
# Make arguments global
ARGS = parse_args()
main()
| [
"[email protected]"
] | |
a86ce8417607db70a904b3ff9754c6950ed06c89 | faabe34af6297530617395bcc6811350765da847 | /platforms/leetcode/NthDigit.py | 6e67cbfe78c483aff5ee596ef9b57abc3fd7bb68 | [] | no_license | pqnguyen/CompetitiveProgramming | 44a542aea299bd553dd022a9e737e087285b8b6d | 27330e7ff79c4ac883d7e1fcdf2f0d30939c3f78 | refs/heads/master | 2021-07-21T12:15:47.366599 | 2021-06-27T14:58:48 | 2021-06-27T14:58:48 | 132,837,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | class Solution:
def findNthDigit(self, n: int) -> int:
length, start, count = 1, 1, 9
while n > length * count:
n -= length * count
length += 1
count *= 10
start *= 10
start += (n - 1) // length
return int(str(start)[(n - 1) % length])
res = Solution().findNthDigit(15)
print(res)
| [
"[email protected]"
] | |
00a759bd71d11f942a7195e2ce3a0f9237ad4f89 | eb91f8764a466c20d3ed53f0df43e2e91fc095ea | /HFhtml/migrations/0007_auto_20210106_2045.py | efdaed8cb955a4052bb217f5edcee04b059ca0bf | [] | no_license | blackw00d/HappyFamily | 61733c46a2f2c26919a532e5c07ea143fabd14a9 | 6b0d99a498c7a92170de0bbdb18907d6d951da31 | refs/heads/master | 2023-03-08T21:20:41.481293 | 2021-02-27T04:13:39 | 2021-02-27T04:13:39 | 291,481,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 524 | py | # Generated by Django 3.0.8 on 2021-01-06 15:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('HFhtml', '0006_auto_20210106_1951'),
]
operations = [
migrations.AlterField(
model_name='orders',
name='pay',
field=models.TextField(choices=[('Онлайн', 'Онлайн'), ('При получении', 'При получении')], default='Онлайн', verbose_name='Оплата'),
),
]
| [
"email"
] | email |
5497982226fc1fd6f795a09becbbfa23349d3f38 | 7000895fad6f4c23084122ef27b3292d5e57df9f | /tests/core/test_ChainManager.py | b9d94e4c26c5c584e44b7f9d86c8c5184b4ec985 | [
"MIT"
] | permissive | jack3343/xrd-core | 1302cefe2a231895a53fcef73e558cdbc1196884 | 48a6d890d62485c627060b017eadf85602268caf | refs/heads/master | 2022-12-15T07:36:16.618507 | 2020-08-27T09:21:36 | 2020-08-27T09:21:36 | 290,652,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258,142 | py | # coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from unittest import TestCase
from os import urandom
from mock import Mock, patch, PropertyMock, MagicMock
from pyxrdlib.pyxrdlib import hstr2bin
from pyqryptonight.pyqryptonight import StringToUInt256
from xrd.generated import xrdstateinfo_pb2
from xrd.core import config
from xrd.crypto.xmss import XMSS
from xrd.crypto.QRandomX import QRandomX
from xrd.core.OptimizedAddressState import OptimizedAddressState
from xrd.core.MultiSigAddressState import MultiSigAddressState
from xrd.core.Block import Block
from xrd.core.BlockMetadata import BlockMetadata
from xrd.core.ChainManager import ChainManager
from xrd.core.DifficultyTracker import DifficultyTracker
from xrd.core.GenesisBlock import GenesisBlock
from xrd.core.PaginatedBitfield import PaginatedBitfield
from xrd.core.State import State
from xrd.core.txs.SlaveTransaction import SlaveTransaction
from xrd.core.txs.TransferTransaction import TransferTransaction
from xrd.core.txs.LatticeTransaction import LatticeTransaction
from xrd.core.txs.multisig.MultiSigCreate import MultiSigCreate
from xrd.core.txs.multisig.MultiSigSpend import MultiSigSpend
from xrd.core.txs.multisig.MultiSigVote import MultiSigVote
from tests.misc.helper import get_alice_xmss, get_bob_xmss, set_default_balance_size, set_hard_fork_block_number, \
set_xrd_dir, replacement_getTime, get_some_address, gen_blocks
alice = get_alice_xmss()
bob = get_bob_xmss()
def ask_difficulty_tracker(difficulty: str, dev_config: config.DevConfig):
dt = DifficultyTracker()
tmp_difficulty = StringToUInt256(difficulty)
tmp_target = dt.get_target(tmp_difficulty, dev_config)
return tmp_difficulty, tmp_target
def create_block(block_number, previous_block, miner_address):
return Block.create(dev_config=config.dev,
block_number=block_number,
prev_headerhash=previous_block.headerhash,
prev_timestamp=previous_block.timestamp,
transactions=[],
miner_address=miner_address,
seed_height=None,
seed_hash=None)
def create_m_block(block_number, previous_block, miner_address):
mock_block = Mock(
autospec=Block,
name="Mock Block {}".format(block_number),
block_number=block_number,
prev_headerhash=previous_block.headerhash,
prev_block_timestamp=previous_block.timestamp,
transactions=[],
miner_address=miner_address,
headerhash="Mock Block {} {}".format(block_number, urandom(6)).encode(),
timestamp=replacement_getTime()
)
mock_block.serialize.return_value = "Mock Block {}".format(block_number).encode()
return mock_block
class TestChainManagerReal(TestCase):
def setUp(self):
with set_xrd_dir('no_data'):
self.state = State()
self.state.get_measurement = Mock(return_value=10000000)
self._qn = QRandomX()
try:
del GenesisBlock.instance # Removing Singleton instance
except Exception: # noqa
pass
self.genesis_block = GenesisBlock()
self.chain_manager = ChainManager(self.state)
self.chain_manager._difficulty_tracker = Mock()
def test_load(self):
# load() has the following tasks:
# Write Genesis Block into State immediately
# Register block_number <-> blockhash mapping
# Calculate difficulty Metadata for Genesis Block
# Generate AddressStates from Genesis Block balances
# Apply Genesis Block's transactions to the state
self.chain_manager.load(self.genesis_block)
block = Block.get_block(self.state, GenesisBlock().headerhash)
self.assertIsNotNone(block)
def test_load_twice(self):
self.chain_manager.load(self.genesis_block)
# If we call load() a second time, it should check to see if we are forked and initiate recovery.
# First we pretend we are not forked.
self.state.get_fork_state = Mock(return_value=None)
self.chain_manager._fork_recovery = Mock(name='mock _fork_recovery()')
self.chain_manager.load(self.genesis_block)
self.chain_manager._fork_recovery.assert_not_called()
# If we pretend we are forked, it should call _fork_recovery().
m_fork_state = Mock(autospec=xrdstateinfo_pb2.ForkState, initiator_headerhash=self.genesis_block.headerhash)
self.state.get_fork_state.return_value = m_fork_state
self.chain_manager.load(self.genesis_block)
self.chain_manager._fork_recovery.assert_called_with(self.genesis_block, m_fork_state)
@patch('xrd.core.misc.ntp.getTime')
def test_simple_add_block(self, time_mock):
# Simply test that adding a block on top of the genesis block works.
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[],
miner_address=alice.address,
seed_hash=None,
seed_height=None)
block_1.set_nonces(config.dev, 201, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_multi_output_transaction_add_block(self, time_mock):
# Test that adding block with a multi-output Transaction updates everybody's balances correctly.
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
transfer_transaction = TransferTransaction.create(addrs_to=[alice.address, random_xmss.address],
amounts=[40 * int(config.dev.shor_per_quanta),
59 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob)
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[transfer_transaction],
miner_address=alice.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
self.assertEqual(bob_addr_state.balance, 0)
self.assertEqual(alice_addr_state.balance,
140 * int(config.dev.shor_per_quanta) + block_1.block_reward + block_1.fee_reward)
self.assertEqual(random_addr_state.balance, 159 * int(config.dev.shor_per_quanta))
@patch("xrd.core.DifficultyTracker.DifficultyTracker.get")
def test_add_block(self, mock_difficulty_tracker_get):
"""
Add block_1 on genesis block (that registers Bob as Alice's slave)
Add a competing fork_block on genesis block (without the SlaveTransaction)
Add block_2 on fork_block (without the SlaveTransaction)
Bob should be free from slavery now.
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
# Add block_1 on genesis block.
slave_tx = SlaveTransaction.create(slave_pks=[bob.pk],
access_types=[0],
fee=0,
xmss_pk=alice.pk)
slave_tx.sign(alice)
slave_tx._data.nonce = 1
self.assertTrue(slave_tx.validate())
with patch('xrd.core.misc.ntp.getTime') as time_mock:
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[slave_tx],
miner_address=alice.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
# Yes, Bob is Alice's slave.
alice_state = self.chain_manager.get_optimized_address_state(alice.address)
self.assertEqual(alice_state.slaves_count(), 1)
bob_access_type = self.chain_manager.get_slave_pk_access_type(alice.address, bob.pk)
self.assertEqual(bob_access_type.access_type, 0)
# Add fork block on genesis block
with patch('xrd.core.misc.ntp.getTime') as time_mock:
time_mock.return_value = 1715270948 # Very high to get an easy difficulty
fork_block = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[],
miner_address=bob.address,
seed_height=None,
seed_hash=None)
fork_block.set_nonces(config.dev, 4, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, fork_block.blockheader, False):
# fork_block.set_nonces(fork_block.mining_nonce + 1)
# print(fork_block.mining_nonce)
self.assertTrue(fork_block.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(fork_block)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
fork_block = Block.get_block(self.state, fork_block.headerhash)
self.assertIsNotNone(fork_block)
# Add block_2 on fork_block.
with patch('xrd.core.misc.ntp.getTime') as time_mock:
time_mock.return_value = 1815270948 # Very high to get an easy difficulty
block_2 = fork_block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=fork_block.headerhash,
prev_timestamp=fork_block.timestamp,
transactions=[],
miner_address=bob.address,
seed_height=None,
seed_hash=None)
block_2.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(state, block_2.blockheader, False):
# block_2.set_nonces(block_2.mining_nonce + 1, 0)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block.block_number, block_2.block_number)
self.assertEqual(self.chain_manager.last_block.serialize(), block_2.serialize())
# Now we are on the forked chain, Bob is no longer Alice's slave.
bob_access_type = self.chain_manager.get_slave_pk_access_type(alice.address, bob.pk)
self.assertIsNone(bob_access_type)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block2(self, time_mock):
"""
Features Tested
- Multi Sig Create, Spend & Vote Txn
- Vote on a multi sig spend
Expectation
- Multi Sig Spend transaction must be executed as it has received sufficient vote and reached to threshold.
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 1)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[transfer_transaction],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_2.blockheader, False):
# block_2.set_nonces(block_2.mining_nonce + 1)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_2)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 2)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[multi_sig_spend],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_3.blockheader, config.dev, False):
# block_3.set_nonces(config.dev, block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_3)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(4))
block_4 = Block.create(dev_config=config.dev,
block_number=4,
prev_headerhash=block_3.headerhash,
prev_timestamp=block_3.timestamp,
transactions=[multi_sig_vote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_4.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_4.blockheader, config.dev, False):
# block_4.set_nonces(config.dev, block_4.mining_nonce + 1)
# print(block_4.mining_nonce)
self.assertTrue(block_4.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_4)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_4)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(5))
block_5 = Block.create(dev_config=config.dev,
block_number=5,
prev_headerhash=block_4.headerhash,
prev_timestamp=block_4.timestamp,
transactions=[multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_5.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_5.blockheader, config.dev, False):
# block_5.set_nonces(config.dev, block_5.mining_nonce + 1)
# print(block_5.mining_nonce)
self.assertTrue(block_5.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_5)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_5)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 59 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward +
block_2.block_reward + block_2.fee_reward +
block_3.block_reward + block_3.fee_reward +
block_4.block_reward + block_4.fee_reward +
block_5.block_reward + block_5.fee_reward +
5)
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta) + 10)
self.assertEqual(multi_sig_address_state.balance, 40 * int(config.dev.shor_per_quanta) - 15)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block3(self, time_mock):
"""
Features Tested
- Multi Sig Create, Spend & Vote Txn
- Vote on an expired multi sig spend
Expectation
- Block including the vote for multi sig vote txn must be rejected due to failure in validation.
- Multi Sig Spend transaction must not have executed as it expired without sufficient vote reaching threshold.
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=4,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 1)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[transfer_transaction],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_2.blockheader, False):
# block_2.set_nonces(block_2.mining_nonce + 1)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_2)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 2)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[multi_sig_spend],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_3.blockheader, False):
# block_3.set_nonces(block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_3)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(4))
block_4 = Block.create(dev_config=config.dev,
block_number=4,
prev_headerhash=block_3.headerhash,
prev_timestamp=block_3.timestamp,
transactions=[multi_sig_vote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_4.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_4.blockheader, False):
# block_4.set_nonces(block_4.mining_nonce + 1)
# print(block_4.mining_nonce)
self.assertTrue(block_4.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_4)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_4)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(5))
block_5 = Block.create(dev_config=config.dev,
block_number=5,
prev_headerhash=block_4.headerhash,
prev_timestamp=block_4.timestamp,
transactions=[multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_5.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not self.chain_manager.validate_mining_nonce(block_5.blockheader, False):
# block_5.set_nonces(block_5.mining_nonce + 1)
# print(block_5.mining_nonce)
self.assertTrue(block_5.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_5)
# Block rejected as is includes vote for the expired multi sig spend txn.
self.assertFalse(result)
self.assertEqual(self.chain_manager.last_block, block_4)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 59 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward +
block_2.block_reward + block_2.fee_reward +
block_3.block_reward + block_3.fee_reward +
block_4.block_reward + block_4.fee_reward)
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state.balance, 40 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block4(self, time_mock):
"""
Features Tested
- Behavior of Block validation with lattice transctions
Expectation
- Block 1 and 2 must be added as both of them have valid lattice transaction
- Block 3 must not be added, as it includes a lattice txn adding duplicate public keys
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
lattice_pk = LatticeTransaction.create(pk1=b'alice_pk1',
pk2=b'alice_pk2',
pk3=b'alice_pk3',
fee=5,
xmss_pk=alice_xmss.pk)
lattice_pk.sign(alice_xmss)
lattice_pk.pbdata.nonce = 1
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[lattice_pk],
miner_address=bob_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) - 5)
self.assertEqual(alice_addr_state.lattice_pk_count(), 1)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + 5)
lattice_pk2 = LatticeTransaction.create(pk1=b'alice_pk11',
pk2=b'alice_pk12',
pk3=b'alice_pk13',
fee=5,
xmss_pk=alice_xmss.pk)
lattice_pk2.sign(alice_xmss)
lattice_pk2.pbdata.nonce = 2
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[lattice_pk2],
miner_address=bob_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 10, 0)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_2)
# Duplicate set of public keys in lattice transaction
lattice_pk3 = LatticeTransaction.create(pk1=b'alice_pk11',
pk2=b'alice_pk12',
pk3=b'alice_pk13',
fee=5,
xmss_pk=alice_xmss.pk)
lattice_pk3.sign(alice_xmss)
lattice_pk3.pbdata.nonce = 3
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[lattice_pk3],
miner_address=bob_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 1, 0)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertFalse(result)
self.assertEqual(self.chain_manager.last_block, block_2)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block5(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn, multisig spend txn
& multi sig vote are added into same block
Expectation
- Block must have been added
- multi_sig_spend txn must not be executed as threshold has not reached
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction,
multi_sig_spend, multi_sig_vote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertFalse(vote_stats.executed)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block6(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn, multisig spend txn,
two multisig vote txns are added into same block.
- Behavior of Rollback
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Block must have been added
- multi_sig_spend txn must be executed as threshold has reached
- Rollback must happen successfully
- Used OTS indexes after rollback must be found as unused
- Transaction must be found in State before roll back
- Transaction must not be found in State after roll back
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend,
multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 59 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward +
multi_sig_spend.amounts[1])
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) + multi_sig_spend.amounts[0])
self.assertEqual(multi_sig_address_state.balance,
40 * int(config.dev.shor_per_quanta) - multi_sig_spend.total_amount)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertIsNotNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
# Rollback state to genesis block
self.chain_manager._rollback(self.genesis_block.headerhash)
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertIsNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state, None)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertFalse(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block7(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn, multisig spend txn,
three multisig vote txns are added into same block.
First, MultiSig vote txn signed by Alice to add vote
Second, MultiSig vote txn signed by Bob to add vote
Third, MultiSig vote txn signed by Alice to unvote
- Behavior of Rollback
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Block must have been added.
- The order of transaction in block is Vote1, Vote3, Vote2, so threshold is not reached.
- multi_sig_spend transaction must not execute.
- Rollback must happen successfully
- Used OTS indexes after rollback must be found as unused
- Transaction must be found in State before roll back
- Transaction must not be found in State after roll back
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=True,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 4
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend,
multi_sig_vote1, multi_sig_vote3, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 59 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state.balance,
40 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertIsNotNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
multi_sig_vote3.set_prev_tx_hash(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote3.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertTrue(result)
# Rollback state to genesis block
self.chain_manager._rollback(self.genesis_block.headerhash)
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertIsNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state, None)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertFalse(result)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block8(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn, multisig spend txn,
three multisig vote txns are added into same block.
First, MultiSig vote txn signed by Alice to add vote
Second, MultiSig vote txn signed by Bob to add vote
Third, MultiSig vote txn signed by Alice to unvote
Expectation
- Block must not be added.
- The order of transaction in block is Vote1, Vote2, Vote3 so threshold is reached,
thus Multi Sig Vote3 becomes invalid as unvote txn found after threshold reached.
so the block becomes invalid and rejected.
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=True,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 4
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend,
multi_sig_vote1, multi_sig_vote2, multi_sig_vote3],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertFalse(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertIsNone(vote_stats)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta))
self.assertIsNone(multi_sig_address_state)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block9(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn, multisig spend txn,
two multisig vote txns are added into same block.
Expectation
- Block must not be added, as it includes duplicate multi sig vote txn.
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote2.sign(alice_xmss)
multi_sig_vote2.pbdata.nonce = 4
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend,
multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertFalse(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block10(self, time_mock):
"""
Features Tested
- Behavior of Block validation when two multi sig spend transactions are made, such that
the last multi sig spend transaction gets enough vote but multi sig address, doesn't
have enough fund for the last multi sig spend transaction.
- Behavior of Rollback
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Both blocks must be added.
- multi_sig_spend1 must be executed at block 1
- multi_sig_spend2 must have received enough vote at block 2,
but must not execute due to lack of funds.
- Rollback must happen successfully
- Used OTS indexes after rollback must be found as unused
- Transaction must be found in State before roll back
- Transaction must not be found in State after roll back
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend1 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[20, 15],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend1.sign(alice_xmss)
multi_sig_spend1.pbdata.nonce = 2
multi_sig_spend2 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 10],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend2.sign(alice_xmss)
multi_sig_spend2.pbdata.nonce = 3
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 4
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 5
multi_sig_vote4 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote4.sign(bob_xmss)
multi_sig_vote4.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend1,
multi_sig_spend2, multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction.fee -
transfer_transaction.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction.total_amount -
multi_sig_spend1.total_amount)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[multi_sig_vote3, multi_sig_vote4],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction.fee -
transfer_transaction.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction.total_amount -
multi_sig_spend1.total_amount)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNotNone(vote_stats)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNotNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote3.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote4.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertTrue(result)
# Rollback state to genesis block
self.chain_manager._rollback(self.genesis_block.headerhash)
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNone(vote_stats)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state, None)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertFalse(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block11(self, time_mock):
"""
Features Tested
- Behavior of Block validation when two multi sig spend transactions are made, such that
the last multi sig spend transaction gets enough vote but multi sig address, doesn't
have enough fund for the last multi sig spend transaction.
Then the transaction in block 3 sends more funds to Multi Sig Address, making sufficient funds.
Although to execute the multi_sig_spend2 txn, it needs to be re checked by executing some
vote txn. So in block 4, a multi_sig_vote txn is added to unvote it.
In block 5, a multi_sig_vote txn is added to add vote again, reaching threshold.
- Behavior of Rollback
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Both blocks must be added.
- multi_sig_spend1 must be executed at block 1
- multi_sig_spend2 must have received enough vote at block 2,
but must not execute due to lack of funds.
- In block 5, when re voted there will be enough funds in multi sig address, so the
multi_sig_spend2 txn must have been executed.
- Rollback must happen successfully
- Used OTS indexes after rollback must be found as unused
- Transaction must be found in State before roll back
- Transaction must not be found in State after roll back
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction1 = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction1._data.nonce = 1
transfer_transaction1.sign(bob_xmss)
self.assertTrue(transfer_transaction1.validate_or_raise(True))
multi_sig_spend1 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[20, 15],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend1.sign(alice_xmss)
multi_sig_spend1.pbdata.nonce = 2
multi_sig_spend2 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 10],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend2.sign(alice_xmss)
multi_sig_spend2.pbdata.nonce = 3
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 4
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 5
multi_sig_vote4 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote4.sign(bob_xmss)
multi_sig_vote4.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction1, multi_sig_spend1,
multi_sig_spend2, multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount -
multi_sig_spend1.total_amount)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[multi_sig_vote3, multi_sig_vote4],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount -
multi_sig_spend1.total_amount)
transfer_transaction2 = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction2._data.nonce = 4
transfer_transaction2.sign(bob_xmss)
self.assertTrue(transfer_transaction2.validate_or_raise(True))
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[transfer_transaction2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_3.blockheader, config.dev, False):
# block_3.set_nonces(config.dev, block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount)
multi_sig_vote5 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=True,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote5.sign(bob_xmss)
multi_sig_vote5.pbdata.nonce = 5
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(4))
block_4 = Block.create(dev_config=config.dev,
block_number=4,
prev_headerhash=block_3.headerhash,
prev_timestamp=block_3.timestamp,
transactions=[multi_sig_vote5],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_4.set_nonces(config.dev, 4, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_4.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_4)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount -
multi_sig_vote5.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward +
block_4.fee_reward +
block_4.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount)
multi_sig_vote6 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote6.sign(bob_xmss)
multi_sig_vote6.pbdata.nonce = 6
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(5))
block_5 = Block.create(dev_config=config.dev,
block_number=5,
prev_headerhash=block_4.headerhash,
prev_timestamp=block_4.timestamp,
transactions=[multi_sig_vote6],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_5.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_5.blockheader, config.dev, False):
# block_5.set_nonces(config.dev, block_5.mining_nonce + 1)
# print(block_5.mining_nonce)
self.assertTrue(block_5.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_5)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount -
multi_sig_vote5.fee -
multi_sig_vote6.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
multi_sig_spend2.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward +
block_4.fee_reward +
block_4.block_reward +
block_5.fee_reward +
block_5.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0] +
multi_sig_spend2.amounts[1])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount -
multi_sig_spend2.total_amount)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNotNone(vote_stats)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNotNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote3.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote4.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote5.txhash)
multi_sig_vote5.set_prev_tx_hash(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote5.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote6.txhash)
multi_sig_vote6.set_prev_tx_hash(multi_sig_vote5.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote6.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction2.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote5.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote6.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction2.ots_key)
self.assertTrue(result)
# Rollback state to genesis block
self.assertTrue(self.chain_manager._rollback(self.genesis_block.headerhash))
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNone(vote_stats)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNone(vote_stats)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote5.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote6.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction1.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction2.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(multi_sig_address_state, None)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote5.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote6.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction1.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction2.ots_key)
self.assertFalse(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block12(self, time_mock):
"""
Features Tested
- Behavior of Block validation after expiry.
Expectation
- Block 1 must be added, as it includes valid transactions.
- Block 2 must be added.
- Block 3 must fail as it includes vote for an expired multi_sig_spend transaction.
- multi_sig_spend txn must not be executed as threshold has not reached.
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=2,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_unvote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_unvote1.sign(alice_xmss)
multi_sig_unvote1.pbdata.nonce = 4
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction,
multi_sig_spend, multi_sig_vote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_2.blockheader, config.dev, False):
# block_2.set_nonces(config.dev, block_2.mining_nonce + 1)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
# Trying to add block_3 with an vote txn for an expired multi_sig_spend txn to meet threshold
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_3.blockheader, config.dev, False):
# block_3.set_nonces(config.dev, block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertFalse(result)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
# Trying to add block_3 with an unvote txn for an expired multi_sig_spend txn
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[multi_sig_unvote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_3.blockheader, config.dev, False):
# block_3.set_nonces(config.dev, block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertFalse(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend.txhash)
self.assertFalse(vote_stats.executed)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block13(self, time_mock):
"""
Features Tested
- Slave transactions
- Transfer Transaction made by slave address
- Rollback of slave transaction
Expectation
- block_1 must be accepted as all transactions are valid
- block_2 must be rejected as the slave transaction includes existing slave address
associated with master address
- After rollback, slaves meta data containing access type, must not be found in the state
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed1 = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
extended_seed2 = "01040097cbf736e725a5da3ccbdb78688f9261d54a9d752108f" \
"c331f51e46aca23757d42d49f9aeea3ba2818ed378e755b6c17"
random_xmss1 = XMSS.from_extended_seed(hstr2bin(extended_seed1))
random_xmss2 = XMSS.from_extended_seed(hstr2bin(extended_seed2))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
slave_txn1 = SlaveTransaction.create(slave_pks=[random_xmss1.pk, random_xmss2.pk],
access_types=[0, 0],
fee=5,
xmss_pk=alice_xmss.pk)
slave_txn1.sign(alice_xmss)
slave_txn1.pbdata.nonce = 1
transfer_txn = TransferTransaction.create(addrs_to=[random_xmss1.address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=random_xmss2.pk,
master_addr=alice_xmss.address)
transfer_txn.sign(random_xmss2)
transfer_txn.pbdata.nonce = 1
slave_txn2 = SlaveTransaction.create(slave_pks=[random_xmss1.pk],
access_types=[0],
fee=5,
xmss_pk=alice_xmss.pk)
slave_txn2.sign(alice_xmss)
slave_txn2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[slave_txn1, transfer_txn],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state1 = self.chain_manager.get_optimized_address_state(random_xmss1.address)
random_addr_state2 = self.chain_manager.get_optimized_address_state(random_xmss2.address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward -
slave_txn1.fee -
transfer_txn.fee -
transfer_txn.total_amount)
self.assertEqual(random_addr_state1.balance,
100 * int(config.dev.shor_per_quanta) +
transfer_txn.amounts[0])
self.assertEqual(random_addr_state2.balance, 100 * int(config.dev.shor_per_quanta))
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss1.pk)
self.assertEqual(slave_metadata.access_type, 0)
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss2.pk)
self.assertEqual(slave_metadata.access_type, 0)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 1)
self.assertEqual(random_addr_state1.nonce, 0)
self.assertEqual(random_addr_state2.nonce, 1)
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[slave_txn2],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_2.set_nonces(config.dev, 1, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_2.blockheader, config.dev, False):
# block_2.set_nonces(config.dev, block_2.mining_nonce + 1)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertFalse(result)
self.assertTrue(self.chain_manager._rollback(self.genesis_block.headerhash))
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state1 = self.chain_manager.get_optimized_address_state(random_xmss1.address)
random_addr_state2 = self.chain_manager.get_optimized_address_state(random_xmss2.address)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state1.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state2.balance, 100 * int(config.dev.shor_per_quanta))
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss1.pk)
self.assertIsNone(slave_metadata)
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss2.pk)
self.assertIsNone(slave_metadata)
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state1.nonce, 0)
self.assertEqual(random_addr_state2.nonce, 0)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block14(self, time_mock):
"""
Features Tested
- Transfer Transaction made by slave address which is not associated by the master address
Expectation
- block_1 must be rejected as slave is not associated with master address
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed1 = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
extended_seed2 = "01040097cbf736e725a5da3ccbdb78688f9261d54a9d752108f" \
"c331f51e46aca23757d42d49f9aeea3ba2818ed378e755b6c17"
random_xmss1 = XMSS.from_extended_seed(hstr2bin(extended_seed1))
random_xmss2 = XMSS.from_extended_seed(hstr2bin(extended_seed2))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
transfer_txn = TransferTransaction.create(addrs_to=[random_xmss1.address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=random_xmss2.pk,
master_addr=alice_xmss.address)
transfer_txn.sign(random_xmss2)
transfer_txn.pbdata.nonce = 1
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[transfer_txn],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertFalse(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertFalse(result)
self.assertEqual(self.chain_manager.last_block, self.genesis_block)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state1 = self.chain_manager.get_optimized_address_state(random_xmss1.address)
random_addr_state2 = self.chain_manager.get_optimized_address_state(random_xmss2.address)
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state1.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state2.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(bob_addr_state.nonce, 0)
self.assertEqual(alice_addr_state.nonce, 0)
self.assertEqual(random_addr_state1.nonce, 0)
self.assertEqual(random_addr_state2.nonce, 0)
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss1.pk)
self.assertIsNone(slave_metadata)
slave_metadata = self.chain_manager.get_slave_pk_access_type(alice_addr_state.address, random_xmss2.pk)
self.assertIsNone(slave_metadata)
@set_default_balance_size()
@patch('xrd.core.misc.ntp.getTime')
def test_add_block15(self, time_mock):
"""
Features Tested
- Behavior of Slave txn when duplicate slave transaction are added into a single block
Expectation
- block_1 must be rejected as the slave transaction includes existing slave address
associated with master address
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed1 = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
extended_seed2 = "01040097cbf736e725a5da3ccbdb78688f9261d54a9d752108f" \
"c331f51e46aca23757d42d49f9aeea3ba2818ed378e755b6c17"
random_xmss1 = XMSS.from_extended_seed(hstr2bin(extended_seed1))
random_xmss2 = XMSS.from_extended_seed(hstr2bin(extended_seed2))
alice_xmss = get_alice_xmss(4)
slave_txn1 = SlaveTransaction.create(slave_pks=[random_xmss1.pk, random_xmss2.pk],
access_types=[0, 0],
fee=5,
xmss_pk=alice_xmss.pk)
slave_txn1.sign(alice_xmss)
slave_txn1.pbdata.nonce = 1
slave_txn2 = SlaveTransaction.create(slave_pks=[random_xmss1.pk],
access_types=[0],
fee=5,
xmss_pk=alice_xmss.pk)
slave_txn2.sign(alice_xmss)
slave_txn2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[slave_txn1, slave_txn2],
miner_address=alice_xmss.address,
seed_height=None,
seed_hash=None)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertFalse(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_rollback(self, time_mock):
# Test that adding block for multi sig spend with multi sig vote txn
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 129, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_1.blockheader, False):
# block_1.set_nonces(block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_1)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 1)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[transfer_transaction],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not PoWValidator().validate_mining_nonce(self.state, block_2.blockheader, False):
# block_2.set_nonces(block_2.mining_nonce + 1)
# print(block_2.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_2)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 2)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[multi_sig_spend],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_3.blockheader, config.dev, False):
# block_3.set_nonces(config.dev, block_3.mining_nonce + 1)
# print(block_3.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_3)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(4))
block_4 = Block.create(dev_config=config.dev,
block_number=4,
prev_headerhash=block_3.headerhash,
prev_timestamp=block_3.timestamp,
transactions=[multi_sig_vote1],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_4.set_nonces(config.dev, 4, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_4.blockheader, config.dev, False):
# block_4.set_nonces(config.dev, block_4.mining_nonce + 1)
# print(block_4.mining_nonce)
self.assertTrue(block_4.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_4)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_4)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(5))
block_5 = Block.create(dev_config=config.dev,
block_number=5,
prev_headerhash=block_4.headerhash,
prev_timestamp=block_4.timestamp,
transactions=[multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_5.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not self.chain_manager.validate_mining_nonce(block_5.blockheader, False):
# block_5.set_nonces(block_5.mining_nonce + 1)
# print(block_5.mining_nonce)
self.assertTrue(block_5.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_5)
self.assertTrue(result)
self.assertEqual(self.chain_manager.last_block, block_5)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance, 59 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
block_1.block_reward + block_1.fee_reward +
block_2.block_reward + block_2.fee_reward +
block_3.block_reward + block_3.fee_reward +
block_4.block_reward + block_4.fee_reward +
block_5.block_reward + block_5.fee_reward +
5)
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta) + 10)
self.assertEqual(multi_sig_address_state.balance, 40 * int(config.dev.shor_per_quanta) - 15)
self.assertEqual(multi_sig_address_state.transaction_hash_count(), 3)
self.chain_manager._rollback(self.genesis_block.headerhash)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
self.assertIsNone(MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db, multi_sig_address))
self.assertEqual(bob_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(alice_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
self.assertEqual(random_addr_state.balance, 100 * int(config.dev.shor_per_quanta))
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_rollback2(self, time_mock):
"""
Features Tested
- Behavior of Block validation when two multi sig spend transactions are made, such that
the last multi sig spend transaction gets enough vote but multi sig address, doesn't
have enough fund for the last multi sig spend transaction.
- Behavior of Rollback for block number 2
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Both blocks must be added.
- multi_sig_spend1 must be executed at block 1
- multi_sig_spend2 must have received enough vote at block 2,
but must not execute due to lack of funds.
- Rollback must happen successfully
- multi_sig_vote3 and multi_sig_vote4 must be found in the state before rollback
- multi_sig_vote3 and multi_sig_vote4 must be deleted from the state after the rollback
- Used OTS indexes for multi_sig_vote3 and multi_sig_vote4 after rollback must be found as unused
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend1 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[20, 15],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend1.sign(alice_xmss)
multi_sig_spend1.pbdata.nonce = 2
multi_sig_spend2 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 10],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend2.sign(alice_xmss)
multi_sig_spend2.pbdata.nonce = 3
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 4
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 5
multi_sig_vote4 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote4.sign(bob_xmss)
multi_sig_vote4.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend1,
multi_sig_spend2, multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction.fee -
transfer_transaction.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction.total_amount -
multi_sig_spend1.total_amount)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[multi_sig_vote3, multi_sig_vote4],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction.fee -
transfer_transaction.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction.total_amount -
multi_sig_spend1.total_amount)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNotNone(vote_stats)
self.assertTrue(vote_stats.executed)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNotNone(vote_stats)
self.assertFalse(vote_stats.executed)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote3.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote4.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertTrue(result)
# Rollback state to block number 1
self.chain_manager._rollback(block_1.headerhash)
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertIsNone(tx_meta_data)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
self.assertEqual(bob_addr_state.nonce, 2)
self.assertEqual(alice_addr_state.nonce, 4)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction.total_amount -
transfer_transaction.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertFalse(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction.ots_key)
self.assertTrue(result)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_roll_back3(self, time_mock):
"""
Features Tested
- Behavior of Block validation when two multi sig spend transactions are made, such that
the last multi sig spend transaction gets enough vote but multi sig address, doesn't
have enough fund for the last multi sig spend transaction.
Then the transaction in block 3 sends more funds to Multi Sig Address, making sufficient funds.
Although to execute the multi_sig_spend2 txn, it needs to be re checked by executing some
vote txn. So in block 4, a multi_sig_vote txn is added to unvote it.
In block 5, a multi_sig_vote txn is added to add vote again, reaching threshold.
- Behavior of Rollback
- Transaction storage in State
- OTS index usage before and after rollback
Expectation
- Both blocks must be added.
- multi_sig_spend1 must be executed at block 1
- multi_sig_spend2 must have received enough vote at block 2,
but must not execute due to lack of funds.
- In block 5, when re voted there will be enough funds in multi sig address, so the
multi_sig_spend2 txn must have been executed.
- Rollback must happen successfully
- Used OTS indexes after rollback must be found as unused
- Transaction must be found in State before roll back
- Transaction must not be found in State after roll back
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=8,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction1 = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction1._data.nonce = 1
transfer_transaction1.sign(bob_xmss)
self.assertTrue(transfer_transaction1.validate_or_raise(True))
multi_sig_spend1 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[20, 15],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend1.sign(alice_xmss)
multi_sig_spend1.pbdata.nonce = 2
multi_sig_spend2 = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 10],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend2.sign(alice_xmss)
multi_sig_spend2.pbdata.nonce = 3
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 4
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend1.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
multi_sig_vote3 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote3.sign(alice_xmss)
multi_sig_vote3.pbdata.nonce = 5
multi_sig_vote4 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote4.sign(bob_xmss)
multi_sig_vote4.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction1, multi_sig_spend1,
multi_sig_spend2, multi_sig_vote1, multi_sig_vote2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 2, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount -
multi_sig_spend1.total_amount)
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(2))
block_2 = Block.create(dev_config=config.dev,
block_number=2,
prev_headerhash=block_1.headerhash,
prev_timestamp=block_1.timestamp,
transactions=[multi_sig_vote3, multi_sig_vote4],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_2.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_2.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_2)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount -
multi_sig_spend1.total_amount)
transfer_transaction2 = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction2._data.nonce = 4
transfer_transaction2.sign(bob_xmss)
self.assertTrue(transfer_transaction2.validate_or_raise(True))
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(3))
block_3 = Block.create(dev_config=config.dev,
block_number=3,
prev_headerhash=block_2.headerhash,
prev_timestamp=block_2.timestamp,
transactions=[transfer_transaction2],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_3.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_3.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_3)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount)
multi_sig_vote5 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=True,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote5.sign(bob_xmss)
multi_sig_vote5.pbdata.nonce = 5
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(4))
block_4 = Block.create(dev_config=config.dev,
block_number=4,
prev_headerhash=block_3.headerhash,
prev_timestamp=block_3.timestamp,
transactions=[multi_sig_vote5],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_4.set_nonces(config.dev, 4, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_4.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_4)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount -
multi_sig_vote5.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward +
block_4.fee_reward +
block_4.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount)
multi_sig_vote6 = MultiSigVote.create(shared_key=multi_sig_spend2.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote6.sign(bob_xmss)
multi_sig_vote6.pbdata.nonce = 6
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(5))
block_5 = Block.create(dev_config=config.dev,
block_number=5,
prev_headerhash=block_4.headerhash,
prev_timestamp=block_4.timestamp,
transactions=[multi_sig_vote6],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_5.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_5.blockheader, config.dev, False):
# block_5.set_nonces(config.dev, block_5.mining_nonce + 1)
# print(block_5.mining_nonce)
self.assertTrue(block_5.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_5)
self.assertTrue(result)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertTrue(vote_stats.executed)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount -
multi_sig_vote5.fee -
multi_sig_vote6.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
multi_sig_spend2.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward +
block_4.fee_reward +
block_4.block_reward +
block_5.fee_reward +
block_5.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0] +
multi_sig_spend2.amounts[1])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount -
multi_sig_spend2.total_amount)
# Check Txns in State
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertIsNotNone(vote_stats)
self.assertTrue(vote_stats.executed)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertIsNotNone(vote_stats)
self.assertTrue(vote_stats.executed)
self.assertEqual(vote_stats.total_weight, 10)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_create.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_create.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_spend2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_spend2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote2.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote3.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote3.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote4.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote5.txhash)
multi_sig_vote5.set_prev_tx_hash(multi_sig_vote4.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote5.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote6.txhash)
multi_sig_vote6.set_prev_tx_hash(multi_sig_vote5.txhash)
self.assertEqual(tx_meta_data[0].pbdata, multi_sig_vote6.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction1.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction1.pbdata)
tx_meta_data = self.chain_manager.get_tx_metadata(transfer_transaction2.txhash)
self.assertEqual(tx_meta_data[0].pbdata, transfer_transaction2.pbdata)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_create.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_spend2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote2.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(alice_addr_state.address, multi_sig_vote3.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote4.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote5.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote6.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction1.ots_key)
self.assertTrue(result)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, transfer_transaction2.ots_key)
self.assertTrue(result)
# Rollback state to block number 4
self.assertTrue(self.chain_manager._rollback(block_4.headerhash))
self.assertEqual(self.chain_manager.last_block.block_number, block_4.block_number)
# Post Rollback tests
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
self.assertEqual(vote_stats.total_weight, 4)
tx_meta_data = self.chain_manager.get_tx_metadata(multi_sig_vote6.txhash)
self.assertIsNone(tx_meta_data)
bob_addr_state = self.chain_manager.get_optimized_address_state(bob_xmss.address)
alice_addr_state = self.chain_manager.get_optimized_address_state(alice_xmss.address)
random_addr_state = self.chain_manager.get_optimized_address_state(random_xmss.address)
multi_sig_address_state = MultiSigAddressState.get_multi_sig_address_state_by_address(self.state._db,
multi_sig_address)
self.assertEqual(bob_addr_state.nonce, 5)
self.assertEqual(alice_addr_state.nonce, 5)
self.assertEqual(random_addr_state.nonce, 0)
self.assertEqual(bob_addr_state.balance,
100 * int(config.dev.shor_per_quanta) -
transfer_transaction1.fee -
transfer_transaction1.total_amount -
transfer_transaction2.fee -
transfer_transaction2.total_amount -
multi_sig_vote5.fee)
self.assertEqual(alice_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[1] +
block_1.fee_reward +
block_1.block_reward +
block_2.fee_reward +
block_2.block_reward +
block_3.fee_reward +
block_3.block_reward +
block_4.fee_reward +
block_4.block_reward)
self.assertEqual(random_addr_state.balance,
100 * int(config.dev.shor_per_quanta) +
multi_sig_spend1.amounts[0])
self.assertIsNotNone(multi_sig_address_state)
self.assertEqual(multi_sig_address_state.balance,
transfer_transaction1.total_amount +
transfer_transaction2.total_amount -
multi_sig_spend1.total_amount)
# Check OTS key usage
p = PaginatedBitfield(False, self.state._db)
result = p.load_bitfield_and_ots_key_reuse(bob_addr_state.address, multi_sig_vote6.ots_key)
self.assertFalse(result)
# Rollback state to block number 3
self.assertTrue(self.chain_manager._rollback(block_3.headerhash))
self.assertEqual(self.chain_manager.last_block.block_number, 3)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend1.txhash)
self.assertTrue(vote_stats.executed)
vote_stats = self.chain_manager.get_vote_stats(multi_sig_spend2.txhash)
self.assertFalse(vote_stats.executed)
self.assertEqual(vote_stats.total_weight, 10)
@patch('xrd.core.misc.ntp.getTime', new=replacement_getTime)
def test_get_headerhashes(self):
block_1 = create_block(1, self.genesis_block, alice.address)
block_2 = create_block(2, block_1, alice.address)
self.chain_manager.load(self.genesis_block)
self.chain_manager.add_block(block_1)
self.chain_manager.add_block(block_2)
node_header_hash = self.chain_manager.get_headerhashes(0)
self.assertEqual(node_header_hash.headerhashes,
[self.genesis_block.headerhash, block_1.headerhash, block_2.headerhash])
@patch('xrd.core.misc.ntp.getTime', new=replacement_getTime)
def test_fork_recovery(self):
# When the node finds that it has been on the slower chain all this time, it runs _fork_recovery() to _rollback
# to an earlier state and switch to the longer chain.
block_1 = create_block(1, self.genesis_block, alice.address)
block_2 = create_block(2, block_1, alice.address)
block_3 = create_block(3, block_2, alice.address)
self.main_chain = [block_2, block_3]
block_2_alt = create_block(2, block_1, bob.address)
block_3_alt = create_block(3, block_2_alt, bob.address)
block_4_alt = create_block(4, block_3_alt, bob.address)
self.alt_chain = [block_2_alt, block_3_alt, block_4_alt]
self.chain_manager.load(self.genesis_block)
self.chain_manager.add_block(block_1)
self.chain_manager.add_block(block_2)
self.chain_manager.add_block(block_3)
# Start adding the forked block to the chain manager. It accepts, and does nothing else.
self.chain_manager.add_block(block_2_alt)
self.assertEqual(self.chain_manager.last_block, block_3)
# We lengthen the fork. Still, the chain manager stays on the first chain.
self.chain_manager.add_block(block_3_alt)
self.assertEqual(self.chain_manager.last_block, block_3)
# When it is obvious that the fork is longer (has a higher cum. diff), the chain manager invokes
# _fork_recovery() and switches over to the fork
self.chain_manager.add_block(block_4_alt)
self.assertEqual(self.chain_manager.last_block, block_4_alt)
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_update_state_container(self, time_mock):
"""
Features Tested
- Behavior of update_state_container when a multisig vote txn is made for an unknown multi sig spend txn
Expectation
- update_state_container must return false
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 3, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
state_container = self.chain_manager.new_state_container(set(),
10,
False,
None)
self.assertFalse(self.chain_manager.update_state_container(multi_sig_vote1, state_container))
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_update_state_container2(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn & multisig spend txn
both are added into same block
- Behavior of update_state_container for multi_sig_vote1 and multi_sig_vote2 txn
Expectation
- update_state_container must return false and thus block should not be added as multi_sig_create
doesnt have any balance
- update_state_container must return false for both multi_sig_vote1 and multi_sig_vote2 txn
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, multi_sig_spend],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# from xrd.core.PoWValidator import PoWValidator
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertFalse(result)
state_container = self.chain_manager.new_state_container(set(),
10,
False,
None)
self.assertFalse(self.chain_manager.update_state_container(multi_sig_vote1, state_container))
self.assertFalse(self.chain_manager.update_state_container(multi_sig_vote2, state_container))
@set_default_balance_size()
@set_hard_fork_block_number()
@patch('xrd.core.misc.ntp.getTime')
def test_update_state_container3(self, time_mock):
"""
Features Tested
- Behavior of Block validation when multisig create txn, transfer txn & multisig spend txn
are added into same block
Expectation
- update_state_container when provided with txns multi_sig_vote1 and multi_sig_vote2, it must
return true, as the multi-sig create and spend txn both are available into the state
:param time_mock:
:return:
"""
with patch.object(DifficultyTracker, 'get', return_value=ask_difficulty_tracker('2', config.dev)):
self.chain_manager.load(self.genesis_block)
extended_seed = "010300cebc4e25553afa0aab899f7838e59e18a48852fa9dfd5" \
"ae78278c371902aa9e6e9c1fa8a196d2dba0cbfd2f2d212d16c"
random_xmss = XMSS.from_extended_seed(hstr2bin(extended_seed))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
multi_sig_create = MultiSigCreate.create(signatories=[alice_xmss.address,
bob_xmss.address],
weights=[4, 6],
threshold=5,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_create.sign(alice_xmss)
multi_sig_create.pbdata.nonce = 1
multi_sig_address = MultiSigAddressState.generate_multi_sig_address(multi_sig_create.txhash)
transfer_transaction = TransferTransaction.create(addrs_to=[multi_sig_address],
amounts=[40 * int(config.dev.shor_per_quanta)],
message_data=None,
fee=1 * config.dev.shor_per_quanta,
xmss_pk=bob_xmss.pk)
transfer_transaction._data.nonce = 1
transfer_transaction.sign(bob_xmss)
self.assertTrue(transfer_transaction.validate_or_raise(True))
multi_sig_spend = MultiSigSpend.create(multi_sig_address=multi_sig_address,
addrs_to=[random_xmss.address, alice_xmss.address],
amounts=[10, 5],
expiry_block_number=100,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_spend.sign(alice_xmss)
multi_sig_spend.pbdata.nonce = 2
multi_sig_vote1 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=alice_xmss.pk)
multi_sig_vote1.sign(alice_xmss)
multi_sig_vote1.pbdata.nonce = 3
multi_sig_vote2 = MultiSigVote.create(shared_key=multi_sig_spend.txhash,
unvote=False,
fee=0,
xmss_pk=bob_xmss.pk)
multi_sig_vote2.sign(bob_xmss)
multi_sig_vote2.pbdata.nonce = 2
time_mock.return_value = 1615270948 # Very high to get an easy difficulty
seed_block = self.chain_manager.get_block_by_number(self._qn.get_seed_height(1))
block_1 = Block.create(dev_config=config.dev,
block_number=1,
prev_headerhash=self.genesis_block.headerhash,
prev_timestamp=self.genesis_block.timestamp,
transactions=[multi_sig_create, transfer_transaction, multi_sig_spend],
miner_address=alice_xmss.address,
seed_height=seed_block.block_number,
seed_hash=seed_block.headerhash)
block_1.set_nonces(config.dev, 0, 0)
# Uncomment only to determine the correct mining_nonce of above blocks
# while not self.chain_manager.validate_mining_nonce(block_1.blockheader, config.dev, False):
# block_1.set_nonces(config.dev, block_1.mining_nonce + 1)
# print(block_1.mining_nonce)
self.assertTrue(block_1.validate(self.chain_manager, {}))
result = self.chain_manager.add_block(block_1)
self.assertTrue(result)
state_container = self.chain_manager.new_state_container(set(),
10,
False,
None)
self.assertTrue(self.chain_manager.update_state_container(multi_sig_vote1, state_container))
self.assertTrue(self.chain_manager.update_state_container(multi_sig_vote2, state_container))
class TestChainManager(TestCase):
def setUp(self):
self.state = Mock(autospec=State)
self.state.get_measurement.return_value = 10000000
try:
del GenesisBlock.instance # Removing Singleton instance
except Exception: # noqa
pass
self.genesis_block = GenesisBlock()
self.chain_manager = ChainManager(self.state)
self.chain_manager.tx_pool = Mock()
self.chain_manager._difficulty_tracker = Mock()
@patch('xrd.core.Block.Block', autospec=True)
def test_fork_recovery_failed(self, mock_block):
# When switching to the longer chain fails, _fork_recovery() must _rollback and restore the shorter chain.
# Mock out irrelevant functions
self.chain_manager._update_block_number_mapping = Mock()
# Switching to the new chain should fail!
self.chain_manager.add_chain = Mock(return_value=False)
self.chain_manager._rollback = Mock()
block_1 = create_m_block(1, self.genesis_block, alice.address)
block_2 = create_m_block(2, block_1, alice.address)
block_1_alt = create_m_block(1, self.genesis_block, alice.address)
block_2_alt = create_m_block(2, block_1_alt, alice.address)
block_3_alt = create_m_block(3, block_2_alt, alice.address)
fork_state = xrdstateinfo_pb2.ForkState(
initiator_headerhash=block_3_alt.headerhash,
fork_point_headerhash=self.genesis_block.headerhash,
old_mainchain_hash_path=[b.headerhash for b in [block_1, block_2]],
new_mainchain_hash_path=[b.headerhash for b in [block_1_alt, block_2_alt, block_3_alt]]
)
# _fork_recovery() will _rollback() to the genesis block and go on the longer chain.
# At this point, _rollback() should return the old hash path as a backup
# in case switching to the longer chain fails.
self.chain_manager._rollback.return_value = [block_2.headerhash, block_1.headerhash], True
self.chain_manager._fork_recovery(block_3_alt, fork_state)
# _fork_recovery() should have _rollback()ed when trying to switch to the longer chain
self.chain_manager._rollback.assert_any_call(self.genesis_block.headerhash, fork_state)
# _fork_recovery() should have _rollback()ed to the genesis block when trying to restore the shorter chain
self.chain_manager._rollback.assert_called_with(self.genesis_block.headerhash)
def test_fork_recovery_rollbacked_already(self):
# Switching to the new chain works, but test that if the _rollback() has already happened, _fork_recovery() does
# not hiccup
# Mock out irrelevant functions
self.chain_manager._update_block_number_mapping = Mock()
# Switching to the new chain should succeed!
self.chain_manager.add_chain = Mock(return_value=True)
self.chain_manager._rollback = Mock()
block_1 = create_m_block(1, self.genesis_block, alice.address)
block_2 = create_m_block(2, block_1, alice.address)
block_1_alt = create_m_block(1, self.genesis_block, alice.address)
block_2_alt = create_m_block(2, block_1_alt, alice.address)
block_3_alt = create_m_block(3, block_2_alt, alice.address)
fork_state = xrdstateinfo_pb2.ForkState(
initiator_headerhash=block_3_alt.headerhash,
fork_point_headerhash=self.genesis_block.headerhash,
old_mainchain_hash_path=[b.headerhash for b in [block_1, block_2]],
new_mainchain_hash_path=[b.headerhash for b in [block_1_alt, block_2_alt, block_3_alt]]
)
# State.get_block() should say that we are already on block_1_alt
with patch.object(Block, 'get_block', return_value=block_1_alt):
# _fork_recovery() will not call _rollback(), because it has already happened.
self.chain_manager._fork_recovery(block_3_alt, fork_state)
# _fork_recovery() should have _rollback()ed when trying to switch to the longer chain
self.chain_manager._rollback.assert_not_called()
@patch('xrd.core.config')
@patch('xrd.core.ChainManager.ChainManager.re_org_limit', new_callable=PropertyMock)
def test_add_block_doesnt_add_blocks_beyond_reorg_limit(self, m_re_org_limit, m_height):
# If we are at height 40000, what's the use of adding a block that's height 1? Simply ignore that block.
m_re_org_limit.return_value = 40000
m_height.return_value = 40000
block_1 = create_m_block(1, self.genesis_block, alice.address)
ans = self.chain_manager.add_block(block_1)
self.assertFalse(ans)
@patch('xrd.core.ChainManager.ChainManager.re_org_limit', new_callable=PropertyMock)
def test_add_block_refuses_to_add_too_large_blocks(self, m_re_org_limit):
# State.get_block_size_limit() calculates how large each Block should be from the last 10 confirmed blocks.
m_re_org_limit.return_value = 0
self.state.get_block_size_limit.return_value = 5000000
block_1 = create_m_block(1, self.genesis_block, alice.address)
block_1.size = 6000000
ans = self.chain_manager.add_block(block_1)
self.assertFalse(ans)
@patch('xrd.core.Block.Block', autospec=True)
def test_get_fork_point_failure_modes(self, mock_block):
block_0 = create_m_block(0, Mock(headerhash=b'Fake Genesis', timestamp=replacement_getTime()), alice.address)
block_1 = create_m_block(1, block_0, alice.address)
block_2 = create_m_block(2, block_1, alice.address)
fork_block_0 = create_m_block(0, Mock(headerhash=b'Fake Genesis', timestamp=replacement_getTime()), alice.address)
fork_block_1 = create_m_block(1, fork_block_0, alice.address)
fork_block_2 = create_m_block(2, fork_block_1, alice.address)
# If _get_fork_point() ever reaches block_number 0, that means the genesis block is different!
# Mock self.state leads us back to block_0
mock_block.deserialize = MagicMock(side_effect=[fork_block_1, fork_block_0])
with self.assertRaises(Exception):
self.chain_manager._get_fork_point(fork_block_2)
# If _get_fork_point() cannot find a particular block while walking back to the fork point, something has gone
# very wrong
# Mock self.state leads us back through a broken chain
mock_block.deserialize.side_effect = [block_2, None, block_0]
with self.assertRaises(Exception):
self.chain_manager._get_fork_point(block_2)
def test_try_branch_add_block_fails_if_apply_state_changes_fails(self):
# ChainManager._try_branch_add_block() should fail if ChainManager._apply_state_changes() fails
self.chain_manager._apply_state_changes = Mock(return_value=False)
block = create_m_block(50, self.genesis_block, alice.address)
block_added = self.chain_manager._try_branch_add_block(block, config.dev)
self.assertFalse(block_added)
def test_add_chain_fails_if_fork_recovery_didnt_complete_successfully(self):
block_1 = create_m_block(1, self.genesis_block, alice.address)
block_2 = create_m_block(2, block_1, alice.address)
block_1_alt = create_m_block(1, self.genesis_block, alice.address)
block_2_alt = create_m_block(2, block_1_alt, alice.address)
block_3_alt = create_m_block(3, block_2_alt, alice.address)
fork_state = xrdstateinfo_pb2.ForkState(
initiator_headerhash=block_3_alt.headerhash,
fork_point_headerhash=self.genesis_block.headerhash,
old_mainchain_hash_path=[b.headerhash for b in [block_1, block_2]],
new_mainchain_hash_path=[b.headerhash for b in [block_1_alt, block_2_alt, block_3_alt]]
)
# We want to add_chain(block_*_alt chain), but we're still on block_1 (we should have rolled back to genesis)
self.chain_manager._last_block = block_1
ans = self.chain_manager.add_chain([block_1_alt.headerhash, block_2_alt.headerhash], fork_state)
self.assertFalse(ans)
@patch('xrd.core.Block.Block', autospec=True)
def test_add_chain_fails_if_apply_state_changes_fails(self, mock_block_deserialize):
block_1 = create_m_block(1, self.genesis_block, alice.address)
block_2 = create_m_block(2, block_1, alice.address)
block_1_alt = create_m_block(1, self.genesis_block, alice.address)
block_2_alt = create_m_block(2, block_1_alt, alice.address)
block_3_alt = create_m_block(3, block_2_alt, alice.address)
fork_state = xrdstateinfo_pb2.ForkState(
initiator_headerhash=block_3_alt.headerhash,
fork_point_headerhash=self.genesis_block.headerhash,
old_mainchain_hash_path=[b.headerhash for b in [block_1, block_2]],
new_mainchain_hash_path=[b.headerhash for b in [block_1_alt, block_2_alt, block_3_alt]]
)
# we want to add_chain(block_*_alt chain), but for some reason applying a Block to the State didn't work.
self.chain_manager._apply_state_changes = Mock(return_value=False)
ans = self.chain_manager.add_chain([block_1_alt.headerhash, block_2_alt.headerhash], fork_state)
self.assertFalse(ans)
@patch("xrd.core.Block.Block.get_block")
def test_get_measurement(self, mock_get_block):
def block(state, headerhash):
nth_block = Block()
if headerhash == b'test_block_1':
nth_block.blockheader._data.timestamp_seconds = 50000
elif headerhash == b'test_block_2':
nth_block.blockheader._data.timestamp_seconds = 80000
elif headerhash == b'test_block_3':
nth_block.blockheader._data.timestamp_seconds = 90000
return nth_block
parent_metadata = BlockMetadata.create(block_difficulty=b'\x00' * 32,
cumulative_difficulty=b'\x00' * 32,
child_headerhashes=[])
measurement = self.chain_manager.get_measurement(dev_config=config.dev,
block_timestamp=100000,
parent_headerhash=b'',
parent_metadata=parent_metadata)
# Test Case, when count_headerhashes equals 0
self.assertEqual(measurement, config.dev.block_timing_in_seconds)
mock_get_block.side_effect = block
parent_metadata.update_last_headerhashes([], b'test_block_1')
measurement = self.chain_manager.get_measurement(dev_config=config.dev,
block_timestamp=100000,
parent_headerhash=b'test_block_1',
parent_metadata=parent_metadata)
# Test Case, when count_headerhashes equals 1
self.assertEqual(measurement,
(100000 - 50000 + config.dev.block_timing_in_seconds) // 2)
parent_metadata.update_last_headerhashes([b'test_block_1'], b'test_block_2')
measurement = self.chain_manager.get_measurement(dev_config=config.dev,
block_timestamp=100000,
parent_headerhash=b'test_block_2',
parent_metadata=parent_metadata)
# Test Case, when count_headerhashes is greater than 1
# but less than config.dev.N_measurement
self.assertEqual(measurement,
(100000 - 80000 + config.dev.block_timing_in_seconds) // 2)
parent_metadata.update_last_headerhashes([b'test_block_3'] * config.dev.N_measurement,
b'test_block_2')
measurement = self.chain_manager.get_measurement(dev_config=config.dev,
block_timestamp=100000,
parent_headerhash=b'test_block_2',
parent_metadata=parent_metadata)
# Test Case, when count_headerhashes is greater than config.dev.N_measurement
self.assertEqual(measurement,
(100000 - 90000) // config.dev.N_measurement)
def test_get_all_address_state(self):
with set_xrd_dir('no_data'):
with State() as state:
chain_manager = ChainManager(state)
addresses_state = chain_manager.get_all_address_state()
self.assertEqual(len(addresses_state), 0)
alice_xmss = get_alice_xmss()
alice_address = alice_xmss.address
address_state = OptimizedAddressState.get_optimized_address_state(state, alice_address)
addresses_state = {
alice_address: address_state
}
self.assertTrue(isinstance(address_state.address, bytes))
OptimizedAddressState.put_optimized_addresses_state(state, addresses_state)
addresses_state = chain_manager.get_all_address_state()
self.assertEqual(len(addresses_state), 1)
bob_xmss = get_bob_xmss()
bob_address = bob_xmss.address
address_state = OptimizedAddressState.get_optimized_address_state(state, bob_address)
addresses_state = {
bob_address: address_state
}
self.assertTrue(isinstance(address_state.address, bytes))
OptimizedAddressState.put_optimized_addresses_state(state, addresses_state)
addresses_state = chain_manager.get_all_address_state()
self.assertEqual(len(addresses_state), 2)
def test_set_affected_address(self):
block = Block.create(dev_config=config.dev,
block_number=10,
prev_headerhash=b'',
prev_timestamp=10,
transactions=[],
miner_address=get_some_address(1),
seed_height=None,
seed_hash=None)
# Test Case: without any transactions of block
self.assertEqual(self.chain_manager.set_affected_address(block),
{config.dev.coinbase_address, get_some_address(1)})
alice_xmss = get_alice_xmss()
block = Block.create(dev_config=config.dev,
block_number=10,
prev_headerhash=b'',
prev_timestamp=10,
transactions=[TransferTransaction.create(addrs_to=[get_some_address(2),
get_some_address(3)],
amounts=[100, 100],
message_data=None,
fee=0,
xmss_pk=alice_xmss.pk)],
miner_address=get_some_address(1),
seed_height=None,
seed_hash=None)
# Test Case, with one Transaction
self.assertEqual(self.chain_manager.set_affected_address(block),
{config.dev.coinbase_address,
get_some_address(1),
get_some_address(2),
get_some_address(3),
alice_xmss.address})
def test_get_block_datapoint(self):
with set_xrd_dir('no_data'):
with State() as state:
chain_manager = ChainManager(state)
# Test Case: When block not found
self.assertIsNone(chain_manager.get_block_datapoint(b'test'))
alice_xmss = get_alice_xmss()
blocks = gen_blocks(20, state, alice_xmss.address)
for i in range(1, 20):
datapoint = chain_manager.get_block_datapoint(blocks[i].headerhash)
self.assertEqual(datapoint.difficulty, "256")
self.assertEqual(datapoint.timestamp, 1615270947 + i)
self.assertEqual(datapoint.header_hash, blocks[i].headerhash)
self.assertEqual(datapoint.header_hash_prev, blocks[i - 1].headerhash)
def test_get_state_mainchain(self):
with set_xrd_dir('no_data'):
with State() as state:
chain_manager = ChainManager(state)
alice_xmss = get_alice_xmss()
alice_state = OptimizedAddressState.get_default(alice_xmss.address)
alice_state.increase_nonce()
alice_state.update_balance(None, 1000)
addresses_state = {
alice_state.address: alice_state,
}
OptimizedAddressState.put_optimized_addresses_state(state, addresses_state, None)
addresses_state1, success = chain_manager.get_state_mainchain({alice_state.address})
self.assertTrue(success)
self.assertEqual(addresses_state[alice_state.address].serialize(),
addresses_state1[alice_state.address].serialize())
| [
"[email protected]"
] | |
175362d65ed956693d742ed37d75ce17e75b88dc | 01361e80df13a173264c369f63dc42f5fb1eb9b5 | /carts/migrations/0002_initial.py | 642b8aa58314767eaafc395fd7cdd75fb278ce99 | [] | no_license | Angshumaan/greatkart-django | cb63cf1cc1c094f504adb7ff8fea347c43a6fa81 | 3ff1ac8fc7e7f23bdc3185167e2d8b678256a82c | refs/heads/main | 2023-06-05T10:05:21.408371 | 2021-06-12T06:53:15 | 2021-06-12T06:53:15 | 363,017,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | # Generated by Django 3.2 on 2021-05-22 03:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('store', '0001_initial'),
('carts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='cartitem',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.product'),
),
migrations.AddField(
model_name='cartitem',
name='variations',
field=models.ManyToManyField(blank=True, to='store.Variation'),
),
]
| [
"="
] | = |
ebbd48730fbd9939ec1d72fb2ef3df5c198e09f2 | 1eb72c0482efd06e1b06a7f3af8f6a5250b82584 | /code/lang/complex_if.py | 707a5937eca1f84dfba9e07c8e611c1bd8536da4 | [] | no_license | Bubujka/python-learning | dcb8e5f8fc40600b431a2ec1f4a16814fbbfa5c9 | 79bbe16bec01d429c34f9c36f1d15b4f77c811bf | refs/heads/master | 2021-09-07T15:31:54.822139 | 2018-02-25T03:08:06 | 2018-02-25T03:08:06 | 109,488,504 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | #!/usr/bin/env python3
"""
Комплексные условия в if (and, or)
"""
if True and True and False:
print("NOPE")
if not True or False:
print("NOPE")
if None or None or 1:
print("yeeep")
| [
"[email protected]"
] | |
edfde233b9fd6e03f085f971ebe3dd9459d23689 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/datalake/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/_models.py | d27e5d49e658fa08f7750ab5a048c24f6badf3ae | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 66,522 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class CapabilityInformation(msrest.serialization.Model):
"""Subscription-level properties and limits for Data Lake Store.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar subscription_id: The subscription credentials that uniquely identifies the subscription.
:vartype subscription_id: str
:ivar state: The subscription state. Possible values include: "Registered", "Suspended",
"Deleted", "Unregistered", "Warned".
:vartype state: str or ~azure.mgmt.datalake.store.models.SubscriptionState
:ivar max_account_count: The maximum supported number of accounts under this subscription.
:vartype max_account_count: int
:ivar account_count: The current number of accounts under this subscription.
:vartype account_count: int
:ivar migration_state: The Boolean value of true or false to indicate the maintenance state.
:vartype migration_state: bool
"""
_validation = {
'subscription_id': {'readonly': True},
'state': {'readonly': True},
'max_account_count': {'readonly': True},
'account_count': {'readonly': True},
'migration_state': {'readonly': True},
}
_attribute_map = {
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'max_account_count': {'key': 'maxAccountCount', 'type': 'int'},
'account_count': {'key': 'accountCount', 'type': 'int'},
'migration_state': {'key': 'migrationState', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(CapabilityInformation, self).__init__(**kwargs)
self.subscription_id = None
self.state = None
self.max_account_count = None
self.account_count = None
self.migration_state = None
class CheckNameAvailabilityParameters(msrest.serialization.Model):
"""Data Lake Store account name availability check parameters.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. The Data Lake Store name to check availability for.
:type name: str
:ivar type: Required. The resource type. Note: This should not be set by the user, as the
constant value is Microsoft.DataLakeStore/accounts. Default value:
"Microsoft.DataLakeStore/accounts".
:vartype type: str
"""
_validation = {
'name': {'required': True},
'type': {'required': True, 'constant': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
type = "Microsoft.DataLakeStore/accounts"
def __init__(
self,
**kwargs
):
super(CheckNameAvailabilityParameters, self).__init__(**kwargs)
self.name = kwargs['name']
class CreateDataLakeStoreAccountParameters(msrest.serialization.Model):
"""CreateDataLakeStoreAccountParameters.
All required parameters must be populated in order to send to Azure.
:param location: Required. The resource location.
:type location: str
:param tags: A set of tags. The resource tags.
:type tags: dict[str, str]
:param identity: The Key Vault encryption identity, if any.
:type identity: ~azure.mgmt.datalake.store.models.EncryptionIdentity
:param default_group: The default owner group for all new folders and files created in the Data
Lake Store account.
:type default_group: str
:param encryption_config: The Key Vault encryption configuration.
:type encryption_config: ~azure.mgmt.datalake.store.models.EncryptionConfig
:param encryption_state: The current state of encryption for this Data Lake Store account.
Possible values include: "Enabled", "Disabled".
:type encryption_state: str or ~azure.mgmt.datalake.store.models.EncryptionState
:param firewall_rules: The list of firewall rules associated with this Data Lake Store account.
:type firewall_rules:
list[~azure.mgmt.datalake.store.models.CreateFirewallRuleWithAccountParameters]
:param virtual_network_rules: The list of virtual network rules associated with this Data Lake
Store account.
:type virtual_network_rules:
list[~azure.mgmt.datalake.store.models.CreateVirtualNetworkRuleWithAccountParameters]
:param firewall_state: The current state of the IP address firewall for this Data Lake Store
account. Possible values include: "Enabled", "Disabled".
:type firewall_state: str or ~azure.mgmt.datalake.store.models.FirewallState
:param firewall_allow_azure_ips: The current state of allowing or disallowing IPs originating
within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible
values include: "Enabled", "Disabled".
:type firewall_allow_azure_ips: str or
~azure.mgmt.datalake.store.models.FirewallAllowAzureIpsState
:param trusted_id_providers: The list of trusted identity providers associated with this Data
Lake Store account.
:type trusted_id_providers:
list[~azure.mgmt.datalake.store.models.CreateTrustedIdProviderWithAccountParameters]
:param trusted_id_provider_state: The current state of the trusted identity provider feature
for this Data Lake Store account. Possible values include: "Enabled", "Disabled".
:type trusted_id_provider_state: str or
~azure.mgmt.datalake.store.models.TrustedIdProviderState
:param new_tier: The commitment tier to use for next month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:type new_tier: str or ~azure.mgmt.datalake.store.models.TierType
"""
_validation = {
'location': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'EncryptionIdentity'},
'default_group': {'key': 'properties.defaultGroup', 'type': 'str'},
'encryption_config': {'key': 'properties.encryptionConfig', 'type': 'EncryptionConfig'},
'encryption_state': {'key': 'properties.encryptionState', 'type': 'str'},
'firewall_rules': {'key': 'properties.firewallRules', 'type': '[CreateFirewallRuleWithAccountParameters]'},
'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[CreateVirtualNetworkRuleWithAccountParameters]'},
'firewall_state': {'key': 'properties.firewallState', 'type': 'str'},
'firewall_allow_azure_ips': {'key': 'properties.firewallAllowAzureIps', 'type': 'str'},
'trusted_id_providers': {'key': 'properties.trustedIdProviders', 'type': '[CreateTrustedIdProviderWithAccountParameters]'},
'trusted_id_provider_state': {'key': 'properties.trustedIdProviderState', 'type': 'str'},
'new_tier': {'key': 'properties.newTier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateDataLakeStoreAccountParameters, self).__init__(**kwargs)
self.location = kwargs['location']
self.tags = kwargs.get('tags', None)
self.identity = kwargs.get('identity', None)
self.default_group = kwargs.get('default_group', None)
self.encryption_config = kwargs.get('encryption_config', None)
self.encryption_state = kwargs.get('encryption_state', None)
self.firewall_rules = kwargs.get('firewall_rules', None)
self.virtual_network_rules = kwargs.get('virtual_network_rules', None)
self.firewall_state = kwargs.get('firewall_state', None)
self.firewall_allow_azure_ips = kwargs.get('firewall_allow_azure_ips', None)
self.trusted_id_providers = kwargs.get('trusted_id_providers', None)
self.trusted_id_provider_state = kwargs.get('trusted_id_provider_state', None)
self.new_tier = kwargs.get('new_tier', None)
class CreateFirewallRuleWithAccountParameters(msrest.serialization.Model):
"""The parameters used to create a new firewall rule while creating a new Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the firewall rule to create.
:type name: str
:param start_ip_address: Required. The start IP address for the firewall rule. This can be
either ipv4 or ipv6. Start and End should be in the same protocol.
:type start_ip_address: str
:param end_ip_address: Required. The end IP address for the firewall rule. This can be either
ipv4 or ipv6. Start and End should be in the same protocol.
:type end_ip_address: str
"""
_validation = {
'name': {'required': True},
'start_ip_address': {'required': True},
'end_ip_address': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateFirewallRuleWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.start_ip_address = kwargs['start_ip_address']
self.end_ip_address = kwargs['end_ip_address']
class CreateOrUpdateFirewallRuleParameters(msrest.serialization.Model):
"""The parameters used to create a new firewall rule.
All required parameters must be populated in order to send to Azure.
:param start_ip_address: Required. The start IP address for the firewall rule. This can be
either ipv4 or ipv6. Start and End should be in the same protocol.
:type start_ip_address: str
:param end_ip_address: Required. The end IP address for the firewall rule. This can be either
ipv4 or ipv6. Start and End should be in the same protocol.
:type end_ip_address: str
"""
_validation = {
'start_ip_address': {'required': True},
'end_ip_address': {'required': True},
}
_attribute_map = {
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateOrUpdateFirewallRuleParameters, self).__init__(**kwargs)
self.start_ip_address = kwargs['start_ip_address']
self.end_ip_address = kwargs['end_ip_address']
class CreateOrUpdateTrustedIdProviderParameters(msrest.serialization.Model):
"""The parameters used to create a new trusted identity provider.
All required parameters must be populated in order to send to Azure.
:param id_provider: Required. The URL of this trusted identity provider.
:type id_provider: str
"""
_validation = {
'id_provider': {'required': True},
}
_attribute_map = {
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateOrUpdateTrustedIdProviderParameters, self).__init__(**kwargs)
self.id_provider = kwargs['id_provider']
class CreateOrUpdateVirtualNetworkRuleParameters(msrest.serialization.Model):
"""The parameters used to create a new virtual network rule.
All required parameters must be populated in order to send to Azure.
:param subnet_id: Required. The resource identifier for the subnet.
:type subnet_id: str
"""
_validation = {
'subnet_id': {'required': True},
}
_attribute_map = {
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateOrUpdateVirtualNetworkRuleParameters, self).__init__(**kwargs)
self.subnet_id = kwargs['subnet_id']
class CreateTrustedIdProviderWithAccountParameters(msrest.serialization.Model):
"""The parameters used to create a new trusted identity provider while creating a new Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the trusted identity provider to create.
:type name: str
:param id_provider: Required. The URL of this trusted identity provider.
:type id_provider: str
"""
_validation = {
'name': {'required': True},
'id_provider': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateTrustedIdProviderWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.id_provider = kwargs['id_provider']
class CreateVirtualNetworkRuleWithAccountParameters(msrest.serialization.Model):
"""The parameters used to create a new virtual network rule while creating a new Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the virtual network rule to create.
:type name: str
:param subnet_id: Required. The resource identifier for the subnet.
:type subnet_id: str
"""
_validation = {
'name': {'required': True},
'subnet_id': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CreateVirtualNetworkRuleWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.subnet_id = kwargs['subnet_id']
class Resource(msrest.serialization.Model):
"""The resource model definition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location.
:vartype location: str
:ivar tags: A set of tags. The resource tags.
:vartype tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = None
self.tags = None
class DataLakeStoreAccount(Resource):
"""Data Lake Store account information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location.
:vartype location: str
:ivar tags: A set of tags. The resource tags.
:vartype tags: dict[str, str]
:ivar identity: The Key Vault encryption identity, if any.
:vartype identity: ~azure.mgmt.datalake.store.models.EncryptionIdentity
:ivar account_id: The unique identifier associated with this Data Lake Store account.
:vartype account_id: str
:ivar provisioning_state: The provisioning status of the Data Lake Store account. Possible
values include: "Failed", "Creating", "Running", "Succeeded", "Patching", "Suspending",
"Resuming", "Deleting", "Deleted", "Undeleting", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.datalake.store.models.DataLakeStoreAccountStatus
:ivar state: The state of the Data Lake Store account. Possible values include: "Active",
"Suspended".
:vartype state: str or ~azure.mgmt.datalake.store.models.DataLakeStoreAccountState
:ivar creation_time: The account creation time.
:vartype creation_time: ~datetime.datetime
:ivar last_modified_time: The account last modified time.
:vartype last_modified_time: ~datetime.datetime
:ivar endpoint: The full CName endpoint for this account.
:vartype endpoint: str
:ivar default_group: The default owner group for all new folders and files created in the Data
Lake Store account.
:vartype default_group: str
:ivar encryption_config: The Key Vault encryption configuration.
:vartype encryption_config: ~azure.mgmt.datalake.store.models.EncryptionConfig
:ivar encryption_state: The current state of encryption for this Data Lake Store account.
Possible values include: "Enabled", "Disabled".
:vartype encryption_state: str or ~azure.mgmt.datalake.store.models.EncryptionState
:ivar encryption_provisioning_state: The current state of encryption provisioning for this Data
Lake Store account. Possible values include: "Creating", "Succeeded".
:vartype encryption_provisioning_state: str or
~azure.mgmt.datalake.store.models.EncryptionProvisioningState
:ivar firewall_rules: The list of firewall rules associated with this Data Lake Store account.
:vartype firewall_rules: list[~azure.mgmt.datalake.store.models.FirewallRule]
:ivar virtual_network_rules: The list of virtual network rules associated with this Data Lake
Store account.
:vartype virtual_network_rules: list[~azure.mgmt.datalake.store.models.VirtualNetworkRule]
:ivar firewall_state: The current state of the IP address firewall for this Data Lake Store
account. Possible values include: "Enabled", "Disabled".
:vartype firewall_state: str or ~azure.mgmt.datalake.store.models.FirewallState
:ivar firewall_allow_azure_ips: The current state of allowing or disallowing IPs originating
within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible
values include: "Enabled", "Disabled".
:vartype firewall_allow_azure_ips: str or
~azure.mgmt.datalake.store.models.FirewallAllowAzureIpsState
:ivar trusted_id_providers: The list of trusted identity providers associated with this Data
Lake Store account.
:vartype trusted_id_providers: list[~azure.mgmt.datalake.store.models.TrustedIdProvider]
:ivar trusted_id_provider_state: The current state of the trusted identity provider feature for
this Data Lake Store account. Possible values include: "Enabled", "Disabled".
:vartype trusted_id_provider_state: str or
~azure.mgmt.datalake.store.models.TrustedIdProviderState
:ivar new_tier: The commitment tier to use for next month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:vartype new_tier: str or ~azure.mgmt.datalake.store.models.TierType
:ivar current_tier: The commitment tier in use for the current month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:vartype current_tier: str or ~azure.mgmt.datalake.store.models.TierType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
'identity': {'readonly': True},
'account_id': {'readonly': True},
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'endpoint': {'readonly': True},
'default_group': {'readonly': True},
'encryption_config': {'readonly': True},
'encryption_state': {'readonly': True},
'encryption_provisioning_state': {'readonly': True},
'firewall_rules': {'readonly': True},
'virtual_network_rules': {'readonly': True},
'firewall_state': {'readonly': True},
'firewall_allow_azure_ips': {'readonly': True},
'trusted_id_providers': {'readonly': True},
'trusted_id_provider_state': {'readonly': True},
'new_tier': {'readonly': True},
'current_tier': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'EncryptionIdentity'},
'account_id': {'key': 'properties.accountId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'},
'endpoint': {'key': 'properties.endpoint', 'type': 'str'},
'default_group': {'key': 'properties.defaultGroup', 'type': 'str'},
'encryption_config': {'key': 'properties.encryptionConfig', 'type': 'EncryptionConfig'},
'encryption_state': {'key': 'properties.encryptionState', 'type': 'str'},
'encryption_provisioning_state': {'key': 'properties.encryptionProvisioningState', 'type': 'str'},
'firewall_rules': {'key': 'properties.firewallRules', 'type': '[FirewallRule]'},
'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[VirtualNetworkRule]'},
'firewall_state': {'key': 'properties.firewallState', 'type': 'str'},
'firewall_allow_azure_ips': {'key': 'properties.firewallAllowAzureIps', 'type': 'str'},
'trusted_id_providers': {'key': 'properties.trustedIdProviders', 'type': '[TrustedIdProvider]'},
'trusted_id_provider_state': {'key': 'properties.trustedIdProviderState', 'type': 'str'},
'new_tier': {'key': 'properties.newTier', 'type': 'str'},
'current_tier': {'key': 'properties.currentTier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataLakeStoreAccount, self).__init__(**kwargs)
self.identity = None
self.account_id = None
self.provisioning_state = None
self.state = None
self.creation_time = None
self.last_modified_time = None
self.endpoint = None
self.default_group = None
self.encryption_config = None
self.encryption_state = None
self.encryption_provisioning_state = None
self.firewall_rules = None
self.virtual_network_rules = None
self.firewall_state = None
self.firewall_allow_azure_ips = None
self.trusted_id_providers = None
self.trusted_id_provider_state = None
self.new_tier = None
self.current_tier = None
class DataLakeStoreAccountBasic(Resource):
"""Basic Data Lake Store account information, returned on list calls.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location.
:vartype location: str
:ivar tags: A set of tags. The resource tags.
:vartype tags: dict[str, str]
:ivar account_id: The unique identifier associated with this Data Lake Store account.
:vartype account_id: str
:ivar provisioning_state: The provisioning status of the Data Lake Store account. Possible
values include: "Failed", "Creating", "Running", "Succeeded", "Patching", "Suspending",
"Resuming", "Deleting", "Deleted", "Undeleting", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.datalake.store.models.DataLakeStoreAccountStatus
:ivar state: The state of the Data Lake Store account. Possible values include: "Active",
"Suspended".
:vartype state: str or ~azure.mgmt.datalake.store.models.DataLakeStoreAccountState
:ivar creation_time: The account creation time.
:vartype creation_time: ~datetime.datetime
:ivar last_modified_time: The account last modified time.
:vartype last_modified_time: ~datetime.datetime
:ivar endpoint: The full CName endpoint for this account.
:vartype endpoint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
'account_id': {'readonly': True},
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'endpoint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'account_id': {'key': 'properties.accountId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'},
'endpoint': {'key': 'properties.endpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataLakeStoreAccountBasic, self).__init__(**kwargs)
self.account_id = None
self.provisioning_state = None
self.state = None
self.creation_time = None
self.last_modified_time = None
self.endpoint = None
class DataLakeStoreAccountListResult(msrest.serialization.Model):
"""Data Lake Store account list information response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.DataLakeStoreAccountBasic]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[DataLakeStoreAccountBasic]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataLakeStoreAccountListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
class DataLakeStoreAccountPropertiesBasic(msrest.serialization.Model):
"""The basic account specific properties that are associated with an underlying Data Lake Store account.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar account_id: The unique identifier associated with this Data Lake Store account.
:vartype account_id: str
:ivar provisioning_state: The provisioning status of the Data Lake Store account. Possible
values include: "Failed", "Creating", "Running", "Succeeded", "Patching", "Suspending",
"Resuming", "Deleting", "Deleted", "Undeleting", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.datalake.store.models.DataLakeStoreAccountStatus
:ivar state: The state of the Data Lake Store account. Possible values include: "Active",
"Suspended".
:vartype state: str or ~azure.mgmt.datalake.store.models.DataLakeStoreAccountState
:ivar creation_time: The account creation time.
:vartype creation_time: ~datetime.datetime
:ivar last_modified_time: The account last modified time.
:vartype last_modified_time: ~datetime.datetime
:ivar endpoint: The full CName endpoint for this account.
:vartype endpoint: str
"""
_validation = {
'account_id': {'readonly': True},
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'endpoint': {'readonly': True},
}
_attribute_map = {
'account_id': {'key': 'accountId', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataLakeStoreAccountPropertiesBasic, self).__init__(**kwargs)
self.account_id = None
self.provisioning_state = None
self.state = None
self.creation_time = None
self.last_modified_time = None
self.endpoint = None
class DataLakeStoreAccountProperties(DataLakeStoreAccountPropertiesBasic):
"""Data Lake Store account properties information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar account_id: The unique identifier associated with this Data Lake Store account.
:vartype account_id: str
:ivar provisioning_state: The provisioning status of the Data Lake Store account. Possible
values include: "Failed", "Creating", "Running", "Succeeded", "Patching", "Suspending",
"Resuming", "Deleting", "Deleted", "Undeleting", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.datalake.store.models.DataLakeStoreAccountStatus
:ivar state: The state of the Data Lake Store account. Possible values include: "Active",
"Suspended".
:vartype state: str or ~azure.mgmt.datalake.store.models.DataLakeStoreAccountState
:ivar creation_time: The account creation time.
:vartype creation_time: ~datetime.datetime
:ivar last_modified_time: The account last modified time.
:vartype last_modified_time: ~datetime.datetime
:ivar endpoint: The full CName endpoint for this account.
:vartype endpoint: str
:ivar default_group: The default owner group for all new folders and files created in the Data
Lake Store account.
:vartype default_group: str
:ivar encryption_config: The Key Vault encryption configuration.
:vartype encryption_config: ~azure.mgmt.datalake.store.models.EncryptionConfig
:ivar encryption_state: The current state of encryption for this Data Lake Store account.
Possible values include: "Enabled", "Disabled".
:vartype encryption_state: str or ~azure.mgmt.datalake.store.models.EncryptionState
:ivar encryption_provisioning_state: The current state of encryption provisioning for this Data
Lake Store account. Possible values include: "Creating", "Succeeded".
:vartype encryption_provisioning_state: str or
~azure.mgmt.datalake.store.models.EncryptionProvisioningState
:ivar firewall_rules: The list of firewall rules associated with this Data Lake Store account.
:vartype firewall_rules: list[~azure.mgmt.datalake.store.models.FirewallRule]
:ivar virtual_network_rules: The list of virtual network rules associated with this Data Lake
Store account.
:vartype virtual_network_rules: list[~azure.mgmt.datalake.store.models.VirtualNetworkRule]
:ivar firewall_state: The current state of the IP address firewall for this Data Lake Store
account. Possible values include: "Enabled", "Disabled".
:vartype firewall_state: str or ~azure.mgmt.datalake.store.models.FirewallState
:ivar firewall_allow_azure_ips: The current state of allowing or disallowing IPs originating
within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible
values include: "Enabled", "Disabled".
:vartype firewall_allow_azure_ips: str or
~azure.mgmt.datalake.store.models.FirewallAllowAzureIpsState
:ivar trusted_id_providers: The list of trusted identity providers associated with this Data
Lake Store account.
:vartype trusted_id_providers: list[~azure.mgmt.datalake.store.models.TrustedIdProvider]
:ivar trusted_id_provider_state: The current state of the trusted identity provider feature for
this Data Lake Store account. Possible values include: "Enabled", "Disabled".
:vartype trusted_id_provider_state: str or
~azure.mgmt.datalake.store.models.TrustedIdProviderState
:ivar new_tier: The commitment tier to use for next month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:vartype new_tier: str or ~azure.mgmt.datalake.store.models.TierType
:ivar current_tier: The commitment tier in use for the current month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:vartype current_tier: str or ~azure.mgmt.datalake.store.models.TierType
"""
_validation = {
'account_id': {'readonly': True},
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'endpoint': {'readonly': True},
'default_group': {'readonly': True},
'encryption_config': {'readonly': True},
'encryption_state': {'readonly': True},
'encryption_provisioning_state': {'readonly': True},
'firewall_rules': {'readonly': True},
'virtual_network_rules': {'readonly': True},
'firewall_state': {'readonly': True},
'firewall_allow_azure_ips': {'readonly': True},
'trusted_id_providers': {'readonly': True},
'trusted_id_provider_state': {'readonly': True},
'new_tier': {'readonly': True},
'current_tier': {'readonly': True},
}
_attribute_map = {
'account_id': {'key': 'accountId', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'default_group': {'key': 'defaultGroup', 'type': 'str'},
'encryption_config': {'key': 'encryptionConfig', 'type': 'EncryptionConfig'},
'encryption_state': {'key': 'encryptionState', 'type': 'str'},
'encryption_provisioning_state': {'key': 'encryptionProvisioningState', 'type': 'str'},
'firewall_rules': {'key': 'firewallRules', 'type': '[FirewallRule]'},
'virtual_network_rules': {'key': 'virtualNetworkRules', 'type': '[VirtualNetworkRule]'},
'firewall_state': {'key': 'firewallState', 'type': 'str'},
'firewall_allow_azure_ips': {'key': 'firewallAllowAzureIps', 'type': 'str'},
'trusted_id_providers': {'key': 'trustedIdProviders', 'type': '[TrustedIdProvider]'},
'trusted_id_provider_state': {'key': 'trustedIdProviderState', 'type': 'str'},
'new_tier': {'key': 'newTier', 'type': 'str'},
'current_tier': {'key': 'currentTier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataLakeStoreAccountProperties, self).__init__(**kwargs)
self.default_group = None
self.encryption_config = None
self.encryption_state = None
self.encryption_provisioning_state = None
self.firewall_rules = None
self.virtual_network_rules = None
self.firewall_state = None
self.firewall_allow_azure_ips = None
self.trusted_id_providers = None
self.trusted_id_provider_state = None
self.new_tier = None
self.current_tier = None
class EncryptionConfig(msrest.serialization.Model):
"""The encryption configuration for the account.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of encryption configuration being used. Currently the only
supported types are 'UserManaged' and 'ServiceManaged'. Possible values include: "UserManaged",
"ServiceManaged".
:type type: str or ~azure.mgmt.datalake.store.models.EncryptionConfigType
:param key_vault_meta_info: The Key Vault information for connecting to user managed encryption
keys.
:type key_vault_meta_info: ~azure.mgmt.datalake.store.models.KeyVaultMetaInfo
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'key_vault_meta_info': {'key': 'keyVaultMetaInfo', 'type': 'KeyVaultMetaInfo'},
}
def __init__(
self,
**kwargs
):
super(EncryptionConfig, self).__init__(**kwargs)
self.type = kwargs['type']
self.key_vault_meta_info = kwargs.get('key_vault_meta_info', None)
class EncryptionIdentity(msrest.serialization.Model):
"""The encryption identity properties.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar type: Required. The type of encryption being used. Currently the only supported type is
'SystemAssigned'. Default value: "SystemAssigned".
:vartype type: str
:ivar principal_id: The principal identifier associated with the encryption.
:vartype principal_id: str
:ivar tenant_id: The tenant identifier associated with the encryption.
:vartype tenant_id: str
"""
_validation = {
'type': {'required': True, 'constant': True},
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
type = "SystemAssigned"
def __init__(
self,
**kwargs
):
super(EncryptionIdentity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
class SubResource(msrest.serialization.Model):
"""The resource model definition for a nested resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class FirewallRule(SubResource):
"""Data Lake Store firewall rule information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar start_ip_address: The start IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:vartype start_ip_address: str
:ivar end_ip_address: The end IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:vartype end_ip_address: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'start_ip_address': {'readonly': True},
'end_ip_address': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallRule, self).__init__(**kwargs)
self.start_ip_address = None
self.end_ip_address = None
class FirewallRuleListResult(msrest.serialization.Model):
"""Data Lake Store firewall rule list information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.FirewallRule]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FirewallRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallRuleListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
class KeyVaultMetaInfo(msrest.serialization.Model):
"""Metadata information used by account encryption.
All required parameters must be populated in order to send to Azure.
:param key_vault_resource_id: Required. The resource identifier for the user managed Key Vault
being used to encrypt.
:type key_vault_resource_id: str
:param encryption_key_name: Required. The name of the user managed encryption key.
:type encryption_key_name: str
:param encryption_key_version: Required. The version of the user managed encryption key.
:type encryption_key_version: str
"""
_validation = {
'key_vault_resource_id': {'required': True},
'encryption_key_name': {'required': True},
'encryption_key_version': {'required': True},
}
_attribute_map = {
'key_vault_resource_id': {'key': 'keyVaultResourceId', 'type': 'str'},
'encryption_key_name': {'key': 'encryptionKeyName', 'type': 'str'},
'encryption_key_version': {'key': 'encryptionKeyVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultMetaInfo, self).__init__(**kwargs)
self.key_vault_resource_id = kwargs['key_vault_resource_id']
self.encryption_key_name = kwargs['encryption_key_name']
self.encryption_key_version = kwargs['encryption_key_version']
class NameAvailabilityInformation(msrest.serialization.Model):
"""Data Lake Store account name availability result information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name_available: The Boolean value of true or false to indicate whether the Data Lake
Store account name is available or not.
:vartype name_available: bool
:ivar reason: The reason why the Data Lake Store account name is not available, if
nameAvailable is false.
:vartype reason: str
:ivar message: The message describing why the Data Lake Store account name is not available, if
nameAvailable is false.
:vartype message: str
"""
_validation = {
'name_available': {'readonly': True},
'reason': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NameAvailabilityInformation, self).__init__(**kwargs)
self.name_available = None
self.reason = None
self.message = None
class Operation(msrest.serialization.Model):
"""An available operation for Data Lake Store.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: The name of the operation.
:vartype name: str
:param display: The display information for the operation.
:type display: ~azure.mgmt.datalake.store.models.OperationDisplay
:ivar origin: The intended executor of the operation. Possible values include: "user",
"system", "user,system".
:vartype origin: str or ~azure.mgmt.datalake.store.models.OperationOrigin
"""
_validation = {
'name': {'readonly': True},
'origin': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = kwargs.get('display', None)
self.origin = None
class OperationDisplay(msrest.serialization.Model):
"""The display information for a particular operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provider: The resource provider of the operation.
:vartype provider: str
:ivar resource: The resource type of the operation.
:vartype resource: str
:ivar operation: A friendly name of the operation.
:vartype operation: str
:ivar description: A friendly description of the operation.
:vartype description: str
"""
_validation = {
'provider': {'readonly': True},
'resource': {'readonly': True},
'operation': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = None
self.resource = None
self.operation = None
self.description = None
class OperationListResult(msrest.serialization.Model):
"""The list of available operations for Data Lake Store.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.Operation]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
class TrustedIdProvider(SubResource):
"""Data Lake Store trusted identity provider information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar id_provider: The URL of this trusted identity provider.
:vartype id_provider: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'id_provider': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TrustedIdProvider, self).__init__(**kwargs)
self.id_provider = None
class TrustedIdProviderListResult(msrest.serialization.Model):
"""Data Lake Store trusted identity provider list information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.TrustedIdProvider]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[TrustedIdProvider]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TrustedIdProviderListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
class UpdateDataLakeStoreAccountParameters(msrest.serialization.Model):
"""Data Lake Store account information to update.
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param default_group: The default owner group for all new folders and files created in the Data
Lake Store account.
:type default_group: str
:param encryption_config: Used for rotation of user managed Key Vault keys. Can only be used to
rotate a user managed encryption Key Vault key.
:type encryption_config: ~azure.mgmt.datalake.store.models.UpdateEncryptionConfig
:param firewall_rules: The list of firewall rules associated with this Data Lake Store account.
:type firewall_rules:
list[~azure.mgmt.datalake.store.models.UpdateFirewallRuleWithAccountParameters]
:param virtual_network_rules: The list of virtual network rules associated with this Data Lake
Store account.
:type virtual_network_rules:
list[~azure.mgmt.datalake.store.models.UpdateVirtualNetworkRuleWithAccountParameters]
:param firewall_state: The current state of the IP address firewall for this Data Lake Store
account. Disabling the firewall does not remove existing rules, they will just be ignored until
the firewall is re-enabled. Possible values include: "Enabled", "Disabled".
:type firewall_state: str or ~azure.mgmt.datalake.store.models.FirewallState
:param firewall_allow_azure_ips: The current state of allowing or disallowing IPs originating
within Azure through the firewall. If the firewall is disabled, this is not enforced. Possible
values include: "Enabled", "Disabled".
:type firewall_allow_azure_ips: str or
~azure.mgmt.datalake.store.models.FirewallAllowAzureIpsState
:param trusted_id_providers: The list of trusted identity providers associated with this Data
Lake Store account.
:type trusted_id_providers:
list[~azure.mgmt.datalake.store.models.UpdateTrustedIdProviderWithAccountParameters]
:param trusted_id_provider_state: The current state of the trusted identity provider feature
for this Data Lake Store account. Disabling trusted identity provider functionality does not
remove the providers, they will just be ignored until this feature is re-enabled. Possible
values include: "Enabled", "Disabled".
:type trusted_id_provider_state: str or
~azure.mgmt.datalake.store.models.TrustedIdProviderState
:param new_tier: The commitment tier to use for next month. Possible values include:
"Consumption", "Commitment_1TB", "Commitment_10TB", "Commitment_100TB", "Commitment_500TB",
"Commitment_1PB", "Commitment_5PB".
:type new_tier: str or ~azure.mgmt.datalake.store.models.TierType
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'default_group': {'key': 'properties.defaultGroup', 'type': 'str'},
'encryption_config': {'key': 'properties.encryptionConfig', 'type': 'UpdateEncryptionConfig'},
'firewall_rules': {'key': 'properties.firewallRules', 'type': '[UpdateFirewallRuleWithAccountParameters]'},
'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[UpdateVirtualNetworkRuleWithAccountParameters]'},
'firewall_state': {'key': 'properties.firewallState', 'type': 'str'},
'firewall_allow_azure_ips': {'key': 'properties.firewallAllowAzureIps', 'type': 'str'},
'trusted_id_providers': {'key': 'properties.trustedIdProviders', 'type': '[UpdateTrustedIdProviderWithAccountParameters]'},
'trusted_id_provider_state': {'key': 'properties.trustedIdProviderState', 'type': 'str'},
'new_tier': {'key': 'properties.newTier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateDataLakeStoreAccountParameters, self).__init__(**kwargs)
self.tags = kwargs.get('tags', None)
self.default_group = kwargs.get('default_group', None)
self.encryption_config = kwargs.get('encryption_config', None)
self.firewall_rules = kwargs.get('firewall_rules', None)
self.virtual_network_rules = kwargs.get('virtual_network_rules', None)
self.firewall_state = kwargs.get('firewall_state', None)
self.firewall_allow_azure_ips = kwargs.get('firewall_allow_azure_ips', None)
self.trusted_id_providers = kwargs.get('trusted_id_providers', None)
self.trusted_id_provider_state = kwargs.get('trusted_id_provider_state', None)
self.new_tier = kwargs.get('new_tier', None)
class UpdateEncryptionConfig(msrest.serialization.Model):
"""The encryption configuration used to update a user managed Key Vault key.
:param key_vault_meta_info: The updated Key Vault key to use in user managed key rotation.
:type key_vault_meta_info: ~azure.mgmt.datalake.store.models.UpdateKeyVaultMetaInfo
"""
_attribute_map = {
'key_vault_meta_info': {'key': 'keyVaultMetaInfo', 'type': 'UpdateKeyVaultMetaInfo'},
}
def __init__(
self,
**kwargs
):
super(UpdateEncryptionConfig, self).__init__(**kwargs)
self.key_vault_meta_info = kwargs.get('key_vault_meta_info', None)
class UpdateFirewallRuleParameters(msrest.serialization.Model):
"""The parameters used to update a firewall rule.
:param start_ip_address: The start IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:type start_ip_address: str
:param end_ip_address: The end IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:type end_ip_address: str
"""
_attribute_map = {
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateFirewallRuleParameters, self).__init__(**kwargs)
self.start_ip_address = kwargs.get('start_ip_address', None)
self.end_ip_address = kwargs.get('end_ip_address', None)
class UpdateFirewallRuleWithAccountParameters(msrest.serialization.Model):
"""The parameters used to update a firewall rule while updating a Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the firewall rule to update.
:type name: str
:param start_ip_address: The start IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:type start_ip_address: str
:param end_ip_address: The end IP address for the firewall rule. This can be either ipv4 or
ipv6. Start and End should be in the same protocol.
:type end_ip_address: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateFirewallRuleWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.start_ip_address = kwargs.get('start_ip_address', None)
self.end_ip_address = kwargs.get('end_ip_address', None)
class UpdateKeyVaultMetaInfo(msrest.serialization.Model):
"""The Key Vault update information used for user managed key rotation.
:param encryption_key_version: The version of the user managed encryption key to update through
a key rotation.
:type encryption_key_version: str
"""
_attribute_map = {
'encryption_key_version': {'key': 'encryptionKeyVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateKeyVaultMetaInfo, self).__init__(**kwargs)
self.encryption_key_version = kwargs.get('encryption_key_version', None)
class UpdateTrustedIdProviderParameters(msrest.serialization.Model):
"""The parameters used to update a trusted identity provider.
:param id_provider: The URL of this trusted identity provider.
:type id_provider: str
"""
_attribute_map = {
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateTrustedIdProviderParameters, self).__init__(**kwargs)
self.id_provider = kwargs.get('id_provider', None)
class UpdateTrustedIdProviderWithAccountParameters(msrest.serialization.Model):
"""The parameters used to update a trusted identity provider while updating a Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the trusted identity provider to update.
:type name: str
:param id_provider: The URL of this trusted identity provider.
:type id_provider: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id_provider': {'key': 'properties.idProvider', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateTrustedIdProviderWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.id_provider = kwargs.get('id_provider', None)
class UpdateVirtualNetworkRuleParameters(msrest.serialization.Model):
"""The parameters used to update a virtual network rule.
:param subnet_id: The resource identifier for the subnet.
:type subnet_id: str
"""
_attribute_map = {
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateVirtualNetworkRuleParameters, self).__init__(**kwargs)
self.subnet_id = kwargs.get('subnet_id', None)
class UpdateVirtualNetworkRuleWithAccountParameters(msrest.serialization.Model):
"""The parameters used to update a virtual network rule while updating a Data Lake Store account.
All required parameters must be populated in order to send to Azure.
:param name: Required. The unique name of the virtual network rule to update.
:type name: str
:param subnet_id: The resource identifier for the subnet.
:type subnet_id: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UpdateVirtualNetworkRuleWithAccountParameters, self).__init__(**kwargs)
self.name = kwargs['name']
self.subnet_id = kwargs.get('subnet_id', None)
class Usage(msrest.serialization.Model):
"""Describes the Resource Usage.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar unit: Gets the unit of measurement. Possible values include: "Count", "Bytes", "Seconds",
"Percent", "CountsPerSecond", "BytesPerSecond".
:vartype unit: str or ~azure.mgmt.datalake.store.models.UsageUnit
:ivar id: Resource identifier.
:vartype id: str
:ivar current_value: Gets the current count of the allocated resources in the subscription.
:vartype current_value: int
:ivar limit: Gets the maximum count of the resources that can be allocated in the subscription.
:vartype limit: int
:ivar name: Gets the name of the type of usage.
:vartype name: ~azure.mgmt.datalake.store.models.UsageName
"""
_validation = {
'unit': {'readonly': True},
'id': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'name': {'readonly': True},
}
_attribute_map = {
'unit': {'key': 'unit', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'int'},
'limit': {'key': 'limit', 'type': 'int'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(
self,
**kwargs
):
super(Usage, self).__init__(**kwargs)
self.unit = None
self.id = None
self.current_value = None
self.limit = None
self.name = None
class UsageListResult(msrest.serialization.Model):
"""The response from the List Usages operation.
:param value: Gets or sets the list of Storage Resource Usages.
:type value: list[~azure.mgmt.datalake.store.models.Usage]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Usage]'},
}
def __init__(
self,
**kwargs
):
super(UsageListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class UsageName(msrest.serialization.Model):
"""The usage names that can be used.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: Gets a string describing the resource name.
:vartype value: str
:ivar localized_value: Gets a localized string describing the resource name.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsageName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
class VirtualNetworkRule(SubResource):
"""Data Lake Store virtual network rule information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar subnet_id: The resource identifier for the subnet.
:vartype subnet_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'subnet_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkRule, self).__init__(**kwargs)
self.subnet_id = None
class VirtualNetworkRuleListResult(msrest.serialization.Model):
"""Data Lake Store virtual network rule list information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.VirtualNetworkRule]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkRuleListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
| [
"[email protected]"
] | |
f6f83d8fcc21b13c0e3b8d60e21349fc3e1accdd | eb40dce4039d528b9cd06dbeda75da09d09d7fc5 | /need_install/Django-1.8.17/tests/admin_changelist/models.py | c01bf320621803e3f82fa4938596e56fe61caefe | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | MulticsYin/MulticsSH | 39b62189446787c7f0f037b1640c9c780bd1dddd | 5837a0bff0e7da0e8535e4e0b31ef6baf24274b4 | refs/heads/master | 2021-08-28T07:53:51.759679 | 2017-12-11T15:31:03 | 2017-12-11T15:31:03 | 82,428,902 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,666 | py | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Event(models.Model):
# Oracle can have problems with a column named "date"
date = models.DateField(db_column="event_date")
class Parent(models.Model):
name = models.CharField(max_length=128)
class Child(models.Model):
parent = models.ForeignKey(Parent, editable=False, null=True)
name = models.CharField(max_length=30, blank=True)
age = models.IntegerField(null=True, blank=True)
class Genre(models.Model):
name = models.CharField(max_length=20)
class Band(models.Model):
name = models.CharField(max_length=20)
nr_of_members = models.PositiveIntegerField()
genres = models.ManyToManyField(Genre)
@python_2_unicode_compatible
class Musician(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Group(models.Model):
name = models.CharField(max_length=30)
members = models.ManyToManyField(Musician, through='Membership')
def __str__(self):
return self.name
class Membership(models.Model):
music = models.ForeignKey(Musician)
group = models.ForeignKey(Group)
role = models.CharField(max_length=15)
class Quartet(Group):
pass
class ChordsMusician(Musician):
pass
class ChordsBand(models.Model):
name = models.CharField(max_length=30)
members = models.ManyToManyField(ChordsMusician, through='Invitation')
class Invitation(models.Model):
player = models.ForeignKey(ChordsMusician)
band = models.ForeignKey(ChordsBand)
instrument = models.CharField(max_length=15)
class Swallow(models.Model):
origin = models.CharField(max_length=255)
load = models.FloatField()
speed = models.FloatField()
class Meta:
ordering = ('speed', 'load')
class SwallowOneToOne(models.Model):
swallow = models.OneToOneField(Swallow)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #17198.
"""
bool = models.BooleanField(default=True)
class OrderedObjectManager(models.Manager):
def get_queryset(self):
return super(OrderedObjectManager, self).get_queryset().order_by('number')
class OrderedObject(models.Model):
"""
Model with Manager that defines a default order.
Refs #17198.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
number = models.IntegerField(default=0, db_column='number_val')
objects = OrderedObjectManager()
class CustomIdUser(models.Model):
uuid = models.AutoField(primary_key=True)
| [
"[email protected]"
] | |
e8364f58a88fcdb2c10225d9f1521f10130693d9 | 083558f27c674112dc0811eed9915a91d99fa0a0 | /Строковый тип данных/Методы строк часть 2/Очень странные дела.py | c2e22fcf82e03e35667efe7d4a215fca71dcd999 | [] | no_license | Kanres-GH/Pokolenie-Python-Stepik | e677639af1eabfd29635b792192b4fd5f107f5bb | 54b8d0ae7dbf7aaf2ea97300e643e1787b87ac7f | refs/heads/main | 2023-04-23T04:42:59.662055 | 2021-05-01T17:42:25 | 2021-05-01T17:42:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | n = int(input())
cnt = 0
for i in range(n):
s = input()
if s.count('11')>=3:
cnt+=1
print(cnt) | [
"[email protected]"
] | |
9f5fd565d7ba9817e9f3eaa0b4c21caf0e3a5c5b | 157efc8223be28882fff6e6d64b04a549b16e84e | /MODULES_DATA/DefenseEvasion_CodeSigning_StolenMircosoftWindowsSignature/sigthief.py | 5f73f13dfddf5630caa3eaadf39563fb4d0db16e | [
"BSD-3-Clause"
] | permissive | qq529952515/viperpython | e44161494ce23b4e1515f8c1a651865855467f03 | af277c1f1008332c0e99e507851c3c50758baf4c | refs/heads/main | 2023-07-14T14:11:46.965259 | 2021-08-24T12:29:09 | 2021-08-24T12:29:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,636 | py | #!/usr/bin/env python3
# LICENSE: BSD-3
# Copyright: Josh Pitts @midnite_runr
import io
import shutil
import struct
import sys
from optparse import OptionParser
def gather_file_info_win(binary):
"""
Borrowed from BDF...
I could just skip to certLOC... *shrug*
"""
flItms = {}
binary = open(binary, 'rb')
binary.seek(int('3C', 16))
flItms['buffer'] = 0
flItms['JMPtoCodeAddress'] = 0
flItms['dis_frm_pehdrs_sectble'] = 248
flItms['pe_header_location'] = struct.unpack('<i', binary.read(4))[0]
# Start of COFF
flItms['COFF_Start'] = flItms['pe_header_location'] + 4
binary.seek(flItms['COFF_Start'])
flItms['MachineType'] = struct.unpack('<H', binary.read(2))[0]
binary.seek(flItms['COFF_Start'] + 2, 0)
flItms['NumberOfSections'] = struct.unpack('<H', binary.read(2))[0]
flItms['TimeDateStamp'] = struct.unpack('<I', binary.read(4))[0]
binary.seek(flItms['COFF_Start'] + 16, 0)
flItms['SizeOfOptionalHeader'] = struct.unpack('<H', binary.read(2))[0]
flItms['Characteristics'] = struct.unpack('<H', binary.read(2))[0]
# End of COFF
flItms['OptionalHeader_start'] = flItms['COFF_Start'] + 20
# if flItms['SizeOfOptionalHeader']:
# Begin Standard Fields section of Optional Header
binary.seek(flItms['OptionalHeader_start'])
flItms['Magic'] = struct.unpack('<H', binary.read(2))[0]
flItms['MajorLinkerVersion'] = struct.unpack("!B", binary.read(1))[0]
flItms['MinorLinkerVersion'] = struct.unpack("!B", binary.read(1))[0]
flItms['SizeOfCode'] = struct.unpack("<I", binary.read(4))[0]
flItms['SizeOfInitializedData'] = struct.unpack("<I", binary.read(4))[0]
flItms['SizeOfUninitializedData'] = struct.unpack("<I",
binary.read(4))[0]
flItms['AddressOfEntryPoint'] = struct.unpack('<I', binary.read(4))[0]
flItms['PatchLocation'] = flItms['AddressOfEntryPoint']
flItms['BaseOfCode'] = struct.unpack('<I', binary.read(4))[0]
if flItms['Magic'] != 0x20B:
flItms['BaseOfData'] = struct.unpack('<I', binary.read(4))[0]
# End Standard Fields section of Optional Header
# Begin Windows-Specific Fields of Optional Header
if flItms['Magic'] == 0x20B:
flItms['ImageBase'] = struct.unpack('<Q', binary.read(8))[0]
else:
flItms['ImageBase'] = struct.unpack('<I', binary.read(4))[0]
flItms['SectionAlignment'] = struct.unpack('<I', binary.read(4))[0]
flItms['FileAlignment'] = struct.unpack('<I', binary.read(4))[0]
flItms['MajorOperatingSystemVersion'] = struct.unpack('<H',
binary.read(2))[0]
flItms['MinorOperatingSystemVersion'] = struct.unpack('<H',
binary.read(2))[0]
flItms['MajorImageVersion'] = struct.unpack('<H', binary.read(2))[0]
flItms['MinorImageVersion'] = struct.unpack('<H', binary.read(2))[0]
flItms['MajorSubsystemVersion'] = struct.unpack('<H', binary.read(2))[0]
flItms['MinorSubsystemVersion'] = struct.unpack('<H', binary.read(2))[0]
flItms['Win32VersionValue'] = struct.unpack('<I', binary.read(4))[0]
flItms['SizeOfImageLoc'] = binary.tell()
flItms['SizeOfImage'] = struct.unpack('<I', binary.read(4))[0]
flItms['SizeOfHeaders'] = struct.unpack('<I', binary.read(4))[0]
flItms['CheckSum'] = struct.unpack('<I', binary.read(4))[0]
flItms['Subsystem'] = struct.unpack('<H', binary.read(2))[0]
flItms['DllCharacteristics'] = struct.unpack('<H', binary.read(2))[0]
if flItms['Magic'] == 0x20B:
flItms['SizeOfStackReserve'] = struct.unpack('<Q', binary.read(8))[0]
flItms['SizeOfStackCommit'] = struct.unpack('<Q', binary.read(8))[0]
flItms['SizeOfHeapReserve'] = struct.unpack('<Q', binary.read(8))[0]
flItms['SizeOfHeapCommit'] = struct.unpack('<Q', binary.read(8))[0]
else:
flItms['SizeOfStackReserve'] = struct.unpack('<I', binary.read(4))[0]
flItms['SizeOfStackCommit'] = struct.unpack('<I', binary.read(4))[0]
flItms['SizeOfHeapReserve'] = struct.unpack('<I', binary.read(4))[0]
flItms['SizeOfHeapCommit'] = struct.unpack('<I', binary.read(4))[0]
flItms['LoaderFlags'] = struct.unpack('<I', binary.read(4))[0] # zero
flItms['NumberofRvaAndSizes'] = struct.unpack('<I', binary.read(4))[0]
# End Windows-Specific Fields of Optional Header
# Begin Data Directories of Optional Header
flItms['ExportTableRVA'] = struct.unpack('<I', binary.read(4))[0]
flItms['ExportTableSize'] = struct.unpack('<I', binary.read(4))[0]
flItms['ImportTableLOCInPEOptHdrs'] = binary.tell()
# ImportTable SIZE|LOC
flItms['ImportTableRVA'] = struct.unpack('<I', binary.read(4))[0]
flItms['ImportTableSize'] = struct.unpack('<I', binary.read(4))[0]
flItms['ResourceTable'] = struct.unpack('<Q', binary.read(8))[0]
flItms['ExceptionTable'] = struct.unpack('<Q', binary.read(8))[0]
flItms['CertTableLOC'] = binary.tell()
flItms['CertLOC'] = struct.unpack("<I", binary.read(4))[0]
flItms['CertSize'] = struct.unpack("<I", binary.read(4))[0]
binary.close()
return flItms
def copyCert(exe):
flItms = gather_file_info_win(exe)
if flItms['CertLOC'] == 0 or flItms['CertSize'] == 0:
# not signed
print("Input file Not signed!")
sys.exit(-1)
with open(exe, 'rb') as f:
f.seek(flItms['CertLOC'], 0)
cert = f.read(flItms['CertSize'])
return cert
def writeCert(cert, exe, output):
flItms = gather_file_info_win(exe)
if not output:
output = output = str(exe) + "_signed"
shutil.copy2(exe, output)
print("Output file: {0}".format(output))
with open(exe, 'rb') as g:
with open(output, 'wb') as f:
f.write(g.read())
f.seek(0)
f.seek(flItms['CertTableLOC'], 0)
f.write(struct.pack("<I", len(open(exe, 'rb').read())))
f.write(struct.pack("<I", len(cert)))
f.seek(0, io.SEEK_END)
f.write(cert)
print("Signature appended. \nFIN.")
def outputCert(exe, output):
cert = copyCert(exe)
if not output:
output = str(exe) + "_sig"
print("Output file: {0}".format(output))
open(output, 'wb').write(cert)
print("Signature ripped. \nFIN.")
def check_sig(exe):
flItms = gather_file_info_win(exe)
if flItms['CertLOC'] == 0 or flItms['CertSize'] == 0:
# not signed
print("Inputfile Not signed!")
else:
print("Inputfile is signed!")
def truncate(exe, output):
flItms = gather_file_info_win(exe)
if flItms['CertLOC'] == 0 or flItms['CertSize'] == 0:
# not signed
print("Inputfile Not signed!")
sys.exit(-1)
else:
print("Inputfile is signed!")
if not output:
output = str(exe) + "_nosig"
print("Output file: {0}".format(output))
shutil.copy2(exe, output)
with open(output, "r+b") as binary:
print('Overwriting certificate table pointer and truncating binary')
binary.seek(-flItms['CertSize'], io.SEEK_END)
binary.truncate()
binary.seek(flItms['CertTableLOC'], 0)
binary.write(b"\x00\x00\x00\x00\x00\x00\x00\x00")
print("Signature removed. \nFIN.")
def signfile(exe, sigfile, output):
flItms = gather_file_info_win(exe)
cert = open(sigfile, 'rb').read()
if not output:
output = output = str(exe) + "_signed"
shutil.copy2(exe, output)
print("Output file: {0}".format(output))
with open(exe, 'rb') as g:
with open(output, 'wb') as f:
f.write(g.read())
f.seek(0)
f.seek(flItms['CertTableLOC'], 0)
f.write(struct.pack("<I", len(open(exe, 'rb').read())))
f.write(struct.pack("<I", len(cert)))
f.seek(0, io.SEEK_END)
f.write(cert)
print("Signature appended. \nFIN.")
def signbin(exe, sigfile_bin, output):
flItms = gather_file_info_win(exe)
cert = sigfile_bin
shutil.copy2(exe, output)
with open(exe, 'rb') as g:
with open(output, 'wb') as f:
f.write(g.read())
f.seek(0)
f.seek(flItms['CertTableLOC'], 0)
f.write(struct.pack("<I", len(open(exe, 'rb').read())))
f.write(struct.pack("<I", len(cert)))
f.seek(0, io.SEEK_END)
f.write(cert)
return True
if __name__ == "__main__":
usage = 'usage: %prog [options]'
parser = OptionParser()
parser.add_option("-i", "--file", dest="inputfile",
help="input file", metavar="FILE")
parser.add_option('-r', '--rip', dest='ripsig', action='store_true',
help='rip signature off inputfile')
parser.add_option('-a', '--add', dest='addsig', action='store_true',
help='add signautre to targetfile')
parser.add_option('-o', '--output', dest='outputfile',
help='output file')
parser.add_option('-s', '--sig', dest='sigfile',
help='binary signature from disk')
parser.add_option('-t', '--target', dest='targetfile',
help='file to append signature to')
parser.add_option('-c', '--checksig', dest='checksig', action='store_true',
help='file to check if signed; does not verify signature')
parser.add_option('-T', '--truncate', dest="truncate", action='store_true',
help='truncate signature (i.e. remove sig)')
(options, args) = parser.parse_args()
# rip signature
# inputfile and rip to outputfile
if options.inputfile and options.ripsig:
print("Ripping signature to file!")
outputCert(options.inputfile, options.outputfile)
sys.exit()
# copy from one to another
# inputfile and rip to targetfile to outputfile
if options.inputfile and options.targetfile:
cert = copyCert(options.inputfile)
writeCert(cert, options.targetfile, options.outputfile)
sys.exit()
# check signature
# inputfile
if options.inputfile and options.checksig:
check_sig(options.inputfile)
sys.exit()
# add sig to target file
if options.targetfile and options.sigfile:
signfile(options.targetfile, options.sigfile, options.outputfile)
sys.exit()
# truncate
if options.inputfile and options.truncate:
truncate(options.inputfile, options.outputfile)
sys.exit()
parser.print_help()
parser.error("You must do something!")
| [
"none"
] | none |
5e6ee8b555a309e5ce5e7845954d392ab41207eb | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/Ablation4_ch016_ep010/I_w_M_to_W_pyr/pyr_0s/L7/step10_a.py | 985e11f6a4a139c0e01e9166d0f6ff8d31fb4b6d | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,009 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_0side_L7 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_W_ch_norm_v2
use_loss_obj = [mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_W").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_0side, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_0side.kong_model.model_describe) .set_train_args(epochs= 10) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
ch032_0side = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_0side, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_0side.kong_model.model_describe) .set_train_args(epochs= 10) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_0side.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] | |
1b5f7970359cd50efcaeb24cab8c592711e3bd4d | 2ca88d41f1bb5042338faec50b2af11931db0bdd | /test/test_forecaster_entrypoints.py | 90ba5d0e6c33b7c209c70ac83d0eba7bdfead759 | [
"Apache-2.0"
] | permissive | canerturkmen/gluon-ts | 2f2d46f9b01f5ee07a51a11e822b1c72c2475caa | 57ae07f571ff123eac04af077870c1f216f99d5c | refs/heads/master | 2022-09-10T23:30:26.162245 | 2022-04-20T12:44:01 | 2022-04-20T12:44:01 | 192,873,578 | 1 | 2 | Apache-2.0 | 2020-08-04T16:58:48 | 2019-06-20T07:43:07 | Python | UTF-8 | Python | false | false | 746 | py | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import pkg_resources
# def test_forecaster_entrypoints():
# for entry_point in pkg_resources.iter_entry_points("gluonts_forecasters"):
# entry_point.load()
| [
"[email protected]"
] | |
50f104e0f6bb819ed7e3260cd1671e57d0744183 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part003381.py | 5237a05fd13b91ed25973d14234596c3dec60fd5 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,655 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher37234(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.2.2.2.1.0', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher37234._instance is None:
CommutativeMatcher37234._instance = CommutativeMatcher37234()
return CommutativeMatcher37234._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 37233
if len(subjects) >= 1 and isinstance(subjects[0], Pow):
tmp1 = subjects.popleft()
subjects2 = deque(tmp1._args)
# State 37235
if len(subjects2) >= 1:
tmp3 = subjects2.popleft()
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.1', tmp3)
except ValueError:
pass
else:
pass
# State 37236
if len(subjects2) >= 1:
tmp5 = subjects2.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.2.1.2', tmp5)
except ValueError:
pass
else:
pass
# State 37237
if len(subjects2) == 0:
pass
# State 37238
if len(subjects) == 0:
pass
# 0: x**j
yield 0, subst2
subjects2.appendleft(tmp5)
subjects2.appendleft(tmp3)
subjects.appendleft(tmp1)
return
yield
from collections import deque | [
"[email protected]"
] | |
8baa9d2d34c28cf039e89bca5f34422e62280386 | e488d6d5300b84065ddb50320aa3930d64798047 | /mkt/constants/__init__.py | 5c29991237eb8686d3066ac298329ca61909b66c | [] | no_license | potch/zamboni | 81b2bf01a1d93bbf933a0b3f30db56b438b7b75f | 4e0bfa8be5af334e2c4be3917e3794216498291c | refs/heads/master | 2021-01-15T21:15:25.102909 | 2013-01-02T17:07:02 | 2013-01-02T20:16:10 | 634,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | from .platforms import DEVICE_LOOKUP, FREE_PLATFORMS, PAID_PLATFORMS
from .submit import (APP_IMAGE_SIZES, APP_PREVIEW_MINIMUMS, APP_PREVIEW_SIZES,
MAX_PACKAGED_APP_SIZE)
| [
"[email protected]"
] | |
d4b70a9017046975bcd5e5d8feeaa0247d6923b0 | 753a70bc416e8dced2853f278b08ef60cdb3c768 | /models/research/slim/nets/resnet_v2_test.py | d06cbde94299ddd2b49f48504dd4119a6f6083ca | [
"MIT",
"Apache-2.0"
] | permissive | finnickniu/tensorflow_object_detection_tflite | ef94158e5350613590641880cb3c1062f7dd0efb | a115d918f6894a69586174653172be0b5d1de952 | refs/heads/master | 2023-04-06T04:59:24.985923 | 2022-09-20T16:29:08 | 2022-09-20T16:29:08 | 230,891,552 | 60 | 19 | MIT | 2023-03-25T00:31:18 | 2019-12-30T09:58:41 | C++ | UTF-8 | Python | false | false | 20,270 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.nets.resnet_v2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
import tf_slim as slim
from nets import resnet_utils
from nets import resnet_v2
tf.disable_resource_variables()
def create_test_input(batch_size, height, width, channels):
"""Create test input tensor.
Args:
batch_size: The number of images per batch or `None` if unknown.
height: The height of each image or `None` if unknown.
width: The width of each image or `None` if unknown.
channels: The number of channels per image or `None` if unknown.
Returns:
Either a placeholder `Tensor` of dimension
[batch_size, height, width, channels] if any of the inputs are `None` or a
constant `Tensor` with the mesh grid values along the spatial dimensions.
"""
if None in [batch_size, height, width, channels]:
return tf.placeholder(tf.float32, (batch_size, height, width, channels))
else:
return tf.cast(
np.tile(
np.reshape(
np.reshape(np.arange(height), [height, 1]) +
np.reshape(np.arange(width), [1, width]),
[1, height, width, 1]), [batch_size, 1, 1, channels]),
dtype=tf.float32)
class ResnetUtilsTest(tf.test.TestCase):
def testSubsampleThreeByThree(self):
x = tf.reshape(tf.cast(tf.range(9), dtype=tf.float32), [1, 3, 3, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 6, 8]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testSubsampleFourByFour(self):
x = tf.reshape(tf.cast(tf.range(16), dtype=tf.float32), [1, 4, 4, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 8, 10]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testConv2DSameEven(self):
n, n2 = 4, 2
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.cast([[14, 28, 43, 26], [28, 48, 66, 37], [43, 66, 84, 46],
[26, 37, 46, 22]],
dtype=tf.float32)
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.cast([[14, 43], [43, 84]], dtype=tf.float32)
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = tf.cast([[48, 37], [37, 22]], dtype=tf.float32)
y4_expected = tf.reshape(y4_expected, [1, n2, n2, 1])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def testConv2DSameOdd(self):
n, n2 = 5, 3
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.cast(
[[14, 28, 43, 58, 34], [28, 48, 66, 84, 46], [43, 66, 84, 102, 55],
[58, 84, 102, 120, 64], [34, 46, 55, 64, 30]],
dtype=tf.float32)
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.cast([[14, 43, 34], [43, 84, 55], [34, 55, 30]],
dtype=tf.float32)
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = y2_expected
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def _resnet_plain(self, inputs, blocks, output_stride=None, scope=None):
"""A plain ResNet without extra layers before or after the ResNet blocks."""
with tf.variable_scope(scope, values=[inputs]):
with slim.arg_scope([slim.conv2d], outputs_collections='end_points'):
net = resnet_utils.stack_blocks_dense(inputs, blocks, output_stride)
end_points = slim.utils.convert_collection_to_dict('end_points')
return net, end_points
def testEndPointsV2(self):
"""Test the end points of a tiny v2 bottleneck network."""
blocks = [
resnet_v2.resnet_v2_block(
'block1', base_depth=1, num_units=2, stride=2),
resnet_v2.resnet_v2_block(
'block2', base_depth=2, num_units=2, stride=1),
]
inputs = create_test_input(2, 32, 16, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
expected = [
'tiny/block1/unit_1/bottleneck_v2/shortcut',
'tiny/block1/unit_1/bottleneck_v2/conv1',
'tiny/block1/unit_1/bottleneck_v2/conv2',
'tiny/block1/unit_1/bottleneck_v2/conv3',
'tiny/block1/unit_2/bottleneck_v2/conv1',
'tiny/block1/unit_2/bottleneck_v2/conv2',
'tiny/block1/unit_2/bottleneck_v2/conv3',
'tiny/block2/unit_1/bottleneck_v2/shortcut',
'tiny/block2/unit_1/bottleneck_v2/conv1',
'tiny/block2/unit_1/bottleneck_v2/conv2',
'tiny/block2/unit_1/bottleneck_v2/conv3',
'tiny/block2/unit_2/bottleneck_v2/conv1',
'tiny/block2/unit_2/bottleneck_v2/conv2',
'tiny/block2/unit_2/bottleneck_v2/conv3']
self.assertItemsEqual(expected, end_points.keys())
def _stack_blocks_nondense(self, net, blocks):
"""A simplified ResNet Block stacker without output stride control."""
for block in blocks:
with tf.variable_scope(block.scope, 'block', [net]):
for i, unit in enumerate(block.args):
with tf.variable_scope('unit_%d' % (i + 1), values=[net]):
net = block.unit_fn(net, rate=1, **unit)
return net
def testAtrousValuesBottleneck(self):
"""Verify the values of dense feature extraction by atrous convolution.
Make sure that dense feature extraction by stack_blocks_dense() followed by
subsampling gives identical results to feature extraction at the nominal
network output stride using the simple self._stack_blocks_nondense() above.
"""
block = resnet_v2.resnet_v2_block
blocks = [
block('block1', base_depth=1, num_units=2, stride=2),
block('block2', base_depth=2, num_units=2, stride=2),
block('block3', base_depth=4, num_units=2, stride=2),
block('block4', base_depth=8, num_units=2, stride=1),
]
nominal_stride = 8
# Test both odd and even input dimensions.
height = 30
width = 31
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with slim.arg_scope([slim.batch_norm], is_training=False):
for output_stride in [1, 2, 4, 8, None]:
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(1, height, width, 3)
# Dense feature extraction followed by subsampling.
output = resnet_utils.stack_blocks_dense(inputs,
blocks,
output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected = self._stack_blocks_nondense(inputs, blocks)
sess.run(tf.global_variables_initializer())
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)
class ResnetCompleteNetworkTest(tf.test.TestCase):
"""Tests with complete small ResNet v2 networks."""
def _resnet_small(self,
inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
include_root_block=True,
spatial_squeeze=True,
reuse=None,
scope='resnet_v2_small'):
"""A shallow and thin ResNet v2 for faster tests."""
block = resnet_v2.resnet_v2_block
blocks = [
block('block1', base_depth=1, num_units=3, stride=2),
block('block2', base_depth=2, num_units=3, stride=2),
block('block3', base_depth=4, num_units=3, stride=2),
block('block4', base_depth=8, num_units=2, stride=1),
]
return resnet_v2.resnet_v2(inputs, blocks, num_classes,
is_training=is_training,
global_pool=global_pool,
output_stride=output_stride,
include_root_block=include_root_block,
spatial_squeeze=spatial_squeeze,
reuse=reuse,
scope=scope)
def testClassificationEndPoints(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(), [2, 1, 1, num_classes])
self.assertTrue('predictions' in end_points)
self.assertListEqual(end_points['predictions'].get_shape().as_list(),
[2, 1, 1, num_classes])
self.assertTrue('global_pool' in end_points)
self.assertListEqual(end_points['global_pool'].get_shape().as_list(),
[2, 1, 1, 32])
def testEndpointNames(self):
# Like ResnetUtilsTest.testEndPointsV2(), but for the public API.
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
expected = ['resnet/conv1']
for block in range(1, 5):
for unit in range(1, 4 if block < 4 else 3):
for conv in range(1, 4):
expected.append('resnet/block%d/unit_%d/bottleneck_v2/conv%d' %
(block, unit, conv))
expected.append('resnet/block%d/unit_%d/bottleneck_v2' % (block, unit))
expected.append('resnet/block%d/unit_1/bottleneck_v2/shortcut' % block)
expected.append('resnet/block%d' % block)
expected.extend(['global_pool', 'resnet/logits', 'resnet/spatial_squeeze',
'predictions'])
self.assertItemsEqual(end_points.keys(), expected)
def testClassificationShapes(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 28, 28, 4],
'resnet/block2': [2, 14, 14, 8],
'resnet/block3': [2, 7, 7, 16],
'resnet/block4': [2, 7, 7, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 21, 21, 8],
'resnet/block3': [2, 11, 11, 16],
'resnet/block4': [2, 11, 11, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testRootlessFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 128, 128, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
include_root_block=False,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 64, 64, 4],
'resnet/block2': [2, 32, 32, 8],
'resnet/block3': [2, 16, 16, 16],
'resnet/block4': [2, 16, 16, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
output_stride = 8
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs,
num_classes,
global_pool=global_pool,
output_stride=output_stride,
spatial_squeeze=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 41, 41, 8],
'resnet/block3': [2, 41, 41, 16],
'resnet/block4': [2, 41, 41, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalValues(self):
"""Verify dense feature extraction with atrous convolution."""
nominal_stride = 32
for output_stride in [4, 8, 16, 32, None]:
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(2, 81, 81, 3)
# Dense feature extraction followed by subsampling.
output, _ = self._resnet_small(inputs, None,
is_training=False,
global_pool=False,
output_stride=output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected, _ = self._resnet_small(inputs, None,
is_training=False,
global_pool=False)
sess.run(tf.global_variables_initializer())
self.assertAllClose(output.eval(), expected.eval(),
atol=1e-4, rtol=1e-4)
def testUnknownBatchSize(self):
batch = 2
height, width = 65, 65
global_pool = True
num_classes = 10
inputs = create_test_input(None, height, width, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, _ = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
spatial_squeeze=False,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, 1, 1, num_classes])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 1, 1, num_classes))
def testFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs, None,
global_pool=global_pool)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 3, 3, 32))
def testAtrousFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
output_stride = 8
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs,
None,
global_pool=global_pool,
output_stride=output_stride)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 9, 9, 32))
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
2b74909d2d1d7762214f31b2628489b91f3c0436 | 508c5e01aa7dce530093d5796250eff8d74ba06c | /code/venv/lib/python3.6/site-packages/passlib/utils/_blowfish/__init__.py | 16b85443b77e5ba7fccc0601e0d5299282f0083b | [
"MIT"
] | permissive | jhkuang11/UniTrade | f220b0d84db06ff17626b3daa18d4cb8b72a5d3f | 5f68b853926e167936b58c8543b8f95ebd6f5211 | refs/heads/master | 2022-12-12T15:58:30.013516 | 2019-02-01T21:07:15 | 2019-02-01T21:07:15 | 166,479,655 | 0 | 0 | MIT | 2022-12-07T03:59:47 | 2019-01-18T22:19:45 | Python | UTF-8 | Python | false | false | 6,509 | py | """passlib.utils._blowfish - pure-python eks-blowfish implementation for bcrypt
This is a pure-python implementation of the EKS-Blowfish algorithm described by
Provos and Mazieres in `A Future-Adaptable Password Scheme
<http://www.openbsd.org/papers/bcrypt-paper.ps>`_.
This package contains two submodules:
* ``_blowfish/base.py`` contains a class implementing the eks-blowfish algorithm
using easy-to-examine code.
* ``_blowfish/unrolled.py`` contains a subclass which replaces some methods
of the original class with sped-up versions, mainly using unrolled loops
and local variables. this is the class which is actually used by
Passlib to perform BCrypt in pure python.
This module is auto-generated by a script, ``_blowfish/_gen_files.py``.
Status
------
This implementation is usuable, but is an order of magnitude too slow to be
usuable with real security. For "ok" security, BCrypt hashes should have at
least 2**11 rounds (as of 2011). Assuming a desired response time <= 100ms,
this means a BCrypt implementation should get at least 20 rounds/ms in order
to be both usuable *and* secure. On a 2 ghz cpu, this implementation gets
roughly 0.09 rounds/ms under CPython (220x too slow), and 1.9 rounds/ms
under PyPy (10x too slow).
History
-------
While subsequently modified considerly for Passlib, this code was originally
based on `jBcrypt 0.2 <http://www.mindrot.org/projects/jBCrypt/>`_, which was
released under the BSD license::
Copyright (c) 2006 Damien Miller <[email protected]>
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
#=============================================================================
# imports
#=============================================================================
# core
from itertools import chain
import struct
# pkg
from passlib.utils import bcrypt64, getrandbytes, rng
from passlib.utils.compat import b, bytes, BytesIO, unicode, u
from passlib.utils._blowfish.unrolled import BlowfishEngine
# local
__all__ = [
'BlowfishEngine',
'raw_bcrypt',
]
#=============================================================================
# bcrypt constants
#=============================================================================
# bcrypt constant data "OrpheanBeholderScryDoubt" as 6 integers
BCRYPT_CDATA = [
0x4f727068, 0x65616e42, 0x65686f6c,
0x64657253, 0x63727944, 0x6f756274
]
# struct used to encode ciphertext as digest (last output byte discarded)
digest_struct = struct.Struct(">6I")
#=============================================================================
# base bcrypt helper
#
# interface designed only for use by passlib.handlers.bcrypt:BCrypt
# probably not suitable for other purposes
#=============================================================================
BNULL = b('\x00')
def raw_bcrypt(password, ident, salt, log_rounds):
"""perform central password hashing step in bcrypt scheme.
:param password: the password to hash
:param ident: identifier w/ minor version (e.g. 2, 2a)
:param salt: the binary salt to use (encoded in bcrypt-base64)
:param rounds: the log2 of the number of rounds (as int)
:returns: bcrypt-base64 encoded checksum
"""
#===================================================================
# parse inputs
#===================================================================
# parse ident
assert isinstance(ident, unicode)
if ident == u('2'):
minor = 0
elif ident == u('2a'):
minor = 1
# XXX: how to indicate caller wants to use crypt_blowfish's
# workaround variant of 2a?
elif ident == u('2x'):
raise ValueError("crypt_blowfish's buggy '2x' hashes are not "
"currently supported")
elif ident == u('2y'):
# crypt_blowfish compatibility ident which guarantees compat w/ 2a
minor = 1
else:
raise ValueError("unknown ident: %r" % (ident,))
# decode & validate salt
assert isinstance(salt, bytes)
salt = bcrypt64.decode_bytes(salt)
if len(salt) < 16:
raise ValueError("Missing salt bytes")
elif len(salt) > 16:
salt = salt[:16]
# prepare password
assert isinstance(password, bytes)
if minor > 0:
password += BNULL
# validate rounds
if log_rounds < 4 or log_rounds > 31:
raise ValueError("Bad number of rounds")
#===================================================================
#
# run EKS-Blowfish algorithm
#
# This uses the "enhanced key schedule" step described by
# Provos and Mazieres in "A Future-Adaptable Password Scheme"
# http://www.openbsd.org/papers/bcrypt-paper.ps
#
#===================================================================
engine = BlowfishEngine()
# convert password & salt into list of 18 32-bit integers (72 bytes total).
pass_words = engine.key_to_words(password)
salt_words = engine.key_to_words(salt)
# truncate salt_words to original 16 byte salt, or loop won't wrap
# correctly when passed to .eks_salted_expand()
salt_words16 = salt_words[:4]
# do EKS key schedule setup
engine.eks_salted_expand(pass_words, salt_words16)
# apply password & salt keys to key schedule a bunch more times.
rounds = 1<<log_rounds
engine.eks_repeated_expand(pass_words, salt_words, rounds)
# encipher constant data, and encode to bytes as digest.
data = list(BCRYPT_CDATA)
i = 0
while i < 6:
data[i], data[i+1] = engine.repeat_encipher(data[i], data[i+1], 64)
i += 2
raw = digest_struct.pack(*data)[:-1]
return bcrypt64.encode_bytes(raw)
#=============================================================================
# eof
#=============================================================================
| [
"[email protected]"
] | |
50af0ba1fe035e61753c470d57f2f6ec17a41e80 | 7fc26864e08354acb64d75047ccf8f3d0d0e206d | /dryadic/learning/kbtl/__init__.py | 691319ac52de1e0bf79a30e043a48aa7953865d1 | [
"MIT"
] | permissive | ohsu-comp-bio/dryads | ad56f5655cba87adfa800ff30850e0810d3cc2e2 | 015f6d3186a5146809334e2490c072e675b22891 | refs/heads/master | 2023-03-28T02:52:32.920200 | 2021-03-27T16:30:34 | 2021-03-27T16:30:34 | 142,361,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py |
from .multi_domain import MultiDomain
__all__ = ['MultiDomain']
| [
"[email protected]"
] | |
816522a328e2247a1e43e6f0537dd652c8266d33 | 6a2c2af113bb8b4d55db6ceabc6e78a0bbcd1f91 | /genus processing/Double Breasted.py | 01a6ae3e0307df77a4447697a686b3f1c6324fc1 | [] | no_license | JinghongM/Everlasting_Data_Cleansing | 4a966aca5cba102961f64338411d76e51f60f51e | 237073980b2bd1697db578013c7463dcbc1492fb | refs/heads/master | 2021-04-26T23:48:38.083155 | 2018-06-21T20:00:11 | 2018-06-21T20:00:11 | 123,861,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | import pandas as pd
import copy
import os.path
Pattern=6
Material=7
Species=4
CGP = pd.read_excel("../Realdata.xlsx")
for row in range(1,CGP.shape[0]):
genus = str(CGP.iat[row,3])
if "Double Breasted" in genus:
print(row)
CGP.iat[row,3] = genus.replace("Double Breasted ","")
CGP.iat[row,Species] = "Double Breasted"
i=0 #process headers
while i<len(CGP.columns.values):
if "Unnamed" in CGP.columns.values[i]:
CGP.columns.values[i] = ''
i+=1
CGP.to_excel('../Realdata.xlsx',index=False) | [
"[email protected]"
] | |
5e7fd79ebbe73607afa51ba6a52d8e1ee3a6c9b5 | 2579f37a13cfbb47944c5b81c6e83ca710b29f88 | /Client/config/Client_config_info.py | 4f407858ee9bb1f511c54d87662edd44d4154b42 | [] | no_license | YangQian1992/FTP | 932f32d5ed934bae295bd674757f7af23d0ad1ba | 87d3a78522e7eb8058def1d74d7f32f0f61f1b86 | refs/heads/master | 2020-03-31T16:28:35.146329 | 2018-10-10T06:53:12 | 2018-10-10T06:53:12 | 152,376,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | personal_config_info = {
'SERVER_IP':'127.0.0.1',
'SERVER_PORT':8082,
'CODING':'utf-8',#编码方式
'BLOCK_SIZE':1024
} | [
"[email protected]"
] | |
0c4aa77a3094093376053b5f19d0e964a4b1427a | 9e15ada895e90d033bc3b65c2666065bddd62605 | /12/12.1/Path_test1.py | 3093e21b3c397c3f1a090b05e991c3d7de8b5f0f | [] | no_license | zhyErick/fengkuang_python | b0f0c78273420fd862691799bfd7e4f1b6eadf80 | 6d50ad3b7d4ae05d06379c2dc87d91081964ec6d | refs/heads/master | 2021-02-14T08:23:26.616211 | 2020-05-06T13:08:07 | 2020-05-06T13:08:07 | 244,788,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | from pathlib import *
# 获取当前目录
p = Path('../')
# 遍历当前目录下的所有文件和子目录
for x in p.iterdir():
print(x)
# 获取上一级目录
p = Path('../')
# 获取上级目录及其所有子目录的.py文件
for x in p.glob('**/*.py'):
print(x)
# 获取C:\python\oldboy_python对应的目录
p = Path('C:\python\oldboy_python')
# 获取当前目录及其所有子目录下的.py文件
for x in p.glob('*/*.py'):
print(x) | [
"[email protected]"
] | |
16333d2fe48ad23cc47270a1b8fc53973efafdf3 | 90dc57404ea2e98006adba91f53ea9cc92124bb4 | /spare_parts/forms.py | a6679136c699e7b1b3cdd28e55c4d7b38fcc7460 | [] | no_license | AndreySkryl/flask-test | e57409c779f113980a9acf1c6f9a37be5106e119 | 8979e15789160e65a7ef6123e3ba638e764d8131 | refs/heads/main | 2023-04-24T18:33:59.675028 | 2021-05-10T15:16:56 | 2021-05-10T15:16:56 | 365,334,649 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | from wtforms import Form, StringField, TextAreaField, IntegerField
class SparePartForm(Form):
title = StringField('Заголовок')
price = IntegerField('Цена')
amount = IntegerField('Количество')
description = TextAreaField('Описание')
| [
"[email protected]"
] | |
f1cd67923e90294c3a5c457d1925664b58b06270 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/exercises/Python_Hand-on_Solve_200_Problems/Section 7 Dictionary/check_key_exist_solution.py | 637344c193288ad01e3c9e4394cbd7653b485f05 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 753 | py | # # To add a new cell, type '# %%'
# # To add a new markdown cell, type '# %% [markdown]'
# # %%
# # ---------------------------------------------------------------
# # python best courses https://courses.tanpham.org/
# # ---------------------------------------------------------------
# # Check if a given key already exists in a dictionary
# # input
# # d = {1: 10, 2: 20, 3: 30, 4: 40, 5: 50, 6: 60}
# # is_key_present(5)
# # is_key_present(9)
# # output
# # Key is present in the dictionary
# # Key is not present in the dictionary
#
# d _ {1: 10, 2: 20, 3: 30, 4: 40, 5: 50, 6: 60}
# ___ is_key_present x
# __ ? __ ?
# print('Key is present in the dictionary')
# ____
# print('Key is not present in the dictionary')
# ? 5
# ? 9
#
#
| [
"[email protected]"
] | |
b0dad98cc918993b31888c8234bf8e6d804cb304 | 40eb94673bb2e015c1640665a639c508279e3df4 | /cursos/_flask/flask/ejemplos/u29/aplicacion/models.py | defde3e73d247258609be8cc99c0eaf13f0671aa | [] | no_license | josedom24/plataforma_pledin | 3ab5c59abd87c90f066ba9164d6d0cbc02ea816f | e0521eb103013e0f8d9e2b2ea50e6acac0d09784 | refs/heads/master | 2023-07-20T15:43:30.796223 | 2023-07-17T19:07:51 | 2023-07-17T19:07:51 | 138,278,487 | 2 | 0 | null | 2023-05-01T20:05:34 | 2018-06-22T08:44:58 | Python | UTF-8 | Python | false | false | 2,007 | py | from sqlalchemy import Boolean, Column , ForeignKey
from sqlalchemy import DateTime, Integer, String, Text, Float
from sqlalchemy.orm import relationship
from aplicacion.app import db
from werkzeug.security import generate_password_hash, check_password_hash
class Categorias(db.Model):
"""Categorías de los artículos"""
__tablename__ = 'categorias'
id = Column(Integer, primary_key=True)
nombre = Column(String(100))
articulos = relationship("Articulos", cascade="all, delete-orphan", backref="Categorias",lazy='dynamic')
def __repr__(self):
return (u'<{self.__class__.__name__}: {self.id}>'.format(self=self))
class Articulos(db.Model):
"""Artículos de nuestra tienda"""
__tablename__ = 'articulos'
id = Column(Integer, primary_key=True)
nombre = Column(String(100),nullable=False)
precio = Column(Float,default=0)
iva = Column(Integer,default=21)
descripcion = Column(String(255))
image = Column(String(255))
stock = Column(Integer,default=0)
CategoriaId=Column(Integer,ForeignKey('categorias.id'), nullable=False)
categoria = relationship("Categorias", backref="Articulos")
def precio_final(self):
return self.precio+(self.precio*self.iva/100)
def __repr__(self):
return (u'<{self.__class__.__name__}: {self.id}>'.format(self=self))
class Usuarios(db.Model):
"""Usuarios"""
__tablename__ = 'usuarios'
id = Column(Integer, primary_key=True)
username = Column(String(100),nullable=False)
password_hash = Column(String(128),nullable=False)
nombre = Column(String(200),nullable=False)
email = Column(String(200),nullable=False)
admin = Column(Boolean, default=False)
def __repr__(self):
return (u'<{self.__class__.__name__}: {self.id}>'.format(self=self))
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
| [
"[email protected]"
] | |
10da254f4b916fe94485339bbb590bc46404c718 | 46e271e27afe50b8b62be0651d78164490911bb3 | /ws_moveit/src/example/example_pkg/scripts/red.py | 1ec501210d7051649bb4e99b72defa0eede3bac3 | [] | no_license | Nidhiks2000/Vargi-bot | 8a43af1e470b6fc84d468003f67471a1e1f47aad | 3e2e7be310ed7372cb6960eea8faabec75d9fbcf | refs/heads/master | 2023-07-24T01:05:10.049800 | 2021-09-08T16:31:08 | 2021-09-08T16:31:08 | 403,935,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,639 | py | #!/usr/bin/env python
import rospy
import cv2
import sys
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
class Camera1:
def __init__(self):
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("/eyrc/vb/camera_1/image_raw", Image,self.callback)
def callback(self,data):
try:
cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
except CvBridgeError as e:
rospy.logerr(e)
(rows,cols,channels) = cv_image.shape
image = cv_image
# Resize a 720x1280 image to 360x640 to fit it on the screen
resized_image = cv2.resize(image, (720/2, 1280/2))
green = np.uint8([[[0, 255, 0]]]) #green color
hsvGreen = cv2.cvtColor(green, cv2.COLOR_BGR2HSV) #hsv value of green color
lowerLimit = hsvGreen[0][0][0] - 10, 100, 100 # range of green color lower limit and upper limit
upperLimit = hsvGreen[0][0][0] + 10, 255, 255
red = np.uint8([[[0, 0, 255]]]) #red color
hsvred = cv2.cvtColor(red, cv2.COLOR_BGR2HSV) #hsv value of red color
lower = hsvred[0][0][0] - 10, 100, 100 # range of red color lower limit and upper limit
upper = hsvred[0][0][0] + 10, 255, 255
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) #convert the image into hsv
lg = np.array(lowerLimit) #range of green color
ug = np.array(upperLimit)
green_mask = cv2.inRange(hsv, lg, ug) #green masked image
cv2.imshow('green_packages', green_mask) #show the image
lr = np.array(lower) #range of red color
ur = np.array(upper)
red_mask = cv2.inRange(hsv, lr, ur) #red masked image
cv2.imshow('red_packages', red_mask) #show the image
original = image.copy()
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
lower = np.array([22, 93, 0], dtype="uint8") #yellow colour lower range and upper range
upper = np.array([45, 255, 255], dtype="uint8")
mask = cv2.inRange(image, lower, upper)
cnts = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
x,y,w,h = cv2.boundingRect(c)
cv2.rectangle(original, (x, y), (x + w, y + h), (36,255,12), 2)
cv2.imshow('yellow_packages', mask)
cv2.imshow("/eyrc/vb/camera_1/image_raw", resized_image)
cv2.waitKey(3)
def main(args):
rospy.init_node('node_eg1_read_camera', anonymous=True)
ic = Camera1()
try:
rospy.spin()
except KeyboardInterrupt:
rospy.loginfo("Shutting down")
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
| [
"Happysunshine.disroot.org"
] | Happysunshine.disroot.org |
5e1380e7e83fc9ec185495578654737b55934163 | 13f836eb4770d3d2b0e4be27067411a9d71b8e0d | /__init__.py | f61e61e171dabbbe82865b96ec71b02b37e4a5a4 | [
"ISC"
] | permissive | willsion/push_api | b6f5395178543a6139bffa2406a8955b69c8b393 | 91b5ab8f15029a698216791365b2f589dc340d5c | refs/heads/master | 2021-01-15T14:23:45.981417 | 2016-09-07T02:41:35 | 2016-09-07T02:41:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# FileName: __init__.py
# Desc: 2015-15/1/7:下午1:32
# Author: 苦咖啡
# Email: [email protected]
# HomePage: http://blog.kukafei520.net
# History:
# =============================================================================
| [
"[email protected]"
] | |
1d5d22515ceb69a934cb6bae79eabbf50bc1f463 | 47744b621bd0bc03f2eb6c0fead3ad2347a70aac | /ud120-projects/k_means/k_means_3_features.py | 54960b687f087c22669e3e9627fb3600a5022b27 | [] | no_license | shivam04/udacity-intro-to-machine-learning | 55be33ab1c426d7578bac4cf6c23486feca52c0d | 5e3a535bc31ec3d29088db832a0fa921a6b4b467 | refs/heads/master | 2020-04-06T04:20:40.663517 | 2017-06-24T07:46:43 | 2017-06-24T07:46:43 | 82,966,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,623 | py | #!/usr/bin/python
"""
Skeleton code for k-means clustering mini-project.
"""
import pickle
import numpy
import matplotlib.pyplot as plt
import sys
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
def Draw(pred, features, poi, mark_poi=False, name="image.png", f1_name="feature 1", f2_name="feature 2"):
""" some plotting code designed to help you visualize your clusters """
### plot each cluster with a different color--add more colors for
### drawing more than five clusters
colors = ["b", "c", "k", "m", "g"]
for ii, pp in enumerate(pred):
plt.scatter(features[ii][0], features[ii][1], color = colors[pred[ii]])
### if you like, place red stars over points that are POIs (just for funsies)
if mark_poi:
for ii, pp in enumerate(pred):
if poi[ii]:
plt.scatter(features[ii][0], features[ii][1], color="r", marker="*")
plt.xlabel(f1_name)
plt.ylabel(f2_name)
plt.savefig(name)
plt.show()
### load in the dict of dicts containing all the data on each person in the dataset
data_dict = pickle.load( open("../final_project/final_project_dataset.pkl", "r") )
### there's an outlier--remove it!
data_dict.pop("TOTAL", 0)
#print data_dict
### the input features we want to use
### can be any key in the person-level dictionary (salary, director_fees, etc.)
feature_1 = "salary"
feature_2 = "exercised_stock_options"
feature_3 = "total_payments"
poi = "poi"
features_list = [poi, feature_1, feature_2,feature_3]
data = featureFormat(data_dict, features_list )
poi, finance_features = targetFeatureSplit( data )
### in the "clustering with 3 features" part of the mini-project,
### you'll want to change this line to
### for f1, f2, _ in finance_features:
### (as it's currently written, the line below assumes 2 features)
for f1, f2 ,f3 in finance_features:
plt.scatter( f1, f2, f3 )
plt.show()
### cluster here; create predictions of the cluster labels
### for the data and store them to a list called pred
from sklearn.cluster import KMeans
data2 = featureFormat(data_dict, features_list )
poi, finance_features = targetFeatureSplit( data2 )
clf = KMeans(n_clusters=2, random_state=0)
pred = clf.fit_predict( finance_features )
### rename the "name" parameter when you change the number of features
### so that the figure gets saved to a different file
try:
Draw(pred, finance_features, poi, mark_poi=False, name="clusters3.pdf", f1_name=feature_1, f2_name=feature_2)
except NameError:
print "no predictions object named pred found, no clusters to plot"
| [
"[email protected]"
] | |
8bb8872a18f1c5eafc368fbb1198a76a957a0687 | 9a41558b414f404c119e504df8b3627e37c8b8d0 | /nappy/mkcell/cell_maker.py | 1f0d2845cb9176af52414be290c4d573aef49319 | [
"MIT"
] | permissive | medgbb/nap | c5dfd099d301d72c87006b436376ed862df566b2 | 6e7b5c47fd33fed1ebf7a8c3890b2a7921101213 | refs/heads/master | 2022-12-10T01:26:12.329772 | 2020-09-10T02:50:49 | 2020-09-10T02:50:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,797 | py | #!/usr/bin/env python
"""
Make typical crystalline structures of conventional cell.
Usage:
cell_maker.py (sc|bcc|fcc|hcp|dia|nacl|zb|wz) [options]
Options:
-h, --help Show this help message and exit.
-s, --size=SIZE
Number of copies of the unit cell, in comma-separated format, nx,ny,nz. [default: 1,1,1]
-l, --lattice-constant=LATCONST
Lattice constant of an axis. [default: 5.427]
-o OUTFILE
Output file name. Format is detected automatically. [default: POSCAR]
--orientation ORIENTATION
Orientation of z-axis in Miller index. [default: 0,0,1]
--celltype CELLTYPE
Conventional or primitive. [default: conventional]
--specorder SPECORDER
Species order. [default: None]
"""
from __future__ import print_function
import sys
import numpy as np
from docopt import docopt
from nappy.napsys import NAPSystem
_default_specorder=['Si']
def make_sc(latconst=1.0):
"""
Make a cell of simple cubic structure.
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
symbol = _default_specorder[0]
symbols = [ symbol ]
poss = [[0.00, 0.00, 0.00]]
vels = [[0., 0., 0.]]
frcs = [[0., 0., 0.]]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_bcc(latconst=1.0,specorder=None):
"""
Make a cell of bcc structure with z along [001].
"""
if specorder is None:
specorder = ['Fe']
s= NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.50, 0.50]]
symbol = _default_specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_bcc110(latconst=1.0):
"""
Make a cell of bcc structure with z along [110].
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.414, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.414 ])
s.set_lattice(latconst,a1,a2,a3)
symbol = _default_specorder[0]
symbols = [ symbol, symbol, symbol, symbol]
poss = [[0.00, 0.00, 0.00],
[0.00, 0.50, 0.50],
[0.50, 0.50, 0.00],
[0.50, 0.00, 0.50]]
vels = [ [0., 0., 0.] for i in range(4) ]
frcs = [ [0., 0., 0.] for i in range(4) ]
s.add_atoms(symbols, poss, vels, frcs)
return s
def make_bcc111(latconst=1.0):
"""
Make a cell of bcc structure with z along [111].
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.414, 0.0, 0.0 ])
a2= np.array([ 0.0, 2.449, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.732 ])
s.set_lattice(latconst,a1,a2,a3)
symbol = _default_specorder[0]
poss=[[0.00, 0.00, 0.00],
[0.00, 0.00, 0.50],
[0.00, 0.333, 0.167],
[0.00, 0.333, 0.667],
[0.00, 0.667, 0.333],
[0.00, 0.667, 0.833],
[0.50, 0.167, 0.333],
[0.50, 0.167, 0.833],
[0.50, 0.50, 0.00],
[0.50, 0.50, 0.50],
[0.50, 0.833, 0.167],
[0.50, 0.833, 0.667]]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_fcc(latconst=1.0,specorder=None):
"""
Make a cell of fcc structure.
"""
if specorder is None:
specorder = ['Al']
s= NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.50, 0.00],
[0.50, 0.00, 0.50],
[0.00, 0.50, 0.50]]
symbol = specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_fcc110(latconst=1.0,specorder=None):
"""
Make a cell of fcc structure with z along [110].
"""
if specorder is None:
specorder = ['Al']
s= NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.414, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.414 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.00, 0.50, 0.00],
[0.00, 0.00, 0.50],
[0.00, 0.50, 0.50],
[0.50, 0.25, 0.25],
[0.50, 0.25, 0.75],
[0.50, 0.75, 0.25],
[0.50, 0.75, 0.75]]
symbol = specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_honeycomb(latconst=1.0):
"""
Make a cell of 2D honeycomb structure.
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.5, 0.0 ])
a3= np.array([ 0.0, 0.0, np.sqrt(3.0) ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.50, 0.00],
[0.50, 0.50, 1./6],
[0.50, 0.50, 0.50],
[0.00, 0.50, 0.5 +1.0/6] ]
symbol = _default_specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_diamond(latconst=1.0):
"""
Make a cell of diamond structure.
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.50, 0.00],
[0.50, 0.00, 0.50],
[0.00, 0.50, 0.50],
[0.25, 0.25, 0.25],
[0.75, 0.75, 0.25],
[0.75, 0.25, 0.75],
[0.25, 0.75, 0.75]]
symbol = _default_specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_hcp(latconst=1.0):
"""
Make a cell of hcp structure.
"""
s= NAPSystem(specorder=_default_specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([-0.5, np.sqrt(3.0)/2, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.633 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[1.0/3, 2.0/3, 0.50] ]
symbol = _default_specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_graphene(latconst=2.467,size=(1,1,1)):
"""
Make graphene.
"""
napsys = make_honeycomb(latconst=latconst)
napsys.repeat(*size)
napsys.add_vacuum(2.*latconst, 0.0, 10.*latconst*np.sqrt(3))
return napsys
def make_2D_triangle(latconst=3.8,size=(1,1,1)):
"""
Make 2D triangle lattice on x-z plane.
Note that it is not x-y plane.
"""
specorder = ['Ar']
s = NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 10.0, 0.0 ])
a3= np.array([ 0.0, 0.0, np.sqrt(3.0) ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.50, 0.00],
[0.50, 0.50, 0.50]]
symbol = _default_specorder[0]
symbols = [ symbol for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
s.repeat(*size)
s.add_vacuum(2.*latconst, 0.0, 10.*latconst*np.sqrt(3))
return s
def make_nacl(latconst=1.0,specorder=None):
if specorder is None:
specorder = ['Na','Cl']
if len(specorder) < 2:
specorder = ['Na','Cl']
print('Since len(specorder) < 2, specorder is reset to ',specorder)
s = NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.00, 0.00],
[0.00, 0.50, 0.00],
[0.00, 0.00, 0.50],
[0.50, 0.50, 0.00],
[0.50, 0.00, 0.50],
[0.00, 0.50, 0.50],
[0.50, 0.50, 0.50],]
symbols = ['Na','Cl','Cl','Cl','Na','Na','Na','Cl']
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_zincblend(latconst=1.0,specorder=None):
"""
Make a cell of diamond structure.
"""
if specorder is None:
specorder = ['Ga','N']
if len(specorder) < 2:
specorder = ['Ga','N']
print('Since len(specorder) < 2, specorder is reset to ',specorder)
s= NAPSystem(specorder=specorder)
#...lattice
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([ 0.0, 1.0, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.0 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.50, 0.00],
[0.50, 0.00, 0.50],
[0.00, 0.50, 0.50],
[0.25, 0.25, 0.25],
[0.75, 0.75, 0.25],
[0.75, 0.25, 0.75],
[0.25, 0.75, 0.75]]
symbols = [ specorder[0] if i<4 else specorder[1] for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
def make_wurtzite(latconst=1.0,specorder=None,celltype='conventional'):
"""
Make a cell of wurtzite structure.
- celltype: conventional or primitive
"""
if specorder is None:
specorder = ['Ga','N']
if len(specorder) < 2:
specorder = ['Ga','N']
print('Since len(specorder) < 2, specorder is reset to ',specorder)
s = NAPSystem(specorder=specorder)
if celltype[0] == 'c':
#...conventional cell
a1= np.array([ 1.00, 0.00, 0.00 ])
a2= np.array([ 0.00, np.sqrt(3.0), 0.00 ])
a3= np.array([ 0.00, 0.00, 1.633 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[0.50, 0.50, 0.00],
[0.50, 0.5/3, 0.50],
[0.00, 0.5/3+0.5, 0.50],
[0.50, 0.5/3, 0.125],
[0.00, 0.5/3+0.5, 0.125],
[0.00, 0.00, 0.625],
[0.50, 0.50, 0.625],]
symbols = [ specorder[0] if i<4 else specorder[1] for i in range(len(poss)) ]
elif cenlltype[0] == 'p':
#...primitive cell
a1= np.array([ 1.0, 0.0, 0.0 ])
a2= np.array([-0.5, np.sqrt(3.0)/2, 0.0 ])
a3= np.array([ 0.0, 0.0, 1.633 ])
s.set_lattice(latconst,a1,a2,a3)
poss = [[0.00, 0.00, 0.00],
[2.0/3, 1.0/3, 0.125],
[2.0/3, 1.0/3, 0.50],
[0.00, 0.00, 0.625],]
symbols = [ specorder[0] if i<2 else specorder[1] for i in range(len(poss)) ]
vels = [ [0., 0., 0.] for i in range(len(poss)) ]
frcs = [ [0., 0., 0.] for i in range(len(poss)) ]
s.add_atoms(symbols,poss,vels,frcs)
return s
#=======================================================================
if __name__ == "__main__":
args= docopt(__doc__)
# nx= int(args['--nx'])
# ny= int(args['--ny'])
# nz= int(args['--nz'])
nx,ny,nz = [ int(x) for x in args['--size'].split(',') ]
latconst= float(args['--lattice-constant'])
ofname= args['-o']
orient = [ int(x) for x in args['--orientation'].split(',')]
celltype = args['--celltype']
specorder = [ x for x in args['--specorder'].split(',')]
if specorder[0] == None:
specorder = None
struct= None
if args['sc']:
struct= make_sc(latconst)
elif args['bcc']:
if orient == [0,0,1]:
struct= make_bcc(latconst)
elif orient == [1,1,0]:
struct= make_bcc110(latconst)
elif orient == [1,1,1]:
struct= make_bcc111(latconst)
else:
raise ValueError('The orientation is not available: ',orient)
elif args['fcc']:
if orient == [0,0,1]:
struct= make_fcc(latconst)
elif orient == [1,1,0]:
struct= make_fcc110(latconst)
else:
raise ValueError('The orientation is not available: ',orient)
elif args['hcp']:
struct= make_hcp(latconst)
elif args['dia']:
struct= make_diamond(latconst)
elif args['nacl']:
struct = make_nacl(latconst)
elif args['zb']:
struct = make_zincblend(latconst,specorder=specorder)
elif args['wz']:
struct = make_wurtzite(latconst,celltype=celltype,specorder=specorder)
if struct is None:
print("Something wrong: structure is not created...")
sys.exit()
struct.repeat(nx,ny,nz)
struct.write(ofname)
| [
"[email protected]"
] | |
75ae57ff4ee114e75afc618e0e52b41489e7628d | f6e2744ba52b0655d82071edc741d36cb840e1ff | /Lab1/DataGenerator/CsvParser.py | 50f2e1699047b5e143d5ee2e059879b9de1ecd62 | [] | no_license | PinarTurkyilmaz/SDM-Lab1 | abb77192cf4c85b1751263e1939434cb2de8d3c2 | 0df7b8d2c47d588e15633a684c857ddda5ebefa7 | refs/heads/master | 2020-04-28T18:15:46.867738 | 2019-03-13T18:28:40 | 2019-03-13T18:28:40 | 175,473,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | import csv
with open('Populate.csv', 'r') as csv_file:
csv_reader = csv.DictReader(csv_file)
with open('Cititation.csv', 'w') as new_file:
fieldnames = ['title']
csv_writer = csv.DictWriter(new_file, fieldnames)
csv_writer.writeheader()
for line in csv_reader:
del line['author'] #remove this column
csv_writer.writerow(line)
print(line)
#upload title to another csv
| [
"[email protected]"
] | |
ec1ff974949d84acfe1277e786b564a0462c7d31 | c20a7a651e63c1e7b1c5e6b5c65c8150898bbaf2 | /OJ/LeetCode/74. Search a 2D Matrix.py | bafc00d5a9be337a73709ca26357f10eba16536e | [] | no_license | Nobody0321/MyCodes | 08dbc878ae1badf82afaf0c9fc608b70dfce5cea | b60e2b7a8f2ad604c7d28b21498991da60066dc3 | refs/heads/master | 2023-08-19T14:34:23.169792 | 2023-08-15T15:50:24 | 2023-08-15T15:50:24 | 175,770,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | # 剑指offer出现过
# 矩阵每一行都是左到右递增,每一列都是上到下递增,所以从左下角开始,类似二分查找
class Solution:
def searchMatrix(self, matrix, target):
if matrix == []:
return False
height, width = len(matrix), len(matrix[0])
i, j= height-1, 0
while 0<=i<=height-1 and 0<=j<=width-1:
if 0<=i<=height-1 and 0<=j<=width-1 and matrix[i][j] < target:
j+=1
elif 0<=i<=height-1 and 0<=j<=width-1 and matrix[i][j] > target:
i-=1
else:
return True
return False
if __name__ == "__main__":
print(Solution().searchMatrix([[1]], 2)) | [
"[email protected]"
] | |
07d0b39146ea6065f2b0c58807ac66f5d96a0bdf | 568af6a97c4dab00c8ff2a3c9e608fc8f912e2c1 | /207.py | 6339a2b201d487598fa68a3aeab974e43718e875 | [] | no_license | Deepakdk7/PlayerSet21 | b619e51ce2b601f3b5b46d188417a9a5a377a4ad | c01a6d69108cc8ce099b09c5d025e4828b6dab71 | refs/heads/master | 2020-06-08T16:22:08.421915 | 2019-06-25T16:19:15 | 2019-06-25T16:19:15 | 193,262,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | ax=list(map(int,input().split()))
print((ax[0]*ax[1])//2)
| [
"[email protected]"
] | |
7035acb8a40194eba9b97b8e70803602562936bc | a13ffbab0f24047e43f003131b06052c4a29adff | /Chapter_07/shop/admin.py | c416b49b87b43f83b8f9e5857d303c64c142eaee | [
"Unlicense"
] | permissive | codingEzio/code_py_book_django2_by_example | 58a02b7b8e6a549804834d28488412243df94ea2 | d215d0c87a557685824286822186966b06fa8d59 | refs/heads/master | 2020-04-09T18:24:58.821192 | 2019-01-24T08:49:26 | 2019-01-24T08:49:26 | 160,511,430 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | from django.contrib import admin
from parler.admin import TranslatableAdmin
from .models import Category, Product
@admin.register(Category)
class CategoryAdmin(TranslatableAdmin):
"""
For the "translated_fields", you need to
use `get_prepopulated_fields` instead of `prepopulated_fields`
Since it provides the same functionality,
there's no difference actually, just a different way to get it :P
"""
list_display = ['name', 'slug']
def get_prepopulated_fields(self, request, obj=None):
return { 'slug': ('name',) }
@admin.register(Product)
class ProductAdmin(TranslatableAdmin):
list_display = ['name', 'slug', 'price',
'available', 'created', 'updated']
list_filter = ['available', 'created', 'updated']
list_editable = ['price', 'available']
def get_prepopulated_fields(self, request, obj=None):
return { 'slug': ('name',) } | [
"[email protected]"
] | |
ea590224b5586f898dbc17f6cb755bd3676f56a1 | ee01a1f16e63483ebfd304b838f015f9f2d168b7 | /streamtools/web/main.py | 0188e48a1b050926b6b4c97b32e5a60899ae5eef | [
"MIT"
] | permissive | mariocesar/stream-tools | 284aa494676d27204d71da3a0bdb9a196bcab861 | 384c10d364d8b40b9dfa15eeebed15da6f90ed31 | refs/heads/master | 2022-12-22T14:52:01.033784 | 2021-07-19T01:32:17 | 2021-07-19T01:32:17 | 250,092,686 | 1 | 1 | MIT | 2022-12-12T08:22:55 | 2020-03-25T21:14:04 | Python | UTF-8 | Python | false | false | 867 | py | import asyncio
from asyncio import Queue
from aiohttp import web
from streamtools.relay.main import fetch_events
routes = web.RouteTableDef()
@routes.get("/ws/")
async def websocket_handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
task = asyncio.create_task(fetch_events(request.app.queue))
try:
while True:
message = await request.app.queue.get()
print(message)
await ws.send_json(message.asdata())
finally:
await ws.close()
task.cancel()
return ws
@routes.get("/")
async def landing(request):
return web.Response(text="Hello!")
def get_application():
app = web.Application()
app.queue = Queue()
app.queue.empty()
app.add_routes(routes)
return app
if __name__ == "__main__":
web.run_app(get_application(), port=3000)
| [
"[email protected]"
] | |
636e89e816adde63d47a7c4d4e3d83e62438d8d6 | c3dc08fe8319c9d71f10473d80b055ac8132530e | /challenge-133/paulo-custodio/python/ch-1.py | 0c9b9ec563b3fa36246755408c816a2874d78618 | [] | no_license | southpawgeek/perlweeklychallenge-club | d4b70d9d8e4314c4dfc4cf7a60ddf457bcaa7a1e | 63fb76188e132564e50feefd2d9d5b8491568948 | refs/heads/master | 2023-01-08T19:43:56.982828 | 2022-12-26T07:13:05 | 2022-12-26T07:13:05 | 241,471,631 | 1 | 0 | null | 2020-02-18T21:30:34 | 2020-02-18T21:30:33 | null | UTF-8 | Python | false | false | 760 | py | #!/usr/bin/env python3
# Challenge 133
#
# TASK #1 > Integer Square Root
# Submitted by: Mohammad S Anwar
# You are given a positive integer $N.
#
# Write a script to calculate the integer square root of the given number.
#
# Please avoid using built-in function. Find out more about it here.
#
# Examples
# Input: $N = 10
# Output: 3
#
# Input: $N = 27
# Output: 5
#
# Input: $N = 85
# Output: 9
#
# Input: $N = 101
# Output: 10
# solution: https://en.wikipedia.org/wiki/Integer_square_root
import sys
def isqrt(n):
x0 = n >> 1 # initial estimate
if x0 == 0:
return n
# loop
x1 = int(x0 + n/x0) >> 1
while x1 < x0:
x0 = x1;
x1 = int(x0 + n/x0) >> 1
return x0
n = int(sys.argv[1])
print(isqrt(n))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.