blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a6fb2197fbf80b1c53e59f37f84370f5749ed5e1 | b5dd8d1b798c94731a84c02d98aafb9147200a85 | /sequence_labeling/SLBaselineSYNLinear/data/Instance.py | 6ed47e34116c6f3ff8176de9230c270b70bc070a | [] | no_license | zhangmeishan/DepSAWR | 1ae348dd04ec5e46bc5a75c8972b4bc4008528fe | 104f44fd962a42fdee9b1a9332997d35e8461ff4 | refs/heads/master | 2021-07-09T20:56:56.897774 | 2020-10-27T05:41:08 | 2020-10-27T05:41:08 | 206,974,879 | 15 | 3 | null | null | null | null | UTF-8 | Python | false | false | 8,126 | py | class Word:
def __init__(self, id, form, label):
self.id = id
self.org_form = form
self.form = form.lower()
self.label = label
# 1 indicates word, 0 indicates syn
self.wtype = 0 if label == "###" else 1
def __str__(self):
values = [str(self.id), self.org_form, self.label]
return '\t'.join(values)
class Sentence:
def __init__(self, words):
self.words = list(words)
self.length = len(self.words)
self.key_head = -1
self.key_start = -1
self.key_end = -1
self.key_label = ""
self.span = False
self.wkey_head = -1
self.wkey_start = -1
self.wkey_end = -1
self.wlength, self.forms, self.labels = 0, [], []
self.wposis, self.r_wposis = [], []
for idx in range(self.length):
if words[idx].wtype == 1:
self.wlength = self.wlength + 1
self.forms.append(words[idx].org_form)
self.labels.append(words[idx].label)
num_words = len(self.wposis)
self.r_wposis.append(num_words)
self.wposis.append(idx)
else:
self.r_wposis.append(-1)
self.sentence = ' '.join(self.forms)
for idx in range(self.length):
if words[idx].label.endswith("-*"):
self.key_head = idx
self.wkey_head = self.r_wposis[idx]
self.key_label = words[idx].label[2:-2]
break
if self.key_head != -1:
self.span = True
for idx in range(self.length):
cur_label = words[idx].label
if cur_label.startswith("B-"+self.key_label) \
or cur_label.startswith("S-"+self.key_label):
self.key_start = idx
self.wkey_start = self.r_wposis[idx]
if cur_label.startswith("E-"+self.key_label) \
or cur_label.startswith("S-"+self.key_label):
self.key_end = idx
self.wkey_end = self.r_wposis[idx]
else:
self.key_start, self.wkey_start = self.length, self.wlength
self.key_end, self.wkey_end = -1, -1
def label_to_entity(labels):
length = len(labels)
entities = set()
idx = 0
while idx < length:
if labels[idx] == "O":
idx = idx + 1
elif labels[idx].startswith("B-"):
label = labels[idx][2:]
predict = False
if label.endswith("-*"):
label = label[0:-2]
predict = True
next_idx = idx + 1
end_idx = idx
while next_idx < length:
if labels[next_idx] == "O" or labels[next_idx].startswith("B-") \
or labels[next_idx].startswith("S-"):
break
next_label = labels[next_idx][2:]
if next_label.endswith("-*"):
next_label = next_label[0:-2]
predict = True
if next_label != label:
break
end_idx = next_idx
next_idx = next_idx + 1
if end_idx == idx:
new_label = "S-" + labels[idx][2:]
print("Change %s to %s" % (labels[idx], new_label))
labels[idx] = new_label
if not predict:
entities.add("[%d,%d]%s"%(idx, end_idx, label))
idx = end_idx + 1
elif labels[idx].startswith("S-"):
label = labels[idx][2:]
predict = False
if label.endswith("-*"):
label = label[0:-2]
predict = True
if not predict:
entities.add("[%d,%d]%s"%(idx, idx, label))
idx = idx + 1
elif labels[idx].startswith("M-"):
new_label = "B-" + labels[idx][2:]
print("Change %s to %s" % (labels[idx], new_label))
labels[idx] = new_label
else:
new_label = "S-" + labels[idx][2:]
print("Change %s to %s" % (labels[idx], new_label))
labels[idx] = new_label
return entities
def normalize_labels(labels):
length = len(labels)
change = 0
normed_labels = []
for idx in range(length):
normed_labels.append(labels[idx])
idx = 0
while idx < length:
if labels[idx] == "O":
idx = idx + 1
elif labels[idx].startswith("B-"):
label = labels[idx][2:]
if label.endswith("-*"):
label = label[0:-2]
next_idx = idx + 1
end_idx = idx
while next_idx < length:
if labels[next_idx] == "O" or labels[next_idx].startswith("B-") \
or labels[next_idx].startswith("S-"):
break
next_label = labels[next_idx][2:]
if next_label.endswith("-*"):
next_label = next_label[0:-2]
if next_label != label:
break
end_idx = next_idx
next_idx = next_idx + 1
if end_idx == idx:
new_label = "S-" + labels[idx][2:]
# print("Change %s to %s" % (labels[idx], new_label))
labels[idx] = new_label
normed_labels[idx] = new_label
change = change + 1
idx = end_idx + 1
elif labels[idx].startswith("S-"):
idx = idx + 1
elif labels[idx].startswith("M-"):
new_label = "B-" + labels[idx][2:]
# print("Change %s to %s" % (labels[idx], new_label))
normed_labels[idx] = new_label
labels[idx] = new_label
change = change + 1
else:
new_label = "S-" + labels[idx][2:]
# print("Change %s to %s" % (labels[idx], new_label))
normed_labels[idx] = new_label
labels[idx] = new_label
change = change + 1
return normed_labels, change
def evalInstance(gold, predict):
glength, plength = gold.length, predict.length
if glength != plength:
raise Exception('gold length does not match predict length.')
gold_entity_num, predict_entity_num, correct_entity_num = 0, 0, 0
goldlabels, predictlabels = gold.labels, predict.labels
if gold.span:
gold_entities = label_to_entity(goldlabels)
predict_entities = label_to_entity(predictlabels)
gold_entity_num, predict_entity_num = len(gold_entities), len(predict_entities)
for one_entity in gold_entities:
if one_entity in predict_entities:
correct_entity_num = correct_entity_num + 1
else:
gold_entity_num, predict_entity_num = len(goldlabels), len(predictlabels)
for idx in range(glength):
if goldlabels[idx] == predictlabels[idx]:
correct_entity_num = correct_entity_num + 1
return gold_entity_num, predict_entity_num, correct_entity_num
def readInstance(file):
min_count = 1
total = 0
words = []
for line in file:
tok = line.strip().split('\t')
if not tok or line.strip() == '' or line.strip().startswith('#'):
if len(words) > min_count:
total += 1
yield Sentence(words)
words = []
elif len(tok) == 3:
try:
words.append(Word(int(tok[0]), tok[1], tok[2]))
except Exception:
pass
else:
pass
if len(words) > min_count:
total += 1
yield Sentence(words)
print("Total num: ", total)
def writeInstance(filename, sentences):
with open(filename, 'w') as file:
for sentence in sentences:
for entry in sentence.words:
file.write(str(entry) + '\n')
file.write('\n')
def printInstance(output, sentence):
for entry in sentence.words:
output.write(str(entry) + '\n')
output.write('\n') | [
"[email protected]"
] | |
4d59b6d7525d2424cccd3c6215409bdfb7e78f33 | 171a89102edf10901e18a2c0f41c3313608d2324 | /src/rogerthat/bizz/job/unschedule_service_api_callback_records.py | 0a04dd74317213aea6716a58732b45ec57e5498c | [
"Apache-2.0"
] | permissive | gitter-badger/rogerthat-backend | 7e9c12cdd236ef59c76a62ac644fcd0a7a712baf | ab92dc9334c24d1b166972b55f1c3a88abe2f00b | refs/heads/master | 2021-01-18T06:08:11.435313 | 2016-05-11T08:50:20 | 2016-05-11T08:50:20 | 58,615,985 | 0 | 0 | null | 2016-05-12T06:54:07 | 2016-05-12T06:54:07 | null | UTF-8 | Python | false | false | 1,170 | py | # -*- coding: utf-8 -*-
# Copyright 2016 Mobicage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.1@@
from rogerthat.dal.service import get_service_api_callback_records_query
from google.appengine.ext import db, deferred
def run(service_user, cursor=None):
query = get_service_api_callback_records_query(service_user)
query.with_cursor(cursor)
records = query.fetch(100)
put = list()
for rec in records:
rec.timestamp = 0 - abs(rec.timestamp)
put.append(rec)
db.put(put)
if len(records) > 0:
return deferred.defer(run, service_user, query.cursor(), _transactional=db.is_in_transaction())
| [
"[email protected]"
] | |
7e72fb11137d1cc82500a43c590445b6d4222f54 | 11334e46d3575968de5062c7b0e8578af228265b | /Projects/subsumption_lewis/test_escape_behavior.py | 4a60ca86e27a79e1aadad1e7cc150c9a55c47a09 | [] | no_license | slowrunner/Carl | 99262f16eaf6d53423778448dee5e5186c2aaa1e | 1a3cfb16701b9a3798cd950e653506774c2df25e | refs/heads/master | 2023-06-08T05:55:55.338828 | 2023-06-04T02:39:18 | 2023-06-04T02:39:18 | 145,750,624 | 19 | 2 | null | 2023-06-04T02:39:20 | 2018-08-22T18:59:34 | Roff | UTF-8 | Python | false | false | 1,653 | py | #!/usr/bin/env python3
"""
FILE: test_escape_behavior.py
PURPOSE: Test an subsumption architecture escape behavior
REFERENCES:
"Mobile Robots: Inspiration To Implementation", Jones, Flynn, Seiger p318
"""
import subsumption
import time
import logging
subsumption.inhibit_scan = False
subsumption.inhibit_drive = False
subsumption.TALK = False
def stop():
subsumption.mot_trans = 0
subsumption.mot_rot = 0
time.sleep(3)
def test_escape_behavior():
logging.info("==== TEST ESCAPE BEHAVIOR ====")
subsumption.say("Escape Behavior Test Will Begin In 5 seconds")
time.sleep(5)
try:
while True:
time.sleep(1.0)
except KeyboardInterrupt:
logging.info("==== ESCAPE BEHAVIOR TEST COMPLETE ====")
subsumption.say("Escape Behavior Test Complete")
# MAIN
def main():
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(funcName)s: %(message)s')
logging.info("==== TEST SUBSUMPTION ====")
subsumption.say("Test subsumption.")
try:
subsumption.setup()
# while True:
# do main things
test_escape_behavior()
except KeyboardInterrupt:
print("")
msg="Ctrl-C Detected in Main"
logging.info(msg)
subsumption.say(msg)
except Exception as e:
logging.info("Handling main exception: %s",e)
finally:
subsumption.teardown()
logging.info("==== Subsumption Test Done ====")
subsumption.say("Subsumption test done")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c42ee65059fd84127b788c9f61f22f6091572c64 | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week01/hoework01/gettop10frommaoyam01_20200625172155.py | 6673c7bd655b35c14f885d7566123eee9d12b9b9 | [] | no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 1,024 | py | # 使用requests,bs4库,爬取猫眼电影top10的电影名称、电影类型、上映时间,并以utf-8的字符集保存到csv文件中
import requests
maoyanUrl = "https://maoyan.com/films?showType=3";
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
header = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Accept': "*/*",
'Accept-Encoding': 'gazip, deflate, br',
'Accept-Language': 'en-AU,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,la;q=0.6',
'Content-Type': 'text/plain',
'Connection': 'keep-alive',
# 'Host': 'wreport1.meituan.net',
'Origin': 'https://maoyan.com',
'Referer': 'https://maoyan.com/films?showType=3',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'cross-site',
}
response = requests.get(maoyanUrl,headers=header)
response.encoding = 'utf-8'
print(response.text) | [
"[email protected]"
] | |
7b95fcc33b3aa2249ed1f27138745f475927c2d6 | cf14b6ee602bff94d3fc2d7e712b06458540eed7 | /gs82/gs82/urls.py | 0aecc6d4eeb66d7fa733fff9c8bcaddef8e0841a | [] | no_license | ManishShah120/Learning-Django | 8b0d7bfe7e7c13dcb71bb3d0dcdf3ebe7c36db27 | 8fe70723d18884e103359c745fb0de5498b8d594 | refs/heads/master | 2023-03-29T09:49:47.694123 | 2021-03-28T16:04:34 | 2021-03-28T16:04:34 | 328,925,596 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | from django.contrib import admin
from django.urls import path
from enroll import views
from django.views.decorators.cache import cache_page
urlpatterns = [
path('admin/', admin.site.urls),
path('', cache_page(30)(views.home)),
path('home/', views.home),
path('contact/', views.contact),
]
| [
"[email protected]"
] | |
22a5082162b8e3e3900e02a08ce7e3931b946ac7 | f6faeb43b394bebb2c66b270ece4a5422cece0f6 | /Input.py | 0a45338c9ddf096ffbf6f1a13214ef459aedce03 | [] | no_license | Khun-Cho-Lwin/Programming-Basic-with-Python | a57b6445d0fdfca23017aa691208899935fcf5e7 | 1e8cc924143771b7737bb54ad8f04ae5b88c1e81 | refs/heads/master | 2022-11-13T05:56:11.881552 | 2020-06-29T21:58:29 | 2020-06-29T21:58:29 | 267,246,983 | 0 | 4 | null | 2020-06-29T08:00:57 | 2020-05-27T07:10:11 | Python | UTF-8 | Python | false | false | 166 | py | input1 = int(input("Please enter first number:"))
input2 = int(input("Please enter second number:"))
result = input1 + input2
print(input1,"+",input2,"=",result)
| [
"[email protected]"
] | |
01a356f1fac842936aef6aadf37335b90cd1c87b | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /enas_lm/src/controller.py | a737235c5997a2e2944bca765b591e48869fbeda | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 9,675 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ENAS controller."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow
tf = tensorflow.compat.v1
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_float('controller_baseline_dec', 0.999, '')
flags.DEFINE_float('controller_entropy_weight', 1e-5, '')
flags.DEFINE_float('controller_temperature', 5., '')
flags.DEFINE_float('controller_tanh_constant', 2.25, '')
flags.DEFINE_float('controller_learning_rate', 5e-5, '')
flags.DEFINE_integer('controller_num_layers', 9, '')
REWARD_CONSTANT = 80.0
def _build_train_op(loss, tf_vars, learning_rate, train_step, num_aggregate):
"""Build training ops from `loss` tensor."""
optim = tf.train.AdamOptimizer(learning_rate)
optim = tf.train.SyncReplicasOptimizer(
optim, replicas_to_aggregate=num_aggregate, total_num_replicas=1)
grads = tf.gradients(loss, tf_vars)
train_op = optim.apply_gradients(zip(grads, tf_vars), global_step=train_step)
grad_norm = tf.global_norm(grads)
return train_op, optim, grad_norm
def _lstm(x, prev_c, prev_h, w_lstm):
"""LSTM subgraph."""
ifog = tf.matmul(tf.concat([x, prev_h], axis=1), w_lstm)
i, f, o, g = tf.split(ifog, 4, axis=1)
i = tf.sigmoid(i)
f = tf.sigmoid(f)
o = tf.sigmoid(o)
g = tf.tanh(g)
next_c = i * g + f * prev_c
next_h = o * tf.tanh(next_c)
return next_c, next_h
def _set_default_params(params):
"""Add controller's default params."""
params.add_hparam('controller_hidden_size', 64)
params.add_hparam('controller_num_layers', FLAGS.controller_num_layers)
params.add_hparam('controller_num_functions', 4) # tanh, relu, sigmoid, iden
params.add_hparam('controller_baseline_dec', FLAGS.controller_baseline_dec)
params.add_hparam('controller_entropy_weight',
FLAGS.controller_entropy_weight)
params.add_hparam('controller_temperature', FLAGS.controller_temperature)
params.add_hparam('controller_tanh_constant', FLAGS.controller_tanh_constant)
params.add_hparam('controller_learning_rate', FLAGS.controller_learning_rate)
params.add_hparam('controller_num_aggregate', 10)
params.add_hparam('controller_num_train_steps', 25)
return params
class Controller(object):
"""ENAS controller. Samples architectures and creates training ops."""
def __init__(self, params, name='controller'):
print('-' * 80)
print('Create a controller')
self.params = _set_default_params(params)
self.name = name
self._build_params()
self._build_sampler()
def _build_params(self):
"""Create TF parameters."""
initializer = tf.random_uniform_initializer(minval=-0.01, maxval=0.01)
num_funcs = self.params.controller_num_functions
hidden_size = self.params.controller_hidden_size
with tf.variable_scope(self.name, initializer=initializer):
with tf.variable_scope('lstm'):
self.w_lstm = tf.get_variable('w', [2 * hidden_size, 4 * hidden_size])
with tf.variable_scope('embedding'):
self.g_emb = tf.get_variable('g', [1, hidden_size])
self.w_emb = tf.get_variable('w', [num_funcs, hidden_size])
with tf.variable_scope('attention'):
self.attn_w_1 = tf.get_variable('w_1', [hidden_size, hidden_size])
self.attn_w_2 = tf.get_variable('w_2', [hidden_size, hidden_size])
self.attn_v = tf.get_variable('v', [hidden_size, 1])
num_params = sum([np.prod(v.shape) for v in tf.trainable_variables()
if v.name.startswith(self.name)])
print('Controller has {0} params'.format(num_params))
def _build_sampler(self):
"""Build the sampler ops and the log_prob ops."""
hidden_size = self.params.controller_hidden_size
num_layers = self.params.controller_num_layers
arc_seq = []
sample_log_probs = []
sample_entropy = []
all_h = [tf.zeros([1, hidden_size], dtype=tf.float32)]
all_h_w = [tf.zeros([1, hidden_size], dtype=tf.float32)]
# sampler ops
inputs = self.g_emb
prev_c = tf.zeros([1, hidden_size], dtype=tf.float32)
prev_h = tf.zeros([1, hidden_size], dtype=tf.float32)
inputs = self.g_emb
for layer_id in range(1, num_layers+1):
next_c, next_h = _lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
all_h.append(next_h)
all_h_w.append(tf.matmul(next_h, self.attn_w_1))
query = tf.matmul(next_h, self.attn_w_2)
query = query + tf.concat(all_h_w[:-1], axis=0)
query = tf.tanh(query)
logits = tf.matmul(query, self.attn_v)
logits = tf.reshape(logits, [1, layer_id])
if self.params.controller_temperature:
logits /= self.params.controller_temperature
if self.params.controller_tanh_constant:
logits = self.params.controller_tanh_constant * tf.tanh(logits)
diff = tf.to_float(layer_id - tf.range(0, layer_id)) ** 2
logits -= tf.reshape(diff, [1, layer_id]) / 6.0
skip_index = tf.multinomial(logits, 1)
skip_index = tf.to_int32(skip_index)
skip_index = tf.reshape(skip_index, [1])
arc_seq.append(skip_index)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=skip_index)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(
tf.concat(all_h[:-1], axis=0), skip_index)
inputs /= (0.1 + tf.to_float(layer_id - skip_index))
next_c, next_h = _lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
logits = tf.matmul(next_h, self.w_emb, transpose_b=True)
if self.params.controller_temperature:
logits /= self.params.controller_temperature
if self.params.controller_tanh_constant:
logits = self.params.controller_tanh_constant * tf.tanh(logits)
func = tf.multinomial(logits, 1)
func = tf.to_int32(func)
func = tf.reshape(func, [1])
arc_seq.append(func)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=func)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(self.w_emb, func)
arc_seq = tf.concat(arc_seq, axis=0)
self.sample_arc = arc_seq
self.sample_log_probs = tf.concat(sample_log_probs, axis=0)
self.ppl = tf.exp(tf.reduce_mean(self.sample_log_probs))
sample_entropy = tf.concat(sample_entropy, axis=0)
self.sample_entropy = tf.reduce_sum(sample_entropy)
self.all_h = all_h
def build_trainer(self, child_model):
"""Build the train ops by connecting Controller with a Child."""
# actor
self.valid_loss = tf.to_float(child_model.rl_loss)
self.valid_loss = tf.stop_gradient(self.valid_loss)
self.valid_ppl = tf.exp(self.valid_loss)
self.reward = REWARD_CONSTANT / self.valid_ppl
if self.params.controller_entropy_weight:
self.reward += self.params.controller_entropy_weight * self.sample_entropy
# or baseline
self.sample_log_probs = tf.reduce_sum(self.sample_log_probs)
self.baseline = tf.Variable(0.0, dtype=tf.float32, trainable=False)
baseline_update = tf.assign_sub(self.baseline,
((1 - self.params.controller_baseline_dec) *
(self.baseline - self.reward)))
with tf.control_dependencies([baseline_update]):
self.reward = tf.identity(self.reward)
self.loss = self.sample_log_probs * (self.reward - self.baseline)
self.train_step = tf.Variable(
0, dtype=tf.int32, trainable=False, name='train_step')
tf_vars = [var for var in tf.trainable_variables()
if var.name.startswith(self.name)]
self.train_op, self.optimizer, self.grad_norm = _build_train_op(
loss=self.loss,
tf_vars=tf_vars,
learning_rate=self.params.controller_learning_rate,
train_step=self.train_step,
num_aggregate=self.params.controller_num_aggregate)
def train(self, sess, reset_op, log_every=10):
"""Train the controller for `num_steps`."""
print('-' * 80)
print('Training controller')
num_steps = (self.params.controller_num_aggregate *
self.params.controller_num_train_steps)
run_ops = [self.sample_arc,
self.sample_entropy,
self.reward,
self.baseline,
self.train_op]
for step in range(num_steps):
arc, ent, reward, baseline, _ = sess.run(run_ops)
sess.run(reset_op)
if step % log_every == 0:
log_string = 'step={0:<5d}'.format(step)
log_string += ' ent={0:<7.3f}'.format(ent)
log_string += ' ppl={0:<7.2f}'.format(REWARD_CONSTANT / reward)
log_string += ' rw={0:<7.4f}'.format(reward)
log_string += ' bl={0:<7.4f}'.format(baseline)
log_string += ' arc=[{0}]'.format(' '.join([str(v) for v in arc]))
print(log_string)
| [
"[email protected]"
] | |
bcd2c7e9609e8220daa20bb688620b26c45ef2b3 | e0cbea0cb68f0ba5dba837dbe60067eb88e3d151 | /BeautifulSoup/soup_env/lib/python3.6/site-packages/urllib3/util/timeout.py | 601f7fc9dafaae86b17dc8da5fc0b56b5511cab2 | [] | no_license | arossbrian/my_short_scripts | 74fb689ac25feaffb14437496902ee1a0dcc5b60 | a34923f7ecbf027d8a0704400fcfb3e71ed662fd | refs/heads/master | 2023-05-24T16:45:51.050321 | 2023-05-11T16:20:30 | 2023-05-11T16:20:30 | 195,057,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,132 | py | from __future__ import absolute_import
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
import time
from ..exceptions import TimeoutStateError
# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()
# Use time.monotonic if available.
current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
""" Timeout configuration.
Timeouts can be defined as a default for a pool::
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool)::
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Timeouts can be disabled by setting all the parameters to ``None``::
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: integer, float, or None
:param connect:
The maximum amount of time (in seconds) to wait for a connection
attempt to a server to succeed. Omitting the parameter will default the
connect timeout to the system default, probably `the global default
timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: integer, float, or None
:param read:
The maximum amount of time (in seconds) to wait between consecutive
read operations for a response from the server. Omitting the parameter
will default the read timeout to the system default, probably `the
global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: integer, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response.
For example, Python's DNS resolver does not obey the timeout specified
on the socket. Other factors that can affect total request time include
high CPU load, high swap, the program running at a low priority level,
or other behaviors.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not trigger, even though the request will take
several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, "connect")
self._read = self._validate_timeout(read, "read")
self.total = self._validate_timeout(total, "total")
self._start_connect = None
def __str__(self):
return "%s(connect=%r, read=%r, total=%r)" % (
type(self).__name__,
self._connect,
self._read,
self.total,
)
@classmethod
def _validate_timeout(cls, value, name):
""" Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
raise ValueError(
"Timeout cannot be a boolean value. It must "
"be an int, float or None."
)
try:
float(value)
except (TypeError, ValueError):
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
try:
if value <= 0:
raise ValueError(
"Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than or equal to 0." % (name, value)
)
except TypeError:
# Python 3
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
return value
@classmethod
def from_float(cls, timeout):
""" Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value
passed to this function.
:param timeout: The legacy timeout value.
:type timeout: integer, float, sentinel default object, or None
:return: Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
""" Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read, total=self.total)
def start_connect(self):
""" Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
""" Gets the time elapsed since the call to :meth:`start_connect`.
:return: Elapsed time in seconds.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError(
"Can't get connect duration for timer " "that has not started."
)
return current_time() - self._start_connect
@property
def connect_timeout(self):
""" Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: Connect timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
""" Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: Value to use for the read timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (
self.total is not None
and self.total is not self.DEFAULT_TIMEOUT
and self._read is not None
and self._read is not self.DEFAULT_TIMEOUT
):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(), self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
| [
"[email protected]"
] | |
9c665f2636f0506c2191a10ad99da160b277c34e | eda12fedf7db9ba55c0f5819eb0df90e9889060b | /33_Type_C_TF_acrobot_discrete/03_TF_type_bc1_acrobot_a2c_GREEN.py | 340490526f43de0b81072517749f3a05058bff52 | [] | no_license | RichardMinsooGo-RL-Gym/TF1_4_Reinforcement_Learning_A3C_TF_Gym | 1e816ffc747367d8e40100a64f332d6406738a4a | 75c88b86a2d001b67971bafb37dbfd097a59932a | refs/heads/master | 2022-12-12T14:19:54.537542 | 2020-09-13T07:29:14 | 2020-09-13T07:29:14 | 277,669,264 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,441 | py | import os
import sys
import gym
import pylab
import numpy as np
import time
import tensorflow as tf
env_name = "Acrobot-v1"
env = gym.make(env_name)
# env.seed(1) # reproducible, general Policy gradient has high variance
# np.random.seed(123)
# tf.set_random_seed(456) # reproducible
env = env.unwrapped
# get size of state and action from environment
state_size = env.observation_space.shape[0]
action_size = env.action_space.n
MAX_EP_STEP = 3000
ENTROPY_BETA = 0.001
model_lr = 0.005
model_path = os.path.join(os.getcwd(), 'save_model')
graph_path = os.path.join(os.getcwd(), 'save_graph')
if not os.path.isdir(model_path):
os.mkdir(model_path)
if not os.path.isdir(graph_path):
os.mkdir(graph_path)
# Network for the Actor Critic
class A2C_agent(object):
def __init__(self, sess, scope):
self.sess = sess
# get size of state and action
self.action_size = action_size
self.state_size = state_size
self.value_size = 1
# these is hyper parameters for the ActorCritic
self.discount_factor = 0.99 # decay rate
self.hidden1, self.hidden2 = 128, 128
self.scope = scope
# create model for actor and critic network
with tf.variable_scope(self.scope):
self._init_input()
self.build_model()
self._init_op()
def _init_input(self):
# with tf.variable_scope('input'):
self.state = tf.placeholder(tf.float32, [None, self.state_size], name='state')
self.action = tf.placeholder(tf.int32, [None, ], name='action')
self.q_target = tf.placeholder(tf.float32, name="q_target")
def _init_op(self):
# with tf.variable_scope('td_error'):
# A_t = R_t - V(S_t)
# self.td_error = tf.subtract(self.q_target, self.value, name='td_error')
self.td_error = self.q_target - self.value
# with tf.variable_scope('critic_loss'):
# Value loss
# self.critic_loss = tf.reduce_mean(tf.square(self.td_error))
self.critic_loss = tf.reduce_mean(tf.square(self.value - self.q_target), axis=1)
# with tf.variable_scope('actor_loss'):
log_prob = tf.reduce_sum(tf.log(self.policy + 1e-5) * tf.one_hot(self.action, self.action_size, dtype=tf.float32), axis=1, keep_dims=True)
exp_v = log_prob * tf.stop_gradient(self.td_error)
entropy = -tf.reduce_sum(self.policy * tf.log(self.policy + 1e-5),
axis=1, keep_dims=True) # encourage exploration
self.exp_v = ENTROPY_BETA * entropy + exp_v
self.actor_loss = tf.reduce_mean(-self.exp_v)
self.loss_total = self.actor_loss + self.critic_loss
# with tf.variable_scope('train'):
self.train_op = tf.train.AdamOptimizer(model_lr).minimize(self.loss_total)
# neural network structure of the actor and critic
def build_model(self):
w_init, b_init = tf.random_normal_initializer(.0, .3), tf.constant_initializer(0.1)
with tf.variable_scope("actor"):
actor_hidden = tf.layers.dense(self.state, self.hidden1, tf.nn.tanh, kernel_initializer=w_init,
bias_initializer=b_init)
self.actor_predict = tf.layers.dense(actor_hidden, self.action_size, kernel_initializer=w_init,
bias_initializer=b_init)
self.policy = tf.nn.softmax(self.actor_predict)
with tf.variable_scope("critic"):
critic_hidden = tf.layers.dense(inputs=self.state, units = self.hidden1, activation=tf.nn.tanh, # tanh activation
kernel_initializer=w_init, bias_initializer=b_init, name='fc1_c')
critic_predict = tf.layers.dense(inputs=critic_hidden, units = self.value_size, activation=None,
kernel_initializer=w_init, bias_initializer=b_init, name='fc2_c')
self.value = critic_predict
# get action from policy network
def get_action(self, state):
"""
Choose action based on observation
Arguments:
state: array of state, has shape (num_features)
Returns: index of action we want to choose
"""
# Reshape observation to (num_features, 1)
state_t = state[np.newaxis, :]
# Run forward propagation to get softmax probabilities
prob_weights = self.sess.run(self.policy, feed_dict={self.state: state_t})
# Select action using a biased sample
# this will return the index of the action we've sampled
action = np.random.choice(range(prob_weights.shape[1]), p=prob_weights.ravel())
return action
# save <s, a ,r> of each step
# this is used for calculating discounted rewards
def append_sample(self, state, action, reward):
self.buffer_state.append(state)
self.buffer_action.append(action)
self.buffer_reward.append(reward)
# update policy network and value network every episode
def train_model(self, next_state, done):
if done:
value_next_state = 0 # terminal
else:
value_next_state = self.sess.run(self.value, {self.state: next_state[np.newaxis, :]})[0][0]
for reward in self.buffer_reward[::-1]: # reverse buffer r
value_next_state = reward + self.discount_factor * value_next_state
self.buffer_q_target.append(value_next_state)
self.buffer_q_target.reverse()
feed_dict={
self.state: np.vstack(self.buffer_state),
self.action: np.array(self.buffer_action),
self.q_target: np.vstack(self.buffer_q_target)
}
self.sess.run(self.train_op, feed_dict)
self.buffer_state, self.buffer_action, self.buffer_reward = [], [], []
self.buffer_q_target = []
def main():
with tf.Session() as sess:
agent = A2C_agent(sess, "model")
agent.sess.run(tf.global_variables_initializer())
train_steps = 0
agent.buffer_state, agent.buffer_action, agent.buffer_reward = [], [], []
agent.buffer_q_target = []
scores, episodes = [], []
episode = 0
avg_score = MAX_EP_STEP
start_time = time.time()
while time.time() - start_time < 5 * 60 and avg_score > 90:
done = False
score = 0
state = env.reset()
while not done and score < MAX_EP_STEP:
# every time step we do train from the replay memory
score += 1
# fresh env
# if agent.render:
# env.render()
train_steps += 1
# get action for the current state and go one step in environment
action = agent.get_action(state)
# make step in environment
next_state, reward, done, _ = env.step(action)
# save the sample <state, action, reward> to the memory
agent.append_sample(state, action, reward)
# if train_steps % 10 == 0 or done: # update global and assign to local net
# agent.train_model(next_state, done)
# swap observation
state = next_state
# train when epsisode finished
if done or score == MAX_EP_STEP:
episode += 1
agent.train_model(next_state, done)
# every episode, plot the play time
scores.append(score)
episodes.append(episode)
avg_score = np.mean(scores[-min(30, len(scores)):])
print("episode :{:5d}".format(episode), "/ score :{:5d}".format(score))
break
pylab.plot(episodes, scores, 'b')
pylab.savefig("./save_graph/Cartpole_PG_TF.png")
e = int(time.time() - start_time)
print('Elasped time :{:02d}:{:02d}:{:02d}'.format(e // 3600, (e % 3600 // 60), e % 60))
sys.exit()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
fbcf4dff0606fafa97cc778c0778a49cc9e1a8e6 | 8830831a87f35ff2628f379d8230928ec6b5641a | /Homedepot/code/stem2.py | 3a3c13edf6434f0161556c5b49e294bd64829972 | [] | no_license | nickmcadden/Kaggle | e5882c9d68a81700d8d969328d91c059a0643868 | cbc5347dec90e4bf64d4dbaf28b8ffb362efc64f | refs/heads/master | 2019-07-18T08:09:40.683168 | 2018-01-26T14:35:38 | 2018-01-26T14:35:38 | 40,735,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,710 | py | import pandas as pd
import numpy as np
from nltk.stem.snowball import EnglishStemmer
from nltk.tokenize import wordpunct_tokenize
import sys
import csv
reload(sys)
sys.setdefaultencoding('ISO-8859-1')
stemmer = EnglishStemmer()
print("Reading data\n")
train = pd.read_csv('./input/train.csv', encoding="ISO-8859-1")
test = pd.read_csv('./input/test.csv', encoding="ISO-8859-1")
desc = pd.read_csv('./input/product_descriptions.csv', encoding="ISO-8859-1")
print("Stemming train file\n")
for index, row in train.iterrows():
train.ix[index,'product_title'] = " ".join([stemmer.stem(word.lower()) for word in wordpunct_tokenize(row['product_title'])])
train.ix[index,'search_term'] = " ".join([stemmer.stem(word.lower()) for word in wordpunct_tokenize(row['search_term'])])
if index % 1000 == 0:
print(index)
train.to_csv('./input/train_stemmed_snowball.csv', index=False, quoting=csv.QUOTE_NONNUMERIC)
print("\nStemming test file\n")
for index, row in test.iterrows():
test.ix[index,'product_title'] = " ".join([stemmer.stem(word.lower()) for word in wordpunct_tokenize(row['product_title'])])
test.ix[index,'search_term'] = " ".join([stemmer.stem(word.lower()) for word in wordpunct_tokenize(row['search_term'])])
if index % 1000 == 0:
print(index)
test.to_csv('./input/test_stemmed_snowball.csv', index=False, quoting=csv.QUOTE_NONNUMERIC)
'''
print("\nStemming description file\n")
for index, row in desc.iterrows():
desc.ix[index,'product_description'] = " ".join([stemmer.stem(word.lower()) for word in wordpunct_tokenize(row['product_description'])])
if index % 1000 == 0:
print(index)
desc.to_csv('./input/desc_stemmed_snowball.csv', index=False, quoting=csv.QUOTE_NONNUMERIC)
'''
| [
"[email protected]"
] | |
b217ba63eaddc9616214a06e614c6246f5c30edf | c1bd12405d244c5924a4b069286cd9baf2c63895 | /azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/restore_request.py | e662315f9bdfdbec34fe2249cdb69996c797c338 | [
"MIT"
] | permissive | lmazuel/azure-sdk-for-python | 972708ad5902778004680b142874582a284a8a7c | b40e0e36cc00a82b7f8ca2fa599b1928240c98b5 | refs/heads/master | 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 | MIT | 2019-10-25T15:56:00 | 2014-06-27T19:40:56 | Python | UTF-8 | Python | false | false | 1,055 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RestoreRequest(Model):
"""Base class for restore request. Workload-specific restore requests are
derived from this class.
:param object_type: Polymorphic Discriminator
:type object_type: str
"""
_validation = {
'object_type': {'required': True},
}
_attribute_map = {
'object_type': {'key': 'objectType', 'type': 'str'},
}
_subtype_map = {
'object_type': {'IaasVMRestoreRequest': 'IaasVMRestoreRequest'}
}
def __init__(self):
self.object_type = None
| [
"[email protected]"
] | |
4b7ad1257588f9d861614a07ee2bc059ad96ebde | b34f07d217cdda9f59e7f58f89dad17fae1ee132 | /malaya_speech/model/frame.py | 95fde8af773361726a61fb74e10e57b9e3e60f0e | [
"MIT"
] | permissive | Ariffleng/malaya-speech | 965cea504e364c77ca513d43bf340fc122b97672 | 4343c409340c608a426cc6f0926fbe2c1661783e | refs/heads/master | 2023-08-12T23:23:39.983006 | 2021-10-02T09:14:52 | 2021-10-02T09:14:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,386 | py | import numpy as np
from dataclasses import dataclass
SEGMENT_PRECISION = 1e-6
class Frame:
def __init__(self, array, timestamp, duration):
if not isinstance(array, np.ndarray):
array = np.array(array)
self.array = array
self.timestamp = timestamp
self.duration = duration
@dataclass(frozen=True, order=True)
class Segment:
start: float = 0.0
end: float = 0.0
def __bool__(self):
return bool((self.end - self.start) > SEGMENT_PRECISION)
@property
def duration(self) -> float:
"""
Segment duration (read-only)
"""
return self.end - self.start if self else 0.0
@property
def middle(self) -> float:
"""Segment mid-time (read-only)"""
return 0.5 * (self.start + self.end)
def __contains__(self, other: 'Segment'):
"""Inclusion
>>> segment = Segment(start=0, end=10)
>>> Segment(start=3, end=10) in segment:
True
>>> Segment(start=5, end=15) in segment:
False
"""
return (self.start <= other.start) and (self.end >= other.end)
def __and__(self, other):
"""
Intersection
>>> segment = Segment(0, 10)
>>> other_segment = Segment(5, 15)
>>> segment & other_segment
<Segment(5, 10)>
Note
----
When the intersection is empty, an empty segment is returned:
>>> segment = Segment(0, 10)
>>> other_segment = Segment(15, 20)
>>> intersection = segment & other_segment
>>> if not intersection:
... # intersection is empty.
"""
start = max(self.start, other.start)
end = min(self.end, other.end)
return Segment(start=start, end=end)
def intersects(self, other: 'Segment') -> bool:
"""
Check whether two segments intersect each other
Parameters
----------
other : Segment
Other segment
Returns
-------
intersect : bool
True if segments intersect, False otherwise
"""
return (
(
self.start < other.start
and other.start < self.end - SEGMENT_PRECISION
)
or (
self.start > other.start
and self.start < other.end - SEGMENT_PRECISION
)
or (self.start == other.start)
)
def overlaps(self, t: float):
"""
Check if segment overlaps a given time
Parameters
----------
t : float
Time, in seconds.
Returns
-------
overlap: bool
True if segment overlaps time t, False otherwise.
"""
return self.start <= t and self.end >= t
def __or__(self, other):
"""
Union
>>> segment = Segment(0, 10)
>>> other_segment = Segment(5, 15)
>>> segment | other_segment
<Segment(0, 15)>
Note
----
When a gap exists between the segment, their union covers the gap as well:
>>> segment = Segment(0, 10)
>>> other_segment = Segment(15, 20)
>>> segment | other_segment
<Segment(0, 20)
"""
if not self:
return other
if not other:
return self
start = min(self.start, other.start)
end = max(self.end, other.end)
return Segment(start=start, end=end)
def __xor__(self, other):
"""
Gap
>>> segment = Segment(0, 10)
>>> other_segment = Segment(15, 20)
>>> segment ^ other_segment
<Segment(10, 15)
Note
----
The gap between a segment and an empty segment is not defined.
>>> segment = Segment(0, 10)
>>> empty_segment = Segment(11, 11)
>>> segment ^ empty_segment
ValueError: The gap between a segment and an empty segment is not defined.
"""
if (not self) or (not other):
raise ValueError(
'The gap between a segment and an empty segment '
'is not defined.'
)
start = min(self.end, other.end)
end = max(self.start, other.start)
return Segment(start=start, end=end)
def _str_helper(self, seconds: float):
from datetime import timedelta
negative = seconds < 0
seconds = abs(seconds)
td = timedelta(seconds=seconds)
seconds = td.seconds + 86400 * td.days
microseconds = td.microseconds
hours, remainder = divmod(seconds, 3600)
minutes, seconds = divmod(remainder, 60)
return '%s%02d:%02d:%02d.%03d' % (
'-' if negative else ' ',
hours,
minutes,
seconds,
microseconds / 1000,
)
def __str__(self):
"""
Human-readable representation
>>> print(Segment(1337, 1337 + 0.42))
[ 00:22:17.000 --> 00:22:17.420]
Note
----
Empty segments are printed as "[]"
"""
return '<Segment(%g, %g)>' % (self.start, self.end)
def __repr__(self):
"""
Computer-readable representation
>>> Segment(1337, 1337 + 0.42)
<Segment(1337, 1337.42)>
"""
return '<Segment(%g, %g)>' % (self.start, self.end)
| [
"[email protected]"
] | |
56d61b52a986db759e27b224e6f9af02a912baf9 | b2605c93db0c5b3dd0ac7f7cfa80674e82ff9439 | /sandbox/filter-max255.py | ee9f129f2aff3855cfde263f3b5c214ef661e5e1 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | adnbsr/khmer | 76728708b60a5662e93b83c6559502d31b92445d | 64612c1140d17c0988fa01f3c6c627913b509700 | refs/heads/master | 2021-01-18T13:20:23.385284 | 2013-08-01T21:13:42 | 2013-08-01T21:13:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | import sys, screed.fasta, os
import khmer
from khmer.thread_utils import ThreadedSequenceProcessor, verbose_fastq_iter
K = 32
HT_SIZE=4e9
N_HT=4
WORKER_THREADS=8
GROUPSIZE=100
###
def main():
repfile = sys.argv[1]
infile = sys.argv[2]
outfile = os.path.basename(infile) + '.fno255'
if len(sys.argv) >= 4:
outfile = sys.argv[3]
print 'file to count from: %s' % repfile
print 'input file to filter: %s' % infile
print 'filtering to output:', outfile
print '-- settings:'
print 'K', K
print 'N THREADS', WORKER_THREADS
print '--'
print 'making hashtable'
ht = khmer.new_counting_hash(K, HT_SIZE, N_HT)
print 'consuming input', repfile
ht.consume_fasta(repfile)
outfp = open(outfile, 'w')
def process_fn(record, ht=ht):
name = record['name']
seq = record['sequence']
if 'N' in seq:
return None, None
if len(seq) < K:
return None, None
if ht.get_max_count(seq) >= 255:
return None, None
return name, seq
tsp = ThreadedSequenceProcessor(process_fn, WORKER_THREADS, GROUPSIZE)
###
tsp.start(verbose_fastq_iter(infile), outfp)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
97d23fdb9293035257f2b63f7223884d29f25b32 | 3034e86347c71bf7e7af9e5f7aa44ab5ad61e14b | /mongodb/day04/grid.py | 6a756f9c08f808f46ec135295e3d86b64827d34a | [] | no_license | jason12360/AID1803 | bda039b82f43d6609aa8028b0d9598f2037c23d5 | f0c54a3a2f06881b3523fba7501ab085cceae75d | refs/heads/master | 2020-03-17T00:43:42.541761 | 2018-06-29T10:07:44 | 2018-06-29T10:07:44 | 133,127,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | #用来获取数据库中gridfs存储文件
from pymongo import MongoClient
#和pymongo绑定的
import gridfs
conn = MongoClient('localhost',27017)
db = conn.get_database('grid')
#获取gridfs对象
fs = gridfs.GridFS(db)
files = fs.find()
for file in files:
if file.filename =='./生日快乐歌.mp3':
with open(file.filename,'wb') as f:
while True:
#file.read()函数可以获取文件内容
data = file.read(64)
if not data:
break
f.write(data)
conn.close()
| [
"[email protected]"
] | |
a783bdb2cbac71f57900c83b05288050df71ca1a | a161999b8a9009b6bf961288b68d651541882f2d | /process_news.py | e653f8d7622888988beeeccb4c26faee2e2b6d09 | [] | no_license | kkb-Projects/P1-news-summarization | 788896460aa11712812a86eaf7c7c066c5028d0b | 85122968d92b84741fd2fa8dbb81410e807c7eac | refs/heads/master | 2021-01-09T14:39:09.941508 | 2020-03-19T02:44:17 | 2020-03-19T02:44:17 | 242,340,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,912 | py | # -*- coding:utf8 -*-
# author:yaolinxia
# datetime:2020/3/11
# software: PyCharm
import random
import re
import pandas as pd
from collections import Counter
import jieba
from functools import reduce
"""
汉语新闻语料库处理
"""
def token(string):
# we will learn the regular expression next course.
return re.findall('\w+', string)
# 处理后的文本保存一下
def to_txt(articles_clean,outpath='news_articles.txt'):
with open(outpath, 'w') as f:
for a in articles_clean:
f.write(a + '\n')
# 分词
def cut(string):
return list(jieba.cut(string))
# 将token保存到dict在存储起来
def to_dict(Token, out_path='news_articles_dict.txt'):
line_dict = {}
with open(out_path, 'w') as f:
for i, line in enumerate(Token):
line_dict[i] = line
f.write(str(line_dict))
print(line_dict[2])
def seg2txt(Token, out_path='news_articles_cut.txt'):
with open(out_path, 'w') as f:
for line in Token:
f.write(line+' ')
# 计算词频
def seg2num(cut_txt):
c = Counter()
with open(cut_txt, 'r') as f:
for i in range(2):
for lines in f.readlines():
for l in lines.strip():
c[l] += 1
for (k, v) in c.most_common(2): # 输出词频最高的前两个词
print("%s:%d" % (k, v))
if __name__ == '__main__':
filename = 'data/sqlResult_1558435.csv'
wiki_file = "data/wiki_00"
wiki_out = "data/output/wiki_less.txt"
"""
outpath = 'news_articles.txt'
content = pd.read_csv(filename, encoding='gb18030')
articles = content['content'].tolist()
articles_clean = [''.join(token(str(a))) for a in articles]
Token = []
Token = cut(open(outpath).read())
print("Token", Token)
# to_dict(Token)
seg2txt(Token)
"""
seg2num("data/output/wiki_cut.txt")
| [
"[email protected]"
] | |
46abac533c1ec9a572a565d59cc930bd692ad94d | f3b233e5053e28fa95c549017bd75a30456eb50c | /ptp1b_input/L66/66-77_MD_NVT_rerun/set_7.py | 9c5ad84b39e109861815092ca2f3a6a6735a91e4 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | import os
dir = '/mnt/scratch/songlin3/run/ptp1b/L66/MD_NVT_rerun/ti_one-step/66_77/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_7.in'
temp_pbs = filesdir + 'temp_7.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_7.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_7.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
a9cad12e0ab2aaafb4dab18f953262b068081272 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_virtual_network_taps_operations.py | aa759ad03ff8ad071e1733e6df02900161dfbadb | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 29,365 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkTapsOperations:
"""VirtualNetworkTapsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
tap_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
tap_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network tap.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param tap_name: The name of the virtual network tap.
:type tap_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
tap_name=tap_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def get(
self,
resource_group_name: str,
tap_name: str,
**kwargs
) -> "_models.VirtualNetworkTap":
"""Gets information about the specified virtual network tap.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param tap_name: The name of virtual network tap.
:type tap_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkTap, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTap
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTap"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
tap_name: str,
parameters: "_models.VirtualNetworkTap",
**kwargs
) -> "_models.VirtualNetworkTap":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTap"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkTap')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
tap_name: str,
parameters: "_models.VirtualNetworkTap",
**kwargs
) -> AsyncLROPoller["_models.VirtualNetworkTap"]:
"""Creates or updates a Virtual Network Tap.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param tap_name: The name of the virtual network tap.
:type tap_name: str
:param parameters: Parameters supplied to the create or update virtual network tap operation.
:type parameters: ~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTap
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkTap or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTap]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTap"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
tap_name=tap_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
tap_name: str,
tap_parameters: "_models.TagsObject",
**kwargs
) -> "_models.VirtualNetworkTap":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTap"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(tap_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
tap_name: str,
tap_parameters: "_models.TagsObject",
**kwargs
) -> AsyncLROPoller["_models.VirtualNetworkTap"]:
"""Updates an VirtualNetworkTap tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param tap_name: The name of the tap.
:type tap_name: str
:param tap_parameters: Parameters supplied to update VirtualNetworkTap tags.
:type tap_parameters: ~azure.mgmt.network.v2018_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkTap or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTap]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTap"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
tap_name=tap_name,
tap_parameters=tap_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkTap', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'tapName': self._serialize.url("tap_name", tap_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps/{tapName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.VirtualNetworkTapListResult"]:
"""Gets all the VirtualNetworkTaps in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkTapListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTapListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTapListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkTapListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworkTaps'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.VirtualNetworkTapListResult"]:
"""Gets all the VirtualNetworkTaps in a subscription.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkTapListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.VirtualNetworkTapListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkTapListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkTapListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkTaps'} # type: ignore
| [
"[email protected]"
] | |
b4f598bb7e606e584899aaf66f8c72decb8fa123 | c60c071bc5cf72917883cddbcb5b6a42b6e71f2b | /ja_craiglist_djo/manage.py | 36d599e26d9d291d582787680bf01232cfa9b030 | [] | no_license | CyborgVillager/ja-django-git | bbf17927c8b2d3e774dc4d3bc363f96f3ec49216 | 7755d5996f91ecb5014ae720a4212b44a2e863ef | refs/heads/master | 2020-09-25T19:41:34.297549 | 2019-12-05T16:18:09 | 2019-12-05T16:18:09 | 226,074,493 | 0 | 0 | null | 2019-12-05T16:07:51 | 2019-12-05T10:20:53 | Python | UTF-8 | Python | false | false | 548 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ja_craiglist_djo.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
c0e3f33560d87b12dfd4c8a1b7dbed40257625b5 | 01c33443db4c4ac74087d220a2a3a6967ee3930f | /ccxt/async_support/bitflyer.py | 3aec9da4036433720f813bbe283bea39e187d4cb | [] | no_license | arques-changhwan/ccxt | 74de1790ab2e2cc07fa55f418817c988b3af6a28 | ac26599695af742aaffc16a8fd4dda4f8cb63588 | refs/heads/master | 2022-09-05T11:31:35.903127 | 2020-05-26T06:36:33 | 2020-05-26T06:36:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,801 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import OrderNotFound
class bitflyer(Exchange):
def describe(self):
return self.deep_extend(super(bitflyer, self).describe(), {
'id': 'bitflyer',
'name': 'bitFlyer',
'countries': ['JP'],
'version': 'v1',
'rateLimit': 1000, # their nonce-timestamp is in seconds...
'has': {
'CORS': False,
'withdraw': True,
'fetchMyTrades': True,
'fetchOrders': True,
'fetchOrder': 'emulated',
'fetchOpenOrders': 'emulated',
'fetchClosedOrders': 'emulated',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/28051642-56154182-660e-11e7-9b0d-6042d1e6edd8.jpg',
'api': 'https://api.bitflyer.jp',
'www': 'https://bitflyer.jp',
'doc': 'https://lightning.bitflyer.com/docs?lang=en',
},
'api': {
'public': {
'get': [
'getmarkets/usa', # new(wip)
'getmarkets/eu', # new(wip)
'getmarkets', # or 'markets'
'getboard', # ...
'getticker',
'getexecutions',
'gethealth',
'getboardstate',
'getchats',
],
},
'private': {
'get': [
'getpermissions',
'getbalance',
'getbalancehistory',
'getcollateral',
'getcollateralhistory',
'getcollateralaccounts',
'getaddresses',
'getcoinins',
'getcoinouts',
'getbankaccounts',
'getdeposits',
'getwithdrawals',
'getchildorders',
'getparentorders',
'getparentorder',
'getexecutions',
'getpositions',
'gettradingcommission',
],
'post': [
'sendcoin',
'withdraw',
'sendchildorder',
'cancelchildorder',
'sendparentorder',
'cancelparentorder',
'cancelallchildorders',
],
},
},
'fees': {
'trading': {
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
'BTC/JPY': {
'maker': 0.15 / 100,
'taker': 0.15 / 100,
},
},
})
async def fetch_markets(self, params={}):
jp_markets = await self.publicGetGetmarkets(params)
us_markets = await self.publicGetGetmarketsUsa(params)
eu_markets = await self.publicGetGetmarketsEu(params)
markets = self.array_concat(jp_markets, us_markets)
markets = self.array_concat(markets, eu_markets)
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'product_code')
currencies = id.split('_')
baseId = None
quoteId = None
base = None
quote = None
numCurrencies = len(currencies)
if numCurrencies == 1:
baseId = id[0:3]
quoteId = id[3:6]
elif numCurrencies == 2:
baseId = currencies[0]
quoteId = currencies[1]
else:
baseId = currencies[1]
quoteId = currencies[2]
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = (base + '/' + quote) if (numCurrencies == 2) else id
fees = self.safe_value(self.fees, symbol, self.fees['trading'])
maker = self.safe_value(fees, 'maker', self.fees['trading']['maker'])
taker = self.safe_value(fees, 'taker', self.fees['trading']['taker'])
spot = True
future = False
type = 'spot'
if ('alias' in market) or (currencies[0] == 'FX'):
type = 'future'
future = True
spot = False
maker = 0.0
taker = 0.0
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'maker': maker,
'taker': taker,
'type': type,
'spot': spot,
'future': future,
'info': market,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetGetbalance(params)
#
# [
# {
# "currency_code": "JPY",
# "amount": 1024078,
# "available": 508000
# },
# {
# "currency_code": "BTC",
# "amount": 10.24,
# "available": 4.12
# },
# {
# "currency_code": "ETH",
# "amount": 20.48,
# "available": 16.38
# }
# ]
#
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency_code')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_float(balance, 'amount')
account['free'] = self.safe_float(balance, 'available')
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'product_code': self.market_id(symbol),
}
orderbook = await self.publicGetGetboard(self.extend(request, params))
return self.parse_order_book(orderbook, None, 'bids', 'asks', 'price', 'size')
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
request = {
'product_code': self.market_id(symbol),
}
ticker = await self.publicGetGetticker(self.extend(request, params))
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
last = self.safe_float(ticker, 'ltp')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_float(ticker, 'best_bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'best_ask'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume_by_product'),
'quoteVolume': None,
'info': ticker,
}
def parse_trade(self, trade, market=None):
side = self.safe_string_lower(trade, 'side')
if side is not None:
if len(side) < 1:
side = None
order = None
if side is not None:
id = side + '_child_order_acceptance_id'
if id in trade:
order = trade[id]
if order is None:
order = self.safe_string(trade, 'child_order_acceptance_id')
timestamp = self.parse8601(self.safe_string(trade, 'exec_date'))
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'size')
cost = None
if amount is not None:
if price is not None:
cost = price * amount
id = self.safe_string(trade, 'id')
symbol = None
if market is not None:
symbol = market['symbol']
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': order,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'product_code': market['id'],
}
response = await self.publicGetGetexecutions(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
request = {
'product_code': self.market_id(symbol),
'child_order_type': type.upper(),
'side': side.upper(),
'price': price,
'size': amount,
}
result = await self.privatePostSendchildorder(self.extend(request, params))
# {"status": - 200, "error_message": "Insufficient funds", "data": null}
id = self.safe_string(result, 'child_order_acceptance_id')
return {
'info': result,
'id': id,
}
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a `symbol` argument')
await self.load_markets()
request = {
'product_code': self.market_id(symbol),
'child_order_acceptance_id': id,
}
return await self.privatePostCancelchildorder(self.extend(request, params))
def parse_order_status(self, status):
statuses = {
'ACTIVE': 'open',
'COMPLETED': 'closed',
'CANCELED': 'canceled',
'EXPIRED': 'canceled',
'REJECTED': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
timestamp = self.parse8601(self.safe_string(order, 'child_order_date'))
amount = self.safe_float(order, 'size')
remaining = self.safe_float(order, 'outstanding_size')
filled = self.safe_float(order, 'executed_size')
price = self.safe_float(order, 'price')
cost = price * filled
status = self.parse_order_status(self.safe_string(order, 'child_order_state'))
type = self.safe_string_lower(order, 'child_order_type')
side = self.safe_string_lower(order, 'side')
symbol = None
if market is None:
marketId = self.safe_string(order, 'product_code')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market is not None:
symbol = market['symbol']
fee = None
feeCost = self.safe_float(order, 'total_commission')
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': None,
'rate': None,
}
id = self.safe_string(order, 'child_order_acceptance_id')
return {
'id': id,
'clientOrderId': None,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'fee': fee,
'average': None,
'trades': None,
}
async def fetch_orders(self, symbol=None, since=None, limit=100, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a `symbol` argument')
await self.load_markets()
market = self.market(symbol)
request = {
'product_code': market['id'],
'count': limit,
}
response = await self.privateGetGetchildorders(self.extend(request, params))
orders = self.parse_orders(response, market, since, limit)
if symbol is not None:
orders = self.filter_by(orders, 'symbol', symbol)
return orders
async def fetch_open_orders(self, symbol=None, since=None, limit=100, params={}):
request = {
'child_order_state': 'ACTIVE',
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_closed_orders(self, symbol=None, since=None, limit=100, params={}):
request = {
'child_order_state': 'COMPLETED',
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a `symbol` argument')
orders = await self.fetch_orders(symbol)
ordersById = self.index_by(orders, 'id')
if id in ordersById:
return ordersById[id]
raise OrderNotFound(self.id + ' No order found with id ' + id)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a `symbol` argument')
await self.load_markets()
market = self.market(symbol)
request = {
'product_code': market['id'],
}
if limit is not None:
request['count'] = limit
response = await self.privateGetGetexecutions(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
if code != 'JPY' and code != 'USD' and code != 'EUR':
raise ExchangeError(self.id + ' allows withdrawing JPY, USD, EUR only, ' + code + ' is not supported')
currency = self.currency(code)
request = {
'currency_code': currency['id'],
'amount': amount,
# 'bank_account_id': 1234,
}
response = await self.privatePostWithdraw(self.extend(request, params))
id = self.safe_string(response, 'message_id')
return {
'info': response,
'id': id,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.version + '/'
if api == 'private':
request += 'me/'
request += path
if method == 'GET':
if params:
request += '?' + self.urlencode(params)
url = self.urls['api'] + request
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
auth = ''.join([nonce, method, request])
if params:
if method != 'GET':
body = self.json(params)
auth += body
headers = {
'ACCESS-KEY': self.apiKey,
'ACCESS-TIMESTAMP': nonce,
'ACCESS-SIGN': self.hmac(self.encode(auth), self.encode(self.secret)),
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| [
"[email protected]"
] | |
ac242bd9428a8e8c909b8ceebdba6c1129a468c2 | f2673cd07770dca1bc5017341e8293aebbfd66c7 | /models/attention/encoders/pyramidal_blstm_encoder.py | fc78e88348e5a7cb29ead0d799c4e24a19c25a9f | [
"MIT"
] | permissive | xiao2mo/tensorflow_end2end_speech_recognition | 52d2c8d32b2f6e9f9f11dfaf8ddf434da16ff2ea | 9b4bdcacd9d73c3db19205b74f4d48419584834d | refs/heads/master | 2020-06-03T04:54:34.127500 | 2017-06-12T02:47:51 | 2017-06-12T02:47:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,634 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Pyramidal Bidirectional LSTM Encoder class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from .encoder_base import EncoderOutput, EncoderBase
class PyramidalBLSTMEncoder(EncoderBase):
"""Pyramidal Bidirectional LSTM Encoder.
Args:
num_units:
num_layer:
keep_prob_input:
keep_prob_hidden:
parameter_init:
clip_activation:
num_proj:
"""
def __init__(self,
num_units,
num_layer,
keep_prob_input=1.0,
keep_prob_hidden=1.0,
parameter_init=0.1,
clip_activation=50,
num_proj=None,
name='pblstm_encoder'):
EncoderBase.__init__(self, num_units, num_layer, keep_prob_input,
keep_prob_hidden, parameter_init, clip_activation,
num_proj, name)
def _build(self, inputs, inputs_seq_len):
"""Construct Pyramidal Bidirectional LSTM encoder.
Args:
inputs:
inputs_seq_len:
Returns:
EncoderOutput: A tuple of
`(outputs, final_state,
attention_values, attention_values_length)`
outputs:
final_state:
attention_values:
attention_values_length:
"""
self.inputs = inputs
self.inputs_seq_len = inputs_seq_len
raise NotImplementedError
| [
"[email protected]"
] | |
152553eda650901c21d5c57c5c78ebcc75106dfa | 0b16b44e4fc8c98c9ea3f9d4b8b470f4f62f918d | /Core/migrations/0002_auto_20201101_2120.py | 26e849d4a31082849e63d44fac1fcb8360cb5f66 | [] | no_license | AthifSaheer/DipakNiroula-Django-Ecom | 342eece90211fe80c41ba72bf69a50e63c5ea901 | 94ead608919c5bb076387e26f396e6c38319433e | refs/heads/main | 2023-02-05T06:52:24.204206 | 2020-12-24T13:19:13 | 2020-12-24T13:19:13 | 324,160,212 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,039 | py | # Generated by Django 2.2.14 on 2020-11-01 15:50
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Core', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='return_POlicy',
new_name='return_Policy',
),
migrations.CreateModel(
name='Admin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=50)),
('image', models.ImageField(upload_to='admins')),
('mobile', models.CharField(max_length=20)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
009e53c59746e5e95ef1681b709b7a2b28c2339c | 267aafa3826d216f70a0197369c334bc542aee40 | /setup.py | a4b523deefdb6153c1331d6b30709c5c10b95b35 | [] | no_license | research-core/core-orders | 7ccc199e6b89e6cd86affd4d8e5bab4fe845589b | 37566b742b1423d30f9dc8e67641d828dc22e4a6 | refs/heads/master | 2020-06-29T02:37:00.250110 | 2019-08-26T17:10:48 | 2019-08-26T17:10:48 | 200,413,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re
from setuptools import setup, find_packages
version, license = None, None
with open('orders/__init__.py', 'r') as fd:
content = fd.read()
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', content, re.MULTILINE).group(1)
license = re.search(r'^__license__\s*=\s*[\'"]([^\'"]*)[\'"]', content, re.MULTILINE).group(1)
if version is None: raise RuntimeError('Cannot find version information')
if license is None: raise RuntimeError('Cannot find license information')
with open('README.md', 'r') as fd:
long_description = fd.read()
setup(
name='core-orders',
version=version,
description='Research CORE ERM - orders module',
author='Ricardo Ribeiro, Hugo Cachitas',
author_email='[email protected], [email protected]',
url='https://github.com/research-core/core-orders',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(),
license=license,
)
| [
"[email protected]"
] | |
a9682c31beb5aa6a6e2cacc7e42da087c161cd63 | 7ec04fc867d0a48fffc05c65bff9217cfe211fe7 | /HW/统计字符串/teachers.py | f3e81a089bc6a999b09cf50c7dafa2466777ca3b | [] | no_license | Cherry93/pythonPractic | 3b9d1f99803503073bbb2f3a58009665338bd278 | 2889183af6c9a01ab47895b23e2d6ce8c288fd4d | refs/heads/master | 2021-08-31T16:41:56.655989 | 2017-12-22T03:53:18 | 2017-12-22T03:53:18 | 115,008,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | '''
定义教师类,属性包括姓名、职称、工资,创建1000个实例,使用pickle写入teachers.dat并再次读出;
'''
import pickle
from tkinter import filedialog
class Teachers:
def __init__(self,name,call,rmb):
self.name =name
self.call = call
self.rmb = rmb
def __str__(self):
return "name:"+str(self.name)+"call:"+str(self.call)+"rmb:"+str(self.rmb)
c = Teachers("王小星","高级",1000)
#print(c)
def writeDemo():
global file
#print(c)
savePath = filedialog.asksaveasfilename()
file = open(savePath, mode="ab")
for i in range(10):
data = c
pickle.dump(data, file)
file.close()
writeDemo()
def readMode():
global file
print(data)
with open(filedialog.askopenfilename(), mode="rb") as file:
for i in range(10):
data = pickle.load(file)
print(data)
readMode()
| [
"[email protected]"
] | |
eb3d54dc1db886b98008f3a576109aa33e101d6d | 5e734cd4e071272688ab635243290936c5c2db40 | /lib/paths.py | 26971a871946a307647c399e9c700320a62ab114 | [
"MIT"
] | permissive | jwilk/i18nspector | a2a4aecee00de9cfb8d9a0354614f7413e19f1b9 | d9762416937399b81abaedc9ddcdc36dbda1c318 | refs/heads/master | 2023-09-04T12:32:35.255101 | 2023-08-22T08:41:50 | 2023-08-22T08:41:50 | 29,258,684 | 2 | 3 | MIT | 2022-06-27T19:04:57 | 2015-01-14T18:22:23 | Python | UTF-8 | Python | false | false | 1,388 | py | # Copyright © 2013 Jakub Wilk <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
paths to code and data
'''
import os
basedir = os.path.normpath(os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'',
))
datadir = os.path.join(basedir, 'data', '')
def check():
os.stat(basedir)
os.stat(datadir)
# vim:ts=4 sts=4 sw=4 et
| [
"[email protected]"
] | |
9b3d001951b24200fcdb3bd49fa67280cf2503c4 | 6659f860ddbb7550f66ea712753d3d2aab1cc6ff | /Note_3/Example_36.py | 2671dcc8e106d4ba64273a5f63c1cda83dfc50f5 | [] | no_license | ianhom/Python-Noob | adf077bee78727eac43da2804a90528ace6c38a6 | e12f0159d68d7c4962cafa3cb8b68a8761037f21 | refs/heads/master | 2020-12-08T12:06:01.909463 | 2018-07-03T00:42:41 | 2018-07-03T00:42:41 | 67,806,200 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
题目:求100之内的素数。
'''
lower = int(input("输入区间最小值: "))
upper = int(input("输入区间最大值: "))
for num in range(lower,upper + 1):
# 素数大于 1
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
# result
'''
输入区间最小值: 2
输入区间最大值: 78
2
3
5
7
11
13
17
19
23
29
31
37
41
43
47
53
59
61
67
71
73
'''
| [
"[email protected]"
] | |
4581172461ca6e272ba66f94b453f7e3627ebeb2 | e617affbb9292944465969a7f7a6a02b1c88f10a | /offer_algri/数组中出现次数超过一半的数字/p.py | 2be851787656e28518166bb8ce3645d671b6563e | [] | no_license | darr/offer_algri | 92904d02c7bbd721aa47b4836f2190c3e9407f24 | 724fd689cfe7bd2f8aaed19ef912eecbf00a2df3 | refs/heads/master | 2020-03-25T04:18:40.491916 | 2018-09-07T08:52:39 | 2018-09-07T08:52:39 | 143,388,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 934 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#####################################
# File name : p.py
# Create date : 2018-07-23 08:49
# Modified date : 2018-07-23 13:04
# Author : DARREN
# Describe : not set
# Email : [email protected]
#####################################
class Solution:
#run:32ms memorry:5624k
def MoreThanHalfNum_Solution(self,numbers):
lenN = len(numbers)
if numbers == None or lenN <= 0:
return 0
num = numbers[0]
times =1
for i in range(1,lenN):
if times == 0:
num = numbers[i]
elif numbers[i] == num:
times +=1
else:
times -=1
count = 0
for i in range(lenN):
if numbers[i] == num:
count +=1
if count > lenN/2:
return num
return 0 | [
"[email protected]"
] | |
6ee3ad7ed2666cd3c2c2e7bb9947e9d2975cadf8 | ef243d91a1826b490e935fa3f3e6c29c3cc547d0 | /PyQt5/QtSensors/QAltimeterFilter.py | 7f6d16487b33ff0384829272d015bff8aad4003c | [] | no_license | VentiFang/Python_local_module | 6b3d0b22399e817057dfd15d647a14bb1e41980e | c44f55379eca2818b29732c2815480ee755ae3fb | refs/heads/master | 2020-11-29T11:24:54.932967 | 2019-12-25T12:57:14 | 2019-12-25T12:57:14 | 230,101,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | # encoding: utf-8
# module PyQt5.QtSensors
# from F:\Python\Python36\lib\site-packages\PyQt5\QtSensors.pyd
# by generator 1.147
# no doc
# imports
import PyQt5.QtCore as __PyQt5_QtCore
import sip as __sip
from .QSensorFilter import QSensorFilter
class QAltimeterFilter(QSensorFilter):
"""
QAltimeterFilter()
QAltimeterFilter(QAltimeterFilter)
"""
def filter(self, QAltimeterReading): # real signature unknown; restored from __doc__
""" filter(self, QAltimeterReading) -> bool """
return False
def __init__(self, QAltimeterFilter=None): # real signature unknown; restored from __doc__ with multiple overloads
pass
| [
"[email protected]"
] | |
76a9acaf06ed647f5329818ed4650ab73952cbb8 | 7246faf9a222269ce2612613f58dc5ff19091f10 | /leetcode/1662.py | d793883d41ed3cb54e390d971c41a4c5ca4f7ffd | [] | no_license | gusdn3477/Algorithm_Study | 87a2eb72a8488d9263a86db70dadc7944434d41d | 3fefe1dcb40122157845ffc542f41cb097711cc8 | refs/heads/main | 2023-08-30T12:18:21.412945 | 2021-09-28T13:00:11 | 2021-09-28T13:00:11 | 308,364,230 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | class Solution:
def arrayStringsAreEqual(self, word1: List[str], word2: List[str]) -> bool:
a = ''
b = ''
for i in word1:
a += i
for i in word2:
b += i
return a == b | [
"[email protected]"
] | |
2b8e0f7cc47c50698ff14eb2bb688b25f20ccf77 | 76d4430567b68151df1855f45ea4408f9bebe025 | /test/functional/wallet_importmulti.py | 9b417874ed72b862538be622fc4351394626492d | [
"MIT"
] | permissive | MicroBitcoinOrg/MicroBitcoin | f761b2ff04bdcb650d7c0ddbef431ef95cd69541 | db7911968445606bf8899903322d5d818d393d88 | refs/heads/master | 2022-12-27T10:04:21.040945 | 2022-12-18T05:05:17 | 2022-12-18T05:05:17 | 132,959,214 | 21 | 33 | MIT | 2020-06-12T04:38:45 | 2018-05-10T22:07:51 | C++ | UTF-8 | Python | false | false | 44,483 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The MicroBitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC.
Test importmulti by generating keys on node0, importing the scriptPubKeys and
addresses on node1 and then testing the address info for the different address
variants.
- `get_key()` and `get_multisig()` are called to generate keys on node0 and
return the privkeys, pubkeys and all variants of scriptPubKey and address.
- `test_importmulti()` is called to send an importmulti call to node1, test
success, and (if unsuccessful) test the error code and error message returned.
- `test_address()` is called to call getaddressinfo for an address on node1
and test the values returned."""
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.script import (
CScript,
OP_NOP,
)
from test_framework.test_framework import MicroBitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.wallet_util import (
get_key,
get_multisig,
test_address,
)
class ImportMultiTest(MicroBitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"]]
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.setup_nodes()
def test_importmulti(self, req, success, error_code=None, error_message=None, warnings=None):
"""Run importmulti and assert success"""
if warnings is None:
warnings = []
result = self.nodes[1].importmulti([req])
observed_warnings = []
if 'warnings' in result[0]:
observed_warnings = result[0]['warnings']
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
assert_equal(result[0]['error']['code'], error_code)
assert_equal(result[0]['error']['message'], error_message)
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue() # Sync the timestamp to the wallet, so that importmulti works
node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
# Check only one address
assert_equal(node0_address1['ismine'], True)
# Node 1 sync test
assert_equal(self.nodes[1].getblockcount(), 1)
# Address Test - before import
address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# MicroBitcoin Address (implicit non-internal)
self.log.info("Should import an address")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=False)
watchonly_address = key.p2pkh_addr
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
self.test_importmulti({"scriptPubKey": {"address": "not valid address"},
"timestamp": "now"},
success=False,
error_code=-5,
error_message='Invalid address \"not valid address\"')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp,
ischange=True)
# ScriptPubKey + internal + label
self.log.info("Should not allow a label to be specified when internal is true")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"internal": True,
"label": "Unsuccessful labelling for internal addresses"},
success=False,
error_code=-8,
error_message='Internal addresses should not have a label')
# Nonstandard scriptPubKey + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal flag")
nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex()
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Public key + !Internal(explicit)
self.log.info("Should import an address with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": False},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [key.pubkey],
"internal": True},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
# Nonstandard scriptPubKey + Public key + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal and with public key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"pubkeys": [key.pubkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
self.log.info("Should not import an address with private key if is already imported")
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-4,
error_message='The wallet already contains the private key for this address or script ("' + key.p2pkh_script + '")')
# Address + Private key + watchonly
self.log.info("Should import an address with private key and with watchonly")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [key.privkey],
"watchonly": True},
success=True,
warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [key.privkey],
"internal": True},
success=True)
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=True,
timestamp=timestamp)
# Nonstandard scriptPubKey + Private key + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal and with private key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
"timestamp": "now",
"keys": [key.privkey]},
success=False,
error_code=-8,
error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=False,
ismine=False,
timestamp=None)
# P2SH address
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(COINBASE_MATURITY)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
isscript=True,
iswatchonly=True,
timestamp=timestamp)
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(COINBASE_MATURITY)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
multisig.p2sh_addr, timestamp=timestamp, iswatchonly=True, ismine=False, solvable=True)
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(COINBASE_MATURITY)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2]},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
multisig.p2sh_addr,
timestamp=timestamp,
ismine=False,
iswatchonly=True,
solvable=True)
p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(COINBASE_MATURITY)
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
"timestamp": "now",
"redeemscript": multisig.redeem_script,
"keys": multisig.privkeys[0:2],
"watchonly": True},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
iswatchonly=True,
ismine=False,
solvable=True,
timestamp=timestamp)
# Address + Public key + !Internal + Wrong pubkey
self.log.info("Should not import an address with the wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"pubkeys": [wrong_key]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info("Should import a scriptPubKey with internal and with a wrong public key as non-solvable")
key = get_key(self.nodes[0])
wrong_key = get_key(self.nodes[0]).pubkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"pubkeys": [wrong_key],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Address + Private key + !watchonly + Wrong private key
self.log.info("Should import an address with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now",
"keys": [wrong_privkey]},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info("Should import a scriptPubKey with internal and with a wrong private key as non-solvable")
key = get_key(self.nodes[0])
wrong_privkey = get_key(self.nodes[0]).privkey
self.test_importmulti({"scriptPubKey": key.p2pkh_script,
"timestamp": "now",
"keys": [wrong_privkey],
"internal": True},
success=True,
warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
iswatchonly=True,
ismine=False,
solvable=False,
timestamp=timestamp)
# Importing existing watch only address with new timestamp should replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
self.test_importmulti({"scriptPubKey": {"address": watchonly_address},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.start_nodes()
test_address(self.nodes[1],
watchonly_address,
iswatchonly=True,
ismine=False,
timestamp=watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": key.p2pkh_script,
"timestamp": ""
}])
# Import P2WPKH address as watch only
self.log.info("Should import a P2WPKH address as watch only")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
key.p2wpkh_addr,
iswatchonly=True,
solvable=False)
# Import P2WPKH address with public key but no private key
self.log.info("Should import a P2WPKH address and public key as solvable but not spendable")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
"timestamp": "now",
"pubkeys": [key.pubkey]},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2wpkh_addr,
ismine=False,
solvable=True)
# Import P2WPKH address with key and check it is spendable
self.log.info("Should import a P2WPKH address with key")
key = get_key(self.nodes[0])
self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
"timestamp": "now",
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2wpkh_addr,
iswatchonly=False,
ismine=True)
# P2WSH multisig address without scripts or keys
multisig = get_multisig(self.nodes[0])
self.log.info("Should import a p2wsh multisig as watch only without respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
solvable=False)
# Same P2WSH multisig address as above, but now with witnessscript + private keys
self.log.info("Should import a p2wsh with respective witness script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
"timestamp": "now",
"witnessscript": multisig.redeem_script,
"keys": multisig.privkeys},
success=True)
test_address(self.nodes[1],
multisig.p2sh_addr,
solvable=True,
ismine=True,
sigsrequired=2)
# P2SH-P2WPKH address with no redeemscript or public or private key
key = get_key(self.nodes[0])
self.log.info("Should import a p2sh-p2wpkh without redeem script or keys")
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
key.p2sh_p2wpkh_addr,
solvable=False,
ismine=False)
# P2SH-P2WPKH address + redeemscript + public key with no private key
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable")
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
"timestamp": "now",
"redeemscript": key.p2sh_p2wpkh_redeem_script,
"pubkeys": [key.pubkey]},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2sh_p2wpkh_addr,
solvable=True,
ismine=False)
# P2SH-P2WPKH address + redeemscript + private key
key = get_key(self.nodes[0])
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
"timestamp": "now",
"redeemscript": key.p2sh_p2wpkh_redeem_script,
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2sh_p2wpkh_addr,
solvable=True,
ismine=True)
# P2SH-P2WSH multisig + redeemscript with no private key
multisig = get_multisig(self.nodes[0])
self.log.info("Should import a p2sh-p2wsh with respective redeem script but no private key")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_p2wsh_addr},
"timestamp": "now",
"redeemscript": multisig.p2wsh_script,
"witnessscript": multisig.redeem_script},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
multisig.p2sh_p2wsh_addr,
solvable=True,
ismine=False)
# Test importing of a P2SH-P2WPKH address via descriptor + private key
key = get_key(self.nodes[0])
self.log.info("Should not import a p2sh-p2wpkh address from descriptor without checksum and private key")
self.test_importmulti({"desc": "sh(wpkh(" + key.pubkey + "))",
"timestamp": "now",
"label": "Unsuccessful P2SH-P2WPKH descriptor import",
"keys": [key.privkey]},
success=False,
error_code=-5,
error_message="Missing checksum")
# Test importing of a P2SH-P2WPKH address via descriptor + private key
key = get_key(self.nodes[0])
p2sh_p2wpkh_label = "Successful P2SH-P2WPKH descriptor import"
self.log.info("Should import a p2sh-p2wpkh address from descriptor and private key")
self.test_importmulti({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"label": p2sh_p2wpkh_label,
"keys": [key.privkey]},
success=True)
test_address(self.nodes[1],
key.p2sh_p2wpkh_addr,
solvable=True,
ismine=True,
labels=[p2sh_p2wpkh_label])
# Test ranged descriptor fails if range is not specified
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
self.log.info("Ranged descriptor import should fail without a specified range")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Descriptor is ranged, please specify the range')
# Test importing of a ranged descriptor with xpriv
self.log.info("Should import the ranged descriptor with specified range as solvable")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now",
"range": 1},
success=True)
for address in addresses:
test_address(self.nodes[1],
address,
solvable=True,
ismine=True)
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
success=False, error_code=-8, error_message='Range is too large')
# Test importing a descriptor containing a WIF private key
wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
desc = "sh(wpkh(" + wif_priv + "))"
self.log.info("Should import a descriptor with a WIF private key as spendable")
self.test_importmulti({"desc": descsum_create(desc),
"timestamp": "now"},
success=True)
test_address(self.nodes[1],
address,
solvable=True,
ismine=True)
# dump the private key to ensure it matches what was imported
privkey = self.nodes[1].dumpprivkey(address)
assert_equal(privkey, wif_priv)
# Test importing of a P2PKH address via descriptor
key = get_key(self.nodes[0])
p2pkh_label = "P2PKH descriptor import"
self.log.info("Should import a p2pkh address from descriptor")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"label": p2pkh_label},
True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
test_address(self.nodes[1],
key.p2pkh_addr,
solvable=True,
ismine=False,
labels=[p2pkh_label])
# Test import fails if both desc and scriptPubKey are provided
key = get_key(self.nodes[0])
self.log.info("Import should fail if both scriptPubKey and desc are provided")
self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"scriptPubKey": {"address": key.p2pkh_addr},
"timestamp": "now"},
success=False,
error_code=-8,
error_message='Both a descriptor and a scriptPubKey should not be provided.')
# Test import fails if neither desc nor scriptPubKey are present
key = get_key(self.nodes[0])
self.log.info("Import should fail if neither a descriptor nor a scriptPubKey are provided")
self.test_importmulti({"timestamp": "now"},
success=False,
error_code=-8,
error_message='Either a descriptor or scriptPubKey must be provided.')
# Test importing of a multisig via descriptor
key1 = get_key(self.nodes[0])
key2 = get_key(self.nodes[0])
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
self.test_importmulti({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
"timestamp": "now"},
success=True,
warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
test_address(self.nodes[1],
key1.p2pkh_addr,
ismine=False,
iswatchonly=False)
# Import pubkeys with key origin info
self.log.info("Addresses should have hd keypath and master key id after import with key origin")
pub_addr = self.nodes[1].getnewaddress()
pub_addr = self.nodes[1].getnewaddress(address_type="bech32")
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
pub_keypath = info['hdkeypath']
pub_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc' : descsum_create("wpkh([" + pub_fpr + pub_keypath[1:] +"]" + pub + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(pub_import_info['pubkey'], pub)
assert_equal(pub_import_info['hdkeypath'], pub_keypath)
# Import privkeys with key origin info
priv_addr = self.nodes[1].getnewaddress(address_type="bech32")
info = self.nodes[1].getaddressinfo(priv_addr)
priv = self.nodes[1].dumpprivkey(priv_addr)
priv_keypath = info['hdkeypath']
priv_fpr = info['hdmasterfingerprint']
result = self.nodes[0].importmulti(
[{
'desc' : descsum_create("wpkh([" + priv_fpr + priv_keypath[1:] + "]" + priv + ")"),
"timestamp": "now",
}]
)
assert result[0]['success']
priv_import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(priv_import_info['hdkeypath'], priv_keypath)
# Make sure the key origin info are still there after a restart
self.stop_nodes()
self.start_nodes()
import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(import_info['hdmasterfingerprint'], pub_fpr)
assert_equal(import_info['hdkeypath'], pub_keypath)
import_info = self.nodes[0].getaddressinfo(priv_addr)
assert_equal(import_info['hdmasterfingerprint'], priv_fpr)
assert_equal(import_info['hdkeypath'], priv_keypath)
# Check legacy import does not import key origin info
self.log.info("Legacy imports don't have key origin info")
pub_addr = self.nodes[1].getnewaddress()
info = self.nodes[1].getaddressinfo(pub_addr)
pub = info['pubkey']
result = self.nodes[0].importmulti(
[{
'scriptPubKey': {'address': pub_addr},
'pubkeys': [pub],
"timestamp": "now",
}]
)
assert result[0]['success']
pub_import_info = self.nodes[0].getaddressinfo(pub_addr)
assert_equal(pub_import_info['pubkey'], pub)
assert 'hdmasterfingerprint' not in pub_import_info
assert 'hdkeypath' not in pub_import_info
# Bech32m addresses and descriptors cannot be imported
self.log.info("Bech32m addresses and descriptors cannot be imported")
self.test_importmulti(
{
"scriptPubKey": {"address": "bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqc8gma6"},
"timestamp": "now",
},
success=False,
error_code=-5,
error_message="Bech32m addresses cannot be imported into legacy wallets",
)
self.test_importmulti(
{
"desc": descsum_create("tr({})".format(pub)),
"timestamp": "now",
},
success=False,
error_code=-5,
error_message="Bech32m descriptors cannot be imported into legacy wallets",
)
# Import some public keys to the keypool of a no privkey wallet
self.log.info("Adding pubkey to keypool of disableprivkey wallet")
self.nodes[1].createwallet(wallet_name="noprivkeys", disable_private_keys=True)
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('wpkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
},
{
'desc': descsum_create('wpkh(' + pub2 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2)
newaddr1 = wrpc.getnewaddress(address_type="bech32")
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getnewaddress(address_type="bech32")
assert_equal(addr2, newaddr2)
# Import some public keys to the internal keypool of a no privkey wallet
self.log.info("Adding pubkey to internal keypool of disableprivkey wallet")
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('wpkh(' + pub1 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
},
{
'desc': descsum_create('wpkh(' + pub2 + ')'),
'keypool': True,
'internal': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert result[1]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2)
newaddr1 = wrpc.getrawchangeaddress(address_type="bech32")
assert_equal(addr1, newaddr1)
newaddr2 = wrpc.getrawchangeaddress(address_type="bech32")
assert_equal(addr2, newaddr2)
# Import a multisig and make sure the keys don't go into the keypool
self.log.info('Imported scripts with pubkeys should not have their pubkeys go into the keypool')
addr1 = self.nodes[0].getnewaddress(address_type="bech32")
addr2 = self.nodes[0].getnewaddress(address_type="bech32")
pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey']
pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey']
result = wrpc.importmulti(
[{
'desc': descsum_create('wsh(multi(2,' + pub1 + ',' + pub2 + '))'),
'keypool': True,
"timestamp": "now",
}]
)
assert result[0]['success']
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
# Cannot import those pubkeys to keypool of wallet with privkeys
self.log.info("Pubkeys cannot be added to the keypool of a wallet with private keys")
wrpc = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
assert wrpc.getwalletinfo()['private_keys_enabled']
result = wrpc.importmulti(
[{
'desc': descsum_create('wpkh(' + pub1 + ')'),
'keypool': True,
"timestamp": "now",
}]
)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], "Keys can only be imported to the keypool when private keys are disabled")
# Make sure ranged imports import keys in order
self.log.info('Key ranges should be imported in order')
wrpc = self.nodes[1].get_wallet_rpc("noprivkeys")
assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0)
assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False)
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
addresses = [
'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
]
result = wrpc.importmulti(
[{
'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
'keypool': True,
'timestamp': 'now',
'range' : [0, 4],
}]
)
for i in range(0, 5):
addr = wrpc.getnewaddress('', 'bech32')
assert_equal(addr, addresses[i])
if __name__ == '__main__':
ImportMultiTest().main()
| [
"[email protected]"
] | |
3843f381b3d6a2e009ea019d635947124fc99156 | fc05249c73f910a4d36f471eb91e05256a64cdfe | /roms/make_bry_phd16.py | 991513bd9be6ce35aac02cc3ffdedfe61ec96daf | [] | no_license | rsoutelino/sandbox | f51b37619cd7a61a0446d83e2e1c2af58f14802a | 814d215582d8e14514ba93daf1b41f6d118b906c | refs/heads/master | 2023-03-02T12:05:18.703732 | 2023-03-02T01:58:15 | 2023-03-02T01:58:15 | 28,204,889 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 29,082 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Creates initial conditions netCDF file for ROMS
#
# Rafael Soutelino - [email protected]
#
# Using some material from Matlab scripts by
# "Copyright (c) 2003 UCLA - Patrick Marchesiello"
#
# Last modification: Aug, 2010
#####################################################################
print ' \n' + '==> ' + ' IMPORTING MODULES ...\n' + ' '
# IMPORTING MODULES #################################################
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import delaunay
from matplotlib.mlab import griddata
from mpl_toolkits.basemap import Basemap
import datetime as dt
import netCDF4
import scipy.io as sp
# classes and functions to the computings
from roms_setup import run_setup, zlev, ztosigma
#####################################################################
# SCRIPT START ######################################################
# Basic Settings:
filenamestr = '_bry.nc'
filetypestr = 'ROMS Boundary Conditions file'
# READING PREVIOUSLY BUILT RELEVANT FILES: ###########################
# metadata ascii file
# OA-created netcdf initial T, S file
# grid netcdf file
print ' \n' + '==> ' + ' READING ASCII METADATA FILE ...\n' + ' '
run = run_setup('../phd16_run.setup')
print ' \n' + '==> ' + ' READING FEATURE MODEL FIELD ...\n' + ' '
datafile = sp.loadmat(run.datadir + run.ini_filename)
# assigning some variables from data file
Zlev = datafile['z'][:].ravel(); Zlev = np.abs(Zlev); Zlev = -Zlev
N1 = Zlev.size
lon = datafile['lon'][:]
lat = datafile['lat'][:]
temp = datafile['temp'][:]
salt = datafile['salt'][:]
u = datafile['u'][:]
v = datafile['v'][:]
ubar = datafile['ubar'][:]
vbar = datafile['vbar'][:]
zeta = datafile['ssh'][:]
print ' \n' + '==> ' + ' READING GRID NETCDF FILE ...\n' + ' '
grdfile = netCDF4.Dataset(run.rundir + run.run_name + '_grd.nc')
# assigning some variables from grid file
rlon2 = grdfile.variables['lon_rho'][:]
rlat2 = grdfile.variables['lat_rho'][:]
vlon2 = grdfile.variables['lon_v'][:]
vlat2 = grdfile.variables['lat_v'][:]
ulon2 = grdfile.variables['lon_u'][:]
ulat2 = grdfile.variables['lat_u'][:]
angle = grdfile.variables['angle'][:]
h2 = grdfile.variables['h'][:]
rmask2 = grdfile.variables['mask_rho'][:]
# DOING COMPUTATIONS TO INTERPOLATE THE FIELDS TO ROMS GRID #########
# Modify the bathymetry
f = np.where(h2 >= 5000)
h2[f] = 5000; del f
N = int(run.klevels)
Jrho, Irho = rlon2.shape
Mr2, Lr2 = rlon2.shape
Lu2 = Lr2-1; Mu2 = Mr2
Lv2 = Lr2; Mv2 = Mr2-1
cosa = np.cos(angle); sina = np.sin(angle); del angle
rmask2 = np.ma.masked_where(rmask2 == 0, rmask2)
hu = griddata(rlon2.ravel(), rlat2.ravel(), h2.ravel(), ulon2, ulat2)
hv = griddata(rlon2.ravel(), rlat2.ravel(), h2.ravel(), vlon2, vlat2)
[Zsig,dZsig] = zlev(h2,run.theta_s,run.theta_b,run.tcline,run.klevels)
[ZsigU,dZsigU] = zlev(hu,run.theta_s,run.theta_b,run.tcline,run.klevels)
[ZsigV,dZsigV] = zlev(hv,run.theta_s,run.theta_b,run.tcline,run.klevels)
### Interpolating T, S to ROMS 3D S-COORD grid ###############################
lN = run.klevels
lt = np.size(run.time)
ZETA = np.zeros([lt, Jrho, Irho])
UBAR = np.zeros([lt, Mu2, Lu2])
VBAR = np.zeros([lt, Mv2, Lv2])
TEMP = np.zeros([lt, N, Mv2, Lv2])
SALT = np.zeros([lt, N, Mv2, Lv2])
U = np.zeros([lt, N, Mu2, Lu2])
V = np.zeros([lt, N, Mv2, Lv2])
z2 = np.zeros([N1, Jrho, Irho])
Zlev2 = np.zeros([N1, 1])
print ' \n' + '==> ' + ' INTERPOLATING TEMPERATURE ...\n' + ' '
for k in np.arange(0, N1, 1):
print 'TEMP: Z Level = ' + str(-1*Zlev[k]) + ' m'
z1 = np.squeeze(temp[k,:,:])
z2[N1-k-1,:,:] = griddata(lon.ravel(),lat.ravel(),z1.ravel(),rlon2,rlat2)
Zlev2[N1-k-1] = Zlev[k]
print ' \n' + '==> ' + ' INTERPOLATING TEMP FROM Z --> S COORD ...\n' + ' '
TEMP = ztosigma(z2,Zsig,Zlev2); del z1, z2
###
z2 = np.zeros([N1, Jrho, Irho])
print ' \n' + '==> ' + ' INTERPOLATING SALINITY ...\n' + ' '
for k in np.arange(0, N1, 1):
print 'SALT: Z Level = ' + str(-1*Zlev[k]) + ' m'
z1 = np.squeeze(salt[k,:,:])
z2[N1-k-1,:,:] = griddata(lon.ravel(),lat.ravel(),z1.ravel(),rlon2,rlat2)
Zlev2[N1-k-1] = Zlev[k]
print ' \n' + '==> ' + ' INTERPOLATING SALT FROM Z --> S COORD ...\n' + ' '
SALT = ztosigma(z2,Zsig,Zlev2);
###
z2 = np.zeros([N1, Mu2, Lu2])
print ' \n' + '==> ' + ' INTERPOLATING U-velocity ...\n' + ' '
for k in np.arange(0, N1, 1):
print 'U-Vel: Z Level = ' + str(-1*Zlev[k]) + ' m'
z1 = np.squeeze(u[k,:,:])
z2[N1-k-1,:,:] = griddata(lon.ravel(),lat.ravel(),z1.ravel(),ulon2,ulat2)
Zlev2[N1-k-1] = Zlev[k]
print ' \n' + '==> ' + ' INTERPOLATING V-Vel FROM Z --> S COORD ...\n' + ' '
U = ztosigma(z2,ZsigU,Zlev2);
###
z2 = np.zeros([N1, Mv2, Lv2])
print ' \n' + '==> ' + ' INTERPOLATING V-velocity ...\n' + ' '
for k in np.arange(0, N1, 1):
print 'V-Vel: Z Level = ' + str(-1*Zlev[k]) + ' m'
z1 = np.squeeze(v[k,:,:])
z2[N1-k-1,:,:] = griddata(lon.ravel(),lat.ravel(),z1.ravel(),vlon2,vlat2)
Zlev2[N1-k-1] = Zlev[k]
print ' \n' + '==> ' + ' INTERPOLATING V-Vel FROM Z --> S COORD ...\n' + ' '
V = ztosigma(z2,ZsigV,Zlev2);
###
print ' \n' + '==> ' + ' INTERPOLATING UBAR-velocity ...\n' + ' '
UBAR = griddata(lon.ravel(),lat.ravel(),ubar.ravel(),ulon2,ulat2)
print ' \n' + '==> ' + ' INTERPOLATING VBAR-velocity ...\n' + ' '
VBAR = griddata(lon.ravel(),lat.ravel(),vbar.ravel(),vlon2,vlat2)
print ' \n' + '==> ' + ' INTERPOLATING FREE-SURFACE ...\n' + ' '
ZETA = griddata(lon.ravel(),lat.ravel(),zeta.ravel(),rlon2,rlat2)
# WRITING THE NETCDF FILE ####################################################
# Based on "bry_limit.cdl" NETCDF sample structure
# some computings regarding netcdf variables:
t = np.arange(0, run.time);
N = int(run.klevels)
theta_s = run.theta_s
theta_b = run.theta_b
Mp, Lp = h2.shape
L = Lp - 1
M = Mp - 1
Np = N + 1
if run.spherical == 1:
spherical = 'T'
else:
spherical = 'F'
ds = 1.0 / N
lev = np.arange(1, N+1, 1)
sc = -1 + (lev-0.5)*ds
Ptheta = np.sinh(theta_s*sc) / np.sinh(theta_s)
Rtheta = np.tanh( theta_s*(sc+0.5) ) / ( 2* np.tanh(0.5*theta_s) ) - 0.5
Cs = (1-theta_b)*Ptheta + theta_b * Rtheta
scw = np.arange(-1, 0+ds, ds)
Pthetaw = np.sinh( theta_s*scw ) / np.sinh(theta_s)
Rthetaw = np.tanh( theta_s*(scw+0.5) ) / (2*np.tanh(0.5*theta_s)) - 0.5
Csw = (1-theta_b)*Pthetaw + theta_b*Rthetaw
### GETTING SLICES FOR PROVIDE EXTERNAL BOUNDARY CONDITIONS ####################
# NORTH #####
# getting the northern slice to use as boundary condition
temp_north = TEMP[:,-1,:]; temp_north.shape = (1, run.klevels, Lp)
salt_north = SALT[:,-1,:]; salt_north.shape = (1, run.klevels, Lp)
#zeta_north = ZETA[:,-1,:]; zeta_north.shape = (1, Lp)
u_north = U[:,-1,:]; u_north.shape = (1, run.klevels, L)
v_north = V[:,-1,:]; v_north.shape = (1, run.klevels, Lp)
ubar_north = UBAR[-1,:]; ubar_north.shape = (1, L)
vbar_north = VBAR[-1,:]; vbar_north.shape = (1, Lp)
zeta_north = ZETA[-1,:]; zeta_north.shape = (1, Lp)
# repeating as many times as the model will run
temp_north = temp_north.repeat(t.size, axis=0)
salt_north = salt_north.repeat(t.size, axis=0)
#zeta_north = zeta_north.repeat(t.size, axis=0)
u_north = u_north.repeat(t.size, axis=0)
v_north = v_north.repeat(t.size, axis=0)
ubar_north = ubar_north.repeat(t.size, axis=0)
vbar_north = vbar_north.repeat(t.size, axis=0)
zeta_north = zeta_north.repeat(t.size, axis=0)
# EAST #######
# getting the eastern slice to use as boundary condition
temp_east = TEMP[:,:,-1]; temp_east.shape = (1, run.klevels, Mp)
salt_east = SALT[:,:,-1]; salt_east.shape = (1, run.klevels, Mp)
u_east = U[:,:,-1]; u_east.shape = (1, run.klevels, Mp)
v_east = V[:,:,-1]; v_east.shape = (1, run.klevels, M)
ubar_east = UBAR[:,-1]; ubar_east.shape = (1, Mp)
vbar_east = VBAR[:,-1]; vbar_east.shape = (1, M)
zeta_east = ZETA[:,-1]; zeta_east.shape = (1, Mp)
# repeating as many times as the model will run
temp_east = temp_east.repeat(t.size, axis=0)
salt_east = salt_east.repeat(t.size, axis=0)
u_east = u_east.repeat(t.size, axis=0)
v_east = v_east.repeat(t.size, axis=0)
ubar_east = ubar_east.repeat(t.size, axis=0)
vbar_east = vbar_east.repeat(t.size, axis=0)
zeta_east = zeta_east.repeat(t.size, axis=0)
# SOUTH #####
# getting the southern slice to use as boundary condition
temp_south = TEMP[:,1,:]; temp_south.shape = (1, run.klevels, Lp)
salt_south = SALT[:,1,:]; salt_south.shape = (1, run.klevels, Lp)
u_south = U[:,1,:]; u_south.shape = (1, run.klevels, L)
v_south = V[:,1,:]; v_south.shape = (1, run.klevels, Lp)
ubar_south = UBAR[1,:]; ubar_south.shape = (1, L)
vbar_south = VBAR[1,:]; vbar_south.shape = (1, Lp)
zeta_south = ZETA[1,:]; zeta_south.shape = (1, Lp)
# repeating as many times as the model will run
temp_south = temp_south.repeat(t.size, axis=0)
salt_south = salt_south.repeat(t.size, axis=0)
u_south = u_south.repeat(t.size, axis=0)
v_south = v_south.repeat(t.size, axis=0)
ubar_south = ubar_south.repeat(t.size, axis=0)
vbar_south = vbar_south.repeat(t.size, axis=0)
zeta_south = zeta_south.repeat(t.size, axis=0)
#################################################################################
print ' \n' + '==> ' + ' WRITING NETCDF BOUNDARY CONDITIONS FILE ...\n' + ' '
ncfile = netCDF4.Dataset(run.rundir + run.run_name + filenamestr, mode='w',
clobber='true', format='NETCDF3_CLASSIC')
# creating DIMENSIONS
ncfile.createDimension('xi_rho', size=Lp)
ncfile.createDimension('xi_u', size=L)
ncfile.createDimension('xi_v', size=Lp)
ncfile.createDimension('eta_rho', size=Mp)
ncfile.createDimension('eta_u', size=Mp)
ncfile.createDimension('eta_v', size=M)
ncfile.createDimension('s_rho', size=N)
ncfile.createDimension('s_w', size=Np)
ncfile.createDimension('zeta_time', size=run.time)
ncfile.createDimension('v2d_time', size=run.time)
ncfile.createDimension('v3d_time', size=run.time)
ncfile.createDimension('temp_time', size=run.time)
ncfile.createDimension('salt_time', size=run.time)
ncfile.createDimension('one', size=1)
# creating GLOBAL ATTRIBUTES
setattr(ncfile, 'type', filetypestr)
setattr(ncfile, 'title', run.ini_info)
setattr(ncfile, 'out_file', run.run_name + filenamestr)
setattr(ncfile, 'grd_file', run.run_name + '_grd.nc')
now = dt.datetime.now()
setattr(ncfile,'history',np.str(now))
# creating VARIABLES, ATTRIBUTES and ASSIGNING VALUES
# ---------------------------------------------------------------------------
ncfile.createVariable('spherical', 'c')
setattr(ncfile.variables['spherical'], 'long_name', 'grid type logical switch')
setattr(ncfile.variables['spherical'], 'flag_values', 'T, F')
setattr(ncfile.variables['spherical'], 'flag_meanings', 'spherical, cartesian')
ncfile.variables['spherical'][:] = spherical
# ---------------------------------------------------------------------------
ncfile.createVariable('Vtransform', 'd', dimensions=('one'))
setattr(ncfile.variables['Vtransform'], 'long_name',
'vertical terrain-following transformation equation')
ncfile.variables['Vtransform'][:] = run.vtransform
# ---------------------------------------------------------------------------
ncfile.createVariable('Vstretching', 'd', dimensions=('one'))
setattr(ncfile.variables['Vstretching'], 'long_name',
'vertical terrain-following stretching function')
ncfile.variables['Vstretching'][:] = run.vstretching
# ---------------------------------------------------------------------------
ncfile.createVariable('theta_s', 'd', dimensions=('one'))
setattr(ncfile.variables['theta_s'], 'long_name',
'S-coordinate surface control parameter')
ncfile.variables['theta_s'][:] = run.theta_s
# ---------------------------------------------------------------------------
ncfile.createVariable('theta_b', 'd', dimensions=('one'))
setattr(ncfile.variables['theta_b'], 'long_name',
'S-coordinate bottom control parameter')
ncfile.variables['theta_b'][:] = run.theta_b
# ---------------------------------------------------------------------------
ncfile.createVariable('Tcline', 'd', dimensions=('one'))
setattr(ncfile.variables['Tcline'], 'long_name',
'S-coordinate surface/bottom layer width')
setattr(ncfile.variables['Tcline'], 'units', 'meter')
ncfile.variables['Tcline'][:] = run.tcline
# ---------------------------------------------------------------------------
ncfile.createVariable('hc', 'd', dimensions=('one'))
setattr(ncfile.variables['hc'],'long_name',
'S-coordinate parameter, critical depth')
setattr(ncfile.variables['hc'], 'units', 'meter')
ncfile.variables['hc'][:] = run.hc
# ---------------------------------------------------------------------------
ncfile.createVariable('s_rho', 'd', dimensions=('s_rho'))
setattr(ncfile.variables['s_rho'], 'long_name', 'S-coordinate at RHO-points')
setattr(ncfile.variables['s_rho'], 'valid_min', -1.0)
setattr(ncfile.variables['s_rho'], 'valid_max', 0.0)
setattr(ncfile.variables['s_rho'], 'positive', 'up')
setattr(ncfile.variables['s_rho'], 'standard_name', 'ocean_s_coordinate_g1')
setattr(ncfile.variables['s_rho'], 'formula_terms',
's: s_rho C: Cs_r eta: zeta depth: h depth_c: hc')
ncfile.variables['s_rho'][:] = sc
# ---------------------------------------------------------------------------
ncfile.createVariable('s_w', 'd', dimensions=('s_w'))
setattr(ncfile.variables['s_w'], 'long_name', 'S-coordinate at W-points')
setattr(ncfile.variables['s_w'], 'valid_min', -1.0)
setattr(ncfile.variables['s_w'], 'valid_max', 0.0)
setattr(ncfile.variables['s_w'], 'positive', 'up')
setattr(ncfile.variables['s_w'], 'standard_name', 'ocean_s_coordinate_g1')
setattr(ncfile.variables['s_w'], 'formula_terms',
's: s_rho C: Cs_w eta: zeta depth: h depth_c: hc')
ncfile.variables['s_w'][:] = scw
# ---------------------------------------------------------------------------
ncfile.createVariable('Cs_r', 'd', dimensions=('s_rho'))
setattr(ncfile.variables['Cs_r'], 'long_name',
'S-coordinate stretching curves at RHO-points')
setattr(ncfile.variables['Cs_r'], 'valid_min', -1.0)
setattr(ncfile.variables['Cs_r'], 'valid_max', 0.0)
ncfile.variables['Cs_r'][:] = Cs
# ---------------------------------------------------------------------------
ncfile.createVariable('Cs_w', 'd', dimensions=('s_w'))
setattr(ncfile.variables['Cs_w'], 'long_name',
'S-coordinate stretching curves at W-points')
setattr(ncfile.variables['Cs_w'], 'valid_min', -1.0)
setattr(ncfile.variables['Cs_w'], 'valid_max', 0.0)
ncfile.variables['Cs_w'][:] = Csw
# ---------------------------------------------------------------------------
ncfile.createVariable('h', 'd', dimensions=('eta_rho', 'xi_rho'))
setattr(ncfile.variables['h'], 'long_name', 'bathymetry at RHO-points')
setattr(ncfile.variables['h'], 'units', 'meter')
setattr(ncfile.variables['h'], 'coordinates', 'lon_rho lat_rho')
ncfile.variables['h'][:] = h2
# ---------------------------------------------------------------------------
ncfile.createVariable('lon_rho', 'd', dimensions=('eta_rho', 'xi_rho'))
setattr(ncfile.variables['lon_rho'], 'long_name', 'longitude of RHO-points')
setattr(ncfile.variables['lon_rho'], 'units', 'degree_east')
setattr(ncfile.variables['lon_rho'], 'standard_name', 'longitude')
ncfile.variables['lon_rho'][:] = grdfile.variables['lon_rho'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('lat_rho', 'd', dimensions=('eta_rho', 'xi_rho'))
setattr(ncfile.variables['lat_rho'], 'long_name', 'latitude of RHO-points')
setattr(ncfile.variables['lat_rho'], 'units', 'degree_north')
setattr(ncfile.variables['lat_rho'], 'standard_name', 'latitude')
ncfile.variables['lat_rho'][:] = grdfile.variables['lat_rho'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('lon_u', 'd', dimensions=('eta_u', 'xi_u'))
setattr(ncfile.variables['lon_u'], 'long_name', 'longitude of U-points')
setattr(ncfile.variables['lon_u'], 'units', 'degree_east')
setattr(ncfile.variables['lon_u'], 'standard_name', 'longitude')
ncfile.variables['lon_u'][:] = grdfile.variables['lon_u'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('lat_u', 'd', dimensions=('eta_u', 'xi_u'))
setattr(ncfile.variables['lat_u'], 'long_name', 'latitude of U-points')
setattr(ncfile.variables['lat_u'], 'units', 'degree_north')
setattr(ncfile.variables['lat_u'], 'standard_name', 'latitude')
ncfile.variables['lat_u'][:] = grdfile.variables['lat_u'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('lon_v', 'd', dimensions=('eta_v', 'xi_v'))
setattr(ncfile.variables['lon_v'], 'long_name', 'longitude of V-points')
setattr(ncfile.variables['lon_v'], 'units', 'degree_east')
setattr(ncfile.variables['lon_v'], 'standard_name', 'lonitude')
ncfile.variables['lon_v'][:] = grdfile.variables['lon_v'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('lat_v', 'd', dimensions=('eta_v', 'xi_v'))
setattr(ncfile.variables['lat_v'], 'long_name', 'latitude of V-points')
setattr(ncfile.variables['lat_v'], 'units', 'degree_north')
setattr(ncfile.variables['lat_v'], 'standard_name', 'latitude')
ncfile.variables['lat_v'][:] = grdfile.variables['lat_v'][:]
# ---------------------------------------------------------------------------
ncfile.createVariable('v3d_time', 'd', dimensions=('v3d_time'))
setattr(ncfile.variables['v3d_time'], 'long_name', '3D momentum time')
setattr(ncfile.variables['v3d_time'], 'units', 'days since 0000-01-01 00:00:00')
ncfile.variables['v3d_time'][:] = t
# ---------------------------------------------------------------------------
ncfile.createVariable('temp_time', 'd', dimensions=('temp_time'))
setattr(ncfile.variables['temp_time'], 'long_name', 'potential temperature time')
setattr(ncfile.variables['temp_time'], 'units', 'days since 0000-01-01 00:00:00')
ncfile.variables['temp_time'][:] = t
# ---------------------------------------------------------------------------
ncfile.createVariable('salt_time', 'd', dimensions=('salt_time'))
setattr(ncfile.variables['salt_time'], 'long_name', 'salinity time')
setattr(ncfile.variables['salt_time'], 'units', 'days since 0000-01-01 00:00:00')
ncfile.variables['salt_time'][:] = t
# ---------------------------------------------------------------------------
#ncfile.createVariable('u_west', 'd', dimensions=('v3d_time', 's_rho', 'eta_u'))
#setattr(ncfile.variables['u_west'], 'long_name', '3D u-momentum western boundary condition')
#setattr(ncfile.variables['u_west'], 'units', 'meter second-1')
#setattr(ncfile.variables['u_west'], 'time', 'v3d_time')
#ncfile.variables['u_west'][:] = u_west
# ---------------------------------------------------------------------------
ncfile.createVariable('u_east', 'd', dimensions=('v3d_time', 's_rho', 'eta_u'))
setattr(ncfile.variables['u_east'], 'long_name', '3D u-momentum eastern boundary condition')
setattr(ncfile.variables['u_east'], 'units', 'meter second-1')
setattr(ncfile.variables['u_east'], 'time', 'v3d_time')
ncfile.variables['u_east'][:] = u_east
# ---------------------------------------------------------------------------
ncfile.createVariable('u_south', 'd', dimensions=('v3d_time', 's_rho', 'xi_u'))
setattr(ncfile.variables['u_south'], 'long_name', '3D u-momentum southern boundary condition')
setattr(ncfile.variables['u_south'], 'units', 'meter second-1')
setattr(ncfile.variables['u_south'], 'time', 'v3d_time')
ncfile.variables['u_south'][:] = u_south
## ---------------------------------------------------------------------------
ncfile.createVariable('u_north', 'd', dimensions=('v3d_time', 's_rho', 'xi_u'))
setattr(ncfile.variables['u_north'], 'long_name', '3D u-momentum northern boundary condition')
setattr(ncfile.variables['u_north'], 'units', 'meter second-1')
setattr(ncfile.variables['u_north'], 'time', 'v3d_time')
ncfile.variables['u_north'][:] = u_north
# ---------------------------------------------------------------------------
ncfile.createVariable('ubar_east', 'd', dimensions=('v3d_time', 'eta_u'))
setattr(ncfile.variables['ubar_east'], 'long_name', '2D u-momentum eastern boundary condition')
setattr(ncfile.variables['ubar_east'], 'units', 'meter second-1')
setattr(ncfile.variables['ubar_east'], 'time', 'v3d_time')
ncfile.variables['ubar_east'][:] = ubar_east
# ---------------------------------------------------------------------------
ncfile.createVariable('ubar_south', 'd', dimensions=('v3d_time', 'xi_u'))
setattr(ncfile.variables['ubar_south'], 'long_name', '2D u-momentum southern boundary condition')
setattr(ncfile.variables['ubar_south'], 'units', 'meter second-1')
setattr(ncfile.variables['ubar_south'], 'time', 'v3d_time')
ncfile.variables['ubar_south'][:] = ubar_south
## ---------------------------------------------------------------------------
ncfile.createVariable('ubar_north', 'd', dimensions=('v3d_time', 'xi_u'))
setattr(ncfile.variables['ubar_north'], 'long_name', '2D u-momentum northern boundary condition')
setattr(ncfile.variables['ubar_north'], 'units', 'meter second-1')
setattr(ncfile.variables['ubar_north'], 'time', 'v3d_time')
ncfile.variables['ubar_north'][:] = ubar_north
## ---------------------------------------------------------------------------
#ncfile.createVariable('v_west', 'd', dimensions=('v3d_time', 's_rho', 'eta_v'))
#setattr(ncfile.variables['v_west'], 'long_name', '3D v-momentum western boundary condition')
#setattr(ncfile.variables['v_west'], 'units', 'meter second-1')
#setattr(ncfile.variables['v_west'], 'time', 'v3d_time')
#ncfile.variables['v_west'][:] = v_west
# ---------------------------------------------------------------------------
ncfile.createVariable('v_east', 'd', dimensions=('v3d_time', 's_rho', 'eta_v'))
setattr(ncfile.variables['v_east'], 'long_name', '3D v-momentum eastern boundary condition')
setattr(ncfile.variables['v_east'], 'units', 'meter second-1')
setattr(ncfile.variables['v_east'], 'time', 'v3d_time')
ncfile.variables['v_east'][:] = v_east
# ---------------------------------------------------------------------------
ncfile.createVariable('v_south', 'd', dimensions=('v3d_time', 's_rho', 'xi_v'))
setattr(ncfile.variables['v_south'], 'long_name', '3D v-momentum sovthern boundary condition')
setattr(ncfile.variables['v_south'], 'units', 'meter second-1')
setattr(ncfile.variables['v_south'], 'time', 'v3d_time')
ncfile.variables['v_south'][:] = v_south
## ---------------------------------------------------------------------------
ncfile.createVariable('v_north', 'd', dimensions=('v3d_time', 's_rho', 'xi_v'))
setattr(ncfile.variables['v_north'], 'long_name', '3D v-momentum northern boundary condition')
setattr(ncfile.variables['v_north'], 'units', 'meter second-1')
setattr(ncfile.variables['v_north'], 'time', 'v3d_time')
ncfile.variables['v_north'][:] = v_north
# ---------------------------------------------------------------------------
ncfile.createVariable('vbar_east', 'd', dimensions=('v3d_time', 'eta_v'))
setattr(ncfile.variables['vbar_east'], 'long_name', '2D v-momentum eastern boundary condition')
setattr(ncfile.variables['vbar_east'], 'units', 'meter second-1')
setattr(ncfile.variables['vbar_east'], 'time', 'v3d_time')
ncfile.variables['vbar_east'][:] = vbar_east
# ---------------------------------------------------------------------------
ncfile.createVariable('vbar_south', 'd', dimensions=('v3d_time', 'xi_v'))
setattr(ncfile.variables['vbar_south'], 'long_name', '2D v-momentum southern boundary condition')
setattr(ncfile.variables['vbar_south'], 'units', 'meter second-1')
setattr(ncfile.variables['vbar_south'], 'time', 'v3d_time')
ncfile.variables['vbar_south'][:] = vbar_south
## ---------------------------------------------------------------------------
ncfile.createVariable('vbar_north', 'd', dimensions=('v3d_time', 'xi_v'))
setattr(ncfile.variables['vbar_north'], 'long_name', '2D v-momentum northern boundary condition')
setattr(ncfile.variables['vbar_north'], 'units', 'meter second-1')
setattr(ncfile.variables['vbar_north'], 'time', 'v3d_time')
ncfile.variables['vbar_north'][:] = vbar_north
# ---------------------------------------------------------------------------
ncfile.createVariable('zeta_east', 'd', dimensions=('temp_time', 'eta_rho'))
setattr(ncfile.variables['zeta_east'], 'long_name', 'free-surface eastern boundary condition')
setattr(ncfile.variables['zeta_east'], 'units', 'meter')
setattr(ncfile.variables['zeta_east'], 'time', 'temp_time')
ncfile.variables['zeta_east'][:] = zeta_east
# ---------------------------------------------------------------------------
ncfile.createVariable('zeta_south', 'd', dimensions=('temp_time', 'xi_rho'))
setattr(ncfile.variables['zeta_south'], 'long_name', 'free-surface southern boundary condition')
setattr(ncfile.variables['zeta_south'], 'units', 'meter')
setattr(ncfile.variables['zeta_south'], 'time', 'temp_time')
ncfile.variables['zeta_south'][:] = zeta_south
## ---------------------------------------------------------------------------
ncfile.createVariable('zeta_north', 'd', dimensions=('temp_time', 'xi_rho'))
setattr(ncfile.variables['zeta_north'], 'long_name', 'free-surface northern boundary condition')
setattr(ncfile.variables['zeta_north'], 'units', 'meter')
setattr(ncfile.variables['zeta_north'], 'time', 'temp_time')
ncfile.variables['zeta_north'][:] = zeta_north
## ---------------------------------------------------------------------------
#ncfile.createVariable('temp_west', 'd', dimensions=('temp_time', 's_rho', 'eta_rho'))
#setattr(ncfile.variables['temp_west'], 'long_name', 'potential temperature western boundary condition')
#setattr(ncfile.variables['temp_west'], 'units', 'celcius')
#setattr(ncfile.variables['temp_west'], 'time', 'temp_time')
#ncfile.variables['temp_west'][:] = temp_west
# ---------------------------------------------------------------------------
ncfile.createVariable('temp_east', 'd', dimensions=('temp_time', 's_rho', 'eta_rho'))
setattr(ncfile.variables['temp_east'], 'long_name', 'potential temperature eastern boundary condition')
setattr(ncfile.variables['temp_east'], 'units', 'celcius')
setattr(ncfile.variables['temp_east'], 'time', 'temp_time')
ncfile.variables['temp_east'][:] = temp_east
# ---------------------------------------------------------------------------
ncfile.createVariable('temp_south', 'd', dimensions=('temp_time', 's_rho', 'xi_rho'))
setattr(ncfile.variables['temp_south'], 'long_name', 'potential temperature southern boundary condition')
setattr(ncfile.variables['temp_south'], 'units', 'celcius')
setattr(ncfile.variables['temp_south'], 'time', 'temp_time')
ncfile.variables['temp_south'][:] = temp_south
## ---------------------------------------------------------------------------
ncfile.createVariable('temp_north', 'd', dimensions=('temp_time', 's_rho', 'xi_rho'))
setattr(ncfile.variables['temp_north'], 'long_name', 'potential temperature northern boundary condition')
setattr(ncfile.variables['temp_north'], 'units', 'celcius')
setattr(ncfile.variables['temp_north'], 'time', 'temp_time')
ncfile.variables['temp_north'][:] = temp_north
## ---------------------------------------------------------------------------
#ncfile.createVariable('salt_west', 'd', dimensions=('salt_time', 's_rho', 'eta_rho'))
#setattr(ncfile.variables['salt_west'], 'long_name', 'salinity western boundary condition')
#setattr(ncfile.variables['salt_west'], 'time', 'salt_time')
#ncfile.variables['salt_west'][:] = salt_west
# ---------------------------------------------------------------------------
ncfile.createVariable('salt_east', 'd', dimensions=('salt_time', 's_rho', 'eta_rho'))
setattr(ncfile.variables['salt_east'], 'long_name', 'salinity eastern boundary condition')
setattr(ncfile.variables['salt_east'], 'time', 'salt_time')
ncfile.variables['salt_east'][:] = salt_east
# ---------------------------------------------------------------------------
ncfile.createVariable('salt_south', 'd', dimensions=('salt_time', 's_rho', 'xi_rho'))
setattr(ncfile.variables['salt_south'], 'long_name', 'salinity southern boundary condition')
setattr(ncfile.variables['salt_south'], 'time', 'salt_time')
ncfile.variables['salt_south'][:] = salt_south
## ---------------------------------------------------------------------------
ncfile.createVariable('salt_north', 'd', dimensions=('salt_time', 's_rho', 'xi_rho'))
setattr(ncfile.variables['salt_north'], 'long_name', 'salinity northern boundary condition')
setattr(ncfile.variables['salt_north'], 'time', 'salt_time')
ncfile.variables['salt_north'][:] = salt_north
ncfile.sync()
print ' \n' + '==> ' + ' ############################################# ...\n' + ' '
print ' \n' + '==> ' + ' BOUNDARY CONDITIONS FILE SUCCESSFULLY CREATED ...\n' + ' '
print ' \n' + '==> ' + ' ############################################# ...\n' + ' '
| [
"[email protected]"
] | |
1f0ba2eb90839c85462d5f63334dbc88a90db375 | 1d672c52ada009c6aeeafec6caeae0adf064060d | /docs/source/conf.py | a97a5551e3ae8bef36af79a0972f0eb8404b6190 | [
"BSD-3-Clause"
] | permissive | sakshiseth/fury | 9927487aaf5dd1b2dc0db5cd31facdb4743f86dd | 5799e445a5a306852a674396803bbefa922f0ae6 | refs/heads/master | 2021-01-13T20:18:49.848717 | 2020-02-22T20:54:59 | 2020-02-22T20:54:59 | 242,483,253 | 0 | 1 | NOASSERTION | 2020-02-23T08:39:05 | 2020-02-23T08:39:04 | null | UTF-8 | Python | false | false | 7,705 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# FURY documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 28 12:35:56 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import re
import sys
from datetime import datetime
# Add current path
sys.path.insert(0, os.path.abspath('.'))
# Add doc in path for finding tutorial and examples
sys.path.insert(0, os.path.abspath('../..'))
# Add custom extensions
sys.path.insert(0, os.path.abspath('./ext'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = '2.1'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.githubpages',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'IPython.sphinxext.ipython_directive',
'IPython.sphinxext.ipython_console_highlighting',
'matplotlib.sphinxext.plot_directive',
'numpydoc',
'sphinx_copybutton',
'sphinx_gallery.gen_gallery',
'ext.build_modref_templates',
'ext.github',
'ext.github_tools',
'ext.rstjinja'
]
# Configuration options for plot_directive. See:
# https://github.com/matplotlib/matplotlib/blob/f3ed922d935751e08494e5fb5311d3050a3b637b/lib/matplotlib/sphinxext/plot_directive.py#L81
plot_html_show_source_link = False
plot_html_show_formats = False
# Generate the API documentation when building
autosummary_generate = []
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'FURY'
copyright = '2010-{0}, FURY'.format(datetime.now().year)
author = 'FURY'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
import fury
# The short X.Y version.
version = fury.__version__
# The full version, including alpha/beta/rc tags.
release = fury.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), ]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'versions.html',
]
}
# ghissue config
github_project_url = "https://github.com/fury-gl/fury"
import github_tools as ght
all_versions = ght.get_all_versions(ignore='micro')
html_context = {'all_versions': all_versions,
'versions_list': ['dev', 'latest'] + all_versions,
'basic_stats': ght.fetch_basic_stats(),
'contributors': ght.fetch_contributor_stats(),
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'fury'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fury.tex', 'FURY Documentation',
'Contributors', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fury', 'FURY Documentation',
[author], 1)
]
# -- Options for sphinx gallery -------------------------------------------
from scrap import ImageFileScraper
sc = ImageFileScraper()
sphinx_gallery_conf = {
'doc_module': ('fury',),
# path to your examples scripts
'examples_dirs': ['../examples', '../tutorials'],
# path where to save gallery generated examples
'gallery_dirs': ['auto_examples', 'auto_tutorials'],
'image_scrapers': (sc),
'backreferences_dir': 'api',
'reference_url': {'fury': None, },
'filename_pattern': re.escape(os.sep)
}
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'fury', 'FURY Documentation',
author, 'fury', 'Free Unified Rendering in Python',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable', None),
'matplotlib': ('https://matplotlib.org', None),
'dipy': ('https://dipy.org/documentation/latest',
'https://dipy.org/documentation/1.0.0./objects.inv/'),
}
| [
"[email protected]"
] | |
4513165496d6f2e83579ac9cf0684f88a705068e | d020606f5e9174aa669e4b6b316bdb0fcb05ce02 | /run_test.py | dc79693a586c6b2f47af9c3cd513684781ca785c | [] | no_license | Hanlen520/AutomationProject | 4c1270fba570b256493cd6681d715e0b5136a4f5 | 95a7cb61d8b339a6409483d738de5a0d9d85b321 | refs/heads/master | 2023-04-02T20:23:07.696753 | 2021-04-07T07:57:04 | 2021-04-07T07:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,342 | py | # coding = utf8
import logging
import multiprocessing
import subprocess
import pytest
from airtest.core.api import *
from poco.drivers.android.uiautomation import AndroidUiautomationPoco
from config import install_app_necessary, SERIAL_NUMBER
from page.fota.fota_page import Fota_Page
from page.main_page import Main_Page
from page.system.system import System
from toolsbar.common import test_device
from toolsbar.permissionGrant import grant_permission
os.path.abspath(".")
# 过滤airtest log只打印ERROR的Log
logger_airtest = logging.getLogger("airtest")
logger_airtest.setLevel(logging.ERROR)
cur_time = time.strftime("%Y%m%d_%H%M%S")
"""
@File:run_test.py
@Author:Bruce
@Date:2020/12/15
@Description:项目运行函数,存放测试和调试函数
"""
"""
单个设备poco、device不需要初始化
多个设备poco、device都需要创建新对象poco_item
后续将poco_item传入使用即可,airtest相关api,使用对应device_item进行调用
case不需要重复写
UI 进程和底部进程不要在同一个进程中容易出问题
"""
# 多机测试进程池:兼容单机和多机运行
"""
@description:多进程创建进行多台设备测试
@tip:
Pycharm调用adb缺陷,需要使用terminal输入charm来启动pycharm,以获得dash权限
执行case前,手动将pocoservice.apk的contniue安装好并将授权界面点掉,防止后续错误发生
"""
def start_test():
print("当前设备数量:" + str(len(SERIAL_NUMBER)))
if len(SERIAL_NUMBER) > 1:
for i in test_device:
install_app_necessary(i)
grant_permission(i)
else:
install_app_necessary(test_device)
grant_permission(test_device)
test_pool = multiprocessing.Pool(len(SERIAL_NUMBER))
for device_ in SERIAL_NUMBER:
test_pool.apply_async(func=fota_test_area, args=(device_,))
sleep(10)
test_pool.close()
test_pool.join()
"""
@description:Fota checklist测试函数执行区域
@param:
device_:设备序列号
"""
def fota_test_area(device_):
pytest.main(["-v", "-s", "--cmdopt={}".format(device_), "{}".format("./test_case/test_before_fota.py"),
"--reruns={}".format(1),
"--alluredir={}".format("./temp/need_data[{}_{}]/".format(cur_time, device_))])
# 设置差异化
subprocess.Popen(
args=["allure", "generate", "./temp/need_data[{}_{}]/".format(cur_time, device_), "-o",
"./report/test_report[{}_{}]/".format(cur_time, device_),
"--clean"],
shell=False).communicate()[0]
updatesw(device_)
# subprocess.Popen(
# "allure generate ./temp/need_data[{}_{}] -o ./report/test_report[{}_{}]/ --clean".format(cur_time, device_,
# cur_time, device_),
# shell=True).communicate()[0]
"""
@description:Fota checklist测试软件升级函数执行区域
@param:
device_:设备序列号
"""
def updatesw(device_):
print("开始新版本升级")
try:
device_c = connect_device("Android:///{}".format(device_))
poco = AndroidUiautomationPoco(device=device_c, use_airtest_input=False,
screenshot_each_action=False)
main_page = Main_Page(device_c, poco)
system = System(main_page)
system.unlock_screen()
fota_page = Fota_Page(main_page)
fota_page.start_fota_page()
fota_page.skip_guide()
fota_page.updatesw()
print("升级结果:" + str(fota_page.check_update_result(device_)))
print("Fota升级测试结束")
except Exception as ex:
print(str(ex))
"""
@description:Fota checklist测试函数区域
"""
def fota_checklist_test_module():
start_test()
"""
@description:main函数,主要运行函数
"""
if __name__ == '__main__':
print("脚本开始测试,Fota checklist模块测试正在运行中……")
for i in range(5):
print("这是第{}次测试该脚本".format(i))
fota_checklist_test_module()
print("This is {} times running and time is {}".format(str(i), time.strftime("%Y%m%d_%H%M%S")))
print("脚本测试结束,请检查测试结果")
| [
"[email protected]"
] | |
eb525e2ac4b98dac4261e2d6857bca7619fda42c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_disgraced.py | e19865eec3d6c3da4393171f3fa501464fae3db9 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py |
from xai.brain.wordbase.verbs._disgrace import _DISGRACE
#calss header
class _DISGRACED(_DISGRACE, ):
def __init__(self,):
_DISGRACE.__init__(self)
self.name = "DISGRACED"
self.specie = 'verbs'
self.basic = "disgrace"
self.jsondata = {}
| [
"[email protected]"
] | |
03cf906edb96cb427cd37f0de2a53228c70ea321 | 2bcf18252fa9144ece3e824834ac0e117ad0bdf3 | /zpt/trunk/site-packages/zpt/_pytz/zoneinfo/Africa/Asmera.py | 9ccd9c3141892ef8d1d76acc48c773f5c5c4c4cf | [
"MIT",
"ZPL-2.1"
] | permissive | chadwhitacre/public | 32f65ba8e35d38c69ed4d0edd333283a239c5e1d | 0c67fd7ec8bce1d8c56c7ff3506f31a99362b502 | refs/heads/master | 2021-05-10T14:32:03.016683 | 2010-05-13T18:24:20 | 2010-05-13T18:24:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | '''tzinfo timezone information for Africa/Asmera.'''
from zpt._pytz.tzinfo import DstTzInfo
from zpt._pytz.tzinfo import memorized_datetime as d
from zpt._pytz.tzinfo import memorized_ttinfo as i
class Asmera(DstTzInfo):
'''Africa/Asmera timezone definition. See datetime.tzinfo for details'''
zone = 'Africa/Asmera'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1936,5,4,21,24,40),
]
_transition_info = [
i(9300,0,'ADMT'),
i(10800,0,'EAT'),
]
Asmera = Asmera()
| [
"[email protected]"
] | |
2736cd03881b87e222ecd21b6b92c7e5445f98f5 | 31d5bebb9667b17a17fe98a5c3064cac5a0fd4dd | /calisanProfil/urls.py | 219a0406652a00ff45e0ff330c77ec07ab045d24 | [] | no_license | refik/audio | d6b8829fafcfa2c54e6f477ceede210a2f5d4f41 | 011b7b0f01d14529b91bf6f4d3c5919823e19e6b | refs/heads/master | 2021-01-17T17:06:06.202561 | 2015-09-17T05:38:22 | 2015-09-17T05:38:22 | 1,948,617 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | from django.conf.urls.defaults import *
urlpatterns = patterns('audio.calisanProfil.views',
(r'^musteri-temsilcisi/', 'temsilci'),
)
| [
"[email protected]"
] | |
0fe9c746bbc3125b3782ee930a2c84333c287b39 | 993ef8924418866f932396a58e3ad0c2a940ddd3 | /Production/python/Summer20UL17/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8_cff.py | 554733df78bf4f09d59066ea4d14db9921190388 | [] | no_license | TreeMaker/TreeMaker | 48d81f6c95a17828dbb599d29c15137cd6ef009a | 15dd7fe9e9e6f97d9e52614c900c27d200a6c45f | refs/heads/Run2_UL | 2023-07-07T15:04:56.672709 | 2023-07-03T16:43:17 | 2023-07-03T16:43:17 | 29,192,343 | 16 | 92 | null | 2023-07-03T16:43:28 | 2015-01-13T13:59:30 | Python | UTF-8 | Python | false | false | 105,012 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/040F76C0-87F9-1E40-AF31-3839718CE4FA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/0ED5D3A8-76D6-834D-9D57-11AE93630524.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/104E86BB-B4B5-A440-9900-9CEDC0E4462C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/169B7151-D553-294C-B6FC-A61CBC65E981.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/17F1FBF2-CEB1-D046-A12F-2E656895323A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/1A6EF3DD-96B3-D74F-926F-D13178E2BAA1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/20158F70-DF2A-BA4F-9AD4-B293D000B25C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/29BCEAE2-A276-A644-A311-4C7B36915430.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/2E3EFFAB-C362-4B42-BC6D-B5B5FD3FA319.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/2F3018CB-5898-0846-9A6D-CD37A7639B4E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/33FA73F1-2420-B94F-BD40-717F23AB713E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/34A39E04-F0E4-A548-9DFF-5528F8882B96.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/37C16094-B604-F34B-BFC0-021DD99CF58A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/39314427-5F22-394C-A4BA-ABDE88A80F64.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/3C168CF8-96F0-BF49-9BF9-846DA1A717F9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/3E7712CF-6072-EB4D-87D4-E66B19A62D7B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/44F8C83B-C4A2-9841-8146-2375B18FDC64.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/46FAC75C-262E-234F-8165-F035016EEF34.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/4ADED794-FD71-E849-9B1F-E84F9EAA192E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/4CEDDD7C-0A0E-6043-BBB6-10ACF6092138.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/4D533C05-E4FA-EF44-8887-79732CA99DD8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/4E7C6A46-B7BD-AD40-835F-20A03A3CFD3E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/52663E9C-0CDC-2449-BC02-762868681005.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/543FAAE3-8C2C-CE42-BF60-DFC2B41135D7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/54AD8154-7315-3145-B13D-8A08C9DFDE01.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/5E3D50D1-3AA2-9D4F-AEE7-F296C15C78A2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/6173FC89-EF0E-424B-90CF-DD683F164A83.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/64068C1C-3644-8149-ABA5-83FE60FDAA5C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/645F42A6-BB7B-5444-8505-F3F9E55C23DD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/6663F93E-7CAF-0F4D-85A0-E7382074942B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/6B3C235F-193D-8C4B-82B9-72BEBB7C8E74.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/6E5EE289-F0F6-9B40-9197-DCFBF36E114F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/716CCBE8-72BE-DB45-80AF-251FF614A1F8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/73247DD8-2F4B-064B-92E5-F65B1B234C09.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/7D422F0D-CA1F-E24F-BD59-FABA861FB6A2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/81F70542-41B0-014D-B077-049CE4BE3804.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/943A82EC-7B16-8747-AE44-5C42207BC8D1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/9B54A8B8-C366-284A-BEE4-0B4F13DB6640.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/9E098250-B3F0-B34F-B229-59E6C02DE706.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/9F562254-F439-284C-BEE0-4AAF92F2282F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/9F73D23D-BCC9-0144-A87F-EEBD4820B8D6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/9FD2B99B-1A10-B944-9B8D-F3712E34A989.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/B0606CC2-EE18-114A-B5FA-B34D7339B7BD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/B0646D47-3D12-FE48-8160-5676096E3FEE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/B7FEA6F8-47DD-D64A-B551-9FD590B18AEE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/B8BFE5FB-1A8F-9F40-85D1-149212DC7179.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/C057013A-EE37-2C4A-B869-BE0F72BF25F2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/C38C148A-F608-4B43-BB32-1D4C99D237E3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/C44D29C2-87EB-DF4B-97CA-C0B79B10494B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/C786572C-6711-8541-8E66-FB4AE5F7D286.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/CB1C829B-D783-0442-9CED-1F6631C0FD18.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/D2616535-6FB7-544B-A3C5-9BE5BBA010B6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/D4561D48-4A88-2E48-83F9-98CAC8761578.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/D8151204-D88F-CC48-9E1B-0BC197294FA3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/D91BA312-2BCB-744D-9F39-BD59DFB1F3C4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/DDCCCC66-401B-4D4C-BF8D-538E5C416240.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/DF79181F-6FF4-554F-8BF9-0C2C8061C9F9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/E3211257-9E34-3448-A29E-DB93425435D0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/E3D8D243-29B4-104E-942F-3945370859B9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/F5F20E96-B3AB-C749-BEFB-D5165198BD38.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/F62B67C4-8AE2-1745-930D-7D7D2C10E31F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2430000/F976AE76-E95C-9840-9EA3-09EBC715FF99.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/00777B57-81C2-4D45-BBBC-DC66CB13205C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/00C74ACE-A715-2D44-B290-955D8DB034CB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/029DEC73-933B-2A4B-AA7B-4454475F6065.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/042EF133-E994-2042-9581-E8F292D4E693.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/047EE3D3-8D9C-1843-BA16-1F2848C97630.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/05196994-4E58-134B-94FD-8A59FDB6E08B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/07600D68-2C77-E246-ACFC-6F8FEF4E1427.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0A58321F-A7FE-9940-B0F0-BFBF7B4B206F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0CCD1841-7EBE-A442-B5CC-1170A7167B10.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0CD86BC1-E37F-2B43-99A5-C4899386C5C9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0CE6A61F-C334-A04A-AD05-69FB574F54D2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0D367BF0-846B-3144-B664-22F52562E3A5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0F0977FE-63D3-2045-9797-63255D22B534.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/0FA5F865-303E-004B-9E63-5F20EC317022.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/118B8A9F-64D5-D34B-A4B7-EEDDD299FF0F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/13694B20-2734-BD4E-A250-889BF0783246.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/138AC7AE-7FDE-2645-8B64-462D4F44C76F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1496EB1E-5BDA-864A-8F48-52F4A83951F5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/14E9B5A4-82FF-D944-A553-91B67E4A5CBE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/150B3888-30BE-DC4E-8A7C-802DEE569E31.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1872EF53-7DBD-C543-AD01-12005F7B2185.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/188125E4-FF59-CF40-A4CA-B8808D429D21.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1B956203-923F-B54F-A604-DF3941A327CF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1C384746-185C-D64D-A80F-23EFA3AEAE3C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1C5C319F-A2CD-B941-A897-E83550ECBA8B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1D82469C-BC77-3D4F-B878-EFFD7691D574.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1E73EA4C-F196-DF4C-8EAB-756F94441BEB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/1F40E503-923F-9542-BBF4-5164C1D17F5F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/20377682-6C0A-A042-BE0C-868C876DDB84.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2054F4C0-30DC-CD4B-9924-C35473D92413.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/217E51A2-4774-4A48-AB02-13FE41026084.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/21C7EF48-46FF-5F49-837A-A3D708BC17EE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2253EEEA-98AE-7A4B-B6E3-EFC1EC009844.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/236C4A5A-32F8-1A46-BD79-727223E10DC9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/23EFCD49-94F2-7D40-90AE-FC481386888A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/25D7368A-9F2B-794B-932A-CED5EA6D783F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/26949464-3F9B-6147-865A-99202ED63217.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/27AAF8AC-0FE4-6C4F-9E87-AA943C67C33C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/28547707-F9B9-8F44-A3B9-64E56D962879.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/29B93AA2-AC7D-134E-AB56-65D31599A5AF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2A01401E-9E21-7A4F-8D0B-2B77BBD8F623.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2C325FB6-F0FC-814E-A3B5-9E917B22B7F9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2C4D7DC4-9011-9545-B411-BEF6BBC7D444.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2D779E68-18A7-3043-A4B1-7D6C2A27EA05.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/2E4D91B0-CBB5-F449-9B18-00185293EAC8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/301C771F-C079-7B41-B6BC-6CE2B72D48E9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/30350157-53A6-E542-8300-9190A499D14E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/303CB3E5-4DCA-8446-9793-1D8676709ACF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/30968F79-4C3B-AA45-8596-F144C6988052.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/32A088F9-9769-8B45-831E-27E839133333.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/347D95F0-31D1-FC4F-98BF-F6C421D8A317.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/36A6DF53-EE63-0248-AD6E-A48497621D31.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/38926556-9AEF-474C-A1AF-9A17D099D9C2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/3E41755F-03BC-1046-8569-E1C26C74EC40.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/3F2A3ECE-657F-8A4C-B3EE-08EAE92FFC9A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/3F7AD088-E8F3-8B43-889C-355038DD99E9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4078FD40-487B-DB40-9DD2-CB5C47157624.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/409E4B19-0090-4B41-A8FC-20AA80666423.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4100F425-1303-8344-AD7E-86C25ECE34AB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4140939B-1857-F947-ACFB-5BF9ABF594AB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/44AD1EA1-D2AE-834F-9DEC-F6D782DD962E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/467F3890-B4D2-6F41-8285-B31F70D5E955.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/473B6D39-9340-714A-AF88-AAB81D130444.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/493A474C-2429-4146-BB04-2A12FF6AA7B4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4D1FA4DB-6045-FC48-8645-593F7A1E31D4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4E7C714F-B9D3-9949-BC70-9969A126A437.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4E815F0C-EE82-E842-9992-6BA1549A204B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4E8A1EC0-6230-C14E-9F04-EDD03BBDD669.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/4E9BC393-E042-B040-BFD7-4CD0C12E4D20.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/50BCFA54-FFF5-D640-8A78-F081624A63EE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/52D9D3D5-0E65-8B4C-BBBC-39AC8E771464.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/540AF028-C653-474D-AFAB-A00752162AF9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5850C4E9-6A60-A641-ABE4-E0585CE77FAC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/58A8BBA9-8397-C74D-BFF0-4710743CB416.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/58B6325E-4030-9D4A-9B0D-F1756BD74B87.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5914FC48-8226-2F42-A004-1F6870DA7084.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/59191BDD-BD28-B445-BD04-0061ED5912F7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5A65EA71-B4BE-F446-84E4-0B777716BBF1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5B2D2379-BC9C-3349-B51E-83C6E0BF763C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5D476D0E-AFE0-7B43-A12F-80943C4CA1AF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5F6BAD25-EBCF-D14F-9F8E-155F697532E6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/5FF9033B-F9B7-DF45-AD9F-96B40F4AFE24.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/603A9239-242C-A14A-822A-2BA55A1E119B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/630748BD-6677-0B4C-91AD-ACA0DE286CDF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/63175522-C9DC-384A-B28D-ABA6F6DA3FE3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/63768F97-657D-E544-9D94-F26A97C4B8C4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/656EF9F6-364C-9F4A-9AF7-DB2F7BFB58E0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/657A6F28-7626-3B4A-9729-35DC5B495113.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/660A400D-E7FF-F140-A6AC-523B44144647.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/66B72500-28E3-F146-B70D-D92AD6777C85.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/677B2B97-F067-2047-9AD1-88DAD65C8344.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6883B711-4C3A-C444-9045-9FB8D8C6C1FA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6901DF07-5E7B-644F-B569-E8EBD0D9A514.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6945D3FE-E708-5F42-A6CC-FD4106E83A4F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6C08B1DF-9F2D-7D45-8070-106A106A31C1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6CDE63A0-7240-9F49-B7EB-054451468AC8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6D525D4E-9655-0945-AD4C-B590910D8D79.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6DCA647B-49C2-7648-9B4D-DFBB94955F40.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6DF6E314-3066-B348-8617-399858EEBB55.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6E47A340-692A-0142-B016-B5A546733A31.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/6F7099A7-5839-AA42-9555-5ADB85166F53.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/70174527-7019-A04E-82C1-111A6E8376EC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/730A10E2-0FF7-BA4B-A4F3-CC03FF4D8552.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7321B551-1AEC-BA4C-9C85-B8866151E187.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7369D6B0-7F37-C440-AF74-7E83BE2AAC52.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/73893747-4CD5-3444-9F52-5E991645AF69.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/748828EC-C328-E345-8304-F9B07F1A30C5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/76DB2C7D-609A-DA4E-A3BC-791EC58DC2EC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7A734FE7-992B-9A45-8270-CA5D25453153.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7B8432C3-CFBB-084D-ABD8-B490B8BDB458.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7C34F013-233F-B74E-BD0A-82E37290078D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/7CAD78A4-0E90-AD4C-A915-D90704C310AC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8016FE8B-02E4-6B46-849F-52C3E3831F07.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/80A5DF73-B8D0-F942-84E3-8CA42423526E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/815E8E47-4259-1E45-861E-0129D94328B8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8181B93D-4C5B-1149-8B56-CA475E94F8FE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/82C1426D-9FD4-F148-BD26-D211981D0321.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/83595795-6C5E-0648-9529-BA841DD40E2A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8AE4C08C-7E77-334E-A757-78F8EC68C993.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8BD286EE-55B7-DC40-82D9-2B9CC5746262.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8C096C9F-EFE5-5B4F-8684-66D0AF896840.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/8E54D8A6-36A3-2940-B5B1-D2D8B11BFD44.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/913EB983-AD4D-2543-9862-559CBF6D393C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/91858A48-D4F0-224A-9B02-9F4AD031ABCB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9269975D-E47C-E842-AE09-27F761955122.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/932005BA-22BB-F64B-BEA7-BE25E9597FBE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9571BCA1-38E9-AA4A-ACD2-6108F90AAE9F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/97B2B0DE-D228-174D-B372-CF64FDA0D119.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9BBBC218-2BF8-554B-B285-E117DD7A42E1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9C4F84E3-F633-5549-AA05-211F5A147D4F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9DB7913C-2BD5-7D46-BDB6-6A10350AF781.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9E8E7394-6AFA-0846-AE2B-DF2B90D4C490.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/9E924120-35AC-EE44-9BF3-2B75250B863F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A00995CB-BDDE-4543-BA66-0FF163787686.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A054F21E-756F-B94E-B329-2D3EF5880AFB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A1887F2D-2BA2-2643-9A46-110687978AB0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A3293DA1-54E6-6342-A912-2817A2795D58.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A3EBE262-065B-C449-9F50-BFCBAE5203ED.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A422A9AF-D765-BC4B-B2B2-5F3457B77743.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A4C74C16-BFAD-0E4E-9FBF-14A7A07C17A5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/A68D0205-9C36-FE41-8BA2-0CAE5E5E4B71.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/AA157633-EC08-604D-A9CA-60E80718CA97.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/AA6BA623-FA6B-6A4E-8887-9A35EBC45A34.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/AC224873-2AAF-2B4A-B101-638F74F13AD1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/AC5AEAA0-3BB3-A942-9C77-862E75404AD2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B07D9AFE-26A1-EC4C-9757-542A49D28EDC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B0AF0C65-9E94-9645-A09D-6806730E3B92.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B0B689DB-8D3D-7740-89CC-60E6E3A1B21E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B12D9EBF-4626-E04F-A907-DF3AF675F37F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B36AD251-69A8-3840-9485-5DB32E878778.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B42422B4-CAF8-5542-B975-21C342831460.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/B91D50CD-29BE-DB48-8688-2E44E4781822.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BA915018-4BBC-6344-B6ED-500168508D09.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BB087B68-BC41-564B-937F-56D5C31D8F06.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BC50CD01-3D2E-CA40-B2E4-D3B2631555DA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BC7DB403-843B-3749-8C57-5E02A6AABF1C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BCF598D1-4A70-1643-9419-3889EEEC1D13.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BD1A1455-3AD4-2543-B447-209C65DF60F1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BE6B1817-42B7-C844-AFF6-A2A3BA55C9F3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/BF0AB3EE-3055-6845-8D43-7E2E9F5010F5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C255AE8D-B549-A742-B9E2-3A1B3FD774F0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C2651212-9D07-1945-B9E6-37D57B7BC8C5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C27E1A28-7E5B-B04F-9C46-7351239AE26D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C32C49A7-7157-3D46-B8B6-2D13C91E9A39.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C36E9FA8-B17B-B74A-B136-6E20AA18BF74.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C5AB0CBE-1900-DD45-A2AB-300C20E7D68C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C5C8B096-061D-9445-9B0E-EA210E1E0C0C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C66B00CC-593E-604F-875E-A5DDB0281772.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C679F22C-6826-D847-B78F-3378FFDC3F5D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/C8CFCA4C-10AF-984C-9BAB-6602DF98F4CE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CAFA98B8-9DB6-7C49-94DF-9D4B8A9CFEF6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CB081021-1483-1544-80AA-0ECBB0035DF6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CBB986AC-E0A7-3247-A78B-352EE8597431.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CC2BE306-7B87-A64D-AE1C-C00E52EBEF9F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CC85DBB2-DD3B-7147-BE42-8AF22A8B5708.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/CFF969A0-9E4A-9B46-AE9C-2E8FC182356C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/D2B26CB8-F23C-444A-8CBA-B8CEF4F39293.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/D3B19CEA-7280-BE42-AB91-26B9F03FAA8D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/D4FD49B4-A4BE-BD44-A371-1CE0A869C34D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/D5A93A11-11E9-9B45-809A-42521078D7AC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/D8EF8F5F-3E25-4F4E-A9B5-B16E3FCB4E9F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DB38C316-013B-B142-A4AB-D23AA834581B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DB978157-77AC-B046-8508-2793B54792EE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DBD2E541-4D63-D94D-961A-6C9849CF0093.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DBECB541-787A-E54A-9A10-EFD68B00EAE4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DC71ED02-9857-3B41-B6C6-241C65AD3B5E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DCC2F861-2B3C-AB46-A907-AAEF041A0354.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DCD15F46-4252-604D-A503-90F6B8BD9B7A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DCED4A3A-24EC-5345-B18E-DF4A0EEF5AC4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DD684B78-5DB6-CF44-86BB-65C7B210A51A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DDD69870-35F1-E149-B14A-4522C9FB01A8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DE800EF6-CDEA-B44C-A3A8-2BE2415344EC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DF4FBE57-94D4-F54A-873C-80CFF8594ADC.root',
] )
readFiles.extend( [
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DF5CAC58-ACF0-E84B-8FE9-016F0F6C832A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DF6D18B3-8156-1049-B293-94D3AEB405C4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DFCCFFF1-88E9-1943-BFCC-7275E0331484.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/DFFBCEFF-D35D-D447-936D-6842EEF781FB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E1DD9338-9B03-8240-852D-7481CEA7CFB5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E36CF180-85D6-9D45-AB3A-5242746091B4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E3FC0EC0-EEA9-DA48-BDE0-8BAEA5766FD4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E43EC89A-0667-1744-901A-218E164C0BD6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E6E1D413-D403-D443-BEA1-14A7692C1020.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/E9FE08D4-542C-824D-9A9C-5DF0C7A349FF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/ED487309-DE19-7F47-A752-4533439BD2C5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/EDE1BE0C-E3CE-7A41-97E4-7E275A50655E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/EDE5F7CB-72EC-1543-B6E8-49542DF59F93.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/EE476D5D-4894-514D-B8F7-8B861873316F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/EE4B7ECE-17A2-3E4A-97B6-739056B7ACC3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/EE6A5BCA-DE5E-AE4C-BDF5-6920A3643D8B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F0980C96-C55A-6446-A15F-C76B41B663A2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F0D0F5A4-B723-7F4B-86B1-30BBDE5BB961.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F10BE888-DDC0-C345-9FE1-D8B4EC550C01.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F24FB930-872E-AA47-B380-2F2E97E86EB8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F2D05199-1AB3-B74B-8F8E-CB7F9F12AF76.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F4A2597D-BA5A-0540-B8BE-F42BFACD41E7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F7E242C9-A334-F745-A27D-83C097097377.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/F974938D-1D61-B041-BE1A-AF781C4F3988.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FA617A96-8E3F-A44F-8B24-589D3B603486.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FACDD593-B8E0-2D4D-A033-D20DF72E0FC4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FBE301F7-7320-1640-B60B-B9AF11F098C6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FC01AAF4-C620-5D42-AE67-84B6BD99EC19.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FC7E7C44-BF7D-184A-A2CC-9938CD34A848.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FD9BA385-76A0-4846-ADFB-4756287282FE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FEC364E5-55EC-A446-9F70-51BCA250A75B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/2530000/FFEBEA04-69CA-004B-A6E1-A966999B7E9D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/015D2D67-DB95-DB4B-B9C7-52E124208AFE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/190B02DC-C207-4E4B-8D72-5181673C11FA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/1C494398-6963-B942-846B-E6AB14C90046.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/26658BF2-5863-3740-B26D-4C0B08684982.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/29A19B48-E90C-CC43-9441-298767DBEEA4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/37263419-6E54-6C47-9505-4CD485603CEF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/4CFC154D-A3E6-A745-93E2-AF8E6CDEEE6F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/52D50C21-0DFD-BF45-A0B0-B1107F34D541.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/546167FB-AC19-CC4B-9EC1-F7B006EB0890.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/56B0C283-3F35-E443-A07B-E262C2DE4BDD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/5D7C59F8-EADC-8446-930E-ABD916915004.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/8DDDE812-094C-0A46-AE18-FA8228CDF520.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/97A1D404-28EB-1148-94C5-7FF74B3EE847.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/98B20C9C-3EE9-4943-9686-47D85630328B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/ADA95267-DC42-0144-8E3E-E3A5A997F354.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/AE92704B-C447-4C44-B2E9-D59E16B45AD8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/BA2EC183-F9C6-BA47-998F-D8CDBCC443CF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/BCBE3B75-D824-084D-B996-557B2DB8F1B4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/D671A052-F742-E444-BF22-BF8F8103006A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/DCDAB40E-952A-4743-994C-B3B0F59CD098.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/ECCFDA7B-9649-4A49-8043-32BDD5493154.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/EE3C5F93-0552-EA4E-A694-9AAFCA6BB5A3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/F1FE5CF5-ED94-8D45-9EFB-6725C1A28126.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/25310000/F22C3E97-FFF9-4348-9AD1-25D8D5B08DEB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/00740E9A-8BA7-484C-A0D4-FF86E01EEFAB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/0161D154-B4D0-5F48-B902-0AE570CC2DAC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/0B0912CB-72EB-6E4D-A6FE-68F241CCBA71.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/0BCA6920-2CBD-0441-AF2A-69884804D521.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/0D1A9BAA-AD22-B243-970D-DE6E5BA9DC41.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/0D988CAF-C1EF-3646-8D0A-8876BFCF4FC8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/111C953D-02FB-5B48-BE40-BD604D911B04.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/11682C0D-C993-2047-B87F-63A4E2646499.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/150B096D-1E45-8C49-9E16-86E643BC9A7B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/18C5CDD7-7570-E141-9807-7BE8BA018B7B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/1CEA1288-7E80-684E-BDBC-2A3BDB69D553.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/1E2F6545-93DB-C142-8480-640AD39F09B1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/1E5B4168-69A5-C44F-98CF-F92E246AEED1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/1ECFCAE5-2C1B-144B-BB4B-7A913CCAF500.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/1FAFC742-EED5-B043-957E-69151D17068C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/201C51A3-D79D-C74E-A106-0A65469B863E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/2040FD3F-6A17-4949-ABB9-61DFDAB8737E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/23438FEE-970D-D349-9817-098CA28964F2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/23B4FA32-BF44-1044-8C6B-C7BEACE56216.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/270BAE0E-589E-514B-82DA-33D2016D38D4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/27B38C51-94F7-444B-AAE7-5505214055BA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/2B1BDEFD-03E6-8B48-AEE3-23E56644F52E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/2C09245D-E719-A844-8363-4F8A1D7516AE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/2DE808EC-BEF2-DB4F-81CB-96EBAA5B3EC6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/39317E16-5FC7-3647-ACC5-FC74B734810D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/3AC09085-3618-1C4E-8E25-FAE720990F3C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/3C72D0C2-5734-D745-9EDA-D3F165A43BAA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/3CB21D18-8689-B647-83B2-FE387CEA713A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/3FD63C13-3AEB-9742-B101-EE4E9D56AEE7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/43B2FB16-C73E-1640-8A66-85F7E4FDAA98.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/477DB1EE-77B4-0149-A974-7DC3EBA5BAE5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/4B43C2B9-3B9C-1344-B0F3-A3BBFB14488D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/4D846B54-EBF9-744B-9B72-2451F6D529AC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/51494C26-D3AC-1848-B076-298BCA6ACB8E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/51BCE606-959A-A24C-B622-AEC39CBAB5DC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/59F9BDB8-0587-864C-922C-4C332BC1F7B9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/5A12621D-31C9-E042-9039-0538D941D476.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/5A6018B8-A63F-6846-8B67-3786AC748981.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/5AA76158-A842-B346-B00B-63B841080D54.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/5ED16D3D-3FC1-864E-998C-1924C5F5E08C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6271C35B-30F1-E644-B281-FB1876C632E7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/62E5E210-E7A1-BA4A-BB5A-853BF9F6AFE6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/645FBC5E-F83F-4C4A-AA89-AC550ACCB9C0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/66115F82-6336-204D-B4F4-28119BC6EC0B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/666805FC-2772-CD47-8AF1-27770B2F7FC1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/69187B03-3343-F846-A791-F2C00D533B6E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6A5022EB-A38F-F94A-97F9-7491EE238E01.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6C564911-A235-254D-9122-14A6A65758B4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6CFD655D-0B98-4C4F-B92A-CE21534AA5E6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6D1783E8-E980-6844-9D7C-B2E445B1A44F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6D9C072A-5D50-004F-9BC8-B949A8EDFC6B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/6F301DAF-90B4-D149-A0E8-896E68C28AA9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/727319D4-0ABC-3E40-8F24-BCEF44BACAE5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/73A53631-14C2-D542-863B-74D9752F810C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/74C1D45C-DEB0-4A4C-B712-204E68AC0635.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/75915C8A-0B87-8745-8AB0-704691127803.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/76D44B34-98A1-B74B-B308-1F44CFF9906B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/7B758ECE-1CFD-A34D-9784-13E8FF1A0581.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/7F3CC92B-2F07-484C-894D-58DF2E0A833C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/83B30849-ECA4-BD4E-AF8E-A7564C49F886.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/863497D0-819C-7549-A4AD-32DA2DF49359.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/8B265EE9-717B-C544-980F-056D5E878827.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/8D207266-97BE-DE42-A740-51739F207CAD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/8D97B67D-2FCC-944C-8642-BDFCDC2C5AAD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/8D998915-AA99-384B-814F-53DF381B63AD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/8E31A25B-1959-A14B-A93B-2626004C48B6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/9070F146-796D-B343-AE70-666F5688F8A3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/91F37FB2-159D-6840-8A90-C486D9BEFB7C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/9454403E-3128-D54A-9799-4CA0D88CD3E5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/95FB9626-980F-144E-A9CA-68DF9140B843.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/9700E9DB-8F15-8346-AF8D-65FF3B81967E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/99120665-72E5-B243-B51C-9757E49DE3B0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/9C8B4769-6DC6-9840-BBD1-D9ADA445F200.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/9D15E50A-12E9-C540-A97E-0CF9D06DB205.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/A48CB09B-6C7F-4646-A353-249D80B106E3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/A527FC42-3039-AE45-8B75-FB3C57FA218A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/A7EB113F-98A5-694A-802E-E60D359B97D1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/A84E717D-CAEB-8847-AC4E-60574936D16B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/A87C29FF-C57F-A343-9E7E-157D07DA466D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/AAB88C15-B0C5-464A-8B6D-9D493A77AB2F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/ABE33CFC-34E2-4547-B1F3-5F76FFF265BE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B1724A97-BFFA-C348-96D9-4B7846984F0E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B26F1F1E-1C0F-B441-BE6D-2370F2A076E7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B3E5071B-EF8A-C649-8631-7499ECA2B401.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B58486B8-71A4-D840-9666-796E8F6862FE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B5D7799D-0AEE-9846-9DF7-B5F5945DA332.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B779845F-AAB2-4947-BCAD-9A05173498C6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B86D9958-F78F-7D47-A3BD-F2B5499E2C69.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B9648D77-F101-1344-96C2-174877651376.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/B9CFD511-A1F5-604F-AAD1-EC02D3DAAABF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/BB3E06DA-8135-884E-9696-69FFFA99078E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/BE3A8576-2C80-CD4B-AAA7-CA3A77652079.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C2DA763E-98A9-6044-B832-BCC50B97D3FA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C33DF600-0728-7F45-B017-B55727DCFF92.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C3765AEF-05BC-BA43-A994-C0F3C90B6C41.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C554B73E-3BC0-1F4A-9139-D9864E88F744.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C647F26B-54CC-AE4D-9313-04837AB13A3B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C67F7B3C-0B07-0D46-A70E-F96E3183FFBF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C92BACB5-26A9-BF44-8FB7-0F0A2BE0D696.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/C9F26C3D-CD7E-F14A-8150-D2920AB85462.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/CA3654A7-07F3-774D-AC38-8673D2961481.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/CB4A76B1-1962-514E-91FB-5077230CBD6F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/CE714D4E-EBDD-314D-BED9-1B31B810B315.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/CFBA3BD4-42D3-804A-8D0B-D8D83B4D740F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/CFF5C01D-6448-2D45-B3ED-DAD2AA8C97F0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D2051375-B0F7-0645-84ED-79DEF894D9D1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D3A6847E-9372-6A43-9C39-D453102974B0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D46E0B1F-8A95-3448-A2E3-F6408F88A6D7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D613D5C6-1205-DF41-9E35-57B691B051CC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D691450B-6947-CD47-9DD3-2DB402AB51FF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D70B18A4-40F4-9648-B0B2-0C7D44027C53.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D7ED8FF1-F45D-3B4B-9B9B-ABD1E91E8C18.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/D9564D5A-3A56-3F44-9762-F68091A92DA4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/DD5D4C75-43A5-8A48-B153-BBD06CD17AD8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/DE473EB5-9EC7-6F4D-A181-44C8EFC374B3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/DEE650CC-9296-7743-847E-9BDF78D597A2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/E1C86D1E-01D7-814B-879B-7794C7CF2EF7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/E1E06E86-E35B-9E4D-A042-C3DBB15A0B63.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/E9551DD6-0AAF-1B4F-9A62-D02F0A6083AE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/E9B09FB5-7C40-8E4B-8C33-B965D166C83E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/EBD3EEA4-308A-234E-81C3-712EB672CB76.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/EC3E4F45-57C7-644D-AF86-647F53A629BB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/ECAFE7B1-DD96-F144-A978-BB8697D44285.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/ECD42A40-6C6E-DE4D-9108-4F4001C7169C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/ED847D37-8300-FC4B-B438-505E7CCA6E53.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/EF17F264-BE9B-1F44-BF26-47C6611D0960.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F1BAF12A-9A5A-AA4B-9C47-C02DFDE28A2F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F270FA26-52AD-1944-878F-C129305D475F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F5442211-FE04-2049-822C-3E6764A210BD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F5BEA595-F4E8-5040-AA06-8362E08AD004.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F6009DF0-02CF-9040-B0DF-C7BE58696855.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/F77C29C4-33F5-DB44-AE70-6FEF217F5098.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/FA51E14F-705E-FE41-B213-402616DAF0E9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/FA6E521E-38EC-E84D-A6A2-E9958E0624C2.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/FC4BFA0C-1799-3F44-9DAD-76D15EB59912.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/40000/FC887885-0F30-534C-89FD-1657891C36D3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/002D4B3D-C7F2-2B4F-9034-FF97937139B9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/01F1F9C3-7A93-8645-BD96-EA968C632EA1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/029C34F8-3D3F-1A43-9FB2-D93D3CDB0ED8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/0387D222-84DA-374C-BA40-A059740A42D6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/0498BA3D-B69F-6344-ADF5-A3A2A4B018A6.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/0782DE15-D7C7-2341-8D0D-CAFC19777513.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/0866115E-2911-204D-9727-A22AEDF7BC60.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/09AB6FB8-2460-4748-B434-4A53D642F5BB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/149600D5-0583-C248-8CB6-FC83C14291A0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/183CC50D-827F-A446-AA85-7BD5099F0AA0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/18A37D02-5305-2546-9D6A-6077FD0A61B3.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/1DC8B9CA-C76B-6146-AC55-D828F9D3E649.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/1E445232-BBB3-0D48-BBAB-52A8B94E2CCA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/27BAC5BD-0B1C-174F-BCCD-11628C3E3684.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/2AA6BA7B-D8D6-9144-A725-9774C67D4F3D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/2B3B691F-5605-A042-BF3E-9EBA58FF961C.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/2DB0C2CC-616D-A840-A116-DA1C3179291E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/2FBFDD3D-BC8E-1E4A-BD31-220003966D81.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/33556457-C263-CB47-98EB-63B6ED8631DE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/3414B9AB-03CC-5F40-B249-5C6D1A2BF1BB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/3BDF6440-D74E-F644-8FFB-9FB223A41E73.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/3C27867F-56D4-3740-BE71-9F5E64C1B36A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/3E32040F-7EFA-4541-A44F-93A553083BF8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/436AA578-58CB-C342-8D37-50F3EB02CB4E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4449D2D9-412C-6A4C-8F22-94A5EE227F33.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/476A394F-1E0B-B74A-B5A3-F6F304542449.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/47E3965D-5403-264C-B08F-A457CF8B8E0F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4864425B-A42B-694D-9965-CCFC448D548E.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/48AB27E3-0456-FF45-B905-B31AF06F2755.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4A5B93D3-8721-8043-A1D0-422E7E113D15.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4C03BB5B-1C90-A84E-B4A7-EFC7E080B44D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4D392416-0BA7-EC4B-853A-A553B0B94CEB.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/4E06634D-E6F3-A349-A88B-5B3DD6330328.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/518BF959-1B26-4A4D-A655-C639F52588F5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/52BECA74-432F-514B-98FE-86CC9E64C41F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/58B1F9B2-8A32-B742-B9F8-055D30325BCC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/58E32EEF-D979-F74C-8881-26DC6686FE81.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/5B77D5BC-500E-684A-B372-F0C8137EC8AC.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/5E3DE40E-8DF6-E440-BFF9-032ECE898990.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/6799572D-265E-D94E-A865-FB85AA45430F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/6A57A94C-0E8E-1041-8EB9-91D90E24CF4F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/6C4EA686-81AC-2546-B402-620ED5EFD878.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/727BF05F-1445-D349-A70B-2067A714B486.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/77238886-635D-FB47-A67A-868D3E09AB87.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/7A2D12B1-072C-BC42-AA7E-3440E685B938.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/81724AAC-E114-C049-BC66-958EBD3F2E93.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/818DEDDB-2F8B-3847-B665-D61CC080E26D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/8BC7F343-2A7F-E84E-B48D-9BBA6263647D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/8E445F22-DE00-154A-9F00-537D5C960137.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/929A28EA-D8A6-494F-BF55-AF2C4B7A1679.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/93002E71-837B-E243-8176-38FDEABC6B81.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/94042E3D-7253-5946-98CE-95852FC13109.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/941F8DDF-FBA7-F948-AF98-EC16FB688C2B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/97B8E8DC-CDBC-014C-877F-55532F55320B.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/9A5308B5-8FA4-4347-B0DC-78144B057D74.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/9AC9838A-830E-D440-80C3-9D0B4F91C9AE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/9DC72B7C-94DE-0D4B-9078-D4AF239C1379.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/A3B615BF-A504-704B-A032-DF3CE690D525.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/AA08B693-16F9-A841-8CE1-BDF1E445C3B4.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/ACC54A2F-4FCE-F541-BDE7-35C11D3647C7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/AD8BFABD-2140-D44C-99B6-45CE7408FD53.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B01B2D68-5B68-6B4E-B0B0-A27F3E3CDB14.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B0750808-F74E-7949-9940-DC4D42857569.root',
] )
readFiles.extend( [
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B1CEDBDD-D5A8-4542-B097-6194E8535B04.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B364313C-47CD-084E-9F13-9456CF1C9EA5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B80E64D9-0647-7A40-9C54-0B39C1021084.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B90C3C5D-665C-0348-B27F-DD55C6E72172.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/B910BE35-2775-224D-9CE9-9787481E3508.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/BC4DF0D3-E259-3749-B1E0-0B4C75D20908.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/BE5B3DF0-F2F4-8F4A-9282-DCB5399F11DA.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/C03A467F-9F32-154F-99EE-D2CAEBE53DC8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/CB14F6DF-E1A7-B542-B4DE-1A8B68E6E5D8.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/CBDBB752-6D3A-C945-900E-B7A65675B917.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/CBE06EA5-B9B9-7D46-A3FE-8A7C89D92CBD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/CF147427-4A05-8545-B69D-342EFD660081.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/D17C9D76-AD69-5B47-8D83-DE921D7104A0.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/D47025BC-CD06-C749-88CF-8FED3212BCF5.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/D5030B38-D321-484E-9B57-B33BF1FC6EDF.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/D5C096A2-435D-864C-9C23-FB87B4AE1EC7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/D7D8FDD3-F69B-2545-A96C-4F93FFDD7AE7.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/DAD23E9E-6ADC-2F45-8072-0DEA8ADCEBE9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/E734BC86-A13C-9D4B-A552-9ABC21C2EDA9.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/E92B481B-1B83-0648-9F9C-B55511883031.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/EE036D0B-EEA4-F145-AC50-32E355FC9754.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/F1AFB333-9EEC-E249-9511-AAB0E230B06A.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/F51D1953-4491-494B-81E0-6600E08B0B1D.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/F735D96C-E721-6540-B179-E13A4660739F.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/F86185C5-320A-1D42-AF9F-1229F83C1E53.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/FD39B205-B180-5F4A-885F-AC6F14E102BD.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/FD827B02-7AC3-6C45-A408-B69C0BD256BE.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/FDDA9BB9-DA38-2D46-95F7-5308BA76BED1.root',
'/store/mc/RunIISummer20UL17MiniAODv2/TTJets_DiLept_genMET-150_TuneCP5_13TeV-madgraphMLM-pythia8/MINIAODSIM/106X_mc2017_realistic_v9-v2/50000/FFF16FD3-A17E-F346-B744-5B757955DACF.root',
] )
| [
"[email protected]"
] | |
0d74495bd1cc1679a451768d66fda5ef8194d179 | ce4f7f8e9336b8bbf9cbfe147d922e37034ab6c3 | /code-festival-2016-qualc/b/main.py | 987ac90bc403547651d6d2456180210a150a8701 | [] | no_license | kussy-tessy/atcoder | 5604919747242ee9740b9131bb6e168e96af0151 | ee917fa5a5218d4a9e72f710d0d844e7c203f13b | refs/heads/master | 2023-07-21T09:25:15.464881 | 2021-09-04T14:06:02 | 2021-09-04T14:06:02 | 311,221,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | #!/usr/bin/env python3
K, T = map(int,(input().split()))
As = list(map(int,(input().split())))
# if len(As) == 1:
# print(As[0] - 1)
As.sort()
As_max = As[-1]
As_other = sum(As[:-1])
print(max(0, As_max - As_other - 1)) | [
"[email protected]"
] | |
7c562bd59160cfb964891453b9d4a28be9ae4772 | c6b74df572dc2dcf7034c25860d18cb3c9143d4f | /linearizer/generative.py | ec00a10615d362bf6ab18a3138b457f78f83330b | [] | no_license | kadarakos/LinearAMR | 55f2dfedb5d100988be5511004be690e2808ad17 | d8408924171960e84c92cfe46bb531b3b3ee17e0 | refs/heads/master | 2021-01-19T23:19:41.226183 | 2017-04-21T11:29:02 | 2017-04-21T11:29:02 | 88,961,862 | 0 | 0 | null | 2017-04-21T08:37:38 | 2017-04-21T08:37:38 | null | UTF-8 | Python | false | false | 3,595 | py | __author__ = 'thiagocastroferreira'
from sys import path
path.append('/home/tcastrof/amr/scp_repo')
path.append('/home/tcastrof/amr/Grammar')
path.append('../')
from compression_tree.compressor import Compressor
from ERG import AMR
import kenlm
import os
import utils
import itertools
class Generative(object):
def __init__(self, lm_path):
self.model = kenlm.Model(lm_path)
self.compressor = compressor
def process(self, amr):
self.amr = amr
return self.linearize(self.amr.root)
def ranking(self, base):
candidates = []
for candidate in itertools.permutations(base):
snt = []
for e in candidate:
for span in e.split():
snt.extend(span.split('~'))
snt = ' '.join(snt)
score = self.model.score(snt)
candidates.append((' '.join(candidate), score))
return sorted(candidates, key=lambda x: x[1], reverse=True)
def linearize(self, root):
linear = []
for edge in self.amr.edges[root]:
linear_child = self.linearize(edge.node_id)
if linear_child.strip() != '':
if edge.status == '+':
linear_child = edge.name + '~' + linear_child
linear.append(linear_child)
status = self.amr.nodes[root].status
name = self.amr.nodes[root].name
if 0 < len(linear) <= 9:
if status == '+':
linear.append(name)
rank = self.ranking(linear)
return rank[0][0]
elif len(linear) > 9:
if status == '+':
linear.insert(len(linear)-1, name)
return ' '.join(linear)
else:
if status == '+':
return name
else:
return ''
if __name__ == '__main__':
CLF_NODE_PATH = '../compression/results/clf_node.cPickle'
CLF_EDGE_PATH = '../compression/results/clf_edge.cPickle'
EDGE_PATH = '../compression/validation/edge_feat.cPickle'
EDGE_PARENT_PATH = '../compression/validation/edge_parent_feat.cPickle'
EDGE_CHILD_PATH = '../compression/validation/edge_child_feat.cPickle'
NODE_PATH = '../compression/validation/node_feat.cPickle'
NODE_PARENT_PATH = '../compression/validation/node_parent_feat.cPickle'
LM_PATH = 'lm/6gram.arpa'
compressor = Compressor(clf_node_path=CLF_NODE_PATH,
clf_edge_path=CLF_EDGE_PATH,
edge_path=EDGE_PATH,
edge_parent_path=EDGE_PARENT_PATH,
edge_child_path=EDGE_CHILD_PATH,
node_path=NODE_PATH,
node_parent_path=NODE_PARENT_PATH)
linearizer = Generative(lm_path=LM_PATH)
amrs_path = '../data/LDC2016E25/data/amrs/split/test'
amrs = []
for fname in os.listdir(amrs_path):
f = os.path.join(amrs_path, fname)
amrs.extend(utils.parse_corpus(f, False))
linears = []
for amr in amrs:
print amr['sentence']
linear = linearizer.process(amr['amr'].lower())
final = []
for l in linear.split():
final.extend(l.split('~'))
linears.append(' '.join(final))
de = open('../data/LDC2016E25/corpus/test.gen', 'w')
# en = open('../data/LDC2016E25/corpus/dev.lex', 'w')
for i, linear in enumerate(linears):
de.write(linear)
de.write('\n')
# en.write(amrs[i]['sentence'].lower())
# en.write('\n')
de.close()
# en.close() | [
"[email protected]"
] | |
4a0570c65c81d3d58ef799132c1206c6d01be707 | bcf88b912b9443c3326466c226f68a7e7ad5aa9d | /bdbag/__init__.py | ab5519ea26b97ecb75b741254c95bea69f7adaf3 | [
"Apache-2.0"
] | permissive | mvdbeek/bdbag | 33bc7e0275c720104af77654b0016024cb6ab012 | fe67b5bffc68b7dac823ce03d450ede3affccbef | refs/heads/master | 2020-03-25T05:17:09.646537 | 2018-07-12T03:58:06 | 2018-07-12T03:58:06 | 143,438,809 | 0 | 0 | null | 2018-08-03T14:42:27 | 2018-08-03T14:42:27 | null | UTF-8 | Python | false | false | 6,188 | py | import os
import re
import sys
import json
import logging
import mimetypes
from requests.utils import requote_uri
from pkg_resources import get_distribution, DistributionNotFound
__version__ = "1.4.2"
if sys.version_info > (3,):
from urllib.parse import quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit
from urllib.request import urlretrieve, urlopen
else:
from urllib import quote as urlquote, unquote as urlunquote, urlretrieve, urlopen
from urlparse import urlsplit, urlunsplit
try:
VERSION = get_distribution("bdbag").version
except DistributionNotFound:
VERSION = __version__ + '-dev'
PROJECT_URL = 'https://github.com/fair-research/bdbag'
try:
BAGIT_VERSION = get_distribution("bagit").version
except DistributionNotFound:
BAGIT_VERSION = 'unknown'
BAG_PROFILE_TAG = 'BagIt-Profile-Identifier'
BDBAG_PROFILE_ID = 'https://raw.githubusercontent.com/fair-research/bdbag/master/profiles/bdbag-profile.json'
BDBAG_RO_PROFILE_ID = 'https://raw.githubusercontent.com/fair-research/bdbag/master/profiles/bdbag-ro-profile.json'
ID_RESOLVER_TAG = 'identifier_resolvers'
DEFAULT_ID_RESOLVERS = ['n2t.net', 'identifiers.org']
DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.bdbag')
DEFAULT_CONFIG_FILE = os.path.join(DEFAULT_CONFIG_PATH, 'bdbag.json')
DEFAULT_CONFIG = {
'bag_config':
{
'bag_algorithms': ['md5', 'sha256'],
'bag_processes': 1,
'bag_metadata':
{
BAG_PROFILE_TAG: BDBAG_PROFILE_ID
}
},
ID_RESOLVER_TAG: DEFAULT_ID_RESOLVERS
}
CONTENT_DISP_REGEX = re.compile(r"^filename[*]=UTF-8''(?P<name>[-_.~A-Za-z0-9%]+)$")
FILTER_REGEX = re.compile(r"(?P<column>^.*)(?P<operator>==|!=|=\*|!\*|\^\*|\$\*|>=|>|<=|<)(?P<value>.*$)")
FILTER_DOCSTRING = "\"==\" (equal), " \
"\"!=\" (not equal), " \
"\"=*\" (wildcard substring equal), " \
"\"!*\" (wildcard substring not equal), " \
"\"^*\" (wildcard starts with), " \
"\"$*\" (wildcard ends with), " \
"or \">\", \">=\", \"<\", \"<=\""
if not mimetypes.inited:
mimetypes.init()
def get_typed_exception(e):
exc = "".join(("[", type(e).__name__, "] "))
return "".join((exc, str(e)))
def add_mime_types(types):
if not types:
return
for t in types.keys():
for e in types[t]:
mimetypes.add_type(type=t, ext=e if e.startswith(".") else "".join([".", e]))
def guess_mime_type(file_path):
mtype = mimetypes.guess_type(file_path)
content_type = 'application/octet-stream'
if mtype[0] is not None and mtype[1] is not None:
content_type = "+".join([mtype[0], mtype[1]])
elif mtype[0] is not None:
content_type = mtype[0]
elif mtype[1] is not None:
content_type = mtype[1]
return content_type
def parse_content_disposition(value):
m = CONTENT_DISP_REGEX.match(value)
if not m:
raise ValueError('Cannot parse content-disposition "%s".' % value)
n = m.groupdict()['name']
try:
n = urlunquote(str(n))
except Exception as e:
raise ValueError('Invalid URL encoding of content-disposition filename component. %s.' % e)
try:
if sys.version_info < (3,):
n = n.decode('utf8')
except Exception as e:
raise ValueError('Invalid UTF-8 encoding of content-disposition filename component. %s.' % e)
return n
def escape_uri(uri, illegal_only=True, safe="/"):
if not uri:
return uri
if illegal_only:
return requote_uri(uri)
else:
urlparts = urlsplit(uri)
path = urlquote(urlunquote(urlparts.path), safe=safe)
query = urlquote(urlunquote(urlparts.query), safe=safe)
fragment = urlquote(urlunquote(urlparts.fragment), safe=safe)
return urlunsplit((urlparts.scheme, urlparts.netloc, path, query, fragment))
def filter_dict(expr, entry):
if not expr:
return True
match = FILTER_REGEX.search(expr)
if not match:
raise ValueError("Unable to parse expression: %s" % expr)
expr_dict = match.groupdict()
filter_col = expr_dict["column"]
filter_val = expr_dict["value"]
operator = expr_dict["operator"]
filter_neg = filter_substring = filter_relation = filter_startswith = filter_endswith = False
if "==" == operator:
pass
elif "!=" == operator:
filter_neg = True
elif "=*" == operator:
filter_substring = True
elif "^*" == operator:
filter_startswith = True
elif "$*" == operator:
filter_endswith = True
elif "!*" == operator:
filter_substring = True
filter_neg = True
elif (">" == operator) or (">=" == operator) or ("<" == operator) or ("<=" == operator):
filter_relation = True
else:
raise ValueError("Unsupported operator type in filter expression: %s" % expr)
result = False
filter_val = filter_val.strip()
filter_col = filter_col.strip()
if filter_col in set(entry.keys()):
value = entry[filter_col]
if filter_neg:
if filter_substring:
result = filter_val not in str(value)
else:
result = filter_val != value
else:
if filter_substring:
result = filter_val in str(value)
elif filter_startswith:
result = str(value).startswith(filter_val)
elif filter_endswith:
result = str(value).endswith(filter_val)
elif filter_relation:
try:
statement = "%d%s%d" % (int(value), operator, int(filter_val))
result = eval(statement)
except Exception as e:
logging.warning("Unable to evaluate filter expression [%s]: %s" %
(expr, get_typed_exception(e)))
else:
result = filter_val == value
if not result:
logging.debug(
"Excluding %s because it does not match the filter expression: [%s]." %
(json.dumps(entry), expr))
return result
| [
"[email protected]"
] | |
ca893e5aeee0c7456739c4457ae664105c5c96c6 | 46c3fd904e7b1c45541ffe0518afe50dfdafb089 | /movie/migrations/0003_movielink_link.py | 84dc7889b0f52b99ff6733291f6811344b4d8db2 | [] | no_license | Shirhussain/Movies | 6ab10b27748bc1cdd3a904861092e5246ce01190 | 4f6639491a86708a5d04a8de7f928500ecba3fdc | refs/heads/master | 2023-01-01T07:52:25.639564 | 2020-10-26T02:15:23 | 2020-10-26T02:15:23 | 306,643,126 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | # Generated by Django 3.1 on 2020-10-24 17:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movie', '0002_auto_20201024_1659'),
]
operations = [
migrations.AddField(
model_name='movielink',
name='link',
field=models.URLField(default=''),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
47a724810b4e9c7cfd2870858a2472067fe6ff19 | 1cceef957954ec516cd8bcd9e9d51e8d9120200b | /test_retring_async.py | 1c13088f03af4ed39ea5ab6e8ea213792c02dbe7 | [
"MIT"
] | permissive | coneagoe/retrying-async | 3b8c4a51a7adcbaa2149b110199e6d0b6b5a1f7e | 54eec24e4183b4ea31c0e133ed11ec0f0535a194 | refs/heads/master | 2022-12-21T05:12:17.930689 | 2020-09-21T02:38:42 | 2020-09-21T02:38:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | # coding: utf-8
import asyncio
import requests
from retrying_async import retry
def request_api_sync():
print('正在获取')
response = requests.get(url="http://www.baidu.com")
print(response.status_code, response.content)
raise Exception("异常")
@retry(attempts=3, delay=3)
async def request_api_async():
print('正在获取')
response = requests.get(url="http://www.baidu.com")
print(response.status_code, response.content)
raise Exception("异常")
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(request_api_async())
| [
"[email protected]"
] | |
d751ba839e41585536769b62bfa2c50a150fb12d | 6559d2c69ddcd73df844f9e26470c8ea06d92a6c | /xnr_0429/xnr/_facebook/feedback_comment.py | 550d853c6fbd9b7769168390aeafe3c05e801dbe | [] | no_license | yuanhuiru/xnr2 | cc4199fbb136fa5bdf18d879bb77ceb5155627f3 | b37ec9beccf7332efcda9bdff0c34fa3198b816c | refs/heads/master | 2020-03-21T12:22:17.392966 | 2020-01-14T06:40:55 | 2020-01-14T06:40:55 | 138,549,389 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,505 | py | #!/usr/bin/env python
#encoding: utf-8
from launcher import Launcher
import time
from es import Es_fb
class Comment():
def __init__(self):
self.list = []
def get_comment(self):
for url in comment_list:
driver.get(url)
root_content = driver.find_element_by_xpath('//div[@class="_58jw"]/p').text
root_time = driver.find_element_by_xpath('//abbr[@class="_5ptz"]').get_attribute('data-utime')
for each in driver.find_elements_by_xpath('//div[@aria-label="评论"]'):
author_name = each.find_element_by_xpath('./div/div/div/div[2]/div/div/div/span/span[1]/a').text
author_id = ''.join(re.findall(re.compile('id=(\d+)'),each.find_element_by_xpath('./div/div/div/div[2]/div/div/div/span/span[1]/a').get_attribute('data-hovercard')))
pic_url = each.find_element_by_xpath('./div/div/div/div[1]/a/img').get_attribute('src')
content = each.find_element_by_xpath('./div/div/div/div[2]/div/div/div/span/span[2]/span/span/span/span').text
time = each.find_element_by_xpath('./div/div/div/div[2]/div/div/div[2]/span[4]/a/abbr').get_attribute('data-utime')
self.list.append({'author_name':author_name,'author_id':author_id,'pic_url':pic_url,'content':content,'time':time})
return self.list
def save(self,indexName,typeName,item):
es.executeES(indexName,typeName,item)
if __name__ == '__main__':
fb = Launcher('18538728360','zyxing,0513')
es = es_twitter()
comment_list = fb.get_comment_list()
comment = Comment()
list = comment.get_comment()
comment.save(list)
| [
"[email protected]"
] | |
1aaafa9b5403e7331b1d730439c5a8e67fa3debb | d1e4f29e583ee964d63bc48554eaa73d67d58eb2 | /zerver/migrations/0264_migrate_is_announcement_only.py | 073eb22a23670741fdc4d7155701549b168dfc77 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | hygolei/zulip | 299f636f9238f50b0d2746f1c371748f182f1f4e | 39fe66ab0824bc439929debeb9883c3046c6ed70 | refs/heads/master | 2023-07-11T22:50:27.434398 | 2021-08-09T10:07:35 | 2021-08-09T10:07:35 | 375,401,165 | 1 | 1 | Apache-2.0 | 2021-08-09T10:07:36 | 2021-06-09T15:20:09 | Python | UTF-8 | Python | false | false | 972 | py | # Generated by Django 1.11.26 on 2020-01-25 23:47
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def upgrade_stream_post_policy(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
Stream = apps.get_model("zerver", "Stream")
Stream.STREAM_POST_POLICY_EVERYONE = 1
Stream.STREAM_POST_POLICY_ADMINS = 2
Stream.objects.filter(is_announcement_only=False).update(
stream_post_policy=Stream.STREAM_POST_POLICY_EVERYONE
)
Stream.objects.filter(is_announcement_only=True).update(
stream_post_policy=Stream.STREAM_POST_POLICY_ADMINS
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0263_stream_stream_post_policy"),
]
operations = [
migrations.RunPython(
upgrade_stream_post_policy, reverse_code=migrations.RunPython.noop, elidable=True
),
]
| [
"[email protected]"
] | |
9f0fe44398ecdc7bda9c8cb213e2256c43819598 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/334/90931/submittedfiles/testes.py | 12f71f0bb7b483e5f7d1e3b92c7403e72b738f64 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
def maximo (a,b):
if a>b:
return a
else:
return b
x=input()
y=input()
print(maximo(a,b) | [
"[email protected]"
] | |
1fd6f92f79cd265af470244e0460ad9853def643 | e18a353582609732c795401f1a01bc762bd939f2 | /top/python/MuonTracking.RunII.py | 9f1a8ca4dcada005ae643bc5e39eb41edab8c6d8 | [] | no_license | s-farry/workspaces | 06741807bb464bb0712d52108c2d1b7ae62b1353 | 0dcf3868dcbe110206ea88ff5c9e04a3b44b1ca1 | refs/heads/master | 2020-04-03T00:45:39.152227 | 2017-06-15T16:33:33 | 2017-06-15T16:33:33 | 64,213,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,463 | py | from Jawa import EfficiencyClass
from ROOT import TFile, TCut, TTree, TMath
phicut= TCut("(abs(tag_PHI-probe_PHI)<TMath::Pi() ? abs(tag_PHI-probe_PHI) : 2*TMath::Pi()-abs(tag_PHI-probe_PHI))>0.1")
ptcut = TCut("tag_PT > 20000 && probe_PT > 20000")
triggercut = TCut("tag_Hlt2EWSingleMuonVHighPtDecision_TOS==1 && tag_Hlt1SingleMuonHighPTDecision_TOS == 1 && tag_L0MuonEWDecision_TOS ==1")
run1triggercut = TCut("tag_Hlt2SingleMuonHighPTDecision_TOS==1 && tag_Hlt1SingleMuonHighPTDecision_TOS == 1 && tag_L0MuonDecision_TOS ==1")
trkqual = TCut("(sqrt(tag_PERR2)/tag_P) < 0.1")
eta = TCut("tag_ETA > 2 && tag_ETA < 4.5 && probe_ETA > 2 && probe_ETA < 4.5")
vtxcut = TCut("boson_ENDVERTEX_CHI2/boson_ENDVERTEX_NDOF < 5")
isocut = TCut("tag_cpt_0.50 < 2000")
pt25 = TCut("probe_PT > 25000")
pt30 = TCut("probe_PT > 30000")
passcut = TCut("probe_AssocZM == 1")
passcutW = TCut("probe_AssocWM == 1")
passcutStd = TCut("probe_AssocStdM == 1")
mass = TCut("boson_M > 70000 && boson_M < 110000")
selcut = ptcut + phicut + triggercut + vtxcut + eta + mass
f = TFile.Open('root://hepgrid11.ph.liv.ac.uk///dpm/ph.liv.ac.uk/home/lhcb/Run2Effs/MuonTracking_WLine.MD.2016.root')
g = TFile.Open('root://hepgrid11.ph.liv.ac.uk///dpm/ph.liv.ac.uk/home/lhcb/Run2Effs/MuonTracking_WLine.MU.2016.root')
h = TFile.Open('root://hepgrid11.ph.liv.ac.uk///dpm/ph.liv.ac.uk/home/lhcb/Run2Effs/MuonTracking_WLine.MD.2015.root')
i = TFile.Open('root://hepgrid11.ph.liv.ac.uk///dpm/ph.liv.ac.uk/home/lhcb/Run2Effs/MuonTracking_WLine.MU.2015.root')
t = f.Get("PlusTag/DecayTree")
u = f.Get("MinusTag/DecayTree")
v = g.Get("PlusTag/DecayTree")
w = g.Get("MinusTag/DecayTree")
tt = h.Get("PlusTag/DecayTree")
uu = h.Get("MinusTag/DecayTree")
vv = i.Get("PlusTag/DecayTree")
ww = i.Get("MinusTag/DecayTree")
magup = TCut("Polarity == 1")
magdown = TCut("Polarity == -1")
selcutMU = selcut + magup
selcutMD = selcut + magdown
'''
etabins = [2.0 , 2.25 , 2.5 , 2.75 , 3.00 , 3.25 , 3.5 , 4.0 , 4.5]
etabins2 = [2.0 , 2.25 , 2.5 , 2.75 , 2.875, 3.00 , 3.1225, 3.25 , 3.375, 3.5 , 4.0 , 4.5]
tckbins = [3500000.0, 4600000.0, 4800000.0, 5700000.0, 5900000.0, 6000000.0, 7100000.0, 7300000.0, 7400000.0,
7500000.0, 7600000.0, 7700000.0, 7900000.0, 7929912.0, 8000000.0]
effvars = [
["ETA", "probe_ETA", 10 , 2 , 4.5 ],
["ETA5", "probe_ETA", 5 , 2 , 4.5 ],
["ETA8", "probe_ETA", etabins ],
["PT", "probe_PT", 10 , 20000 , 70000],
["PT5", "probe_PT", 5 , 20000 , 70000],
["P", "probe_P", 8 , 100000 , 500000],
["PHI", "probe_PHI", 10 , -TMath.Pi() , TMath.Pi()],
["PHI5", "probe_PHI", 5 , -TMath.Pi() , TMath.Pi()],
["VeloClusters", "nVeloClusters", 8 , 0 , 4000 , "I"],
["ITClusters", "nITClusters", 8 , 0 , 2000 , "I"],
["PVs", "nPVs", 6 , -0.5 , 5.5 , "I"],
["TCK", "OdinTCK", tckbins, "I"],
["SPDHits", "nSPDHits", 20 , 0 , 1000, "I"]
]
eff2dvars = [
["ETA_PHI", "ETA5","PHI5"],
["ETA_PT" , "ETA5","PT5"]
]
'''
from effbins_config import *
def makeMuonTrackingRunII(name, selcut, passcut):
MuonTrackingRunIIMagUpMuPlus = EfficiencyClass("Muon"+name+"TrackingRunIIMagUpMuPlus")
MuonTrackingRunIIMagDownMuPlus = EfficiencyClass("Muon"+name+"TrackingRunIIMagDownMuPlus")
MuonTrackingRunIIMagUpMuMinus = EfficiencyClass("Muon"+name+"TrackingRunIIMagUpMuMinus")
MuonTrackingRunIIMagDownMuMinus = EfficiencyClass("Muon"+name+"TrackingRunIIMagDownMuMinus")
MuonTrackingRunIIMagUpMuMinus.AddTree(v)
MuonTrackingRunIIMagUpMuMinus.AddTree(vv)
MuonTrackingRunIIMagUpMuMinus.SetSelectionCut(selcut + magup)
MuonTrackingRunIIMagUpMuMinus.SetPassCut(passcut)
MuonTrackingRunIIMagUpMuMinus.AddVars(effvars + trkeffvars)
MuonTrackingRunIIMagUpMuMinus.Add2DVars(trk2dvars)
MuonTrackingRunIIMagUpMuMinus.Run()
MuonTrackingRunIIMagUpMuMinus.SaveToFile()
MuonTrackingRunIIMagUpMuPlus.AddTree(w)
MuonTrackingRunIIMagUpMuPlus.AddTree(ww)
MuonTrackingRunIIMagUpMuPlus.SetSelectionCut(selcut + magup)
MuonTrackingRunIIMagUpMuPlus.SetPassCut(passcut)
MuonTrackingRunIIMagUpMuPlus.AddVars(effvars + trkeffvars)
MuonTrackingRunIIMagUpMuPlus.Add2DVars(trk2dvars)
MuonTrackingRunIIMagUpMuPlus.Run()
MuonTrackingRunIIMagUpMuPlus.SaveToFile()
MuonTrackingRunIIMagDownMuMinus.AddTree(t)
MuonTrackingRunIIMagDownMuMinus.AddTree(tt)
MuonTrackingRunIIMagDownMuMinus.SetSelectionCut(selcut + magdown)
MuonTrackingRunIIMagDownMuMinus.SetPassCut(passcut)
MuonTrackingRunIIMagDownMuMinus.AddVars(effvars + trkeffvars)
MuonTrackingRunIIMagDownMuMinus.Add2DVars(trk2dvars)
MuonTrackingRunIIMagDownMuMinus.Run()
MuonTrackingRunIIMagDownMuMinus.SaveToFile()
MuonTrackingRunIIMagDownMuPlus.AddTree(u)
MuonTrackingRunIIMagDownMuPlus.AddTree(uu)
MuonTrackingRunIIMagDownMuPlus.SetSelectionCut(selcut + magdown)
MuonTrackingRunIIMagDownMuPlus.SetPassCut(passcut)
MuonTrackingRunIIMagDownMuPlus.AddVars(effvars + trkeffvars)
MuonTrackingRunIIMagDownMuPlus.Add2DVars(trk2dvars)
MuonTrackingRunIIMagDownMuPlus.Run()
MuonTrackingRunIIMagDownMuPlus.SaveToFile()
MuonTrackingRunIIMagDown = EfficiencyClass("Muon"+name+"TrackingRunIIMagDown", MuonTrackingRunIIMagDownMuPlus, MuonTrackingRunIIMagDownMuMinus)
MuonTrackingRunIIMagDown.MakeEfficiencyGraph()
MuonTrackingRunIIMagDown.SaveToFile()
MuonTrackingRunIIMagUp = EfficiencyClass("Muon"+name+"TrackingRunIIMagUp", MuonTrackingRunIIMagUpMuPlus, MuonTrackingRunIIMagUpMuMinus)
MuonTrackingRunIIMagUp.MakeEfficiencyGraph()
MuonTrackingRunIIMagUp.SaveToFile()
MuonTrackingRunIIMuPlus = EfficiencyClass("Muon"+name+"TrackingRunIIMuPlus", MuonTrackingRunIIMagDownMuPlus, MuonTrackingRunIIMagUpMuPlus)
MuonTrackingRunIIMuPlus.MakeEfficiencyGraph()
MuonTrackingRunIIMuPlus.SaveToFile()
MuonTrackingRunIIMuMinus = EfficiencyClass("Muon"+name+"TrackingRunIIMuMinus", MuonTrackingRunIIMagDownMuMinus, MuonTrackingRunIIMagUpMuMinus)
MuonTrackingRunIIMuMinus.MakeEfficiencyGraph()
MuonTrackingRunIIMuMinus.PrintEfficiencies("ETA")
MuonTrackingRunIIMuMinus.SaveToFile()
MuonTrackingRunII = EfficiencyClass("Muon"+name+"TrackingRunII", MuonTrackingRunIIMagDown, MuonTrackingRunIIMagUp)
MuonTrackingRunII.MakeEfficiencyGraph()
MuonTrackingRunII.SaveToFile()
makeMuonTrackingRunII("",selcut,passcut)
#makeMuonTrackingRunII("W",selcut,passcutW)
| [
"[email protected]"
] | |
17a4c3efc94fc1e6caad8a5a7ade5f392c075824 | 5c7db30d59cd28fe1923bb5fdb9280ffe2070b70 | /django-polls/polls/migrations/0001_initial.py | cca72afb3465cec2f3f673e3e259b8a64609593e | [] | no_license | golkedj/django_test | 6816b640e675aabd311de98907ff38fc8034b7d5 | d1ab4b5bf6984aee78163a94638460f187ca12a9 | refs/heads/master | 2021-01-22T16:44:30.569480 | 2017-09-06T16:56:23 | 2017-09-06T16:56:23 | 100,724,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,230 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-18 14:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
| [
"="
] | = |
778373ee38e2b8e500a508492b5c81d519f80a09 | f8671d120f8f32b0febe94f4dc84570603e34fac | /utils_driver.py | c9b9a0185636c8784dadc34512484fe9360420ca | [] | no_license | ahashisyuu/OpenSpider | f35772a53c4de4217df9dc1ee8f2078e1c2eb281 | 31da122dc2ab658142c34089f3cc0fe71a5016ca | refs/heads/master | 2022-03-19T01:37:58.965682 | 2019-12-10T12:40:02 | 2019-12-10T12:40:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | from selenium import webdriver
import platform
#print platform.system()
def get_driver():
system = platform.system()
if system == "Linux":
return webdriver.PhantomJS(executable_path='/home/ubuntu/phantomjs-2.1.1-linux-x86_64/bin/phantomjs')
else:
return webdriver.Chrome()
#return webdriver.PhantomJS()
#driver = get_driver()
#driver.get("http://www.baidu.com")
#driver.close()
| [
"[email protected]"
] | |
d5706657c7a3d28103d085bb0dbf7d12e11bac82 | 173b7e08d9fdbfeda8349570f7ccd93cbd6c02d4 | /example_model/model_node_label.py | 84ea201452534e2e144905c11f081a4272f8ac42 | [
"LicenseRef-scancode-other-permissive"
] | permissive | embeddedsamurai/kGCN-1 | ef647d539fb79d6b5ebe090a3b27b349933d6ca4 | 7bc4dc32afd7a76e31b3bd37e2cb71611ba1fc5f | refs/heads/master | 2020-08-04T16:51:36.430607 | 2019-10-01T05:02:31 | 2019-10-01T05:02:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,427 | py | import tensorflow as tf
import tensorflow.contrib.keras as K
import kgcn.layers
from kgcn.default_model import DefaultModel
import tensorflow.contrib.keras as K
class GCN(DefaultModel):
def build_placeholders(self,info,config,batch_size):
# input data types (placeholders) of this neural network
return self.get_placeholders(info,config,batch_size,
['adjs','nodes','mask','dropout_rate',
'node_label','mask_node_label',
'enabled_node_nums','is_train','features'])
def build_model(self,placeholders,info,config,batch_size):
adj_channel_num=info.adj_channel_num
embedding_dim=config["embedding_dim"]
in_adjs=placeholders["adjs"]
features=placeholders["features"]
in_nodes=placeholders["nodes"]
labels=placeholders["node_label"]
mask_labels=placeholders["mask_node_label"]
mask=placeholders["mask"]
enabled_node_nums=placeholders["enabled_node_nums"]
is_train=placeholders["is_train"]
layer=features
input_dim=info.feature_dim
if features is None:
layer=K.layers.Embedding(info.all_node_num,embedding_dim)(in_nodes)
input_dim=embedding_dim
# layer: batch_size x graph_node_num x dim
layer=kgcn.layers.GraphConv(64,adj_channel_num)(layer,adj=in_adjs)
layer=kgcn.layers.GraphBatchNormalization()(layer,
max_node_num=info.graph_node_num,enabled_node_nums=enabled_node_nums)
layer=tf.nn.relu(layer)
layer=kgcn.layers.GraphConv(64,adj_channel_num)(layer,adj=in_adjs)
layer=kgcn.layers.GraphBatchNormalization()(layer,
max_node_num=info.graph_node_num,enabled_node_nums=enabled_node_nums)
layer=tf.nn.relu(layer)
layer=kgcn.layers.GraphConv(2,adj_channel_num)(layer,adj=in_adjs)
prediction=tf.nn.softmax(layer)
# computing cost and metrics
cost=tf.nn.softmax_cross_entropy_with_logits(labels=labels,logits=layer)
cost=mask*tf.reduce_mean(cost,axis=1)
cost_opt=tf.reduce_mean(cost)
metrics={}
cost_sum=tf.reduce_sum(cost)
pre_count=tf.cast(tf.equal(tf.argmax(prediction,2), tf.argmax(labels,2)),tf.float32)
correct_count=mask*tf.reduce_mean(pre_count,axis=1)
metrics["correct_count"]=tf.reduce_sum(correct_count)
return layer,prediction,cost_opt,cost_sum,metrics
| [
"[email protected]"
] | |
725223f8d060081f839ffe104c2a1a8f0c49e687 | 920f81d8f5fbd45eb15f2970d0bd528b921a3d46 | /pyplot/plot_loss.py | 81bb50f95b08e5d8fafdc78fc8d47652605f5877 | [] | no_license | minhnd3796/RIVF2019_Minh | 740a4015b7741bea9d2503088e99bc1a97a1f18f | c2439421efcbae3bad09f459a3d582b7fcf735c4 | refs/heads/master | 2020-03-25T03:49:20.533009 | 2018-08-03T01:27:14 | 2018-08-03T01:27:14 | 143,361,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | from pandas import read_csv
import pylab
from sys import argv
train_data_8s = read_csv('FCN-8s-ResNet101_Vaihingen/run_train-tag-entropy_1.csv')
train_step_8s = train_data_8s.iloc[:, 1].values
train_acc_8s = train_data_8s.iloc[:, 2].values
validation_data_8s = read_csv('FCN-8s-ResNet101_Vaihingen/run_validation-tag-entropy_1.csv')
validation_step_8s = validation_data_8s.iloc[:, 1].values
validation_acc_8s = validation_data_8s.iloc[:, 2].values
pylab.plot(train_step_8s, train_acc_8s, 'green', label='Training with 2 skips')
pylab.plot(validation_step_8s, validation_acc_8s, 'purple', label='Validation 2 skips')
train_data_4s = read_csv('FCN-4s-ResNet101_Vaihingen/run_train-tag-entropy_1.csv')
train_step_4s = train_data_4s.iloc[:, 1].values
train_acc_4s = train_data_4s.iloc[:, 2].values
validation_data_4s = read_csv('FCN-4s-ResNet101_Vaihingen/run_validation-tag-entropy_1.csv')
validation_step_4s = validation_data_4s.iloc[:, 1].values
validation_acc_4s = validation_data_4s.iloc[:, 2].values
pylab.plot(train_step_4s, train_acc_4s, 'r', label='Training with 3 skips')
pylab.plot(validation_step_4s, validation_acc_4s, 'b', label='Validation 3 skips')
pylab.legend(loc='upper left')
pylab.xlabel('Step')
pylab.ylabel('Loss')
pylab.show()
| [
"[email protected]"
] | |
fe40af596e008133901e8eb437974a14e6f29f8f | 82770c7bc5e2f27a48b8c370b0bab2ee41f24d86 | /microblog/flask/venv/lib/python2.7/site-packages/scipy/fftpack/tests/test_basic.py | 15826d4685a7eefdaf622f6b05ad0ecfe5c8f25c | [
"Apache-2.0"
] | permissive | johankaito/fufuka | 77ddb841f27f6ce8036d7b38cb51dc62e85b2679 | 32a96ecf98ce305c2206c38443e58fdec88c788d | refs/heads/master | 2022-07-20T00:51:55.922063 | 2015-08-21T20:56:48 | 2015-08-21T20:56:48 | 39,845,849 | 2 | 0 | Apache-2.0 | 2022-06-29T23:30:11 | 2015-07-28T16:39:54 | Python | UTF-8 | Python | false | false | 29,654 | py | #!/usr/bin/env python
# Created by Pearu Peterson, September 2002
from __future__ import division, print_function, absolute_import
__usage__ = """
Build fftpack:
python setup_fftpack.py build
Run tests if scipy is installed:
python -c 'import scipy;scipy.fftpack.test()'
Run tests if fftpack is not installed:
python tests/test_basic.py
"""
from numpy.testing import (assert_equal, assert_array_almost_equal,
assert_array_almost_equal_nulp, assert_raises, run_module_suite,
assert_array_less, TestCase, dec)
from scipy.fftpack import ifft,fft,fftn,ifftn,rfft,irfft, fft2
from scipy.fftpack import _fftpack as fftpack
from scipy.fftpack.basic import _is_safe_size
from numpy import (arange, add, array, asarray, zeros, dot, exp, pi,
swapaxes, double, cdouble)
import numpy as np
import numpy.fft
# "large" composite numbers supported by FFTPACK
LARGE_COMPOSITE_SIZES = [
2**13,
2**5 * 3**5,
2**3 * 3**3 * 5**2,
]
SMALL_COMPOSITE_SIZES = [
2,
2*3*5,
2*2*3*3,
]
# prime
LARGE_PRIME_SIZES = [
2011
]
SMALL_PRIME_SIZES = [
29
]
from numpy.random import rand
def _assert_close_in_norm(x, y, rtol, size, rdt):
# helper function for testing
err_msg = "size: %s rdt: %s" % (size, rdt)
assert_array_less(np.linalg.norm(x - y), rtol*np.linalg.norm(x), err_msg)
def random(size):
return rand(*size)
def get_mat(n):
data = arange(n)
data = add.outer(data,data)
return data
def direct_dft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = -arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)
return y
def direct_idft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)/n
return y
def direct_dftn(x):
x = asarray(x)
for axis in range(len(x.shape)):
x = fft(x,axis=axis)
return x
def direct_idftn(x):
x = asarray(x)
for axis in range(len(x.shape)):
x = ifft(x,axis=axis)
return x
def direct_rdft(x):
x = asarray(x)
n = len(x)
w = -arange(n)*(2j*pi/n)
r = zeros(n,dtype=double)
for i in range(n//2+1):
y = dot(exp(i*w),x)
if i:
r[2*i-1] = y.real
if 2*i < n:
r[2*i] = y.imag
else:
r[0] = y.real
return r
def direct_irdft(x):
x = asarray(x)
n = len(x)
x1 = zeros(n,dtype=cdouble)
for i in range(n//2+1):
if i:
if 2*i < n:
x1[i] = x[2*i-1] + 1j*x[2*i]
x1[n-i] = x[2*i-1] - 1j*x[2*i]
else:
x1[i] = x[2*i-1]
else:
x1[0] = x[0]
return direct_idft(x1).real
class _TestFFTBase(TestCase):
def setUp(self):
self.cdt = None
self.rdt = None
np.random.seed(1234)
def test_definition(self):
x = np.array([1,2,3,4+1j,1,2,3,4+2j], dtype=self.cdt)
y = fft(x)
assert_equal(y.dtype, self.cdt)
y1 = direct_dft(x)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4+0j,5], dtype=self.cdt)
assert_array_almost_equal(fft(x),direct_dft(x))
def test_n_argument_real(self):
x1 = np.array([1,2,3,4], dtype=self.rdt)
x2 = np.array([1,2,3,4], dtype=self.rdt)
y = fft([x1,x2],n=4)
assert_equal(y.dtype, self.cdt)
assert_equal(y.shape,(2,4))
assert_array_almost_equal(y[0],direct_dft(x1))
assert_array_almost_equal(y[1],direct_dft(x2))
def _test_n_argument_complex(self):
x1 = np.array([1,2,3,4+1j], dtype=self.cdt)
x2 = np.array([1,2,3,4+1j], dtype=self.cdt)
y = fft([x1,x2],n=4)
assert_equal(y.dtype, self.cdt)
assert_equal(y.shape,(2,4))
assert_array_almost_equal(y[0],direct_dft(x1))
assert_array_almost_equal(y[1],direct_dft(x2))
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = list(range(n))
y = fftpack.zfft(x)
y2 = numpy.fft.fft(x)
assert_array_almost_equal(y,y2)
y = fftpack.zrfft(x)
assert_array_almost_equal(y,y2)
def test_invalid_sizes(self):
assert_raises(ValueError, fft, [])
assert_raises(ValueError, fft, [[1,1],[2,2]], -5)
def test__is_safe_size(self):
vals = [(0, True), (1, True), (2, True), (3, True), (4, True), (5, True), (6, True), (7, False),
(15, True), (16, True), (17, False), (18, True), (21, False), (25, True), (50, True),
(120, True), (210, False)]
for n, is_safe in vals:
assert_equal(_is_safe_size(n), is_safe)
class TestDoubleFFT(_TestFFTBase):
def setUp(self):
self.cdt = np.cdouble
self.rdt = np.double
class TestSingleFFT(_TestFFTBase):
def setUp(self):
self.cdt = np.complex64
self.rdt = np.float32
@dec.knownfailureif(True, "single-precision FFT implementation is partially disabled, until accuracy issues with large prime powers are resolved")
def test_notice(self):
pass
class _TestIFFTBase(TestCase):
def setUp(self):
np.random.seed(1234)
def test_definition(self):
x = np.array([1,2,3,4+1j,1,2,3,4+2j], self.cdt)
y = ifft(x)
y1 = direct_idft(x)
assert_equal(y.dtype, self.cdt)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4+0j,5], self.cdt)
assert_array_almost_equal(ifft(x),direct_idft(x))
def test_definition_real(self):
x = np.array([1,2,3,4,1,2,3,4], self.rdt)
y = ifft(x)
assert_equal(y.dtype, self.cdt)
y1 = direct_idft(x)
assert_array_almost_equal(y,y1)
x = np.array([1,2,3,4,5], dtype=self.rdt)
assert_equal(y.dtype, self.cdt)
assert_array_almost_equal(ifft(x),direct_idft(x))
def test_djbfft(self):
for i in range(2,14):
n = 2**i
x = list(range(n))
y = fftpack.zfft(x,direction=-1)
y2 = numpy.fft.ifft(x)
assert_array_almost_equal(y,y2)
y = fftpack.zrfft(x,direction=-1)
assert_array_almost_equal(y,y2)
def test_random_complex(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.cdt)
x = random([size]).astype(self.cdt) + 1j*x
y1 = ifft(fft(x))
y2 = fft(ifft(x))
assert_equal(y1.dtype, self.cdt)
assert_equal(y2.dtype, self.cdt)
assert_array_almost_equal(y1, x)
assert_array_almost_equal(y2, x)
def test_random_real(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.rdt)
y1 = ifft(fft(x))
y2 = fft(ifft(x))
assert_equal(y1.dtype, self.cdt)
assert_equal(y2.dtype, self.cdt)
assert_array_almost_equal(y1, x)
assert_array_almost_equal(y2, x)
def test_size_accuracy(self):
# Sanity check for the accuracy for prime and non-prime sized inputs
if self.rdt == np.float32:
rtol = 1e-5
elif self.rdt == np.float64:
rtol = 1e-10
for size in LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size).astype(self.rdt)
y = ifft(fft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
y = fft(ifft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
x = (x + 1j*np.random.rand(size)).astype(self.cdt)
y = ifft(fft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
y = fft(ifft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
def test_invalid_sizes(self):
assert_raises(ValueError, ifft, [])
assert_raises(ValueError, ifft, [[1,1],[2,2]], -5)
class TestDoubleIFFT(_TestIFFTBase):
def setUp(self):
self.cdt = np.cdouble
self.rdt = np.double
class TestSingleIFFT(_TestIFFTBase):
def setUp(self):
self.cdt = np.complex64
self.rdt = np.float32
class _TestRFFTBase(TestCase):
def setUp(self):
np.random.seed(1234)
def test_definition(self):
for t in [[1, 2, 3, 4, 1, 2, 3, 4], [1, 2, 3, 4, 1, 2, 3, 4, 5]]:
x = np.array(t, dtype=self.rdt)
y = rfft(x)
y1 = direct_rdft(x)
assert_array_almost_equal(y,y1)
assert_equal(y.dtype, self.rdt)
def test_djbfft(self):
from numpy.fft import fft as numpy_fft
for i in range(2,14):
n = 2**i
x = list(range(n))
y2 = numpy_fft(x)
y1 = zeros((n,),dtype=double)
y1[0] = y2[0].real
y1[-1] = y2[n//2].real
for k in range(1, n//2):
y1[2*k-1] = y2[k].real
y1[2*k] = y2[k].imag
y = fftpack.drfft(x)
assert_array_almost_equal(y,y1)
def test_invalid_sizes(self):
assert_raises(ValueError, rfft, [])
assert_raises(ValueError, rfft, [[1,1],[2,2]], -5)
class TestRFFTDouble(_TestRFFTBase):
def setUp(self):
self.cdt = np.cdouble
self.rdt = np.double
class TestRFFTSingle(_TestRFFTBase):
def setUp(self):
self.cdt = np.complex64
self.rdt = np.float32
class _TestIRFFTBase(TestCase):
def setUp(self):
np.random.seed(1234)
def test_definition(self):
x1 = [1,2,3,4,1,2,3,4]
x1_1 = [1,2+3j,4+1j,2+3j,4,2-3j,4-1j,2-3j]
x2 = [1,2,3,4,1,2,3,4,5]
x2_1 = [1,2+3j,4+1j,2+3j,4+5j,4-5j,2-3j,4-1j,2-3j]
def _test(x, xr):
y = irfft(np.array(x, dtype=self.rdt))
y1 = direct_irdft(x)
assert_equal(y.dtype, self.rdt)
assert_array_almost_equal(y,y1, decimal=self.ndec)
assert_array_almost_equal(y,ifft(xr), decimal=self.ndec)
_test(x1, x1_1)
_test(x2, x2_1)
def test_djbfft(self):
from numpy.fft import ifft as numpy_ifft
for i in range(2,14):
n = 2**i
x = list(range(n))
x1 = zeros((n,),dtype=cdouble)
x1[0] = x[0]
for k in range(1, n//2):
x1[k] = x[2*k-1]+1j*x[2*k]
x1[n-k] = x[2*k-1]-1j*x[2*k]
x1[n//2] = x[-1]
y1 = numpy_ifft(x1)
y = fftpack.drfft(x,direction=-1)
assert_array_almost_equal(y,y1)
def test_random_real(self):
for size in [1,51,111,100,200,64,128,256,1024]:
x = random([size]).astype(self.rdt)
y1 = irfft(rfft(x))
y2 = rfft(irfft(x))
assert_equal(y1.dtype, self.rdt)
assert_equal(y2.dtype, self.rdt)
assert_array_almost_equal(y1, x, decimal=self.ndec,
err_msg="size=%d" % size)
assert_array_almost_equal(y2, x, decimal=self.ndec,
err_msg="size=%d" % size)
def test_size_accuracy(self):
# Sanity check for the accuracy for prime and non-prime sized inputs
if self.rdt == np.float32:
rtol = 1e-5
elif self.rdt == np.float64:
rtol = 1e-10
for size in LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size).astype(self.rdt)
y = irfft(rfft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
y = rfft(irfft(x))
_assert_close_in_norm(x, y, rtol, size, self.rdt)
def test_invalid_sizes(self):
assert_raises(ValueError, irfft, [])
assert_raises(ValueError, irfft, [[1,1],[2,2]], -5)
# self.ndec is bogus; we should have a assert_array_approx_equal for number of
# significant digits
class TestIRFFTDouble(_TestIRFFTBase):
def setUp(self):
self.cdt = np.cdouble
self.rdt = np.double
self.ndec = 14
class TestIRFFTSingle(_TestIRFFTBase):
def setUp(self):
self.cdt = np.complex64
self.rdt = np.float32
self.ndec = 5
class Testfft2(TestCase):
def setUp(self):
np.random.seed(1234)
def test_regression_244(self):
"""fft returns wrong result with axes parameter."""
# fftn (and hence fft2) used to break when both axes and shape were
# used
x = numpy.ones((4,4,2))
y = fft2(x, shape=(8,8), axes=(-3,-2))
y_r = numpy.fft.fftn(x, s=(8, 8), axes=(-3, -2))
assert_array_almost_equal(y, y_r)
def test_invalid_sizes(self):
assert_raises(ValueError, fft2, [[]])
assert_raises(ValueError, fft2, [[1,1],[2,2]], (4, -3))
class TestFftnSingle(TestCase):
def setUp(self):
np.random.seed(1234)
def test_definition(self):
x = [[1,2,3],[4,5,6],[7,8,9]]
y = fftn(np.array(x, np.float32))
if not y.dtype == np.complex64:
raise ValueError("double precision output with single precision")
y_r = np.array(fftn(x), np.complex64)
assert_array_almost_equal_nulp(y, y_r)
def test_size_accuracy(self):
for size in SMALL_COMPOSITE_SIZES + SMALL_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size, size) + 1j*np.random.rand(size, size)
y1 = fftn(x.real.astype(np.float32))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 2000)
for size in LARGE_COMPOSITE_SIZES + LARGE_PRIME_SIZES:
np.random.seed(1234)
x = np.random.rand(size, 3) + 1j*np.random.rand(size, 3)
y1 = fftn(x.real.astype(np.float32))
y2 = fftn(x.real.astype(np.float64)).astype(np.complex64)
assert_equal(y1.dtype, np.complex64)
assert_array_almost_equal_nulp(y1, y2, 2000)
class TestFftn(TestCase):
def setUp(self):
np.random.seed(1234)
def test_definition(self):
x = [[1,2,3],[4,5,6],[7,8,9]]
y = fftn(x)
assert_array_almost_equal(y,direct_dftn(x))
x = random((20,26))
assert_array_almost_equal(fftn(x),direct_dftn(x))
x = random((5,4,3,20))
assert_array_almost_equal(fftn(x),direct_dftn(x))
def test_axes_argument(self):
# plane == ji_plane, x== kji_space
plane1 = [[1,2,3],[4,5,6],[7,8,9]]
plane2 = [[10,11,12],[13,14,15],[16,17,18]]
plane3 = [[19,20,21],[22,23,24],[25,26,27]]
ki_plane1 = [[1,2,3],[10,11,12],[19,20,21]]
ki_plane2 = [[4,5,6],[13,14,15],[22,23,24]]
ki_plane3 = [[7,8,9],[16,17,18],[25,26,27]]
jk_plane1 = [[1,10,19],[4,13,22],[7,16,25]]
jk_plane2 = [[2,11,20],[5,14,23],[8,17,26]]
jk_plane3 = [[3,12,21],[6,15,24],[9,18,27]]
kj_plane1 = [[1,4,7],[10,13,16],[19,22,25]]
kj_plane2 = [[2,5,8],[11,14,17],[20,23,26]]
kj_plane3 = [[3,6,9],[12,15,18],[21,24,27]]
ij_plane1 = [[1,4,7],[2,5,8],[3,6,9]]
ij_plane2 = [[10,13,16],[11,14,17],[12,15,18]]
ij_plane3 = [[19,22,25],[20,23,26],[21,24,27]]
ik_plane1 = [[1,10,19],[2,11,20],[3,12,21]]
ik_plane2 = [[4,13,22],[5,14,23],[6,15,24]]
ik_plane3 = [[7,16,25],[8,17,26],[9,18,27]]
ijk_space = [jk_plane1,jk_plane2,jk_plane3]
ikj_space = [kj_plane1,kj_plane2,kj_plane3]
jik_space = [ik_plane1,ik_plane2,ik_plane3]
jki_space = [ki_plane1,ki_plane2,ki_plane3]
kij_space = [ij_plane1,ij_plane2,ij_plane3]
x = array([plane1,plane2,plane3])
assert_array_almost_equal(fftn(x),fftn(x,axes=(-3,-2,-1))) # kji_space
assert_array_almost_equal(fftn(x),fftn(x,axes=(0,1,2)))
y = fftn(x,axes=(2,1,0)) # ijk_space
assert_array_almost_equal(swapaxes(y,-1,-3),fftn(ijk_space))
y = fftn(x,axes=(2,0,1)) # ikj_space
assert_array_almost_equal(swapaxes(swapaxes(y,-1,-3),
-1,-2),
fftn(ikj_space))
y = fftn(x,axes=(1,2,0)) # jik_space
assert_array_almost_equal(swapaxes(swapaxes(y,-1,-3),
-3,-2),
fftn(jik_space))
y = fftn(x,axes=(1,0,2)) # jki_space
assert_array_almost_equal(swapaxes(y,-2,-3),fftn(jki_space))
y = fftn(x,axes=(0,2,1)) # kij_space
assert_array_almost_equal(swapaxes(y,-2,-1),
fftn(kij_space))
y = fftn(x,axes=(-2,-1)) # ji_plane
assert_array_almost_equal(fftn(plane1),y[0])
assert_array_almost_equal(fftn(plane2),y[1])
assert_array_almost_equal(fftn(plane3),y[2])
y = fftn(x,axes=(1,2)) # ji_plane
assert_array_almost_equal(fftn(plane1),y[0])
assert_array_almost_equal(fftn(plane2),y[1])
assert_array_almost_equal(fftn(plane3),y[2])
y = fftn(x,axes=(-3,-2)) # kj_plane
assert_array_almost_equal(fftn(x[:,:,0]),y[:,:,0])
assert_array_almost_equal(fftn(x[:,:,1]),y[:,:,1])
assert_array_almost_equal(fftn(x[:,:,2]),y[:,:,2])
y = fftn(x,axes=(-3,-1)) # ki_plane
assert_array_almost_equal(fftn(x[:,0,:]),y[:,0,:])
assert_array_almost_equal(fftn(x[:,1,:]),y[:,1,:])
assert_array_almost_equal(fftn(x[:,2,:]),y[:,2,:])
y = fftn(x,axes=(-1,-2)) # ij_plane
assert_array_almost_equal(fftn(ij_plane1),swapaxes(y[0],-2,-1))
assert_array_almost_equal(fftn(ij_plane2),swapaxes(y[1],-2,-1))
assert_array_almost_equal(fftn(ij_plane3),swapaxes(y[2],-2,-1))
y = fftn(x,axes=(-1,-3)) # ik_plane
assert_array_almost_equal(fftn(ik_plane1),swapaxes(y[:,0,:],-1,-2))
assert_array_almost_equal(fftn(ik_plane2),swapaxes(y[:,1,:],-1,-2))
assert_array_almost_equal(fftn(ik_plane3),swapaxes(y[:,2,:],-1,-2))
y = fftn(x,axes=(-2,-3)) # jk_plane
assert_array_almost_equal(fftn(jk_plane1),swapaxes(y[:,:,0],-1,-2))
assert_array_almost_equal(fftn(jk_plane2),swapaxes(y[:,:,1],-1,-2))
assert_array_almost_equal(fftn(jk_plane3),swapaxes(y[:,:,2],-1,-2))
y = fftn(x,axes=(-1,)) # i_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[i,j,:]),y[i,j,:])
y = fftn(x,axes=(-2,)) # j_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[i,:,j]),y[i,:,j])
y = fftn(x,axes=(0,)) # k_line
for i in range(3):
for j in range(3):
assert_array_almost_equal(fft(x[:,i,j]),y[:,i,j])
y = fftn(x,axes=()) # point
assert_array_almost_equal(y,x)
def test_shape_argument(self):
small_x = [[1,2,3],[4,5,6]]
large_x1 = [[1,2,3,0],[4,5,6,0],[0,0,0,0],[0,0,0,0]]
y = fftn(small_x,shape=(4,4))
assert_array_almost_equal(y,fftn(large_x1))
y = fftn(small_x,shape=(3,4))
assert_array_almost_equal(y,fftn(large_x1[:-1]))
def test_shape_axes_argument(self):
small_x = [[1,2,3],[4,5,6],[7,8,9]]
large_x1 = array([[1,2,3,0],
[4,5,6,0],
[7,8,9,0],
[0,0,0,0]])
# Disable tests with shape and axes of different lengths
# y = fftn(small_x,shape=(4,4),axes=(-1,))
# for i in range(4):
# assert_array_almost_equal (y[i],fft(large_x1[i]))
# y = fftn(small_x,shape=(4,4),axes=(-2,))
# for i in range(4):
# assert_array_almost_equal (y[:,i],fft(large_x1[:,i]))
y = fftn(small_x,shape=(4,4),axes=(-2,-1))
assert_array_almost_equal(y,fftn(large_x1))
y = fftn(small_x,shape=(4,4),axes=(-1,-2))
assert_array_almost_equal(y,swapaxes(
fftn(swapaxes(large_x1,-1,-2)),-1,-2))
def test_shape_axes_argument2(self):
# Change shape of the last axis
x = numpy.random.random((10, 5, 3, 7))
y = fftn(x, axes=(-1,), shape=(8,))
assert_array_almost_equal(y, fft(x, axis=-1, n=8))
# Change shape of an arbitrary axis which is not the last one
x = numpy.random.random((10, 5, 3, 7))
y = fftn(x, axes=(-2,), shape=(8,))
assert_array_almost_equal(y, fft(x, axis=-2, n=8))
# Change shape of axes: cf #244, where shape and axes were mixed up
x = numpy.random.random((4,4,2))
y = fftn(x, axes=(-3,-2), shape=(8,8))
assert_array_almost_equal(y, numpy.fft.fftn(x, axes=(-3, -2), s=(8, 8)))
def test_shape_argument_more(self):
"""Test that fftn raises ValueError when s.shape is longer than x.shape"""
x = zeros((4, 4, 2))
assert_raises(ValueError, fftn, x, shape=(8, 8, 2, 1))
def test_invalid_sizes(self):
assert_raises(ValueError, fftn, [[]])
assert_raises(ValueError, fftn, [[1,1],[2,2]], (4, -3))
class _TestIfftn(TestCase):
dtype = None
cdtype = None
def setUp(self):
np.random.seed(1234)
def test_definition(self):
x = np.array([[1,2,3],[4,5,6],[7,8,9]], dtype=self.dtype)
y = ifftn(x)
assert_equal(y.dtype, self.cdtype)
assert_array_almost_equal_nulp(y,direct_idftn(x),self.maxnlp)
x = random((20,26))
assert_array_almost_equal_nulp(ifftn(x),direct_idftn(x),self.maxnlp)
x = random((5,4,3,20))
assert_array_almost_equal_nulp(ifftn(x),direct_idftn(x),self.maxnlp)
def test_random_complex(self):
for size in [1,2,51,32,64,92]:
x = random([size,size]) + 1j*random([size,size])
assert_array_almost_equal_nulp(ifftn(fftn(x)),x,self.maxnlp)
assert_array_almost_equal_nulp(fftn(ifftn(x)),x,self.maxnlp)
def test_invalid_sizes(self):
assert_raises(ValueError, ifftn, [[]])
assert_raises(ValueError, ifftn, [[1,1],[2,2]], (4, -3))
class TestIfftnDouble(_TestIfftn):
dtype = np.float64
cdtype = np.complex128
maxnlp = 2000
class TestIfftnSingle(_TestIfftn):
dtype = np.float32
cdtype = np.complex64
maxnlp = 3500
class TestLongDoubleFailure(TestCase):
def setUp(self):
np.random.seed(1234)
def test_complex(self):
if np.dtype(np.longcomplex).itemsize == np.dtype(np.complex).itemsize:
# longdouble == double; so fft is supported
return
x = np.random.randn(10).astype(np.longdouble) + \
1j * np.random.randn(10).astype(np.longdouble)
for f in [fft, ifft]:
try:
f(x)
raise AssertionError("Type %r not supported but does not fail" %
np.longcomplex)
except ValueError:
pass
def test_real(self):
if np.dtype(np.longdouble).itemsize == np.dtype(np.double).itemsize:
# longdouble == double; so fft is supported
return
x = np.random.randn(10).astype(np.longcomplex)
for f in [fft, ifft]:
try:
f(x)
raise AssertionError("Type %r not supported but does not fail" %
np.longcomplex)
except ValueError:
pass
class FakeArray(object):
def __init__(self, data):
self._data = data
self.__array_interface__ = data.__array_interface__
class FakeArray2(object):
def __init__(self, data):
self._data = data
def __array__(self):
return self._data
class TestOverwrite(object):
"""Check input overwrite behavior of the FFT functions """
real_dtypes = [np.float32, np.float64]
dtypes = real_dtypes + [np.complex64, np.complex128]
def _check(self, x, routine, fftsize, axis, overwrite_x, should_overwrite):
x2 = x.copy()
for fake in [lambda x: x, FakeArray, FakeArray2]:
routine(fake(x2), fftsize, axis, overwrite_x=overwrite_x)
sig = "%s(%s%r, %r, axis=%r, overwrite_x=%r)" % (
routine.__name__, x.dtype, x.shape, fftsize, axis, overwrite_x)
if not should_overwrite:
assert_equal(x2, x, err_msg="spurious overwrite in %s" % sig)
def _check_1d(self, routine, dtype, shape, axis, overwritable_dtypes):
np.random.seed(1234)
if np.issubdtype(dtype, np.complexfloating):
data = np.random.randn(*shape) + 1j*np.random.randn(*shape)
else:
data = np.random.randn(*shape)
data = data.astype(dtype)
for fftsize in [8, 16, 32]:
for overwrite_x in [True, False]:
should_overwrite = (overwrite_x
and dtype in overwritable_dtypes
and fftsize <= shape[axis]
and (len(shape) == 1 or
(axis % len(shape) == len(shape)-1
and fftsize == shape[axis])))
self._check(data, routine, fftsize, axis,
overwrite_x=overwrite_x,
should_overwrite=should_overwrite)
def test_fft(self):
overwritable = (np.complex128, np.complex64)
for dtype in self.dtypes:
self._check_1d(fft, dtype, (16,), -1, overwritable)
self._check_1d(fft, dtype, (16, 2), 0, overwritable)
self._check_1d(fft, dtype, (2, 16), 1, overwritable)
def test_ifft(self):
overwritable = (np.complex128, np.complex64)
for dtype in self.dtypes:
self._check_1d(ifft, dtype, (16,), -1, overwritable)
self._check_1d(ifft, dtype, (16, 2), 0, overwritable)
self._check_1d(ifft, dtype, (2, 16), 1, overwritable)
def test_rfft(self):
overwritable = self.real_dtypes
for dtype in self.real_dtypes:
self._check_1d(rfft, dtype, (16,), -1, overwritable)
self._check_1d(rfft, dtype, (16, 2), 0, overwritable)
self._check_1d(rfft, dtype, (2, 16), 1, overwritable)
def test_irfft(self):
overwritable = self.real_dtypes
for dtype in self.real_dtypes:
self._check_1d(irfft, dtype, (16,), -1, overwritable)
self._check_1d(irfft, dtype, (16, 2), 0, overwritable)
self._check_1d(irfft, dtype, (2, 16), 1, overwritable)
def _check_nd_one(self, routine, dtype, shape, axes, overwritable_dtypes):
np.random.seed(1234)
if np.issubdtype(dtype, np.complexfloating):
data = np.random.randn(*shape) + 1j*np.random.randn(*shape)
else:
data = np.random.randn(*shape)
data = data.astype(dtype)
def fftshape_iter(shp):
if len(shp) <= 0:
yield ()
else:
for j in (shp[0]//2, shp[0], shp[0]*2):
for rest in fftshape_iter(shp[1:]):
yield (j,) + rest
if axes is None:
part_shape = shape
else:
part_shape = tuple(np.take(shape, axes))
for overwrite_x in [True, False]:
for fftshape in fftshape_iter(part_shape):
should_overwrite = (overwrite_x
and data.ndim == 1
and np.all([x < y for x, y in zip(fftshape, part_shape)])
and dtype in overwritable_dtypes)
self._check(data, routine, fftshape, axes,
overwrite_x=overwrite_x,
should_overwrite=should_overwrite)
if data.ndim > 1:
# check fortran order: it never overwrites
self._check(data.T, routine, fftshape, axes,
overwrite_x=overwrite_x,
should_overwrite=False)
def _check_nd(self, routine, dtype, overwritable):
self._check_nd_one(routine, dtype, (16,), None, overwritable)
self._check_nd_one(routine, dtype, (16,), (0,), overwritable)
self._check_nd_one(routine, dtype, (16, 2), (0,), overwritable)
self._check_nd_one(routine, dtype, (2, 16), (1,), overwritable)
self._check_nd_one(routine, dtype, (8, 16), None, overwritable)
self._check_nd_one(routine, dtype, (8, 16), (0, 1), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (0, 1), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (1, 2), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (0,), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (1,), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (2,), overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), None, overwritable)
self._check_nd_one(routine, dtype, (8, 16, 2), (0,1,2), overwritable)
def test_fftn(self):
overwritable = (np.complex128, np.complex64)
for dtype in self.dtypes:
self._check_nd(fftn, dtype, overwritable)
def test_ifftn(self):
overwritable = (np.complex128, np.complex64)
for dtype in self.dtypes:
self._check_nd(ifftn, dtype, overwritable)
if __name__ == "__main__":
run_module_suite()
| [
"[email protected]"
] | |
f1b816434823e5ff322719c6e792a034ea4f4c35 | 177bb6567b9564b1feb1d6e25ab1e0d61adf8770 | /ResidualLoss/CNN_l2_prob_far_dist.py | dc834cb205256111664a4feebdedd1accd470493 | [] | no_license | fzdy1914/NUS-FYP | 4ae9b299cf1cb72a01b371998781b9cec333d3f0 | cb7195a8b025eb8ab2becd26886551479796f930 | refs/heads/master | 2023-04-16T05:08:12.529777 | 2021-04-05T06:56:15 | 2021-04-05T06:56:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,487 | py | import random
import sys
from torch.autograd import Variable
from torch import optim
import numpy as np
from torch.backends import cudnn
import torch.nn.functional as F
import torch
from torch.utils.data import DataLoader, WeightedRandomSampler
from ResidualLoss.dataset import cifar10_data_loader_test, cifar10_data_loader_train, cifar10_dataset_train
from ResidualLoss.model import CIFAR_17
class Logger(object):
def __init__(self):
self.terminal = sys.stdout
log_loc = "./log/%s.txt" % sys.argv[0].split("/")[-1].split(".")[0]
self.log = open(log_loc, "a")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
self.log.flush()
def flush(self):
pass
sys.stdout = Logger()
def setup_seed(seed):
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
cudnn.deterministic = True
setup_seed(1914)
num_epochs = 200
batch_size = 100
evaluation_batch_size = 2500
learning_rate = 0.0001
ref_model = CIFAR_17().cuda()
model = CIFAR_17().cuda()
state_dict = torch.load('./CIFAR-17-1.pt')
ref_model.eval()
model.train()
# optimizer = optim.Adam([
# {'params': model.conv1.parameters()},
# {'params': model.conv2.parameters()},
# {'params': model.conv3.parameters()}
# ], lr=learning_rate, weight_decay=1e-5)
optimizer = optim.Adam(model.parameters(), lr=learning_rate, weight_decay=1e-5)
train_dataset = cifar10_dataset_train()
train_data_length = len(train_dataset)
sampler = WeightedRandomSampler([1] * train_data_length, num_samples=train_data_length, replacement=True)
train_data_loader = DataLoader(train_dataset, batch_size=batch_size, sampler=sampler)
evaluation_data_loader = cifar10_data_loader_train(batch_size=evaluation_batch_size, shuffle=False)
test_data_loader = cifar10_data_loader_test(batch_size)
prob = torch.ones(len(train_dataset), dtype=torch.float64)
ignore_idx_lst = torch.load('CD/ignore_idx_lst.pt')
for idx in ignore_idx_lst:
prob[idx] = 0
sampler.weights = prob
print(prob.sum())
def residual_train():
total_correct_sum = 0
total_classification_loss = 0
for epoch in range(num_epochs):
total_correct = 0
model.eval()
with torch.no_grad():
for data, target in evaluation_data_loader:
data, target = data.cuda(), target.cuda()
output = model(data)
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
total_correct += pred.eq(target.view_as(pred)).sum().item()
model.train()
total_train_loss = 0
for data, target in train_data_loader:
data, target = data.cuda(), target.cuda()
optimizer.zero_grad()
output, features = model.features(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
total_train_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
total_train_loss /= train_data_length
total_correct_sum += total_correct
total_classification_loss += total_train_loss
print('epoch [{}/{}], loss:{:.4f} Accuracy: {}/{}'.format(epoch + 1, num_epochs, total_train_loss, total_correct, train_data_length))
print("average correct:", total_correct_sum / num_epochs)
print("average loss:", total_classification_loss / num_epochs)
def test():
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_data_loader:
data, target = data.cuda(), target.cuda()
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1) # get the index of the max log-probability
correct += pred.eq(target).sum().item()
test_loss /= len(test_data_loader.dataset)
print('Test set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_data_loader.dataset),
100. * correct / len(test_data_loader.dataset)))
# 1000, 500, 200, 100, 75, 50, 25, 10, 5, 1, 0.5,
if __name__ == '__main__':
ref_model.load_state_dict(state_dict)
model.load_state_dict(state_dict)
residual_train()
loc = "./CNN-l2-far-dist/non-freeze.pt"
torch.save(model.state_dict(), loc)
| [
"[email protected]"
] | |
1d775b49368f6bf5bc1295ea51f84d6fcc27a0c9 | 7265af084e9a69b45ea8367f53fd91582d6c2d50 | /eelbrain/tests/test_data.py | d7da51188808c19dbe96345e478525a11c255c60 | [] | no_license | phoebegaston/Eelbrain | b2c6a51d4fb249b9252af3814683c659b9df2965 | b5b2ee7374a644cd9ea7b60bed86510ae3d5cc15 | refs/heads/master | 2020-12-31T05:41:30.784904 | 2015-04-26T11:38:01 | 2015-04-26T12:00:37 | 34,686,112 | 0 | 0 | null | 2015-04-27T19:17:59 | 2015-04-27T19:17:59 | null | UTF-8 | Python | false | false | 29,331 | py | # Author: Christian Brodbeck <[email protected]>
from itertools import izip, product
import os
import cPickle as pickle
import shutil
from string import ascii_lowercase
import tempfile
import mne
from nose.tools import (eq_, ok_, assert_almost_equal, assert_is_instance,
assert_raises)
import numpy as np
from numpy.testing import (assert_equal, assert_array_equal,
assert_array_almost_equal)
from eelbrain import (datasets, load, Var, Factor, NDVar, Dataset, Celltable,
align, align1, combine)
from eelbrain._data_obj import asvar, Categorial, SourceSpace, UTS
from eelbrain._stats.stats import rms
from eelbrain._utils.testing import (assert_dataobj_equal, assert_dataset_equal,
assert_source_space_equal)
def test_print():
"Run the string representation methods"
ds = datasets.get_uts()
print ds
print repr(ds)
A = ds['A']
print A
print repr(A)
Y = ds['Y']
print Y
print repr(Y)
Ynd = ds['uts']
print Ynd
print repr(Ynd)
def test_aggregate():
"Test aggregation methods"
ds = datasets.get_uts()
# don't handle inconsistencies silently
assert_raises(ValueError, ds.aggregate, 'A%B')
dsa = ds.aggregate('A%B', drop_bad=True)
assert_array_equal(dsa['n'], [15, 15, 15, 15])
idx1 = ds.eval("logical_and(A=='a0', B=='b0')")
eq_(dsa['Y', 0], ds['Y', idx1].mean())
# unequal cell counts
ds = ds[:-3]
dsa = ds.aggregate('A%B', drop_bad=True)
assert_array_equal(dsa['n'], [15, 15, 15, 12])
idx1 = ds.eval("logical_and(A=='a0', B=='b0')")
eq_(dsa['Y', 0], ds['Y', idx1].mean())
dsa = ds.aggregate('A%B', drop_bad=True, equal_count=True)
assert_array_equal(dsa['n'], [12, 12, 12, 12])
idx1_12 = np.logical_and(idx1, idx1.cumsum() <= 12)
eq_(dsa['Y', 0], ds['Y', idx1_12].mean())
def test_align():
"Testing align() and align1() functions"
ds = datasets.get_uv()
ds.index()
idx4 = np.arange(0, ds.n_cases, 4)
idx4i = idx4[::-1]
ds2 = ds.sub(np.arange(0, ds.n_cases, 2))
# align1: align Dataset to index
dsa = align1(ds2, idx4)
assert_array_equal(dsa['index'], idx4, "align1() failure")
dsa = align1(ds2, idx4i)
assert_array_equal(dsa['index'], idx4i, "align1() failure")
# d_idx as Var
dsa = align1(ds2[::2], idx4, idx4i)
assert_array_equal(dsa['index'], idx4i, "align1() failure")
assert_raises(ValueError, align1, ds2, idx4, idx4i)
# Factor index
assert_raises(ValueError, align1, ds, ds['rm', ::-1], 'rm')
fds = ds[:20]
dsa = align1(fds, fds['rm', ::-1], 'rm')
assert_array_equal(dsa['index'], np.arange(19, -1, -1), "align1 Factor")
# align two datasets
dsa1, dsa2 = align(ds, ds2)
assert_array_equal(dsa1['index'], dsa2['index'], "align() failure")
dsa1, dsa2 = align(ds, ds2[::-1])
assert_array_equal(dsa1['index'], dsa2['index'], "align() failure")
def test_celltable():
"Test the Celltable class."
ds = datasets.get_uts()
ds['cat'] = Factor('abcd', repeat=15)
ct = Celltable('Y', 'A', ds=ds)
eq_(ct.n_cases, 60)
eq_(ct.n_cells, 2)
ct = Celltable('Y', 'A', match='rm', ds=ds)
eq_(ct.n_cases, 30)
eq_(ct.n_cells, 2)
# cat argument
ct = Celltable('Y', 'cat', cat=('c', 'b'), ds=ds)
eq_(ct.n_cases, 30)
eq_(ct.X[0], 'c')
eq_(ct.X[-1], 'b')
assert_raises(ValueError, Celltable, 'Y', 'cat', cat=('c', 'e'), ds=ds)
ct = Celltable('Y', 'A', match='rm', ds=ds)
eq_(ct.n_cases, 30)
assert np.all(ct.groups['a0'] == ct.groups['a1'])
ct = Celltable('Y', 'cat', match='rm', cat=('c', 'b'), ds=ds)
eq_(ct.n_cases, 30)
eq_(ct.X[0], 'c')
eq_(ct.X[-1], 'b')
# catch unequal length
assert_raises(ValueError, Celltable, ds['Y', :-1], 'cat', ds=ds)
assert_raises(ValueError, Celltable, ds['Y', :-1], 'cat', match='rm', ds=ds)
# coercion of numerical X
X = ds.eval("A == 'a0'")
ct = Celltable('Y', X, cat=(None, None), ds=ds)
eq_(('False', 'True'), ct.cat)
assert_array_equal(ct.data['True'], ds['Y', X])
ct = Celltable('Y', X, cat=(True, False), ds=ds)
eq_(('True', 'False'), ct.cat)
assert_array_equal(ct.data['True'], ds['Y', X])
# test coercion of Y
ct = Celltable(ds['Y'].x, 'A', ds=ds)
assert_is_instance(ct.Y, np.ndarray)
ct = Celltable(ds['Y'].x, 'A', ds=ds, coercion=asvar)
assert_is_instance(ct.Y, Var)
# test sub
ds_sub = ds.sub("A == 'a0'")
ct_sub = Celltable('Y', 'B', ds=ds_sub)
ct = Celltable('Y', 'B', sub="A == 'a0'", ds=ds)
assert_dataobj_equal(ct_sub.Y, ct.Y)
# test sub with rm
ct_sub = Celltable('Y', 'B', match='rm', ds=ds_sub)
ct = Celltable('Y', 'B', match='rm', sub="A == 'a0'", ds=ds)
assert_dataobj_equal(ct_sub.Y, ct.Y)
# Interaction match
ct = Celltable('Y', 'A', match='B % rm', ds=ds)
ok_(ct.all_within)
assert_dataobj_equal(combine((ct.data['a0'], ct.data['a1'])), ds['Y'])
# test rm sorting
ds = Dataset()
ds['rm'] = Factor('abc', repeat=4)
ds['Y'] = Var(np.arange(3.).repeat(4))
ds['X'] = Factor('ab', repeat=2, tile=3)
idx = np.arange(12)
np.random.shuffle(idx)
ds = ds[idx]
ct = Celltable('Y', 'X', 'rm', ds=ds)
assert_array_equal(ct.match, Factor('abc', tile=2))
assert_array_equal(ct.Y, np.tile(np.arange(3.), 2))
assert_array_equal(ct.X, Factor('ab', repeat=3))
def test_combine():
"Test combine()"
ds1 = datasets.get_uts()
ds2 = datasets.get_uts()
ds = combine((ds1, ds2))
assert_array_equal(ds2['Y'].x, ds['Y'].x[ds1.n_cases:], "Basic combine")
del ds1['Y']
del ds2['YCat']
ds = combine((ds1, ds2))
assert_array_equal(ds2['Y'].x, ds['Y'].x[ds1.n_cases:], "Combine with "
"missing Var")
ok_(np.all(ds1['YCat'] == ds['YCat'][:ds1.n_cases]), "Combine with missing "
"Factor")
assert_raises(TypeError, combine, (ds2['A'], ds2['Y']))
# combine NDVar with unequel dimensions
ds = datasets.get_uts(utsnd=True)
y = ds['utsnd']
y1 = y.sub(sensor=['0', '1', '2', '3'])
y2 = y.sub(sensor=['1', '2', '3', '4'])
ds1 = Dataset((y1,))
ds2 = Dataset((y2,))
dsc = combine((ds1, ds2))
y = dsc['utsnd']
eq_(y.sensor.names, ['1', '2', '3'], "Sensor dimension "
"intersection failed.")
dims = ('case', 'sensor', 'time')
ref = np.concatenate((y1.get_data(dims)[:, 1:], y2.get_data(dims)[:, :3]))
assert_array_equal(y.get_data(dims), ref, "combine utsnd")
def test_dataset_combining():
"Test Dataset combination methods"
ds = datasets.get_uv()
del ds['fltvar'], ds['intvar'], ds['A']
ds2 = datasets.get_uv()
del ds2['fltvar'], ds2['intvar']
ds.update(ds2)
assert_array_equal(ds['A'], ds2['A'])
ds2 = datasets.get_uv()
del ds2['fltvar'], ds2['intvar']
ds2['B'][5] = 'something_else'
del ds['A']
assert_raises(ValueError, ds.update, ds2)
def test_dataset_indexing():
"""Test Dataset indexing"""
ds = datasets.get_uv()
# indexing values
eq_(ds['A', 1], ds['A'][1])
eq_(ds[1, 'A'], ds['A'][1])
# indexing variables
assert_dataobj_equal(ds[:, 'A'], ds['A'])
assert_dataobj_equal(ds['A', :], ds['A'])
assert_dataobj_equal(ds[:10, 'A'], ds['A'][:10])
assert_dataobj_equal(ds['A', :10], ds['A'][:10])
# new Dataset through indexing
ds2 = Dataset()
ds2['A'] = ds['A']
assert_dataset_equal(ds[('A',)], ds2)
ds2['B'] = ds['B']
assert_dataset_equal(ds['A', 'B'], ds2)
assert_dataset_equal(ds[('A', 'B'), :10], ds2[:10])
assert_dataset_equal(ds[:10, ('A', 'B')], ds2[:10])
# assigning value
ds[2, 'A'] = 'hello'
eq_(ds[2, 'A'], 'hello')
ds['A', 2] = 'not_hello'
eq_(ds[2, 'A'], 'not_hello')
# assigning new factor
ds['C', :] = 'c'
ok_(np.all(ds.eval("C == 'c'")))
# assigning new Var
ds['D1', :] = 5.
ds[:, 'D2'] = 5.
assert_array_equal(ds['D1'], 5)
assert_array_equal(ds['D2'], 5)
# test illegal names
f = Factor('aaabbb')
assert_raises(ValueError, ds.__setitem__, '%dsa', f)
assert_raises(ValueError, ds.__setitem__, '432', f)
assert_raises(ValueError, ds.__setitem__, ('%dsa', slice(None)), 'value')
assert_raises(ValueError, ds.__setitem__, (slice(None), '%dsa'), 'value')
assert_raises(ValueError, ds.__setitem__, ('432', slice(None)), 4.)
assert_raises(ValueError, ds.__setitem__, (slice(None), '432'), 4.)
def test_dataset_sorting():
"Test Dataset sorting methods"
test_array = np.arange(10)
ds = Dataset()
ds['v'] = Var(test_array)
ds['f'] = Factor(test_array)
# shuffle the Dataset
rand_idx = test_array.copy()
np.random.shuffle(rand_idx)
ds_shuffled = ds[rand_idx]
# ascending, Var, copy
dsa = ds_shuffled.sorted('v')
assert_dataset_equal(dsa, ds, "Copy sorted by Var, ascending")
# descending, Factor, in-place
ds_shuffled.sort('f', descending=True)
assert_dataset_equal(ds_shuffled, ds[::-1], "In-place sorted by Factor, "
"descending")
def test_dim_categorial():
"Test Categorial Dimension"
values = ['a', 'b', 'c', 'abc']
name = 'cat'
dim = Categorial(name, values)
# basic properties
print dim
eq_(len(dim), len(values))
# persistence
s = pickle.dumps(dim, pickle.HIGHEST_PROTOCOL)
dim_ = pickle.loads(s)
eq_(dim_, dim)
# indexing
sub_values = values[:2]
idx = dim.dimindex(sub_values)
assert_array_equal(dim.dimindex(tuple(sub_values)), idx)
eq_(dim[idx], Categorial(name, sub_values))
eq_(dim.dimindex('a'), values.index('a'))
eq_(dim.dimindex('abc'), values.index('abc'))
# intersection
dim2 = Categorial(name, ['c', 'b', 'e'])
dim_i = dim.intersect(dim2)
eq_(dim_i, Categorial(name, ['b', 'c']))
# unicode
dimu = Categorial(name, [u'c', 'b', 'e'])
eq_(dimu.values.dtype.kind, 'U')
eq_(dim2.values.dtype.kind, 'S')
eq_(dimu, dim2)
def test_dim_uts():
"Test UTS Dimension"
uts = UTS(-0.1, 0.005, 301)
# make sure indexing rounds correctly for floats
for i, s in enumerate(np.arange(0, 1.4, 0.05)):
idx = uts.dimindex((-0.1 + s, s))
eq_(idx.start, 10 * i)
eq_(idx.stop, 20 + 10 * i)
# intersection
uts1 = UTS(-0.1, 0.01, 50)
uts2 = UTS(0, 0.01, 20)
intersection = uts1.intersect(uts2)
eq_(intersection, uts2)
idx = uts1.dimindex((0, 0.2))
eq_(uts1[idx], uts2)
def test_effect():
"Test _Effect class"
# .enumerate_cells()
f1 = Factor('aabbccaabbcc')
f2 = Factor('abababababab')
i = f1 % f2
n1 = np.concatenate((np.tile([0, 1], 3), np.tile([2, 3], 3)))
assert_array_equal(f1.enumerate_cells(), n1)
assert_array_equal(f2.enumerate_cells(), np.arange(6).repeat(2))
assert_array_equal(i.enumerate_cells(), np.arange(2).repeat(6))
def test_factor():
"Test basic Factor functionality"
# removing a cell
f = Factor('aabbcc')
eq_(f.cells, ('a', 'b', 'c'))
f[f == 'c'] = 'a'
eq_(f.cells, ('a', 'b'))
# cell order
a = np.tile(np.arange(3), 3)
# alphabetical
f = Factor(a, labels={0: 'c', 1: 'b', 2: 'a'})
eq_(f.cells, ('a', 'b', 'c'))
# ordered
f = Factor(a, labels=((0, 'c'), (1, 'b'), (2, 'a')))
eq_(f.cells, ('c', 'b', 'a'))
eq_(f[:2].cells, ('c', 'b'))
f[f == 'b'] = 'c'
eq_(f.cells, ('c', 'a'))
# label length
lens = [2, 5, 32, 2, 32, 524]
f = Factor(['a' * l for l in lens])
assert_array_equal(f.label_length(), lens)
def test_factor_relabel():
"Test Factor.relabel() method"
f = Factor('aaabbbccc')
f.relabel({'a': 'd'})
assert_array_equal(f, Factor('dddbbbccc'))
f.relabel({'d': 'c', 'c': 'd'})
assert_array_equal(f, Factor('cccbbbddd'))
f.relabel({'d': 'c'})
assert_array_equal(f, Factor('cccbbbccc'))
assert_raises(KeyError, f.relabel, {'a':'c'})
def test_interaction():
"Test Interaction"
ds = datasets.get_uv()
A = ds['A']
B = ds['B']
i = A % B
# eq for sequence
assert_array_equal(i == A % B, True)
assert_array_equal(i == B % A, False)
assert_array_equal(i == A, False)
assert_array_equal(i == ds['fltvar'], False)
assert_array_equal(ds.eval("A%B") == Factor(ds['A']) % B, True)
# eq for element
for a, b in product(A.cells, B.cells):
assert_array_equal(i == (a, b), np.logical_and(A == a, B == b))
def test_isin():
"Test .isin() methods"
values = np.array([ 6, -6, 6, -2, -1, 0, -10, -5, -10, -6])
v = values[0]
v2 = values[:2]
labels = {i: c for i, c in enumerate(ascii_lowercase, -10)}
vl = labels[v]
v2l = [labels[v_] for v_ in v2]
target = np.logical_or(values == v2[0], values == v2[1])
inv_target = np.invert(target)
index_target = np.flatnonzero(values == v)
empty = np.array([])
var = Var(values)
assert_array_equal(var.index(v), index_target)
assert_array_equal(var.isin(v2), target)
assert_array_equal(var.isany(*v2), target)
assert_array_equal(var.isnot(*v2), inv_target)
assert_array_equal(var.isnotin(v2), inv_target)
var0 = Var([])
assert_array_equal(var0.isin(v2), empty)
assert_array_equal(var0.isany(*v2), empty)
assert_array_equal(var0.isnot(*v2), empty)
assert_array_equal(var0.isnotin(v2), empty)
f = Factor(values, labels=labels)
assert_array_equal(f.index(vl), index_target)
assert_array_equal(f.isin(v2l), target)
assert_array_equal(f.isany(*v2l), target)
assert_array_equal(f.isnot(*v2l), inv_target)
assert_array_equal(f.isnotin(v2l), inv_target)
f0 = Factor([])
assert_array_equal(f0.isin(v2l), empty)
assert_array_equal(f0.isany(*v2l), empty)
assert_array_equal(f0.isnot(*v2l), empty)
assert_array_equal(f0.isnotin(v2l), empty)
def test_model():
"Test Model class"
# model repr
a = Factor('ab', repeat=2, name='a')
b = Factor('ab', tile=2, name='b')
m = a * b
eq_(repr(m), "a + b + a % b")
# model without explicit names
x1 = Factor('ab', repeat=2)
x2 = Factor('ab', tile=2)
m = x1 * x2
eq_(repr(m), "<?> + <?> + <?> % <?>")
# catch explicit intercept
intercept = Factor('i', repeat=4, name='intercept')
assert_raises(ValueError, a.__mul__, intercept)
def test_ndvar():
"Test the NDVar class"
ds = datasets.get_uts(utsnd=True)
x = ds['utsnd']
# meaningful slicing
assert_raises(KeyError, x.sub, sensor='5')
assert_equal(x.sub(sensor='4'), x.x[:, 4])
assert_equal(x.sub(sensor=['4', '3', '2']), x.x[:, [4, 3, 2]])
assert_equal(x.sub(sensor=['4']), x.x[:, [4]])
assert_equal(x.sub(case=1, sensor='4'), x.x[1, 4])
# setup indices
s_case = slice(10, 13)
s_sensor = slice(2, 4)
s_time = x.time._slice(0.1, 0.2)
b_case = np.zeros(ds.n_cases, dtype=bool)
b_case[s_case] = True
b_sensor = np.array([False, False, True, True, False])
b_time = np.arange(s_time.start, s_time.stop)
a_case = np.arange(10, 13)
a_sensor = np.arange(2, 4)
a_time = np.arange(x.time.dimindex(0.1), x.time.dimindex(0.2))
# slicing with different index kinds
tgt = x.x[s_case, s_sensor, s_time]
eq_(tgt.shape, (3, 2, 10))
# single
assert_equal(x.sub(case=s_case, sensor=s_sensor, time=s_time), tgt)
assert_equal(x.sub(case=a_case, sensor=a_sensor, time=a_time), tgt)
assert_equal(x.sub(case=b_case, sensor=b_sensor, time=b_time), tgt)
# bool & slice
assert_equal(x.sub(case=b_case, sensor=s_sensor, time=s_time), tgt)
assert_equal(x.sub(case=s_case, sensor=b_sensor, time=s_time), tgt)
assert_equal(x.sub(case=s_case, sensor=s_sensor, time=b_time), tgt)
assert_equal(x.sub(case=b_case, sensor=b_sensor, time=s_time), tgt)
assert_equal(x.sub(case=s_case, sensor=b_sensor, time=b_time), tgt)
assert_equal(x.sub(case=b_case, sensor=s_sensor, time=b_time), tgt)
# bool & array
assert_equal(x.sub(case=b_case, sensor=a_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=b_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=a_sensor, time=b_time), tgt)
assert_equal(x.sub(case=b_case, sensor=b_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=b_sensor, time=b_time), tgt)
assert_equal(x.sub(case=b_case, sensor=a_sensor, time=b_time), tgt)
# slice & array
assert_equal(x.sub(case=s_case, sensor=a_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=s_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=a_sensor, time=s_time), tgt)
assert_equal(x.sub(case=s_case, sensor=s_sensor, time=a_time), tgt)
assert_equal(x.sub(case=a_case, sensor=s_sensor, time=s_time), tgt)
assert_equal(x.sub(case=s_case, sensor=a_sensor, time=s_time), tgt)
# all three
assert_equal(x.sub(case=a_case, sensor=b_sensor, time=s_time), tgt)
assert_equal(x.sub(case=a_case, sensor=s_sensor, time=b_time), tgt)
assert_equal(x.sub(case=b_case, sensor=a_sensor, time=s_time), tgt)
assert_equal(x.sub(case=b_case, sensor=s_sensor, time=a_time), tgt)
assert_equal(x.sub(case=s_case, sensor=a_sensor, time=b_time), tgt)
assert_equal(x.sub(case=s_case, sensor=b_sensor, time=a_time), tgt)
# Var
v_case = Var(b_case)
assert_equal(x.sub(case=v_case, sensor=b_sensor, time=a_time), tgt)
# baseline correction
x_bl = x - x.summary(time=(None, 0))
# assert that the baseline is 0
bl = x_bl.summary('case', 'sensor', time=(None, 0))
ok_(abs(bl) < 1e-10, "Baseline correction")
# NDVar as index
sens_mean = x.mean(('case', 'time'))
idx = sens_mean > 0
pos = sens_mean[idx]
assert_array_equal(pos.x > 0, True)
def test_ndvar_binning():
"Test NDVar.bin()"
x = np.arange(10)
time = UTS(-0.1, 0.1, 10)
x_dst = x.reshape((5, 2)).mean(1)
time_dst = np.arange(0., 0.9, 0.2)
# 1-d
ndvar = NDVar(x, (time,))
b = ndvar.bin(0.2)
assert_array_equal(b.x, x_dst, "Binned data")
assert_array_equal(b.time.x, time_dst, "Bin times")
# 2-d
ndvar = NDVar(np.vstack((x, x, x)), ('case', time))
b = ndvar.bin(0.2)
assert_array_equal(b.x, np.vstack((x_dst, x_dst, x_dst)), "Binned data")
assert_array_equal(b.time.x, time_dst, "Bin times")
# time:
x = np.ones((5, 70))
ndvar = NDVar(x, ('case', UTS(0.45000000000000007, 0.005, 70)))
binned_ndvar = ndvar.bin(0.05)
assert_array_equal(binned_ndvar.x, 1.)
eq_(binned_ndvar.shape, (5, 7))
def test_ndvar_graph_dim():
"Test NDVar dimensions with conectvity graph"
ds = datasets.get_uts(utsnd=True)
x = ds['utsnd']
# non-monotonic index
sub_mono = x.sub(sensor=['2', '3', '4'])
sub_nonmono = x.sub(sensor=['4', '3', '2'])
argsort = np.array([2,1,0])
conn = argsort[sub_mono.sensor.connectivity().ravel()].reshape((-1, 2))
assert_equal(sub_nonmono.sensor.connectivity(), conn)
def test_ndvar_summary_methods():
"Test NDVar methods for summarizing data over axes"
ds = datasets.get_uts(utsnd=True)
x = ds['utsnd']
dim = 'sensor'
axis = x.get_axis(dim)
dims = ('case', 'sensor')
axes = tuple(x.get_axis(d) for d in dims)
idx = x > 0
x0 = x[0]
idx0 = idx[0]
xsub = x.sub(time=(0, 0.5))
idxsub = xsub > 0
idx1d = x.mean(('case', 'time')) > 0
# info inheritance
eq_(x.any(('sensor', 'time')).info, x.info)
# numpy functions
eq_(x.any(), x.x.any())
assert_array_equal(x.any(dim), x.x.any(axis))
assert_array_equal(x.any(dims), x.x.any(axes))
assert_array_equal(x.any(idx0), [x_[idx0.x].any() for x_ in x.x])
assert_array_equal(x.any(idx), [x_[i].any() for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.any(idx0), x0.x[idx0.x].any())
assert_array_equal(x.any(idxsub), xsub.any(idxsub))
assert_array_equal(x.any(idx1d), x.x[:, idx1d.x].any(1))
eq_(x.max(), x.x.max())
assert_array_equal(x.max(dim), x.x.max(axis))
assert_array_equal(x.max(dims), x.x.max(axes))
assert_array_equal(x.max(idx0), [x_[idx0.x].max() for x_ in x.x])
assert_array_equal(x.max(idx), [x_[i].max() for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.max(idx0), x0.x[idx0.x].max())
assert_array_equal(x.max(idxsub), xsub.max(idxsub))
assert_array_equal(x.max(idx1d), x.x[:, idx1d.x].max(1))
eq_(x.mean(), x.x.mean())
assert_array_equal(x.mean(dim), x.x.mean(axis))
assert_array_equal(x.mean(dims), x.x.mean(axes))
assert_array_equal(x.mean(idx0), [x_[idx0.x].mean() for x_ in x.x])
assert_array_equal(x.mean(idx), [x_[i].mean() for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.mean(idx0), x0.x[idx0.x].mean())
assert_array_equal(x.mean(idxsub), xsub.mean(idxsub))
assert_array_equal(x.mean(idx1d), x.x[:, idx1d.x].mean(1))
eq_(x.min(), x.x.min())
assert_array_equal(x.min(dim), x.x.min(axis))
assert_array_equal(x.min(dims), x.x.min(axes))
assert_array_equal(x.min(idx0), [x_[idx0.x].min() for x_ in x.x])
assert_array_equal(x.min(idx), [x_[i].min() for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.min(idx0), x0.x[idx0.x].min())
assert_array_equal(x.min(idxsub), xsub.min(idxsub))
assert_array_equal(x.min(idx1d), x.x[:, idx1d.x].min(1))
eq_(x.std(), x.x.std())
assert_array_equal(x.std(dim), x.x.std(axis))
assert_array_equal(x.std(dims), x.x.std(axes))
assert_array_equal(x.std(idx0), [x_[idx0.x].std() for x_ in x.x])
assert_array_equal(x.std(idx), [x_[i].std() for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.std(idx0), x0.x[idx0.x].std())
assert_array_equal(x.std(idxsub), xsub.std(idxsub))
assert_array_equal(x.std(idx1d), x.x[:, idx1d.x].std(1))
# non-numpy
eq_(x.rms(), rms(x.x))
assert_array_equal(x.rms(dim), rms(x.x, axis))
assert_array_equal(x.rms(dims), rms(x.x, axes))
assert_array_equal(x.rms(idx0), [rms(x_[idx0.x]) for x_ in x.x])
assert_array_equal(x.rms(idx), [rms(x_[i]) for x_, i in izip(x.x, idx.x)])
assert_array_equal(x0.rms(idx0), rms(x0.x[idx0.x]))
assert_array_equal(x.rms(idxsub), xsub.rms(idxsub))
assert_array_equal(x.rms(idx1d), rms(x.x[:, idx1d.x], 1))
def test_ols():
"Test NDVar.ols() method"
from rpy2.robjects import r
# simulate data
ds = datasets.get_uts(True)
n_times = len(ds['uts'].time)
x = np.zeros(n_times)
x[20:40] = np.hanning(20)
utsc = ds.eval("uts.copy()")
utsc.x += ds['Y'].x[:, None] * x[None, :]
ds_ = Dataset()
ds_['x'] = Var(ds['Y'].x)
ds_['x2'] = ds_['x'] + np.random.normal(0, 1, ds.n_cases)
# ols regression
m1 = ds_['x']
b1 = utsc.ols(m1)
res1 = utsc.residuals(m1)
t1 = utsc.ols_t(m1)
m2 = ds_.eval("x + x2")
b2 = utsc.ols(m2)
res2 = utsc.residuals(m2)
t2 = utsc.ols_t(m2)
# compare with R
for i in xrange(n_times):
ds_['y'] = Var(utsc.x[:, i])
ds_.to_r('ds')
# 1 predictor
r('lm1 <- lm(y ~ x, ds)')
beta = r('coef(lm1)')[1]
assert_almost_equal(b1.x[0, i], beta)
res = r('residuals(lm1)')
assert_array_almost_equal(res1.x[:, i], res)
t = r('coef(summary(lm1))')[5]
assert_almost_equal(t1.x[0, i], t)
# 2 predictors
r('lm2 <- lm(y ~ x + x2, ds)')
beta = r('coef(lm2)')[1:]
assert_array_almost_equal(b2.x[:, i], beta)
res = r('residuals(lm2)')
assert_array_almost_equal(res2.x[:, i], res)
lm2_coefs = r('coef(summary(lm2))')
t = [lm2_coefs[7], lm2_coefs[8]]
assert_array_almost_equal(t2.x[:, i], t)
# 3d
utsnd = ds['utsnd']
ds_['utsnd'] = utsnd
b1 = ds_.eval("utsnd.ols(x)")
res1 = ds_.eval("utsnd.residuals(x)")
t1 = ds_.eval("utsnd.ols_t(x)")
for i in xrange(len(b1.time)):
ds_['y'] = Var(utsnd.x[:, 1, i])
ds_.to_r('ds')
# 1 predictor
r('lm1 <- lm(y ~ x, ds)')
beta = r('coef(lm1)')[1]
assert_almost_equal(b1.x[0, 1, i], beta)
res = r('residuals(lm1)')
assert_array_almost_equal(res1.x[:, 1, i], res)
t = r('coef(summary(lm1))')[5]
assert_almost_equal(t1.x[0, 1, i], t)
def test_io_pickle():
"Test io by pickling"
ds = datasets.get_uts()
ds.info['info'] = "Some very useful information about the Dataset"
tempdir = tempfile.mkdtemp()
try:
dest = os.path.join(tempdir, 'test.pickled')
with open(dest, 'wb') as fid:
pickle.dump(ds, fid, protocol=pickle.HIGHEST_PROTOCOL)
with open(dest, 'rb') as fid:
ds2 = pickle.load(fid)
finally:
shutil.rmtree(tempdir)
assert_dataset_equal(ds, ds2)
def test_io_txt():
"Test Dataset io as text"
ds = datasets.get_uv()
# Var that has integer values as float
ds['intflt'] = ds.eval('intvar * 1.')
ds['intflt'].name = 'intflt'
# io test
tempdir = tempfile.mkdtemp()
try:
dest = os.path.join(tempdir, 'test.txt')
ds.save_txt(dest)
ds2 = load.tsv(dest)
finally:
shutil.rmtree(tempdir)
assert_dataset_equal(ds, ds2, decimal=6)
def test_r():
"Test interaction with R thorugh rpy2"
from rpy2.robjects import r
r("data(sleep)")
ds = Dataset.from_r("sleep")
eq_(ds.name, 'sleep')
extra = (0.7, -1.6, -0.2, -1.2, -0.1, 3.4, 3.7, 0.8, 0.0, 2.0, 1.9, 0.8,
1.1, 0.1, -0.1, 4.4, 5.5, 1.6, 4.6, 3.4)
assert_array_equal(ds.eval('extra'), extra)
assert_array_equal(ds.eval('ID'), map(str, xrange(1, 11)) * 2)
assert_array_equal(ds.eval('group'), ['1'] * 10 + ['2'] * 10)
# test putting
ds.to_r('sleep_copy')
ds_copy = Dataset.from_r('sleep_copy')
assert_dataset_equal(ds_copy, ds)
def test_source_space():
"Test SourceSpace Dimension"
subject = 'fsaverage'
data_path = mne.datasets.sample.data_path()
mri_sdir = os.path.join(data_path, 'subjects')
mri_dir = os.path.join(mri_sdir, subject)
src_path = os.path.join(mri_dir, 'bem', subject + '-ico-5-src.fif')
label_dir = os.path.join(mri_dir, 'label')
label_ba1 = mne.read_label(os.path.join(label_dir, 'lh.BA1.label'))
label_v1 = mne.read_label(os.path.join(label_dir, 'lh.V1.label'))
label_mt = mne.read_label(os.path.join(label_dir, 'lh.MT.label'))
label_ba1_v1 = label_ba1 + label_v1
label_v1_mt = label_v1 + label_mt
src = mne.read_source_spaces(src_path)
source = SourceSpace((src[0]['vertno'], src[1]['vertno']), subject,
'ico-5', mri_sdir)
index = source.dimindex(label_v1)
source_v1 = source[index]
index = source.dimindex(label_ba1_v1)
source_ba1_v1 = source[index]
index = source.dimindex(label_v1_mt)
source_v1_mt = source[index]
index = source_ba1_v1.dimindex(source_v1_mt)
source_v1_intersection = source_ba1_v1[index]
assert_source_space_equal(source_v1, source_v1_intersection)
# index from label
index = source.index_for_label(label_v1)
assert_array_equal(index.source[index.x].vertno[0],
np.intersect1d(source.lh_vertno, label_v1.vertices, 1))
# parcellation and cluster localization
if mne.__version__ < '0.8':
return
parc = mne.read_labels_from_annot(subject, parc='aparc', subjects_dir=mri_sdir)
indexes = [source.index_for_label(label) for label in parc
if len(label) > 10]
x = np.vstack([index.x for index in indexes])
ds = source._cluster_properties(x)
for i in xrange(ds.n_cases):
eq_(ds[i, 'location'], parc[i].name)
def test_var():
"Test Var objects"
base = Factor('aabbcde')
y = Var.from_dict(base, {'a': 5, 'e': 8}, default=0)
assert_array_equal(y.x, [5, 5, 0, 0, 0, 0, 8])
# basic operations
info = {'a': 1}
v = Var(np.arange(4.), info=info)
eq_(v.info, info)
w = v - 1
eq_(w.info, info)
assert_array_equal(w.x, v.x - 1)
w = v + 1
eq_(w.info, info)
assert_array_equal(w.x, v.x + 1)
w = v * 2
eq_(w.info, info)
assert_array_equal(w.x, v.x * 2)
w = v / 2
eq_(w.info, info)
assert_array_equal(w.x, v.x / 2)
# assignment
tgt1 = np.arange(10)
tgt2 = np.tile(np.arange(5), 2)
v = Var(np.arange(10))
v[v > 4] = np.arange(5)
assert_array_equal(v, tgt2)
v[5:] = np.arange(5, 10)
assert_array_equal(v, tgt1)
v = Var(np.arange(10))
v[v > 4] = Var(np.arange(5))
assert_array_equal(v, tgt2)
v[5:] = Var(np.arange(5, 10))
assert_array_equal(v, tgt1)
# .split()
y = Var(np.arange(16))
for i in xrange(1, 9):
split = y.split(i)
eq_(len(split.cells), i)
# .as_factor()
v = Var(np.arange(4))
assert_array_equal(v.as_factor(), Factor('0123'))
assert_array_equal(v.as_factor({0: 'a'}), Factor('a123'))
assert_array_equal(v.as_factor({(0, 1): 'a', (2, 3): 'b'}), Factor('aabb'))
assert_array_equal(v.as_factor({(0, 1): 'a', 2: 'b', 'default': 'c'}),
Factor('aabc'))
assert_array_equal(v.as_factor({(0, 1): 'a', (2, 'default'): 'b'}),
Factor('aabb'))
| [
"[email protected]"
] | |
17f147002517ca6e9ce3f90605cfde55fb9f8c21 | 8f736b5cc28cc1d46506abf1b001eb41cc1f9423 | /apps/trade/migrations/0021_auto_20210322_2247.py | 9d43bf7c166f0d2b8fdeb3cf75abab92377b96c8 | [] | no_license | tang1323/MxShop | 6ac68502f59ae07b483b6145e1b557399192e3dd | 831b5bdd8abdf7d6e547b0bd3fff9341261e4afa | refs/heads/master | 2023-04-04T07:09:32.759476 | 2021-04-14T14:36:00 | 2021-04-14T14:36:00 | 357,937,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | # Generated by Django 2.2 on 2021-03-22 22:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trade', '0020_auto_20210322_1137'),
]
operations = [
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
field=models.CharField(blank=True, choices=[('TRADE_SUCCESS', '成功'), ('paying', '待支付'), ('TRADE_FINISHED', '交易结束'), ('WAIT_BUYER_PAY', '交易创建'), ('TRADE_CLOSED', '超时关闭')], default='paying', max_length=30, null=True, verbose_name='订单状态'),
),
]
| [
"[email protected]"
] | |
8cc46188aaa9c714c01b161566a49af267641640 | b860530cc9f14d23fefada526f36fe3eae34e42c | /tracnghiem/admin.py | 5e86c22bc9bdfb2b7ae7920c83df3a7a07080aee | [] | no_license | anhpt204/hvhc | fa0386f624f9699b56570ec06fa724894d04b60a | 0b8d2f48bc8068413b6e53b989205fef19358a80 | refs/heads/master | 2022-12-06T01:00:45.232069 | 2016-08-16T15:50:52 | 2016-08-16T15:50:52 | 42,731,930 | 0 | 0 | null | 2022-11-22T00:27:27 | 2015-09-18T15:54:38 | Python | UTF-8 | Python | false | false | 9,173 | py | # -*- encoding: utf-8 -*-
from django.contrib.admin.options import TabularInline, ModelAdmin
from tracnghiem.models import Answer, QuestionGroup, MCQuestion, TFQuestion, SinhDeConf, LogSinhDe,\
NganHangDe, KHThi, BaiThi, ImportMCQuestion
from django.http.response import HttpResponseRedirect
from django.contrib import admin
import json
# from django.contrib.auth.decorators import permission_required
#from permission.decorators import permission_required
from hvhc import PERM_BOC_DE, PERM_XEM_IN_DE
from daotao.models import SinhVien
from common.models import MyModelAdmin
#Override modeladmin
# class MyModelAdmin(admin.ModelAdmin):
# def get_form(self, request, obj=None, **kwargs):
# if hasattr(self, 'field_permissions'):
# user = request.user
# for _field in self.opts.fields:
# perm = self.field_permissions.get(_field.name)
# if perm and not user.has_perm(perm):
# if self.exclude:
# self.exclude.append(_field.name)
# else:
# self.exclude=[_field.name]
# return super(MyModelAdmin, self).get_form(request, obj, **kwargs)
class AnswerInLine(TabularInline):
model = Answer
extra=4
max_num=4
# class QuestionGroup_SettingInLine(TabularInline):
# model = QuestionGroup_Setting
# fields=('question_group', 'question_type', 'mark_per_question', 'num_of_questions')
class SinhDeConfInline(TabularInline):
model = SinhDeConf
fields = ('level', 'loaiCauHoi', 'soLuong')
# class Chapter_SettingInLine(TabularInline):
# model = Chapter_Setting
# fields=('chapter', 'num_of_questions')
# class CaThiAdmin(ModelAdmin):
# # form = CaThiAdminForm
# model = CaThi
# filter_horizontal =('ds_thisinh', 'ds_giamthi')
# # form = CaThiForm
# list_display = ('title', 'mon_thi', 'ngay_thi', 'description')
# fields=('title', 'mon_thi', 'ds_giamthi', 'ds_thisinh', 'ngay_thi',
# 'tg_bat_dau', 'tg_ket_thuc', 'pass_mark','tao_moi_de_thi',
# 'description')
# # exclude=('ds_sv_thi',)
#
# def add_view(self, request, form_url='', extra_context=None):
# self.inlines = []
# return ModelAdmin.add_view(self, request, form_url=form_url, extra_context=extra_context)
#
# def change_view(self, request, object_id, form_url='', extra_context=None):
# self.inlines = [QuestionGroup_SettingInLine, Chapter_SettingInLine]
# return ModelAdmin.change_view(self, request, object_id, form_url=form_url, extra_context=extra_context)
#
class QuestionGroupAdmin(ModelAdmin):
model = QuestionGroup
class MCQuestionAdmin(ModelAdmin):
model=MCQuestion
list_display = ('maCauHoi', 'monHoc', 'doiTuong', 'noiDung', 'thuocChuong', 'prior', 'diem')
list_filter = ('monHoc', 'doiTuong')
fields = ('maCauHoi', 'monHoc', 'doiTuong',
'prior', 'thuocChuong', #'taoBoi',
'noiDung', 'diem', 'figure', )#'audio', 'clip' )
search_fields = ('noiDung',)
# filter_horizontal = ('ca_thi',)
inlines = [AnswerInLine]
class LogSinhDeAdmin(ModelAdmin):
model = LogSinhDe
fields = ("monHoc", 'doiTuong', 'soLuong', 'ngayTao')
list_display=("monHoc", 'doiTuong', 'ngayTao', 'nguoiTao', 'soLuong', 'sinhDe')
inlines=[SinhDeConfInline]
def save_model(self, request, obj, form, change):
instance = form.save(commit=False)
instance.nguoiTao = request.user
instance.save()
def sinhDe(self, obj):
# ds_dethi = obj.sinhDe()
return u'<a href="%s">Sinh đề</a>' % ('/hvhc/tracnghiem/sinhde/'+str(obj.pk)+'/')
sinhDe.allow_tags=True
sinhDe.short_description="Sinh đề"
class TFQuestionAdmin(ModelAdmin):
model = TFQuestion
list_display = ('monHoc', 'doiTuong', 'noiDung')
fields = ('monHoc', 'doiTuong', 'prior', 'thuocChuong',
'noiDung', 'figure', 'audio', 'clip', 'isTrue' )
list_filter = ('monHoc',)
class NganHangDeAdmin(ModelAdmin):
model=NganHangDe
list_display=('maDeThi', 'get_monHoc', 'get_doiTuong', 'ngay_tao', 'daDuyet', 'export_pdf')
list_filter=('logSinhDe__doiTuong', 'logSinhDe__monHoc', 'ngay_tao', 'daDuyet')
actions=['duyet_deThi', 'boDuyet_deThi']
def get_monHoc(self, obj):
return obj.logSinhDe.monHoc
get_monHoc.short_description="Môn thi"
def get_doiTuong(self, obj):
return obj.logSinhDe.doiTuong
get_doiTuong.short_description="Đối tượng"
def duyet_deThi(self, request, queryset):
queryset.update(daDuyet=True)
duyet_deThi.short_description = "Duyệt các đề đã chọn"
def boDuyet_deThi(self, request, queryset):
queryset.update(daDuyet=False)
boDuyet_deThi.short_description = "Bỏ duyệt các đề đã chọn"
def export_pdf(self, obj):
return u'<a href="%s">PDF</a>' % ('/hvhc/tracnghiem/export/dethi/'+str(obj.pk)+'/')
export_pdf.allow_tags=True
export_pdf.short_description="Đề thi"
class KHThiAdmin(ModelAdmin):
model=KHThi
filter_horizontal =('ds_thisinh', 'ds_giamthi')
fields = ['ten', 'mon_thi', 'nam_hoc', 'hoc_ky', 'doi_tuong',
'ds_thisinh',
'ds_giamthi',
'ngay_thi', 'tg_bat_dau', 'tg_thi', 'trang_thai',
# for test
#'de_thi', 'dap_an'
]
# field_permissions = {'boc_tron_de':'tracnghiem.khthi.duoc_phep_boc_de',
# # 'in_de':'khthi.duoc_phep_xem_va_in_de'
# }
# @permission_required('tracnghiem.khthi.duoc_phep_boc-de')
def boc_tron_de(self, obj):
dethi = json.loads(obj.de_thi)
if len(dethi) == 0:
return u'<a href="%s">Bốc đề</a>' % ('/hvhc/tracnghiem/khthi/boctrondethi/'+str(obj.pk)+'/')
else:
return u'Đã có, <a href="%s">Bốc lại</a>' % ('/hvhc/tracnghiem/khthi/boctrondethi/'+str(obj.pk)+'/')
boc_tron_de.allow_tags=True
boc_tron_de.short_description="Thực hiện"
def xem_de(self, obj):
dethi = json.loads(obj.de_thi)
if len(dethi) == 0:
return u'Chưa có'
else:
return u'<a href="%s">Xem đề</a>' % ('/hvhc/tracnghiem/khthi/show/'+str(obj.pk)+'/')
xem_de.allow_tags=True
xem_de.short_description="Xem"
def get_list_display(self, request):
ld = ['ten', 'mon_thi', 'doi_tuong', 'nam_hoc', 'hoc_ky',
'ngay_thi',
'tg_bat_dau', 'tg_thi', 'trang_thai', 'nguoi_boc_de']
allow_boc_de = False
allow_xem_de = False
perms = request.user.user_permissions.all()
for perm in perms:
if PERM_BOC_DE == perm.codename:
allow_boc_de = True
break
if perm.codename == PERM_XEM_IN_DE:
allow_xem_de = True
break
for group in request.user.groups.all():
perms = group.permissions.all()
for perm in perms:
if PERM_BOC_DE == perm.codename:
allow_boc_de = True
break
if perm.codename == PERM_XEM_IN_DE:
allow_xem_de = True
break
if allow_boc_de:
ld.append('boc_tron_de')
if allow_xem_de:
ld.append('xem_de')
return ld
# return ModelAdmin.get_list_display(self, request)
class DiemAdmin(ModelAdmin):
model = BaiThi
list_display = ['get_ma_sv', 'get_ho_ten', 'get_lop', 'get_mon_thi', 'diem']
list_filter = ['thi_sinh__lop', 'khthi']
actions=['export_pdf']
def get_ma_sv(self, obj):
return obj.thi_sinh.ma_sv
get_ma_sv.short_description = 'Mã SV'
def get_ho_ten(self, obj):
return '%s %s' %(obj.thi_sinh.ho_dem, obj.thi_sinh.ten)
get_ho_ten.short_description = 'Họ và tên'
def get_lop(self, obj):
return obj.thi_sinh.lop
get_lop.short_description = 'Lớp'
def get_mon_thi(self, obj):
return obj.khthi.mon_thi
get_mon_thi.short_description='Môn thi'
def export_pdf(self, request, queryset):
bts = '-'.join([str(obj.id) for obj in queryset])
return HttpResponseRedirect('/hvhc/tracnghiem/export_bd/' + bts + '/')
export_pdf.short_description = "Xuất bảng điểm"
class ImportMCQuestionAdmin(ModelAdmin):
model = ImportMCQuestion
list_display=['mon_thi', 'doi_tuong', 'import_file', 'import_data']
def import_data(self, obj):
# obj.import_data()
return u'<a href="%s">Import</a>' % ('/hvhc/tracnghiem/import/mc/'+str(obj.pk)+'/')
import_data.allow_tags=True
import_data.short_description="Import"
admin.site.register(LogSinhDe, LogSinhDeAdmin)
admin.site.register(NganHangDe, NganHangDeAdmin)
admin.site.register(QuestionGroup, QuestionGroupAdmin)
admin.site.register(MCQuestion, MCQuestionAdmin)
admin.site.register(TFQuestion, TFQuestionAdmin)
admin.site.register(KHThi, KHThiAdmin)
admin.site.register(BaiThi, DiemAdmin)
admin.site.register(ImportMCQuestion, ImportMCQuestionAdmin)
| [
"[email protected]"
] | |
ccec29fd6ea83bdc111cb217e95734492d2579ad | 42348c0ff9785bbab18d87f277df791331bbc121 | /tests/test_pitches.py | 2d1420380d1934aca34db60e8b9154860a4255cc | [
"MIT"
] | permissive | vincentouma/thinkout | 783449834bd856d17e5273d9d3a50ecb6d79f6ef | 85306ccec7924ad4fd6fe7ffb75aa537d9fe97c0 | refs/heads/master | 2020-06-29T11:48:09.283309 | 2019-08-06T05:59:14 | 2019-08-06T05:59:14 | 200,524,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | import unittest
from app.models import User
def test_no_access_password(self):
with self.assertRaises(AttributeError):
self.new_user.password
def test_password_verification(self):
self.assertTrue(self.new_user.verify_password ('banana'))
| [
"[email protected]"
] | |
6aad1e54b8786ecb8e264520db3f9ee24f1bfb49 | 9ac99a99dc8f79f52fbbe3e8a5b311b518fe45d9 | /apps/hrm/models/employee_types.py | 35bc9fbf61826d7c5683608a6038c4e0d7ac01e7 | [] | no_license | nttlong/quicky-01 | eb61620e01f04909d564244c46a03ca2b69dfecc | 0f5610aa7027429bdd9ca9b45899a472c372c6cc | refs/heads/master | 2020-03-25T17:45:31.633347 | 2018-11-27T15:02:30 | 2018-11-27T15:02:30 | 143,994,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | from qmongo import extends, extends_dict,define
from . commons import base
model_name = "employee_types"
extends(
model_name,
base.model_name,
[],
formular = ("text")
)
| [
"[email protected]"
] | |
78628bb42876d7fa9c9b9a641465f23d3409700b | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqptcapacity/l3usageperhist1d.py | 6ce6ba4d67fb03c133a1fb3f10af92213f8034cd | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 17,558 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class L3UsagePerHist1d(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.eqptcapacity.L3UsagePerHist1d", "Layer3 entries usage percentage")
counter = CounterMeta("normalizedv6", CounterCategory.GAUGE, "percentage", "Local v6 L3 entries usage percentage")
counter._propRefs[PropCategory.IMPLICIT_MIN] = "normalizedv6Min"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "normalizedv6Max"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "normalizedv6Avg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "normalizedv6Spct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "normalizedv6Thr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "normalizedv6Tr"
meta._counters.append(counter)
counter = CounterMeta("normalizedv4", CounterCategory.GAUGE, "percentage", "Local v4 L3 entries usage percentage")
counter._propRefs[PropCategory.IMPLICIT_MIN] = "normalizedv4Min"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "normalizedv4Max"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "normalizedv4Avg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "normalizedv4Spct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "normalizedv4Thr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "normalizedv4Tr"
meta._counters.append(counter)
meta.moClassName = "eqptcapacityL3UsagePerHist1d"
meta.rnFormat = "HDeqptcapacityL3UsagePer1d-%(index)s"
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical Layer3 entries usage percentage stats in 1 day"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.eqptcapacity.Entity")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Hist")
meta.superClasses.add("cobra.model.eqptcapacity.L3UsagePerHist")
meta.rnPrefixes = [
('HDeqptcapacityL3UsagePer1d-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "index", "index", 27165, PropCategory.REGULAR)
prop.label = "History Index"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("index", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "normalizedv4Avg", "normalizedv4Avg", 27199, PropCategory.IMPLICIT_AVG)
prop.label = "Local v4 L3 entries usage percentage average value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv4Avg", prop)
prop = PropMeta("str", "normalizedv4Max", "normalizedv4Max", 27198, PropCategory.IMPLICIT_MAX)
prop.label = "Local v4 L3 entries usage percentage maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv4Max", prop)
prop = PropMeta("str", "normalizedv4Min", "normalizedv4Min", 27197, PropCategory.IMPLICIT_MIN)
prop.label = "Local v4 L3 entries usage percentage minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv4Min", prop)
prop = PropMeta("str", "normalizedv4Spct", "normalizedv4Spct", 27200, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Local v4 L3 entries usage percentage suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv4Spct", prop)
prop = PropMeta("str", "normalizedv4Thr", "normalizedv4Thr", 27201, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Local v4 L3 entries usage percentage thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("normalizedv4Thr", prop)
prop = PropMeta("str", "normalizedv4Tr", "normalizedv4Tr", 27202, PropCategory.IMPLICIT_TREND)
prop.label = "Local v4 L3 entries usage percentage trend"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv4Tr", prop)
prop = PropMeta("str", "normalizedv6Avg", "normalizedv6Avg", 27214, PropCategory.IMPLICIT_AVG)
prop.label = "Local v6 L3 entries usage percentage average value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv6Avg", prop)
prop = PropMeta("str", "normalizedv6Max", "normalizedv6Max", 27213, PropCategory.IMPLICIT_MAX)
prop.label = "Local v6 L3 entries usage percentage maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv6Max", prop)
prop = PropMeta("str", "normalizedv6Min", "normalizedv6Min", 27212, PropCategory.IMPLICIT_MIN)
prop.label = "Local v6 L3 entries usage percentage minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv6Min", prop)
prop = PropMeta("str", "normalizedv6Spct", "normalizedv6Spct", 27215, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Local v6 L3 entries usage percentage suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv6Spct", prop)
prop = PropMeta("str", "normalizedv6Thr", "normalizedv6Thr", 27216, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Local v6 L3 entries usage percentage thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("normalizedv6Thr", prop)
prop = PropMeta("str", "normalizedv6Tr", "normalizedv6Tr", 27217, PropCategory.IMPLICIT_TREND)
prop.label = "Local v6 L3 entries usage percentage trend"
prop.isOper = True
prop.isStats = True
meta.props.add("normalizedv6Tr", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "index"))
def __init__(self, parentMoOrDn, index, markDirty=True, **creationProps):
namingVals = [index]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
9d20f3f16753593924c66dad24694eb7c72e00c0 | 795435350d2e4fe415acd1fb846abb1c0cf94911 | /client/code/do some researches on crawler/douban/douban.py | 8cac1d9078cbb947968dfe3d23aeff118fa4b940 | [] | no_license | gaoxinge/network | c3575c7f0d95f7458a4ec74880ca5b8a0bff773e | 68d307ec0756abff60914225fd38d69fa4b2a37c | refs/heads/master | 2021-06-06T05:49:36.521243 | 2021-06-02T15:42:39 | 2021-06-02T15:42:39 | 84,033,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,133 | py | import requests
from lxml import etree
from Item import Item
import time
def http(url):
response = requests.get(url, headers={'User-Agent': 'Mozilla/5.0'})
return response
def parse(response):
Movie = Item('Movie', ['title', 'rating', 'vote'])
root = etree.HTML(response.text)
results = root.xpath('//div[@class=\'pl2\']')
for result in results:
movie = Movie()
movie['title'] = result.xpath('a/text()')[0][:-2].strip()
movie['rating'] = float(result.xpath('.//span[@class=\'rating_nums\']/text()')[0])
movie['vote'] = int(result.xpath('.//span[@class=\'pl\']/text()')[0][1:][:-4])
yield movie
def store(item):
f.write(str(item) + '\n')
def http_parse_store(url):
response = http(url)
items = parse(response)
for item in items:
store(item)
urls = ['https://movie.douban.com/tag/2016?start=' + str((i-1)*20) for i in range(1, 10)]
f = open('douban.txt', 'w')
start = time.time()
while urls:
response = http(urls.pop(0))
items = parse(response)
for item in items:
store(item)
print time.time() - start
f.close()
| [
"[email protected]"
] | |
04ad4d19924cc49f42a7f6ac77847e9bb33362eb | 9f8fa29bb5a93f896862806157b10b55e9f26825 | /message_media_conversations/models/message_dto.py | f07f49fac18257112666287a40d5f2c106d9e2f8 | [
"Apache-2.0"
] | permissive | messagemedia/conversations-python-sdk | 1b245ca7f63ca0c6fdbcd17a9bd11565d421e2a0 | b53046540bd5c826de784228f838468c22b863cf | refs/heads/master | 2020-03-19T05:52:05.735297 | 2018-10-16T23:29:31 | 2018-10-16T23:29:31 | 135,969,313 | 0 | 0 | null | 2018-06-04T04:12:18 | 2018-06-04T04:09:34 | null | UTF-8 | Python | false | false | 2,057 | py | # -*- coding: utf-8 -*-
"""
message_media_conversations.models.message_dto
This file was automatically generated for MessageMedia by APIMATIC v2.0 ( https://apimatic.io )
"""
class MessageDto(object):
"""Implementation of the 'MessageDto' model.
TODO: type model description here.
Attributes:
channel (string): TODO: type description here.
id (string): TODO: type description here.
text (string): TODO: type description here.
timestamp (string): TODO: type description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"channel":'channel',
"id":'id',
"text":'text',
"timestamp":'timestamp'
}
def __init__(self,
channel=None,
id=None,
text=None,
timestamp=None):
"""Constructor for the MessageDto class"""
# Initialize members of the class
self.channel = channel
self.id = id
self.text = text
self.timestamp = timestamp
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
channel = dictionary.get('channel')
id = dictionary.get('id')
text = dictionary.get('text')
timestamp = dictionary.get('timestamp')
# Return an object of this model
return cls(channel,
id,
text,
timestamp)
| [
"[email protected]"
] | |
1227e2067f8c114470a88b026b4a6e6c16ee45bd | 4ea43f3f79ad483d83238d88572feb822f451372 | /philo/models/fields/__init__.py | efd315f9c3c5e11afe2ba9802508200ca1a0905c | [
"ISC"
] | permissive | kgodey/philo | c8c433d44b2f31121f13bd0ee101605be11fe9da | c19bf577d44606d2b284e6058d633f4a174b61cc | refs/heads/master | 2020-12-29T02:54:11.746966 | 2011-05-24T21:57:47 | 2011-05-24T21:57:47 | 686,009 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,547 | py | from django import forms
from django.core.exceptions import ValidationError
from django.core.validators import validate_slug
from django.db import models
from django.utils import simplejson as json
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from philo.forms.fields import JSONFormField
from philo.validators import TemplateValidator, json_validator
#from philo.models.fields.entities import *
class TemplateField(models.TextField):
"""A :class:`TextField` which is validated with a :class:`.TemplateValidator`. ``allow``, ``disallow``, and ``secure`` will be passed into the validator's construction."""
def __init__(self, allow=None, disallow=None, secure=True, *args, **kwargs):
super(TemplateField, self).__init__(*args, **kwargs)
self.validators.append(TemplateValidator(allow, disallow, secure))
class JSONDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, instance, owner):
if instance is None:
raise AttributeError # ?
if self.field.name not in instance.__dict__:
json_string = getattr(instance, self.field.attname)
instance.__dict__[self.field.name] = json.loads(json_string)
return instance.__dict__[self.field.name]
def __set__(self, instance, value):
instance.__dict__[self.field.name] = value
setattr(instance, self.field.attname, json.dumps(value))
def __delete__(self, instance):
del(instance.__dict__[self.field.name])
setattr(instance, self.field.attname, json.dumps(None))
class JSONField(models.TextField):
"""A :class:`TextField` which stores its value on the model instance as a python object and stores its value in the database as JSON. Validated with :func:`.json_validator`."""
default_validators = [json_validator]
def get_attname(self):
return "%s_json" % self.name
def contribute_to_class(self, cls, name):
super(JSONField, self).contribute_to_class(cls, name)
setattr(cls, name, JSONDescriptor(self))
models.signals.pre_init.connect(self.fix_init_kwarg, sender=cls)
def fix_init_kwarg(self, sender, args, kwargs, **signal_kwargs):
# Anything passed in as self.name is assumed to come from a serializer and
# will be treated as a json string.
if self.name in kwargs:
value = kwargs.pop(self.name)
# Hack to handle the xml serializer's handling of "null"
if value is None:
value = 'null'
kwargs[self.attname] = value
def formfield(self, *args, **kwargs):
kwargs["form_class"] = JSONFormField
return super(JSONField, self).formfield(*args, **kwargs)
class SlugMultipleChoiceField(models.Field):
"""Stores a selection of multiple items with unique slugs in the form of a comma-separated list."""
__metaclass__ = models.SubfieldBase
description = _("Comma-separated slug field")
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return []
if isinstance(value, list):
return value
return value.split(',')
def get_prep_value(self, value):
return ','.join(value)
def formfield(self, **kwargs):
# This is necessary because django hard-codes TypedChoiceField for things with choices.
defaults = {
'widget': forms.CheckboxSelectMultiple,
'choices': self.get_choices(include_blank=False),
'label': capfirst(self.verbose_name),
'required': not self.blank,
'help_text': self.help_text
}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
for k in kwargs.keys():
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial'):
del kwargs[k]
defaults.update(kwargs)
form_class = forms.TypedMultipleChoiceField
return form_class(**defaults)
def validate(self, value, model_instance):
invalid_values = []
for val in value:
try:
validate_slug(val)
except ValidationError:
invalid_values.append(val)
if invalid_values:
# should really make a custom message.
raise ValidationError(self.error_messages['invalid_choice'] % invalid_values)
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^philo\.models\.fields\.SlugMultipleChoiceField"])
add_introspection_rules([], ["^philo\.models\.fields\.TemplateField"])
add_introspection_rules([], ["^philo\.models\.fields\.JSONField"]) | [
"[email protected]"
] | |
b12c9631dbd2f75d27a5ac4754fee8e016fc58c0 | 60acb606318869410d7437bf6c1a16fd6762b6b4 | /app/__init__.py | 871f9dabd9b8272d79ccffa706296dcf48f3ee49 | [
"Apache-2.0"
] | permissive | heraclitusj/mgek_imgbed | 8fb0c69599fab3fce06684f659dfd5c0b4c5f866 | d8a77ba1401f42237adda1b3ea8611f6464a704e | refs/heads/master | 2022-07-28T01:48:51.314094 | 2020-05-20T05:35:52 | 2020-05-20T05:35:52 | 265,461,338 | 0 | 0 | null | 2020-05-20T05:31:37 | 2020-05-20T05:31:37 | null | UTF-8 | Python | false | false | 1,231 | py | # @Author: Landers1037
# @Github: github.com/landers1037
# @File: __init__.py.py
# @Date: 2020-05-12
from flask import Flask
from app.config import *
from flask_sqlalchemy import SQLAlchemy
from flask_pymongo import PyMongo
#初始时会默认初始化数据库连接,根据engine的配置选择配置的数据库
db = SQLAlchemy()
mongo = PyMongo()
global_config = None
def create_app(mode=None):
application = Flask(__name__, static_url_path='/images', static_folder='../images')
check_config()
global global_config
global_config = read_config()
if mode == 'dev' or global_config.debug:
application.debug = True
application.config.from_object(flask_config())
#对数据库连接添加错误判断
if global_config.engine == 'sqlite':
db.init_app(application)
elif global_config.engine == 'mongo':
mongo.init_app(application)
else:
db.init_app(application)
from .api.img import img
from .api.auth import auth
from .api.sys import sys
application.register_blueprint(img)
application.register_blueprint(auth)
application.register_blueprint(sys)
return application
| [
"[email protected]"
] | |
ca55231bed72276df46a7e9b1d23e67ae3171425 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_fade.py | 48b2ce0cd12e383a309567bd8721b04aafb27dd9 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py |
#calss header
class _FADE():
def __init__(self,):
self.name = "FADE"
self.definitions = [u'to (cause to) lose colour, brightness, or strength gradually: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'verbs'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
fa0b0e8390377d9b563df54e6bfa61219bfd4b70 | ad0857eaba945c75e705594a53c40dbdd40467fe | /baekjoon/python/buying_cards_11052.py | b29c863cc46b83674f4b81cdf48a7cffc84bb63f | [
"MIT"
] | permissive | yskang/AlgorithmPractice | c9964d463fbd0d61edce5ba8b45767785b0b5e17 | 3efa96710e97c8740d6fef69e4afe7a23bfca05f | refs/heads/master | 2023-05-25T13:51:11.165687 | 2023-05-19T07:42:56 | 2023-05-19T07:42:56 | 67,045,852 | 0 | 0 | null | 2021-06-20T02:42:27 | 2016-08-31T14:40:10 | Python | UTF-8 | Python | false | false | 630 | py | # Title: 카드 구매하기
# Link: https://www.acmicpc.net/problem/11052
import sys
sys.setrecursionlimit(10 ** 6)
read_single_int = lambda: int(sys.stdin.readline().strip())
read_list_int = lambda: list(map(int, sys.stdin.readline().strip().split(' ')))
def solution(num_card: int, packs: list):
d = [0 for _ in range(num_card+1)]
for i in range(1, num_card+1):
d[i] = max([d[i-j] + packs[j-1] for j in range(1, i+1)])
return d[num_card]
def main():
N = read_single_int()
P = read_list_int()
print(solution(N, P))
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
159e01e7c2fe4f3943abf29f49cebe1232f215b3 | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/meta/VehiclePreviewMeta.py | 3b324dcf456ad95c0dd3471a225ee52f30dbbf10 | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 2,046 | py | # 2017.02.03 21:51:10 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/VehiclePreviewMeta.py
from gui.Scaleform.framework.entities.View import View
class VehiclePreviewMeta(View):
"""
DO NOT MODIFY!
Generated with yaml.
__author__ = 'yaml_processor'
@extends View
"""
def closeView(self):
self._printOverrideError('closeView')
def onBackClick(self):
self._printOverrideError('onBackClick')
def onBuyOrResearchClick(self):
self._printOverrideError('onBuyOrResearchClick')
def onOpenInfoTab(self, index):
self._printOverrideError('onOpenInfoTab')
def onCompareClick(self):
self._printOverrideError('onCompareClick')
def as_setStaticDataS(self, data):
"""
:param data: Represented by VehPreviewStaticDataVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_setStaticData(data)
def as_updateInfoDataS(self, data):
"""
:param data: Represented by VehPreviewInfoPanelVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_updateInfoData(data)
def as_updateVehicleStatusS(self, status):
if self._isDAAPIInited():
return self.flashObject.as_updateVehicleStatus(status)
def as_updatePriceS(self, data):
"""
:param data: Represented by VehPreviewPriceDataVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_updatePrice(data)
def as_updateBuyButtonS(self, data):
"""
:param data: Represented by VehPreviewBuyButtonVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_updateBuyButton(data)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\meta\VehiclePreviewMeta.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:51:10 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
0ac14a8d24cb5d63875837fa4d9df2a7b1f8b5c6 | 86741e9f531f2aa63af682cc974ebbcc3b202e90 | /allhub/users/ssh_keys.py | 5829881dbdcb9d0302a4e6deb02739cf3e27ca79 | [
"Apache-2.0"
] | permissive | srinivasreddy/allhub | ccebea96a106e266743d180410ab5b16d08946fe | ff20858c9984da5c4edd5043c39eed3b6d5d693d | refs/heads/master | 2022-12-27T01:24:30.759553 | 2021-06-04T11:38:16 | 2021-06-04T11:38:16 | 204,402,796 | 2 | 2 | Apache-2.0 | 2022-12-08T07:44:11 | 2019-08-26T05:33:37 | Python | UTF-8 | Python | false | false | 1,817 | py | from allhub.response import Response
class SSHKeysMixin:
def list_public_ssh_keys(self, username):
url = "/users/{username}/keys".format(username=username)
self.response = Response(
self.get(
url,
**{"Accept": "application/vnd.github.giant-sentry-fist-preview+json"},
),
"SSHKeys",
)
return self.response.transform()
def ssh_keys(self):
url = "/user/keys"
self.response = Response(
self.get(
url,
**{"Accept": "application/vnd.github.giant-sentry-fist-preview+json"},
),
"SSHKeys",
)
return self.response.transform()
def ssh_key(self, key_id):
url = "/user/keys/{key_id}".format(key_id=key_id)
self.response = Response(
self.get(
url,
**{"Accept": "application/vnd.github.giant-sentry-fist-preview+json"},
),
"SSHKey",
)
return self.response.transform()
def create_public_ssh_key(self, title, key):
url = "/user/keys"
self.response = Response(
self.post(
url,
params=[("title", title), ("key", key)],
**{"Accept": "application/vnd.github.giant-sentry-fist-preview+json"},
),
"SSHKey",
)
return self.response.transform()
def delete_public_ssh_key(self, key_id):
url = "/user/keys/{key_id}".format(key_id=key_id)
self.response = Response(
self.delete(
url,
**{"Accept": "application/vnd.github.giant-sentry-fist-preview+json"},
),
"",
)
return self.response.status_code == 204
| [
"[email protected]"
] | |
e0159a0bc43cebe51ee88486e5e5cacadec5a5a7 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /netex/models/railway_link_ref_structure.py | 00ba98f8df1e4547deb46e2b051c3da101b80055 | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 246 | py | from dataclasses import dataclass
from .infrastructure_link_ref_structure import InfrastructureLinkRefStructure
__NAMESPACE__ = "http://www.netex.org.uk/netex"
@dataclass
class RailwayLinkRefStructure(InfrastructureLinkRefStructure):
pass
| [
"[email protected]"
] | |
6d608921210b60fa6848d362d756532953b5c228 | 2b770588db83bc2a61b52f430248414395cc1b1f | /django_/citysearch_project/cities/models.py | fa4d0dc5e09ad2f0c02245c2ca01b266a2024625 | [] | no_license | luka319/portfelio_chemodanio | 595afb1d2e1fb3564bf94b204aa8e63dddd4cf0c | dd37f8c4af9d043ace9b5438b5a7680cfab26ab2 | refs/heads/master | 2021-06-26T19:05:33.170977 | 2020-01-08T23:24:10 | 2020-01-08T23:24:10 | 231,444,932 | 0 | 0 | null | 2021-06-10T22:27:34 | 2020-01-02T19:19:49 | Python | UTF-8 | Python | false | false | 296 | py | from django.db import models
# Create your models here.
class City(models.Model):
name = models.CharField(max_length = 255)
state = models.CharField(max_length = 255)
class Meta:
verbose_name_plural = "cities_города"
def __str__(self):
return self.name
| [
"[email protected]"
] | |
802828ec5275a0ef9b1ef8d1db094dfc9f3a6d36 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/alertsmanagement/v20190505preview/outputs.py | 0da651342e6335cb50f421cf750b9e602f4b1fea | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,016 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
__all__ = [
'ActionGroupResponse',
'ConditionResponse',
'ConditionsResponse',
'DiagnosticsResponse',
'ScopeResponse',
'SuppressionConfigResponse',
'SuppressionResponse',
'SuppressionScheduleResponse',
]
@pulumi.output_type
class ActionGroupResponse(dict):
"""
Action rule with action group configuration
"""
def __init__(__self__, *,
action_group_id: str,
created_at: str,
created_by: str,
last_modified_at: str,
last_modified_by: str,
type: str,
conditions: Optional['outputs.ConditionsResponse'] = None,
description: Optional[str] = None,
scope: Optional['outputs.ScopeResponse'] = None,
status: Optional[str] = None):
"""
Action rule with action group configuration
:param str action_group_id: Action group to trigger if action rule matches
:param str created_at: Creation time of action rule. Date-Time in ISO-8601 format.
:param str created_by: Created by user name.
:param str last_modified_at: Last updated time of action rule. Date-Time in ISO-8601 format.
:param str last_modified_by: Last modified by user name.
:param str type: Indicates type of action rule
Expected value is 'ActionGroup'.
:param 'ConditionsResponseArgs' conditions: conditions on which alerts will be filtered
:param str description: Description of action rule
:param 'ScopeResponseArgs' scope: scope on which action rule will apply
:param str status: Indicates if the given action rule is enabled or disabled
"""
pulumi.set(__self__, "action_group_id", action_group_id)
pulumi.set(__self__, "created_at", created_at)
pulumi.set(__self__, "created_by", created_by)
pulumi.set(__self__, "last_modified_at", last_modified_at)
pulumi.set(__self__, "last_modified_by", last_modified_by)
pulumi.set(__self__, "type", 'ActionGroup')
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if description is not None:
pulumi.set(__self__, "description", description)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="actionGroupId")
def action_group_id(self) -> str:
"""
Action group to trigger if action rule matches
"""
return pulumi.get(self, "action_group_id")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Creation time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> str:
"""
Created by user name.
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> str:
"""
Last updated time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> str:
"""
Last modified by user name.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter
def type(self) -> str:
"""
Indicates type of action rule
Expected value is 'ActionGroup'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def conditions(self) -> Optional['outputs.ConditionsResponse']:
"""
conditions on which alerts will be filtered
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of action rule
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def scope(self) -> Optional['outputs.ScopeResponse']:
"""
scope on which action rule will apply
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Indicates if the given action rule is enabled or disabled
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ConditionResponse(dict):
"""
condition to trigger an action rule
"""
def __init__(__self__, *,
operator: Optional[str] = None,
values: Optional[Sequence[str]] = None):
"""
condition to trigger an action rule
:param str operator: operator for a given condition
:param Sequence[str] values: list of values to match for a given condition.
"""
if operator is not None:
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def operator(self) -> Optional[str]:
"""
operator for a given condition
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
list of values to match for a given condition.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ConditionsResponse(dict):
"""
Conditions in alert instance to be matched for a given action rule. Default value is all. Multiple values could be provided with comma separation.
"""
def __init__(__self__, *,
alert_context: Optional['outputs.ConditionResponse'] = None,
alert_rule_id: Optional['outputs.ConditionResponse'] = None,
description: Optional['outputs.ConditionResponse'] = None,
monitor_condition: Optional['outputs.ConditionResponse'] = None,
monitor_service: Optional['outputs.ConditionResponse'] = None,
severity: Optional['outputs.ConditionResponse'] = None,
target_resource_type: Optional['outputs.ConditionResponse'] = None):
"""
Conditions in alert instance to be matched for a given action rule. Default value is all. Multiple values could be provided with comma separation.
:param 'ConditionResponseArgs' alert_context: filter alerts by alert context (payload)
:param 'ConditionResponseArgs' alert_rule_id: filter alerts by alert rule id
:param 'ConditionResponseArgs' description: filter alerts by alert rule description
:param 'ConditionResponseArgs' monitor_condition: filter alerts by monitor condition
:param 'ConditionResponseArgs' monitor_service: filter alerts by monitor service
:param 'ConditionResponseArgs' severity: filter alerts by severity
:param 'ConditionResponseArgs' target_resource_type: filter alerts by target resource type
"""
if alert_context is not None:
pulumi.set(__self__, "alert_context", alert_context)
if alert_rule_id is not None:
pulumi.set(__self__, "alert_rule_id", alert_rule_id)
if description is not None:
pulumi.set(__self__, "description", description)
if monitor_condition is not None:
pulumi.set(__self__, "monitor_condition", monitor_condition)
if monitor_service is not None:
pulumi.set(__self__, "monitor_service", monitor_service)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if target_resource_type is not None:
pulumi.set(__self__, "target_resource_type", target_resource_type)
@property
@pulumi.getter(name="alertContext")
def alert_context(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by alert context (payload)
"""
return pulumi.get(self, "alert_context")
@property
@pulumi.getter(name="alertRuleId")
def alert_rule_id(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by alert rule id
"""
return pulumi.get(self, "alert_rule_id")
@property
@pulumi.getter
def description(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by alert rule description
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="monitorCondition")
def monitor_condition(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by monitor condition
"""
return pulumi.get(self, "monitor_condition")
@property
@pulumi.getter(name="monitorService")
def monitor_service(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by monitor service
"""
return pulumi.get(self, "monitor_service")
@property
@pulumi.getter
def severity(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by severity
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter(name="targetResourceType")
def target_resource_type(self) -> Optional['outputs.ConditionResponse']:
"""
filter alerts by target resource type
"""
return pulumi.get(self, "target_resource_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiagnosticsResponse(dict):
"""
Action rule with diagnostics configuration
"""
def __init__(__self__, *,
created_at: str,
created_by: str,
last_modified_at: str,
last_modified_by: str,
type: str,
conditions: Optional['outputs.ConditionsResponse'] = None,
description: Optional[str] = None,
scope: Optional['outputs.ScopeResponse'] = None,
status: Optional[str] = None):
"""
Action rule with diagnostics configuration
:param str created_at: Creation time of action rule. Date-Time in ISO-8601 format.
:param str created_by: Created by user name.
:param str last_modified_at: Last updated time of action rule. Date-Time in ISO-8601 format.
:param str last_modified_by: Last modified by user name.
:param str type: Indicates type of action rule
Expected value is 'Diagnostics'.
:param 'ConditionsResponseArgs' conditions: conditions on which alerts will be filtered
:param str description: Description of action rule
:param 'ScopeResponseArgs' scope: scope on which action rule will apply
:param str status: Indicates if the given action rule is enabled or disabled
"""
pulumi.set(__self__, "created_at", created_at)
pulumi.set(__self__, "created_by", created_by)
pulumi.set(__self__, "last_modified_at", last_modified_at)
pulumi.set(__self__, "last_modified_by", last_modified_by)
pulumi.set(__self__, "type", 'Diagnostics')
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if description is not None:
pulumi.set(__self__, "description", description)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Creation time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> str:
"""
Created by user name.
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> str:
"""
Last updated time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> str:
"""
Last modified by user name.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter
def type(self) -> str:
"""
Indicates type of action rule
Expected value is 'Diagnostics'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def conditions(self) -> Optional['outputs.ConditionsResponse']:
"""
conditions on which alerts will be filtered
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of action rule
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def scope(self) -> Optional['outputs.ScopeResponse']:
"""
scope on which action rule will apply
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Indicates if the given action rule is enabled or disabled
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ScopeResponse(dict):
"""
Target scope for a given action rule. By default scope will be the subscription. User can also provide list of resource groups or list of resources from the scope subscription as well.
"""
def __init__(__self__, *,
scope_type: Optional[str] = None,
values: Optional[Sequence[str]] = None):
"""
Target scope for a given action rule. By default scope will be the subscription. User can also provide list of resource groups or list of resources from the scope subscription as well.
:param str scope_type: type of target scope
:param Sequence[str] values: list of ARM IDs of the given scope type which will be the target of the given action rule.
"""
if scope_type is not None:
pulumi.set(__self__, "scope_type", scope_type)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter(name="scopeType")
def scope_type(self) -> Optional[str]:
"""
type of target scope
"""
return pulumi.get(self, "scope_type")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
list of ARM IDs of the given scope type which will be the target of the given action rule.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SuppressionConfigResponse(dict):
"""
Suppression logic for a given action rule
"""
def __init__(__self__, *,
recurrence_type: str,
schedule: Optional['outputs.SuppressionScheduleResponse'] = None):
"""
Suppression logic for a given action rule
:param str recurrence_type: Specifies when the suppression should be applied
:param 'SuppressionScheduleResponseArgs' schedule: suppression schedule configuration
"""
pulumi.set(__self__, "recurrence_type", recurrence_type)
if schedule is not None:
pulumi.set(__self__, "schedule", schedule)
@property
@pulumi.getter(name="recurrenceType")
def recurrence_type(self) -> str:
"""
Specifies when the suppression should be applied
"""
return pulumi.get(self, "recurrence_type")
@property
@pulumi.getter
def schedule(self) -> Optional['outputs.SuppressionScheduleResponse']:
"""
suppression schedule configuration
"""
return pulumi.get(self, "schedule")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SuppressionResponse(dict):
"""
Action rule with suppression configuration
"""
def __init__(__self__, *,
created_at: str,
created_by: str,
last_modified_at: str,
last_modified_by: str,
suppression_config: 'outputs.SuppressionConfigResponse',
type: str,
conditions: Optional['outputs.ConditionsResponse'] = None,
description: Optional[str] = None,
scope: Optional['outputs.ScopeResponse'] = None,
status: Optional[str] = None):
"""
Action rule with suppression configuration
:param str created_at: Creation time of action rule. Date-Time in ISO-8601 format.
:param str created_by: Created by user name.
:param str last_modified_at: Last updated time of action rule. Date-Time in ISO-8601 format.
:param str last_modified_by: Last modified by user name.
:param 'SuppressionConfigResponseArgs' suppression_config: suppression configuration for the action rule
:param str type: Indicates type of action rule
Expected value is 'Suppression'.
:param 'ConditionsResponseArgs' conditions: conditions on which alerts will be filtered
:param str description: Description of action rule
:param 'ScopeResponseArgs' scope: scope on which action rule will apply
:param str status: Indicates if the given action rule is enabled or disabled
"""
pulumi.set(__self__, "created_at", created_at)
pulumi.set(__self__, "created_by", created_by)
pulumi.set(__self__, "last_modified_at", last_modified_at)
pulumi.set(__self__, "last_modified_by", last_modified_by)
pulumi.set(__self__, "suppression_config", suppression_config)
pulumi.set(__self__, "type", 'Suppression')
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if description is not None:
pulumi.set(__self__, "description", description)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Creation time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> str:
"""
Created by user name.
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> str:
"""
Last updated time of action rule. Date-Time in ISO-8601 format.
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> str:
"""
Last modified by user name.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter(name="suppressionConfig")
def suppression_config(self) -> 'outputs.SuppressionConfigResponse':
"""
suppression configuration for the action rule
"""
return pulumi.get(self, "suppression_config")
@property
@pulumi.getter
def type(self) -> str:
"""
Indicates type of action rule
Expected value is 'Suppression'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def conditions(self) -> Optional['outputs.ConditionsResponse']:
"""
conditions on which alerts will be filtered
"""
return pulumi.get(self, "conditions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of action rule
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def scope(self) -> Optional['outputs.ScopeResponse']:
"""
scope on which action rule will apply
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Indicates if the given action rule is enabled or disabled
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SuppressionScheduleResponse(dict):
"""
Schedule for a given suppression configuration.
"""
def __init__(__self__, *,
end_date: Optional[str] = None,
end_time: Optional[str] = None,
recurrence_values: Optional[Sequence[int]] = None,
start_date: Optional[str] = None,
start_time: Optional[str] = None):
"""
Schedule for a given suppression configuration.
:param str end_date: End date for suppression
:param str end_time: End date for suppression
:param Sequence[int] recurrence_values: Specifies the values for recurrence pattern
:param str start_date: Start date for suppression
:param str start_time: Start time for suppression
"""
if end_date is not None:
pulumi.set(__self__, "end_date", end_date)
if end_time is not None:
pulumi.set(__self__, "end_time", end_time)
if recurrence_values is not None:
pulumi.set(__self__, "recurrence_values", recurrence_values)
if start_date is not None:
pulumi.set(__self__, "start_date", start_date)
if start_time is not None:
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="endDate")
def end_date(self) -> Optional[str]:
"""
End date for suppression
"""
return pulumi.get(self, "end_date")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> Optional[str]:
"""
End date for suppression
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="recurrenceValues")
def recurrence_values(self) -> Optional[Sequence[int]]:
"""
Specifies the values for recurrence pattern
"""
return pulumi.get(self, "recurrence_values")
@property
@pulumi.getter(name="startDate")
def start_date(self) -> Optional[str]:
"""
Start date for suppression
"""
return pulumi.get(self, "start_date")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[str]:
"""
Start time for suppression
"""
return pulumi.get(self, "start_time")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
5644e5c0b6aa0dab0c7749c8574c9a70eebc075c | 400b0cb1f25cc2fbe80a3037c06102f40c4d2d89 | /string33.py | 575f57fac37b82880d7965f8c50047498875f63b | [] | no_license | Prithamprince/Python-programming | 4c747d306829de552e3b0c6af67cfe534a2dc2e1 | 79a0953084a01978e75d2be4db0d35ba1cf29259 | refs/heads/master | 2020-05-30T06:29:26.134906 | 2019-12-13T06:33:49 | 2019-12-13T06:33:49 | 189,580,341 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | from itertools import combinations
p=input()
q=0
l=list(combinations(p,len(p)-1))
for i in range(len(l)):
if(l[i]==l[i][ ::-1]):
print("YES")
q=1
break
if(q==0):
print("NO")
| [
"[email protected]"
] | |
015fd34248887879e5b092b00ab71bd4a61b4d02 | 8a9ba5e9e8c9f3d8e05b6840f1c17d526344e6d2 | /src/reia.py | 7ada168207031f9a5c85d73b90325662fbde0a50 | [
"MIT"
] | permissive | maanavshah/remote-intelligent-assistant | fe1e2bcb6d43345553194c442d4676b3137e0348 | 65ea7287d0ca2dd98a376bbadc81a5093b9b6046 | refs/heads/master | 2021-06-13T20:20:05.622634 | 2019-12-20T12:54:00 | 2019-12-20T12:54:00 | 142,580,543 | 4 | 0 | MIT | 2021-03-25T21:58:30 | 2018-07-27T13:27:56 | Python | UTF-8 | Python | false | false | 5,369 | py | import yaml
import sys
import random
import nltk
import operator
import jellyfish as jf
import json
import requests
import os
import time
import signal
import subprocess
from nltk.tag import StanfordPOSTagger
from textblob.classifiers import NaiveBayesClassifier
from execute import construct_command
from feedback import get_user_feedback
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.svm import LinearSVC
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.multiclass import OneVsRestClassifier
from sklearn import preprocessing
def signal_handler(signal, frame):
print ('Thank You!')
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
my_path = os.path.abspath(os.path.dirname(__file__))
CONFIG_PATH = os.path.join(my_path, "../config/config.yml")
MAPPING_PATH = os.path.join(my_path, "../data/mapping.json")
TRAINDATA_PATH = os.path.join(my_path, "../data/traindata.txt")
LABEL_PATH = os.path.join(my_path, "../data/")
sys.path.insert(0, LABEL_PATH)
import trainlabel
with open(CONFIG_PATH,"r") as config_file:
config = yaml.load(config_file)
os.environ['STANFORD_MODELS'] = config['tagger']['path_to_models']
exec_command = config['preferences']['execute']
def get_username(user_id):
payload = {'token': config['slack']['slack_token'], 'user': user_id}
r = requests.post(config['slack']['user_info'], params=payload)
return r.json()['user']['name']
def read_message():
payload = {'token': config['slack']['slack_token'], 'channel': config['slack']['channel'] , 'count': '1'}
r = requests.get(config['slack']['get_url'], params=payload)
message = r.json()['messages'][0]['text']
ts = r.json()['messages'][0]['ts']
data = r.json()['messages'][0]
if 'user' not in data:
user = r.json()['messages'][0]['username']
else:
user = r.json()['messages'][0]['user']
return(message,ts,user)
def post_message(message):
payload = {'token': config['slack']['slack_token'], 'channel': config['slack']['channel'] , 'text': message, 'username':config['slack']['username']}
r = requests.post(config['slack']['post_url'], params=payload)
return r
def classify(text):
X_train = np.array([line.rstrip('\n') for line in open(TRAINDATA_PATH)])
y_train_text = trainlabel.y_train_text
X_test = np.array([text])
target_names = ['file', 'folder', 'network', 'system', 'general']
lb = preprocessing.MultiLabelBinarizer()
Y = lb.fit_transform(y_train_text)
classifier = Pipeline([
('vectorizer', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', OneVsRestClassifier(LinearSVC()))])
classifier.fit(X_train, Y)
predicted = classifier.predict(X_test)
all_labels = lb.inverse_transform(predicted)
for item, labels in zip(X_test, all_labels):
return (', '.join(labels))
def suggestions(suggest_list):
suggest = (sorted(suggest_list,reverse=True)[:5])
return suggest
def consume_message():
cmd = "sed -i -e \"1d\" /home/maanav/REIA/mqueue.txt"
proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
def call_reia():
while(True):
max_score = 0.1
map_val = ""
with open('/home/maanav/REIA/mqueue.txt', 'r') as f:
first_line = f.readline()
while first_line == "":
time.sleep(1)
call_reia()
print('-----------------------')
user_input = first_line.split(' ', 1)[1]
user_name = get_username(first_line.split(' ', 1)[0])
suggest_list = []
suggest_message = ""
#prev_ts = ts
print("\nINPUT = ")
print(user_input)
label = classify(user_input)
if label == "":
# post_message("Sorry, I could not understand. Please rephrase and try again.")
with open("/home/maanav/REIA/src/user.txt", "a") as output_file:
output_file.write("Incorrectly mapped ::User-Input = " + user_input)
consume_message()
continue
print("Classified as : "+str(label))
tokens = nltk.word_tokenize(user_input)
print(tokens)
st = StanfordPOSTagger(config['tagger']['model'],path_to_jar=config['tagger']['path'])
stanford_tag = st.tag(user_input.split())
print("Tags")
print(stanford_tag)
with open(MAPPING_PATH,'r') as data_file:
data = json.load(data_file)
for i in data[label]:
dist = jf.jaro_distance(str(user_input),str(i))
suggest_list.append(tuple((dist,i)))
print(dist)
if(dist > max_score):
max_score = dist
map_val = i
if max_score < config['preferences']['similarity_threshold']:
# post_message("Sorry, I could not understand. Please rephrase and try again.")
with open("/home/maanav/REIA/src/user.txt", "a") as output_file:
output_file.write("Incorrectly mapped ::User-Input = " + user_input)
consume_message()
continue
if config['preferences']['suggestions'] == True:
suggest = suggestions(suggest_list)
post_message("Did you mean :")
for i in suggest:
suggest_message += (str(i[1])+"\n")
post_message(suggest_message)
continue
print("\nMapped to : "+map_val)
with open("/home/maanav/REIA/src/user.txt", "a") as output_file:
output_file.write("correctly mapped to : " + map_val + " User-Input = " + user_input)
#post_message(map_val)
construct_command(user_input,label,tokens,map_val,stanford_tag,exec_command,user_name)
#call('sed -i -e "1d " REIA/mqueue.txt')
consume_message()
#print(response)
print("Starting...")
call_reia()
| [
"[email protected]"
] | |
3a068e2a6864d85f641af5e0ebd662ca44331292 | 07a1088bcec25cdf7e4027abc5a8dc83eb37ffb4 | /fabrik/ext/nginx.py | ccc6e5e8f0774d418cd35226796dadcf056ebc96 | [
"MIT"
] | permissive | Frojd/Fabrik | 7e00bb66761c552da9d70cc36f3ff0108bf7a481 | 9f2edbba97a7fd236b72a9b3010f6e912ab5c001 | refs/heads/master | 2020-04-06T04:39:31.445843 | 2018-04-16T06:54:21 | 2018-04-16T06:54:21 | 25,035,502 | 12 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | # -*- coding: utf-8 -*-
"""
fabrik.ext.nginx
----------------------
Methods for handling nginx
"""
from fabric.state import env
def restart():
env.run("service nginx restart")
def reload():
env.run("nginx -s reload")
| [
"[email protected]"
] | |
1c89e34f2a701a441c1be1d145087c705e02ff86 | f2171e2f2c78d616a381b3308d13a600d687587f | /x.Machine Learning Foundation/NumPy and Pandas Part 1/numpy_index_array.py | fce265f622df7db4d6f5e57be7428a2167fd3916 | [] | no_license | vinkrish/ml-jupyter-notebook | bda01343118869bd2bfb44f3c3122853834d314a | ef5d05512b8387d7a3e494f024416f6ca7336827 | refs/heads/master | 2021-06-09T00:53:51.638551 | 2021-05-08T15:13:51 | 2021-05-08T15:13:51 | 168,104,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,921 | py | import numpy as np
# Change False to True for each block of code to see what it does
# Using index arrays
if False:
a = np.array([1, 2, 3, 4])
b = np.array([True, True, False, False])
print a[b]
print a[np.array([True, False, True, False])]
# Creating the index array using vectorized operations
if False:
a = np.array([1, 2, 3, 2, 1])
b = (a >= 2)
print a[b]
print a[a >= 2]
# Creating the index array using vectorized operations on another array
if False:
a = np.array([1, 2, 3, 4, 5])
b = np.array([1, 2, 3, 2, 1])
print b == 2
print a[b == 2]
def mean_time_for_paid_students(time_spent, days_to_cancel):
'''
Fill in this function to calculate the mean time spent in the classroom
for students who stayed enrolled at least (greater than or equal to) 7 days.
Unlike in Lesson 1, you can assume that days_to_cancel will contain only
integers (there are no students who have not canceled yet).
The arguments are NumPy arrays. time_spent contains the amount of time spent
in the classroom for each student, and days_to_cancel contains the number
of days until each student cancel. The data is given in the same order
in both arrays.
'''
is_continued = days_to_cancel >= 7
paid_time = time_spent[is_continued]
return paid_time.mean()
# Time spent in the classroom in the first week for 20 students
time_spent = np.array([
12.89697233, 0. , 64.55043217, 0. ,
24.2315615 , 39.991625 , 0. , 0. ,
147.20683783, 0. , 0. , 0. ,
45.18261617, 157.60454283, 133.2434615 , 52.85000767,
0. , 54.9204785 , 26.78142417, 0.
])
# Days to cancel for 20 students
days_to_cancel = np.array([
4, 5, 37, 3, 12, 4, 35, 38, 5, 37, 3, 3, 68,
38, 98, 2, 249, 2, 127, 35
])
| [
"[email protected]"
] | |
1f4bd449aba35de17062609461614b820c3a18f9 | eddbf9518e7384f0e9a1d9e19cbe74855c3f24bd | /2017011066LiShaoFei/First.py | 7f3f5ef75bfa8561246cc72cba9cfb0ca45f5650 | [] | no_license | wanghan79/2019_Python | 9d2391d799efd9545b2afb3565bc5c6d542d1d86 | f856409af92af3990773966d937d58d9d1cade04 | refs/heads/master | 2020-05-05T12:54:30.921361 | 2019-07-20T09:50:03 | 2019-07-20T09:50:03 | 180,050,522 | 11 | 14 | null | 2019-07-15T15:00:03 | 2019-04-08T01:59:24 | Python | UTF-8 | Python | false | false | 1,119 | py | import numpy as np
import random
import string
def random_list( start, stop, length):
if length >= 0:
length = int(length)
start, stop = (int(start), int(stop)) if start <= stop else (int(stop), int(start))
random_list = []
for i in range(length):
random_list.append(random.randint(start, stop))
return random_list
class dataGenerate:
def dGen(self, size=100000):
for i in range(size):
keys = random_list(0, 100, 10)
values = random_list(0, 100, 10)
dictionary = dict(zip(keys, values))
numx = np.random.randint(0, 1000)
numy = np.random.randint(0, 1000)
salt = ''.join(random.sample(string.ascii_letters + string.digits, 8)) # Generate a random string
data = {'string': salt, 'intX': numx, 'intY': numy, 'float': np.random.uniform(0, 1000000), 'keys':keys, 'values':values}
yield data
if __name__ == '__main__':
f = open("output.txt", "w")
for i in dataGenerate().dGen():
s=str(i)
f.write(s+'\n')
f.close() | [
"[email protected]"
] | |
7d69b0a585408e145f7c50fc555cfe9dfb7cb57f | 35cb7a8a22fdd3932b63c89b17f587205bd00fec | /apps/excursao/migrations/0002_excursao_is_internacional.py | 3f9637243e6f3af5d03ec7553d613c3b439ba4a1 | [] | no_license | rcoutelo/viajecomdarcy-web | debc24ec44e733c12257f3e89f3424ab7b3ee1f4 | 2ab2db407523299a58423f058c1f74231b15d617 | refs/heads/master | 2021-03-27T14:41:34.303463 | 2017-06-19T15:14:54 | 2017-06-19T15:14:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2017-05-19 19:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('excursao', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='excursao',
name='is_internacional',
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
] | |
a64f0f99c0ebcacedc4e8efb592d1f75480fcd7c | 0e25329bb101eb7280a34f650f9bd66ed002bfc8 | /tests/functional/test_misc.py | 5da0c776cf8bde4c5a1a3dc58331fff08885b9f3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | enthought/enstaller | 2a2d433a3b83bcf9b4e3eaad59d952c531f36566 | 9c9f1a7ce58358b89352f4d82b15f51fbbdffe82 | refs/heads/master | 2023-08-08T02:30:26.990190 | 2016-01-22T17:51:35 | 2016-01-22T17:51:35 | 17,997,072 | 3 | 4 | null | 2017-01-13T19:22:10 | 2014-03-21T23:03:58 | Python | UTF-8 | Python | false | false | 7,200 | py | import json
import os.path
import platform
import shutil
import sys
import tempfile
import textwrap
import mock
import responses
from enstaller import __version__
from enstaller.config import Configuration
from enstaller.history import History
from enstaller.main import main_noexc
from enstaller.utils import PY_VER
from enstaller.tests.common import authenticated_config, mock_index, mock_print, R_JSON_AUTH_RESP
if sys.version_info[0] == 2:
import unittest2 as unittest
else:
import unittest
class TestMisc(unittest.TestCase):
@authenticated_config
@responses.activate
def test_print_config(self):
self.maxDiff = None
# Given
config = Configuration()
config.update(prefix=sys.prefix)
template = textwrap.dedent("""\
Python version: {pyver}
enstaller version: {version}
sys.prefix: {sys_prefix}
platform: {platform}
architecture: {arch}
use_webservice: True
settings:
prefix = {prefix}
repository_cache = {repository_cache}
noapp = False
proxy = None
You are logged in as 'dummy' (David Cournapeau).
Subscription level: Canopy / EPD Basic or above
""")
r_output = template.format(pyver=PY_VER,
sys_prefix=os.path.normpath(sys.prefix),
version=__version__,
platform=platform.platform(),
arch=platform.architecture()[0],
prefix=os.path.normpath(config.prefix),
repository_cache=config.repository_cache)
responses.add(responses.GET,
"https://api.enthought.com/accounts/user/info/",
body=json.dumps(R_JSON_AUTH_RESP))
# When
with self.assertRaises(SystemExit) as e:
with mock_print() as m:
main_noexc(["--config"])
# Then
self.assertEqual(e.exception.code, 0)
self.assertMultiLineEqual(m.value, r_output)
@authenticated_config
def test_list_bare(self):
# Given
sys_prefix = os.path.normpath(sys.prefix)
# When
with mock.patch("enstaller.cli.commands.print_installed"):
with self.assertRaises(SystemExit) as e:
with mock_print() as m:
main_noexc(["--list"])
# Then
self.assertEqual(e.exception.code, 0)
self.assertMultiLineEqual(m.value, "prefix: {0}\n\n".format(sys_prefix))
@authenticated_config
def test_log(self):
with mock.patch("enstaller.cli.commands.History",
spec=History) as mocked_history:
with self.assertRaises(SystemExit) as e:
with mock_print() as m:
main_noexc(["--log"])
self.assertEqual(e.exception.code, 0)
self.assertTrue(mocked_history.return_value.print_log.called)
self.assertMultiLineEqual(m.value, "")
@authenticated_config
def test_freeze(self):
installed_requirements = ["dummy 1.0.0-1", "another_dummy 1.0.1-1"]
with mock.patch("enstaller.cli.commands.get_freeze_list",
return_value=installed_requirements):
with self.assertRaises(SystemExit) as e:
with mock_print() as m:
main_noexc(["--freeze"])
self.assertEqual(e.exception.code, 0)
self.assertMultiLineEqual(m.value,
"dummy 1.0.0-1\nanother_dummy 1.0.1-1\n")
@mock_index({
"fubar-1.0.0-1.egg": {
"available": True,
"build": 1,
"md5": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"mtime": 0.0,
"name": "fubar",
"packages": [],
"product": "nono",
"python": PY_VER,
"size": 0,
"type": "egg",
"version": "1.0.0"
}}, "https://acme.com")
def test_insecure_flag(self):
# Given
responses.add(responses.GET,
"https://acme.com/accounts/user/info/",
body=json.dumps(R_JSON_AUTH_RESP))
config = Configuration()
config.update(store_url="https://acme.com")
config.update(auth=("nono", "le gros robot"))
# When
with self.assertRaises(SystemExit) as e:
with mock.patch("enstaller.main._ensure_config_or_die",
return_value=config):
with mock.patch(
"enstaller.main.ensure_authenticated_config"
):
main_noexc(["-s", "fubar"])
# Then
self.assertEqual(e.exception.code, 0)
# When
with self.assertRaises(SystemExit) as e:
with mock.patch("enstaller.main._ensure_config_or_die",
return_value=config):
with mock.patch(
"enstaller.main.ensure_authenticated_config"
):
main_noexc(["-ks", "fubar"])
# Then
self.assertEqual(e.exception.code, 0)
class TestPrefix(unittest.TestCase):
def setUp(self):
self.prefix = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.prefix)
@authenticated_config
@mock_index({
"fubar-1.0.0-1.egg": {
"available": True,
"build": 1,
"md5": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"mtime": 0.0,
"name": "fubar",
"packages": [],
"product": "nono",
"python": PY_VER,
"size": 0,
"type": "egg",
"version": "1.0.0"
}}, "https://api.enthought.com")
def test_simple(self):
self.maxDiff = None
# Given
responses.add(responses.GET,
"https://api.enthought.com/accounts/user/info/",
body=json.dumps(R_JSON_AUTH_RESP))
template = textwrap.dedent("""\
Python version: {pyver}
enstaller version: {version}
sys.prefix: {sys_prefix}
platform: {platform}
architecture: {arch}
use_webservice: True
settings:
prefix = {prefix}
repository_cache = {repository_cache}
noapp = False
proxy = None
You are logged in as 'dummy' (David Cournapeau).
Subscription level: Canopy / EPD Basic or above
""")
r_output = template.format(pyver=PY_VER,
sys_prefix=os.path.normpath(sys.prefix),
version=__version__,
platform=platform.platform(),
arch=platform.architecture()[0],
prefix=os.path.normpath(self.prefix),
repository_cache=os.path.join(self.prefix,
"LOCAL-REPO"))
# When
with self.assertRaises(SystemExit):
with mock_print() as m:
main_noexc(["--config", "--prefix={0}".format(self.prefix)])
# Then
self.assertEqual(m.value, r_output)
| [
"[email protected]"
] | |
6ffbc1fdd0bb94c69f961871e05b86e073a589d5 | e0ed932fc2e4edb953cc4e423362dabc19083008 | /python/sanic_learn/docs/learn_conf.py | 3b279281ff746c4e709cfdd6e544322a6b2da803 | [] | no_license | glfAdd/note | 90baee45003ac3998d898dcfbc618caa28f33b74 | 19a9aff61450be25904bff0fe672f660d49d90ff | refs/heads/main | 2023-05-27T13:28:36.092352 | 2023-05-24T03:35:58 | 2023-05-24T03:35:58 | 240,066,208 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 348 | py | """ ============================ config
config对象实现两个__getattr__和__setattr__
方式1: 使用属性
app = Sanic('myapp')
app.config.DB_NAME = 'appdb'
app.config.DB_USER = 'appuser'
方式2: 使用update
db_settings = {
'DB_HOST': 'localhost',
'DB_NAME': 'appdb',
'DB_USER': 'appuser'
}
app.config.update(db_settings)
"""
| [
"[email protected]"
] | |
e27f61c97808942556f956f9422de14a5bd4a641 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/lFp.py | af044a654924c11751ea171e241dc87980eeaa4e | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'lFP':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
bc7e6918b6630b409153c0d84d6feefc8425c2b6 | fd67592b2338105e0cd0b3503552d188b814ad95 | /test/test_models/test_ping.py | 61773ff5dfcc00185b57a32a4230151250581dbc | [] | no_license | E-goi/sdk-python | 175575fcd50bd5ad426b33c78bdeb08d979485b7 | 5cba50a46e1d288b5038d18be12af119211e5b9f | refs/heads/master | 2023-04-29T20:36:02.314712 | 2023-04-18T07:42:46 | 2023-04-18T07:42:46 | 232,095,340 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,002 | py | # coding: utf-8
"""
APIv3 (New)
# Introduction This is our new version of API. We invite you to start using it and give us your feedback # Getting Started E-goi can be integrated with many environments and programming languages via our REST API. We've created a developer focused portal to give your organization a clear and quick overview of how to integrate with E-goi. The developer portal focuses on scenarios for integration and flow of events. We recommend familiarizing yourself with all of the content in the developer portal, before start using our rest API. The E-goi APIv3 is served over HTTPS. To ensure data privacy, unencrypted HTTP is not supported. Request data is passed to the API by POSTing JSON objects to the API endpoints with the appropriate parameters. BaseURL = api.egoiapp.com # RESTful Services This API supports 5 HTTP methods: * <b>GET</b>: The HTTP GET method is used to **read** (or retrieve) a representation of a resource. * <b>POST</b>: The POST verb is most-often utilized to **create** new resources. * <b>PATCH</b>: PATCH is used for **modify** capabilities. The PATCH request only needs to contain the changes to the resource, not the complete resource * <b>PUT</b>: PUT is most-often utilized for **update** capabilities, PUT-ing to a known resource URI with the request body containing the newly-updated representation of the original resource. * <b>DELETE</b>: DELETE is pretty easy to understand. It is used to **delete** a resource identified by a URI. # Authentication We use a custom authentication method, you will need a apikey that you can find in your account settings. Below you will see a curl example to get your account information: #!/bin/bash curl -X GET 'https://api.egoiapp.com/my-account' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' Here you can see a curl Post example with authentication: #!/bin/bash curl -X POST 'http://api.egoiapp.com/tags' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' \\ -H 'Content-Type: application/json' \\ -d '{`name`:`Your custom tag`,`color`:`#FFFFFF`}' # SDK Get started quickly with E-goi with our integration tools. Our SDK is a modern open source library that makes it easy to integrate your application with E-goi services. * <a href='https://github.com/E-goi/sdk-java'>Java</a> * <a href='https://github.com/E-goi/sdk-php'>PHP</a> * <a href='https://github.com/E-goi/sdk-python'>Python</a> * <a href='https://github.com/E-goi/sdk-ruby'>Ruby</a> * <a href='https://github.com/E-goi/sdk-javascript'>Javascript</a> * <a href='https://github.com/E-goi/sdk-csharp'>C#</a> # Stream Limits Stream limits are security mesures we have to make sure our API have a fair use policy, for this reason, any request that creates or modifies data (**POST**, **PATCH** and **PUT**) is limited to a maximum of **20MB** of content length. If you arrive to this limit in one of your request, you'll receive a HTTP code **413 (Request Entity Too Large)** and the request will be ignored. To avoid this error in importation's requests, it's advised the request's division in batches that have each one less than 20MB. # Timeouts Timeouts set a maximum waiting time on a request's response. Our API, sets a default timeout for each request and when breached, you'll receive an HTTP **408 (Request Timeout)** error code. You should take into consideration that response times can vary widely based on the complexity of the request, amount of data being analyzed, and the load on the system and workspace at the time of the query. When dealing with such errors, you should first attempt to reduce the complexity and amount of data under analysis, and only then, if problems are still occurring ask for support. For all these reasons, the default timeout for each request is **10 Seconds** and any request that creates or modifies data (**POST**, **PATCH** and **PUT**) will have a timeout of **60 Seconds**. Specific timeouts may exist for specific requests, these can be found in the request's documentation. # Callbacks A callback is an asynchronous API request that originates from the API server and is sent to the client in response to a previous request sent by that client. The API will make a **POST** request to the address defined in the URL with the information regarding the event of interest and share data related to that event. <a href='/usecases/callbacks/' target='_blank'>[Go to callbacks documentation]</a> ***Note:*** Only http or https protocols are supported in the Url parameter. <security-definitions/> # noqa: E501
The version of the OpenAPI document: 3.0.0
Generated by: https://openapi-generator.tech
"""
import unittest
import egoi_api
from egoi_api.model.ping import Ping
from egoi_api import configuration
class TestPing(unittest.TestCase):
"""Ping unit test stubs"""
_configuration = configuration.Configuration()
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
8969006cf82f736e5a60bd8c29710cd5a996c994 | c8f5d69d21ac4df40d79a811dea2e3ad82fb5e04 | /src/csv2plot.py | a731eff829805f1ba98f1ab6013855d44df4bc50 | [] | no_license | webclinic017/usstock | e71ab18534fd3afc05ab2452578821584750e2b9 | c724f00bc1c5d2a41ee58e037ba0b1b3f0904f70 | refs/heads/master | 2023-08-15T05:22:14.275202 | 2021-10-14T21:19:53 | 2021-10-14T21:19:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,608 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Description: plot time series from a csv file
Usage of:
csv2plot.py file --sep=DELIMITER
Example:
# FROM data file
python csv2plot.py csv2plot.dat
# OR (near realtime data)
iex_types_batch.py --types=chart --range=1d --no_database_save AAPL | csv2plot.py - --columns=close,epochs --xaxis=epochs --title=Apple
# OR (daily data)
iex_types_batch.py --types=chart --range=3m --no_database_save AAPL | csv2plot.py - --columns=open,close,pbdate --xaxis=pbdate --title=Apple
# OR (daily return since inception )
iex_types_batch.py --types=chart --range=3m --no_database_save AAPL | csv2plot.py - --columns=open,close,pbdate --xaxis=pbdate --title=Apple --return_since_inception
# OR (pivot data)
printf "select m.label as ticker,p.close as price,p.pbdate from prc_hist p,mapping_series_label m where p.name in ('^GSPC','^TWII','000001.SS','^SOX','^DJI') and p.pbdate>20170101 and p.name=m.series order by m.label,p.pbdate" | psql.sh -d ara | grep -v rows | python2 csv2plot.py --pivot_group=ticker --pivot_value=price --title='Market Overview 2018-05-25' --interpolate --return_since_inception -
# OR (pivot data and near realtime per minute)
iex_types_batch.py --types=chart --range=1d --no_database_save AAPL XLK SPY| csv2plot.py - --columns=ticker,close,epochs --xaxis=epochs --pivot_group=ticker --pivot_value=close --title='Market Closing Overview' --interpolate --return_since_inception --trendline
# OR (pivot data with minute data)
python csv2plot.py AAPL_XLK_SPY.dat --columns=ticker,close,epochs --xaxis=epochs --pivot_group=ticker --pivot_value=close --title='Market Closing Overview' --interpolate --return_since_inception --trendline
# OR (stock data with --src)
csv2plot.py IBM --src=iex --columns=close,open,pbdate --days=90
# OR (fred data with --src)
csv2plot.py DGS2 --src=fred --columns=close,pbdate
# OR (stock data with --src and candlestick graph)
csv2plot.py IBM --src=iex --columns=close,open,high,low,volume,pbdate --title="IBM OHLC" --days=90 --ohlc
# OR (minute data and candlestick graph)
iex_types_batch.py --types=chart --range=1d --no_database_save --output=csv AAPL| csv2plot.py - --columns=close,open,high,low,volume,epochs,ticker --ohlc --title="Intraday AAPL OHLC" --xaxis=epochs --trendline
# OR (minute data and candlestick Combo graph)
iex_types_batch.py --types=chart --range=1d --no_database_save --output=csv AAPL| csv2plot.py - --columns=ticker,close,open,high,low,volume,epochs --ohlc_combo --title="Intraday AAPL" --xaxis=epochs --trendline
Note: return_since_inception will use $1 as the initial investment if the initial is less than $1
Last mod., Sat Oct 27 20:50:18 EDT 2018
"""
import sys
from optparse import OptionParser
from datetime import datetime
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import matplotlib.image as mimage
import matplotlib.ticker as mticker
import pandas as pd
from scipy.interpolate import interp1d
#font_name = "AR PL UKai CN"
#matplotlib.rcParams['font.family'] = font_name
#matplotlib.rcParams['axes.unicode_minus']=False # in case minus sign is shown as box
import matplotlib.font_manager as mfm
#font_path = "/usr/share/fonts/truetype/arphic/ukai.ttc"
font_path = "/usr/share/fonts/truetype/arphic/uming.ttc"
#font_path = "/usr/share/fonts/truetype/droid/DroidSansFallbackFull.ttf" #Droid Sans Fallback
prop = mfm.FontProperties(fname=font_path)
#prop = mfm.FontProperties()
plt.style.use('dark_background')
if sys.version_info.major == 2:
reload(sys)
sys.setdefaultencoding('utf8')
from cStringIO import StringIO
else:
from io import StringIO
#----------------------------------------------------------------#
def subDict(myDict,kyLst,reverseTF=False):
if reverseTF is True: # invert-match, select non-matching [kyLst] keys
return { ky:myDict[ky] for ky in myDict.keys() if ky not in kyLst }
else:
return { ky:myDict[ky] for ky in myDict.keys() if ky in kyLst }
def ymd_parser(x,fmt='%Y%m%d'): return datetime.strptime(str(x),fmt)
def epoch_parser(x,s=1000): return datetime.fromtimestamp(int(x/s))
def extrapolate_series(yo):
yg=yo.dropna()
fn = interp1d(map(int,yg.index.values), yg.values, fill_value='extrapolate')
return fn(map(int,yo.index.values))
def get_csvdata(args,sep='|',src=None,days=730,start=None,end=None,columns=None,hdrLst=None):
"""
Get data in datafram with selected [columns]
"""
if isinstance(args,pd.DataFrame):
df = args
if columns is not None and df.size > 0:
df = df[ list(set(df.columns) & set(columns.split(','))) ]
if hdrLst is not None:
xLst,yLst = hdrLst.split('=')
xyD = dict(zip(xLst.split(','),yLst.split(',')))
df.rename(columns=xyD,inplace=True)
return df
if len(args)<1:
return None
filename=args[0]
if filename=='-':
df=pd.read_csv(sys.stdin,sep=sep)
elif src is not None:
from _alan_calc import pull_stock_data
df = pull_stock_data(filename,days=days,src=src,start=start,end=end)
else:
df = pd.read_csv(filename,sep=sep)
if df.size < 1:
print >> sys.stderr, "**ERROR: Data not found!"
return {}
if columns is not None:
df = df[ list(set(df.columns) & set(columns.split(','))) ]
df.dropna(inplace=True)
if hdrLst is not None:
xLst,yLst = hdrLst.split('=')
xyD = dict(zip(xLst.split(','),yLst.split(',')))
df.rename(columns=xyD,inplace=True)
return df
def dataj2ts(ts,df,opts=None):
from _alan_str import jj_fmt
import ast
dd = subDict(opts,['j2ts'],reverseTF=True)
if df.size>0 and ts is not None and len(ts)>1:
dd=update(f=df)
return jj_fmt(ts,dd)
else:
return ''
def run_csv2plot(args,opts=None,optx=None):
"""
plot time series data from csv file
"""
#- Set input parameters
if opts is None:
opts, _ = opt_csv2plot([])
if optx is not None:
opts.update(optx)
for ky,va in opts.items():
exec("{}=va".format(ky))
#- Get data in datafram with selected [columns]
df = get_csvdata(args,sep=sep,src=src,days=days,start=start,end=end,columns=columns,hdrLst=hdrLst)
if df is None or len(df)<1 or df.size<1:
return None
if debugTF is True:
print >> sys.stderr, df.head()
#- Use backend to 'tkAgg' for cronjob
if pngname is None or len(pngname)<=4:
plt.switch_backend(backend)
#- Create datetime index
idxname='date'
pbname=xaxis
if pbname in df.columns:
from _alan_date import ymd_parser,epoch_parser
sdate = str(df[pbname].iloc[0])
if sdate.isdigit() == True:
if int(sdate)>123456789:
idxpt=[epoch_parser(x) for x in df[pbname]]
else:
idxpt=[ymd_parser(x,fmt="%Y%m%d") for x in df[pbname]]
else:
idxpt=[ymd_parser(x,fmt=x_fmt) for x in df[pbname]]
df.set_index(pd.DatetimeIndex(idxpt),inplace=True)
df.index.rename(idxname,inplace=True)
df = df.drop(pbname,1)
elif idxname in df.columns:
df[idxname] = pd.to_datetime(df[idxname])
df.set_index(idxname,inplace=True)
else:
df = df.reset_index(drop=True)
#- Create a pivot table
trendName = None
if pivot_group in df.columns and pivot_value in df.columns:
trendName = df[pivot_group][0]
df=df.pivot_table(index='date',columns=pivot_group,values=pivot_value)
#- Create linear-interpolation for missing data
if interpolateYN is True:
df=df.apply(extrapolate_series,axis=0)
#- Create return since inception
if rsiYN is True:
de=[]
for j in range(df.shape[1]):
inix = df.iloc[0,j] if df.iloc[0,j]>1 else 1
de.append(df.iloc[:,j]/inix*100.-100)
#de = [df.iloc[:,j]/df.iloc[0,j]*100.-100 for j in range(df.shape[1])]
df = pd.concat(de,axis=1)
#- Create trend curve
if trendTF is True:
try:
from _alan_pppscf import vertex_locator
if trendName is None:
trendName = df._get_numeric_data().columns[0]
dg, dh = vertex_locator(df[trendName],npar=npar,debugTF=True)
#df['trend'] = dg['trend'].values
if debugTF is True:
print >> sys.stderr, "Trendline dg:\n",dg
except Exception, e:
print >> sys.stderr, "**ERROR: {} @ {}".format(str(e),'vertex_locator()')
if title is None:
title="/".join(df.columns).upper()
if rsiYN is True:
title += " Return Since Inception"
#- plot simple line plot
if tsTF is False:
df = df.reset_index(drop=True)
if debugTF is True:
print >> sys.stderr, df.head()
print >> sys.stderr, df.tail()
nobs=len(df.index)
nsp = (nobs/nbins) if nobs>nbins*2 else nobs
#ds=[y for j,y in enumerate(df.index) if j%nsp==0]
#ax=df.plot(xticks=ds,title=title)
colorUD = ['red','green'] if lang=='cn' else ['green','red']
if ohlcComboTF is True:
from alan_plot import plot_candlestickCombo
from _alan_calc import run_tech
chartType = 'minute' if pbname == 'epochs' else 'chart'
ma1=5;ma2=30
datax = run_tech(df, pcol='close',winLst=[ma1,ma2],nanTF=True)
fig, axes = plot_candlestickCombo(datax,title,ma1,ma2,block=False,chartType=chartType,trendTF=trendTF,npar=npar,debugTF=debugTF,colorUD=colorUD)
if pngname is not None and len(pngname)>4:
plt.savefig(pngname)#, bbox_inches='tight',dpi=1000)
else:
plt.show(axes)
return datax
fig, ax=plt.subplots(figsize=(11,6))
if ohlcTF is True:
from alan_plot import plot_candlestick
chartType = 'minute' if pbname == 'epochs' else 'chart'
ax = plot_candlestick(df,tsidx=df.index,chartType=chartType,title=title,block=False,debugTF=debugTF,ax=ax,trendTF=trendTF,npar=npar,colorUD=colorUD)
x_fmt = "%H:%M" if chartType == 'minute' else x_fmt
print >> sys.stderr, df.describe()
else:
df.plot(ax=ax,grid=True,color=['yellow','green','red','cyan','lightgray','salmon'])
#ax=df.plot(figsize=(11,6))
ax.set_ylabel(df.columns[0])
if trendTF is True:
dg.plot(ax=ax)
if rsiYN is True:
ax.set_ylabel("return %")
ax.grid(linestyle='dotted',linewidth=0.5)
if df.index._typ == "datetimeindex":
mddfmt=mdates.DateFormatter(x_fmt)
ax.xaxis.set_major_formatter(mddfmt)
xtinterval=(df.index[1]-df.index[0])
if xtinterval.days < 7 and xtinterval.days>=1 : # daily data
ax.set_xlim(df.index[0], df.index[-1])
#ax.xaxis.set_major_locator(mdates.MonthLocator(interval=int(nsp/30.+0.97)))
bymd = [1,5,10,15,20,25] if nobs<50 else [1,15] if nobs<120 else [1]
itv = 1 if nobs<160 else int(nsp/30.+0.97)
xlocator = mdates.MonthLocator(bymonthday=bymd,interval=itv)
ax.xaxis.set_major_locator(xlocator)
# check if min/max of xaxis should be included major ticks
if debugTF is True:
print >> sys.stderr, ax.get_xticks(),ax.get_xlim()
xtcks = list(ax.get_xticks())
x1,x2 = xtcks[:2]
xmin,xmax = ax.get_xlim()
if (x1-xmin)>(x2-x1)*0.6:
xtcks = [xmin] + xtcks
if (xmax-xtcks[-1])>(x2-x1)*0.6:
xtcks = xtcks + [xmax]
ax.set_xticks(xtcks)
ax.xaxis.set_minor_locator(mdates.MonthLocator(interval=1))
if debugTF is True:
print >> sys.stderr,ax.get_xticks()
print >> sys.stderr, "Daily data use MonthLocator"
elif xtinterval.seconds < 30: # second data
locator = mdates.AutoDateLocator()
locator.intervald[5] = [0,5,10,15,20,25,30,35,40,45,55]
mddfmt = mdates.AutoDateFormatter(locator)
mddfmt.scaled[1/(24.*60.)] = '%M:%S'
ax.xaxis.set_major_locator(locator)
ax.xaxis.set_major_formatter(mddfmt)
print >> sys.stderr, "Second data use AutoDateLocator",xtinterval.seconds
elif xtinterval.seconds < 100 : # minute data
bym = [0,15,30,45] if nobs<=120 else [0,30] if nobs<=360 else [0]
xlocator = mdates.MinuteLocator(byminute=bym, interval = 1)
ax.xaxis.set_major_locator(xlocator)
print >> sys.stderr, "Minute data use MinuteLocator",xtinterval.days
else: # periodic data
print >> sys.stderr, "Periodic data use DayLocator"
ax.xaxis.set_major_locator(mdates.DayLocator(interval=nsp))
ax.xaxis.label.set_visible(False)
plt.title(title,fontsize=30,fontproperties=prop)
plt.xticks(rotation='20',fontsize=12)
if len(df.columns)>1 and ohlcTF is False:
ax.legend(loc="upper left",prop=prop)
#logo = mimage.imread("aicaas_icon.png")
#plt.figimage(logo, xo=20,yo=420)
plt.subplots_adjust(left=0.1,bottom=0.30)
if pngname is not None and len(pngname)>4:
plt.savefig(pngname)#, bbox_inches='tight',dpi=1000)
else:
plt.show(ax)
return df
def opt_csv2plot(argv,retParser=False):
""" command-line options initial setup
Arguments:
argv: list arguments, usually passed from sys.argv
retParser: OptionParser class return flag, default to False
Return: (options, args) tuple if retParser is False else OptionParser class
"""
parser = OptionParser(usage="usage: %prog [option] FILENAME", version="%prog 1.0",
description="Time-series Plotting Utility via matplotlib")
parser.add_option("-s","--sep",action="store",dest="sep",default="|",
help="field separator (default: |)")
parser.add_option("","--xaxis",action="store",dest="xaxis",default="pbdate",
help="x-axis column name (default: pbdate in yyyymmdd)")
parser.add_option("","--columns",action="store",dest="columns",
help="selected columns (default: ALL)")
parser.add_option("","--ren_header",action="store",dest="hdrLst",
help="rename header columns")
parser.add_option("-t","--title",action="store",dest="title",
help="title (default: combo-colunms)")
parser.add_option("-n","--nbins",action="store",dest="nbins",default="6",type=int,
help="number of bins in x-axis (default: 6)")
parser.add_option("","--return_since_inception",action="store_true",dest="rsiYN",default=False,
help="use Return since Inception plot. Note: $1 will be used as the initial investment if the initial is less than $1")
parser.add_option("","--interpolate",action="store_true",dest="interpolateYN",default=False,
help="use linear-interplation for missing data")
parser.add_option("","--pivot_group",action="store",dest="pivot_group",
help="pivot table group by column, must pair with PIVOT_VALUE")
parser.add_option("","--pivot_value",action="store",dest="pivot_value",
help="pivot table display value column, must pair with PIVOT_GROUP")
parser.add_option("","--x_fmt",action="store",dest="x_fmt",default='%m-%d-%y',
help="graph x-axis format (default: %m-%d-%y)")
parser.add_option("","--png",action="store",dest="pngname",
help="graph name (default: None)")
parser.add_option("","--backend",action="store",dest="backend",default='tkAgg',
help="matplotlib new backend(default: tkAgg)")
parser.add_option("","--no_time_series",action="store_false",dest="tsTF",default=True,
help="Simple line plot no time-series")
parser.add_option("-l","--lang",action="store",dest="lang",default="en",
help="language mode [cn|en] (default: en), ohlc/ohlc_combo ONLY")
parser.add_option("","--ohlc",action="store_true",dest="ohlcTF",default=False,
help="plot stock OHLC Candlestick")
parser.add_option("","--ohlc_combo",action="store_true",dest="ohlcComboTF",default=False,
help="plot stock OHLC Candlestick + MA/RSI/MACD Combo")
parser.add_option("","--src",action="store",dest="src",
help="data source (FILENAME is treated as ticker/series if provided. default: None)")
parser.add_option("","--start",action="store",dest="start",
help="start YYYY-MM-DD, must pair with SRC (default: 2-years-ago)")
parser.add_option("","--end",action="store",dest="end",
help="end YYYY-MM-DD, must pair with SRC (default: today)")
parser.add_option("","--days",action="store",dest="days",default=730,type=int,
help="number of days from END date, must pair with SRC (default: 730)")
parser.add_option("","--trendline",action="store_true",dest="trendTF",default=False,
help="Draw trendline, apply to the 1st array ONLY")
parser.add_option("","--npar",action="store",dest="npar",default=15,type="int",
help="trendline fitting polynomial degree (default: 15)")
parser.add_option("","--j2ts",action="store",dest="j2ts",
help="jinja2 template script, (default: None).")
parser.add_option("","--extra_js",action="store",dest="extraJS",
help="extra JSON in DICT format.")
parser.add_option("","--extra_xs",action="store",dest="extraXS",
help="extra excutable string in k1=v1;k2=v2; format")
parser.add_option("","--debug",action="store_true",dest="debugTF",default=False,
help="debugging (default: False)")
(options, args) = parser.parse_args(argv[1:])
if retParser is True:
return parser
try:
opts = vars(options)
from _alan_str import extra_opts
extra_opts(opts,xkey='extraJS',method='JS',updTF=True)
extra_opts(opts,xkey='extraXS',method='XS',updTF=True)
except Exception as e:
print >> sys.stderr, str(e)
return (opts, args)
if __name__ == '__main__':
opts,args = opt_csv2plot(sys.argv)
try:
df=run_csv2plot(args,opts)
#print dataj2ts(opts['j2ts'],df,opts)
except Exception, e:
print >> sys.stderr, "**ERROR:",str(e)
| [
"[email protected]"
] | |
2d34fe0d4f1b224a9e161de674ff2f540eaf6f3f | d3f448d238b435b48d8f27f17a34b3e39a70dc29 | /python-client/test/test_kyc_user_validation_share_holder_list_item_response_natural.py | 5639c0032162e82c676318d5d1ff7f90707312d0 | [] | no_license | pedroguirao/swagger | 1fc29b6d9bcc193bf8ce85f6d8a6074f4c37150d | 5ffea6203b5fcd3f201c2ede76d354302a6fb0ee | refs/heads/master | 2020-06-07T16:15:08.659567 | 2019-06-21T07:51:49 | 2019-06-21T07:51:49 | 193,055,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | # coding: utf-8
"""
MarketPay API
API for Smart Contracts and Payments # noqa: E501
OpenAPI spec version: v2.01
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.kyc_user_validation_share_holder_list_item_response_natural import KycUserValidationShareHolderListItemResponseNatural # noqa: E501
from swagger_client.rest import ApiException
class TestKycUserValidationShareHolderListItemResponseNatural(unittest.TestCase):
"""KycUserValidationShareHolderListItemResponseNatural unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testKycUserValidationShareHolderListItemResponseNatural(self):
"""Test KycUserValidationShareHolderListItemResponseNatural"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.kyc_user_validation_share_holder_list_item_response_natural.KycUserValidationShareHolderListItemResponseNatural() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
1de1b2caa5a46a524e310c70cb4922b59d81d69c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03106/s261285927.py | acddd8336d17f956526b10a5358983a3ae205bef | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | import sys
a,b,k=map(int,input().split())
for i in range(1,101):
if a%(101-i)==0 and b%(101-i)==0:
k-=1
if k==0:
print(101-i)
sys.exit() | [
"[email protected]"
] | |
71c821509417c94ee842caec376a6a4c2803b333 | d9a22d4dcdfc0c28176c0e8afd784b30d275597e | /test_suite/shared_data/dispersion/Fyn_SH3_R1rho/relax_results/solution_tp02.py | 6e2250c6b7fdf8ea287e0c2e8ad080017c2505a3 | [] | no_license | jlec/relax | fda1b3ff77be0afc21c2e6cc52348ae7635cd07a | c317326ddeacd1a1c608128769676899daeae531 | refs/heads/master | 2016-09-08T00:27:57.256090 | 2015-02-10T12:24:55 | 2015-02-10T12:24:55 | 30,596,131 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,039 | py | """Compare the synthetic cpmg_fit data to the relax solution.
To run this, type:
$ rm -f solution_tp02.log; ../../../../../relax --tee solution_tp02.log solution_tp02.py
"""
# Python module imports.
from os import remove
from shutil import move
# relax module imports.
from lib.dispersion.variables import EXP_TYPE_R1RHO
from lib.nmr import frequency_to_ppm
from specific_analyses.relax_disp.data import generate_r20_key
# Create a data pipe.
pipe.create('R2eff', 'relax_disp')
# Create the spin system.
spin.create(res_name='X', res_num=14, spin_name='N')
spin.element('N', spin_id='@N')
spin.isotope('15N', spin_id='@N')
# The spectral data - experiment ID, R2eff file name, experiment type, spin ID string, spectrometer frequency in Hertz, relaxation time.
data = [
['600_MHz_nu1_50_Hz', 'T14_600_50.dsp', ':14@N', 600e6, 50, 0.04],
['600_MHz_nu1_75_Hz', 'T14_600_75.dsp', ':14@N', 600e6, 75, 0.04],
['600_MHz_nu1_100_Hz', 'T14_600_100.dsp', ':14@N', 600e6, 100, 0.04],
['600_MHz_nu1_150_Hz', 'T14_600_150.dsp', ':14@N', 600e6, 150, 0.04],
['600_MHz_nu1_200_Hz', 'T14_600_200.dsp', ':14@N', 600e6, 200, 0.04],
['800_MHz_nu1_100_Hz', 'T14_800_100.dsp', ':14@N', 800e6, 100, 0.04],
['800_MHz_nu1_200_Hz', 'T14_800_200.dsp', ':14@N', 800e6, 200, 0.04],
['800_MHz_nu1_400_Hz', 'T14_800_400.dsp', ':14@N', 800e6, 400, 0.04]
]
spin_lock_offset = {}
spin_lock_offset['600_MHz_nu1_50_Hz'] = [ 340.0, 330.0, 320.0, 310.0, 300.0, 290.0, 280.0, 270.0, 260.0, 250.0, 240.0, 230.0, 220.0, 210.0, 200.0, 190.0, 180.0, 170.0, 160.0, 150.0, 140.0, 130.0, 120.0, 110.0, 100.0, 90.0, 80.0, 70.0, 60.0, 50.0, 40.0, 30.0, 20.0, 10.0, 0.0, -10.0, -20.0, -30.0, -40.0, -50.0, -60.0, -70.0, -80.0, -90.0]
spin_lock_offset['600_MHz_nu1_75_Hz'] = [ 340.0, 330.0, 320.0, 310.0, 300.0, 290.0, 280.0, 270.0, 260.0, 250.0, 240.0, 230.0, 220.0, 210.0, 200.0, 190.0, 180.0, 170.0, 160.0, 150.0, 140.0, 130.0, 120.0, 110.0, 100.0, 90.0, 80.0, 70.0, 60.0, 50.0, 40.0, 30.0, 20.0, 10.0, 0.0, -10.0, -20.0, -30.0, -40.0, -50.0, -60.0, -70.0, -80.0, -90.0]
spin_lock_offset['600_MHz_nu1_100_Hz'] = [ 340.0, 330.0, 320.0, 310.0, 300.0, 290.0, 280.0, 270.0, 260.0, 250.0, 240.0, 230.0, 220.0, 210.0, 200.0, 190.0, 180.0, 170.0, 160.0, 150.0, 140.0, 130.0, 120.0, 110.0, 100.0, 90.0, 80.0, 70.0, 60.0, 50.0, 40.0, 30.0, 20.0, 10.0, 0.0, -10.0, -20.0, -30.0, -40.0, -50.0, -60.0, -70.0, -80.0, -90.0]
spin_lock_offset['600_MHz_nu1_150_Hz'] = [ 385.0, 370.0, 355.0, 340.0, 325.0, 310.0, 295.0, 280.0, 265.0, 250.0, 235.0, 220.0, 205.0, 190.0, 175.0, 160.0, 145.0, 130.0, 115.0, 100.0, 85.0, 70.0, 55.0, 40.0, 25.0, 10.0, -5.0, -20.0, -35.0, -50.0, -65.0, -80.0, -95.0, -110.0, -125.0, -140.0, -155.0, -170.0, -185.0]
spin_lock_offset['600_MHz_nu1_200_Hz'] = [ 385.0, 370.0, 355.0, 340.0, 325.0, 310.0, 295.0, 280.0, 265.0, 250.0, 235.0, 220.0, 205.0, 190.0, 175.0, 160.0, 145.0, 130.0, 115.0, 100.0, 85.0, 70.0, 55.0, 40.0, 25.0, 10.0, -5.0, -20.0, -35.0, -50.0, -65.0, -80.0, -95.0, -110.0, -125.0, -140.0, -155.0, -170.0, -185.0]
spin_lock_offset['800_MHz_nu1_100_Hz'] = [ 780.0, 750.0, 720.0, 690.0, 660.0, 630.0, 600.0, 570.0, 540.0, 510.0, 480.0, 450.0, 420.0, 390.0, 360.0, 330.0, 300.0, 270.0, 240.0, 210.0, 180.0, 150.0, 120.0, 90.0, 60.0, 30.0, 0.0, -30.0, -60.0, -90.0, -120.0, -150.0, -180.0, -210.0, -240.0, -270.0, -300.0, -330.0, -360.0]
spin_lock_offset['800_MHz_nu1_200_Hz'] = [ 960.0, 920.0, 880.0, 840.0, 800.0, 760.0, 720.0, 680.0, 640.0, 600.0, 560.0, 520.0, 480.0, 440.0, 400.0, 360.0, 320.0, 280.0, 240.0, 200.0, 160.0, 120.0, 80.0, 40.0, 0.0, -40.0, -80.0, -120.0, -160.0, -200.0, -240.0, -280.0, -320.0, -360.0, -400.0, -440.0, -480.0, -520.0, -560.0]
spin_lock_offset['800_MHz_nu1_400_Hz'] = [ 1150.0, 1100.0, 1050.0, 1000.0, 950.0, 900.0, 850.0, 800.0, 750.0, 700.0, 650.0, 600.0, 550.0, 500.0, 450.0, 400.0, 350.0, 300.0, 250.0, 200.0, 150.0, 100.0, 50.0, 0.0, -50.0, -100.0, -150.0, -200.0, -250.0, -300.0, -350.0, -400.0, -450.0, -500.0, -550.0, -600.0, -650.0, -700.0, -750.0]
# Loop over the files, reading in the data.
for id, file, spin_id, H_frq, field, relax_time in data:
# Loop over each CPMG frequency.
for offset in spin_lock_offset[id]:
# The id.
new_id = "%s_%.3f" % (id, offset)
# Set the NMR field strength.
spectrometer.frequency(id=new_id, frq=H_frq)
# Set the relaxation dispersion experiment type.
relax_disp.exp_type(spectrum_id=new_id, exp_type=EXP_TYPE_R1RHO)
# Relaxation dispersion CPMG constant time delay T (in s).
relax_disp.relax_time(spectrum_id=new_id, time=relax_time)
# Set the relaxation dispersion spin-lock field strength (nu1).
relax_disp.spin_lock_field(spectrum_id=new_id, field=field)
# Set the spin-lock offset, converting back to ppm.
relax_disp.spin_lock_offset(spectrum_id=new_id, offset=-frequency_to_ppm(frq=offset, B0=H_frq, isotope='15N'))
# Read the R2eff data.
relax_disp.r2eff_read_spin(id=id, file=file, dir='..', spin_id=spin_id, offset_col=1, data_col=2, error_col=3)
# Load the R1 data.
relax_data.read(ri_id='600MHz', ri_type='R1', frq=600e6, file='R1_600MHz.out', dir='..', mol_name_col=1, res_num_col=2, res_name_col=3, spin_num_col=4, spin_name_col=5, data_col=6, error_col=7)
relax_data.read(ri_id='800MHz', ri_type='R1', frq=800e6, file='R1_800MHz.out', dir='..', mol_name_col=1, res_num_col=2, res_name_col=3, spin_num_col=4, spin_name_col=5, data_col=6, error_col=7)
# Change the model.
relax_disp.select_model('TP02')
# The R20 keys.
r20_600_key = generate_r20_key(exp_type=EXP_TYPE_R1RHO, frq=600e6)
r20_800_key = generate_r20_key(exp_type=EXP_TYPE_R1RHO, frq=800e6)
# Manually set the parameter values.
spin_N = cdp.mol[0].res[0].spin[0]
spin_N.r2 = {
r20_600_key: 9.108060397660111,
r20_800_key: 13.793213528551924,
}
spin_N.pA = 0.945912353996981
spin_N.pB = 0.054087646003019
spin_N.kex = 367.981715073974556
spin_N.dw = 4.305697497613982
spin_N.ri_data['600MHz'] = 3.179051390898238
spin_N.ri_data['800MHz'] = 4.452840879991469
# Calculate.
minimise.calculate()
print("%-40s %20.15f" % ("relax chi2:", spin_N.chi2))
print("%-40s %20.15f" % ("cpmg_fit chi2 (corrections turned off):", 472.400507470708874))
# Minimisation.
minimise.grid_search(inc=7)
minimise.execute('simplex', constraints=True)
# Plot the dispersion curves.
relax_disp.plot_disp_curves(dir='.', num_points=100, extend=0, force=True)
# Save the results.
state.save('solution_tp02', dir='.', compress_type=1, force=True)
# Cleanup.
print("\n\nMoving 'disp_14_N.agr' to 'solution_tp02.agr'.")
move('disp_14_N.agr', 'solution_tp02.agr')
print("Deleting 'grace2images.py'.")
remove('grace2images.py')
| [
"bugman@b7916896-f9f9-0310-9fe5-b3996d8957d5"
] | bugman@b7916896-f9f9-0310-9fe5-b3996d8957d5 |
400c4825be91859d206dbc84ac0bef043e1582b7 | 66a05459831aef06fc86316ecb782848c116b226 | /collective/etherpad/Extensions/Install.py | be56c049b8b686cc2c19c1f049088f75de18d462 | [] | no_license | toutpt/collective.etherpad | 8d01323b3e31ff0872afa1fd8e4bc85b14a3f123 | 590414ddd3ed7437cefea91c853d291feb9b328f | refs/heads/master | 2020-05-18T05:31:36.603712 | 2013-09-26T12:45:35 | 2013-09-26T12:45:35 | 8,142,351 | 2 | 0 | null | 2013-09-18T16:42:41 | 2013-02-11T17:13:59 | Python | UTF-8 | Python | false | false | 709 | py |
def uninstall(portal, reinstall=False):
"""We uninstall things that are not handles by quickinstaller"""
if not reinstall:
# lets remove action on content types
types = portal.portal_types
for _type in ('Document', 'News Item', 'Event', 'Topic'):
_typeinfo = getattr(types, _type, None)
if _typeinfo:
action_info = _typeinfo.getActionObject('object/etherpad')
if action_info:
actions = _typeinfo.listActions()
indexes = [(a.category, a.id) for a in actions]
index = indexes.index(('object', 'etherpad'))
_typeinfo.deleteActions((index, ))
| [
"[email protected]"
] | |
01e31b5def65ba66a0b5b8c58dd666c03742a49f | 00ed1eb9f4875be9c116eae90c850b4c5f0ebd4d | /tests/funcat/utils/test_yahoo.py | 8cf228d677cb84b693c54063b84d932589854b5c | [
"Apache-2.0"
] | permissive | pchaos/funcat2 | a64fbcfc5c1d7b6ed1356cd9558a2efabae90c0e | ff554cc134906a5a182fc31774488d62a839b314 | refs/heads/master | 2023-09-02T19:56:16.017728 | 2021-09-03T01:57:15 | 2021-09-03T01:57:15 | 356,155,099 | 12 | 5 | null | null | null | null | UTF-8 | Python | false | false | 904 | py | # -*- coding: utf-8 -*-
import unittest
import warnings
from funcat.utils import save_sp500_tickers, get_data_from_yahoo
__updated__ = "2021-08-10"
class TestYahoo(unittest.TestCase):
"""Test case docstring."""
@classmethod
def setUpClass(cls):
super(TestYahoo, cls).setUpClass()
# 隐藏warning: ResourceWarning: Enable tracemalloc to get the object
# allocation traceback
warnings.simplefilter('ignore', ResourceWarning)
def setUp(self):
pass
def tearDown(self):
pass
def test_save_sp500_tickers(self):
sp500 = save_sp500_tickers()
self.assertTrue(len(sp500) >= 500,
f"返回长度不够{len(sp500)=}\n: {sp500=}")
print(f"{len(sp500)=}, {sp500=}")
def test_get_data_from_yahoo(self):
get_data_from_yahoo()
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
4533e7f4106a08d6c60606ae85e081356575fbf3 | 0c40e97b69dcd00f0b0b05f249d0fce448320fd8 | /test/functional/feature_segwit.py | 2d82afd598a56df99062522ce243f7c32495d618 | [
"MIT"
] | permissive | Arhipovladimir/Earthcoin | 9908912df9b10b97512c545b855c3670767039d9 | bc5b5ee538c76e7232e93434aedd8688bae70792 | refs/heads/main | 2023-07-16T05:50:52.755250 | 2021-08-25T09:19:40 | 2021-08-25T09:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40,664 | py | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Earthcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the SegWit changeover logic."""
from decimal import Decimal
from test_framework.address import (
key_to_p2pkh,
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
program_to_witness,
script_to_p2sh,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from test_framework.blocktools import witness_script, send_to_witness
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, sha256, ToHex
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE, OP_DROP
from test_framework.test_framework import EarthcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes, hex_str_to_bytes, sync_blocks, try_rpc
from io import BytesIO
NODE_0 = 0
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_spendable_utxo(node, min_value):
for utxo in node.listunspent(query_options={'minimumAmount': min_value}):
if utxo['spendable']:
return utxo
raise AssertionError("Unspent output equal or higher than %s not found" % min_value)
class SegWitTest(EarthcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
# This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
self.extra_args = [
[
"-rpcserialversion=0",
"-vbparams=segwit:0:999999999999",
"-addresstype=legacy",
"-deprecatedrpc=addwitnessaddress",
],
[
"-blockversion=4",
"-rpcserialversion=1",
"-vbparams=segwit:0:999999999999",
"-addresstype=legacy",
"-deprecatedrpc=addwitnessaddress",
],
[
"-blockversion=536870915",
"-vbparams=segwit:0:999999999999",
"-addresstype=legacy",
"-deprecatedrpc=addwitnessaddress",
],
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
super().setup_network()
connect_nodes(self.nodes[0], 2)
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=sign, insert_redeem_script=redeem_script)
def run_test(self):
self.nodes[0].generate(161) #block 161
self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"])
multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
p2sh_addr = self.nodes[i].addwitnessaddress(newaddress)
bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address']
bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address']
assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1]))
assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1]))
assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(self.nodes)
# Make sure all nodes recognize the transactions as theirs
assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50)
assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999"))
assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999"))
self.nodes[0].generate(260) #block 423
sync_blocks(self.nodes)
self.log.info("Verify witness txs are skipped for mining before the fork")
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)
self.nodes[2].generate(4) # blocks 428-431
self.log.info("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
sync_blocks(self.nodes)
assert_equal(len(self.nodes[2].getrawmempool()), 0)
segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
assert_equal(len(segwit_tx_list), 5)
self.log.info("Verify default node can't accept txs with missing witness")
# unsigned, no scriptsig
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
# unsigned with redeem script
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))
self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False))
assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False))
for i in range(len(segwit_tx_list)):
tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness()))
self.log.info("Verify witness txs without witness data are invalid after the fork")
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False)
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False)
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
self.log.info("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435
self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data
assert(tmpl['weightlimit'] == 4000000)
assert(tmpl['sigoplimit'] == 80000)
assert(tmpl['transactions'][0]['txid'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 8)
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
self.log.info("Non-segwit miners are able to use GBT response after activation.")
# Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
# tx2 (segwit input, paying to a non-segwit output) ->
# tx3 (non-segwit input, paying to a non-segwit output).
# tx1 is allowed to appear in the block, but no others.
txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996"))
hex_tx = self.nodes[0].gettransaction(txid)['hex']
tx = FromHex(CTransaction(), hex_tx)
assert(tx.wit.is_null()) # This should not be a segwit input
assert(txid1 in self.nodes[0].getrawmempool())
# Now create tx2, which will spend from txid1.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex']
txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
tx = FromHex(CTransaction(), tx2_hex)
assert(not tx.wit.is_null())
# Now create tx3, which will spend from txid2
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee
tx.calc_sha256()
txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
assert(tx.wit.is_null())
assert(txid3 in self.nodes[0].getrawmempool())
# Now try calling getblocktemplate() without segwit support.
template = self.nodes[0].getblocktemplate()
# Check that tx1 is the only transaction of the 3 in the template.
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid2 not in template_txids and txid3 not in template_txids)
assert(txid1 in template_txids)
# Check that running with segwit support results in all 3 being included.
template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid1 in template_txids)
assert(txid2 in template_txids)
assert(txid3 in template_txids)
# Check that wtxid is properly reported in mempool entry
assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True))
# Mine a block to clear the gbt cache again.
self.nodes[0].generate(1)
self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent")
# Some public keys to be used later
pubkeys = [
"0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
"02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
"04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
"02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
"036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
"0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
"0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
]
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
assert ((self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] == False))
assert ((self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] == True))
self.nodes[0].importpubkey(pubkeys[0])
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
self.nodes[0].importpubkey(pubkeys[1])
compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
self.nodes[0].importpubkey(pubkeys[2])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]
spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress
spendable_after_importaddress = [] # These outputs should be seen after importaddress
solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable
unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address'])
unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"]
# Test multisig_without_privkey
# We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
# Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.
multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address']
script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))
for i in compressed_spendable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# p2sh multisig with compressed keys should always be spendable
spendable_anytime.extend([p2sh])
# bare multisig can be watched and signed, but is not treated as ours
solvable_after_importaddress.extend([bare])
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
# P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# p2sh multisig with uncompressed keys should always be spendable
spendable_anytime.extend([p2sh])
# bare multisig can be watched and signed, but is not treated as ours
solvable_after_importaddress.extend([bare])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# Witness output types with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in compressed_solvable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh])
# P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_solvable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
solvable_after_importaddress.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# Witness output types with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
op1 = CScript([OP_1])
op0 = CScript([OP_0])
# 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)]
unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
p2wshop1 = CScript([OP_0, sha256(op1)])
unsolvable_after_importaddress.append(unsolvablep2pkh)
unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
unsolvable_after_importaddress.append(p2wshop1)
unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
unsolvable_after_importaddress.append(p2shop0)
spendable_txid = []
solvable_txid = []
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)
importlist = []
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
bare = hex_str_to_bytes(v['hex'])
importlist.append(bytes_to_hex_str(bare))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
else:
pubkey = hex_str_to_bytes(v['pubkey'])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
importlist.append(bytes_to_hex_str(p2pk))
importlist.append(bytes_to_hex_str(p2pkh))
importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))
importlist.append(bytes_to_hex_str(unsolvablep2pkh))
importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
importlist.append(bytes_to_hex_str(op1))
importlist.append(bytes_to_hex_str(p2wshop1))
for i in importlist:
# import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
# exceptions and continue.
try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that no witness address should be returned by unsolvable addresses
for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# addwitnessaddress should return a witness addresses even if keys are not in the wallet
self.nodes[0].addwitnessaddress(multisig_without_privkey_address)
for i in compressed_spendable_address + compressed_solvable_address:
witaddress = self.nodes[0].addwitnessaddress(i)
# addwitnessaddress should return the same address if it is a known P2SH-witness address
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# Repeat some tests. This time we don't add witness scripts with importaddress
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]
self.nodes[0].importpubkey(pubkeys[5])
compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
self.nodes[0].importpubkey(pubkeys[6])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]
spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress
solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable
unseen_anytime = [] # These outputs should never be seen
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
premature_witaddress = []
for i in compressed_spendable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH are always spendable
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address + uncompressed_solvable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in compressed_solvable_address:
v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
# P2WSH multisig without private key are seen after addwitnessaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable
solvable_anytime.extend([p2wpkh, p2sh_p2wpkh])
self.mine_and_test_listunspent(spendable_anytime, 2)
self.mine_and_test_listunspent(solvable_anytime, 1)
self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
# premature_witaddress are not accepted until the script is added with addwitnessaddress first
for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
# This will raise an exception
assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# after importaddress it should pass addwitnessaddress
v = self.nodes[0].getaddressinfo(compressed_solvable_address[1])
self.nodes[0].importaddress(v['hex'],"",False,True)
for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
witaddress = self.nodes[0].addwitnessaddress(i)
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress + spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress + solvable_anytime, 1))
self.mine_and_test_listunspent(unseen_anytime, 0)
# Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
v1_addr = program_to_witness(1, [3,5])
v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1})
v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr)
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")
# Check that spendable outputs are really spendable
self.create_and_mine_tx_from_txids(spendable_txid)
# import all the private keys so solvable addresses become spendable
self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
self.create_and_mine_tx_from_txids(solvable_txid)
# Test that importing native P2WPKH/P2WSH scripts works
for use_p2wsh in [False, True]:
if use_p2wsh:
scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
else:
scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"
self.nodes[1].importaddress(scriptPubKey, "", False)
rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"]
txid = self.nodes[1].sendrawtransaction(rawtxfund)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
# Assert it is properly saved
self.stop_node(1)
self.start_node(1)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
def mine_and_test_listunspent(self, script_list, ismine):
utxo = find_spendable_utxo(self.nodes[0], 50)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout'])))
for i in script_list:
tx.vout.append(CTxOut(10000000, i))
tx.rehash()
signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
txid = self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
watchcount = 0
spendcount = 0
for i in self.nodes[0].listunspent():
if (i['txid'] == txid):
watchcount += 1
if (i['spendable'] == True):
spendcount += 1
if (ismine == 2):
assert_equal(spendcount, len(script_list))
elif (ismine == 1):
assert_equal(watchcount, len(script_list))
assert_equal(spendcount, 0)
else:
assert_equal(watchcount, 0)
return txid
def p2sh_address_to_script(self,v):
bare = CScript(hex_str_to_bytes(v['hex']))
p2sh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2wsh = CScript([OP_0, sha256(bare)])
p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL])
return([bare, p2sh, p2wsh, p2sh_p2wsh])
def p2pkh_address_to_script(self,v):
pubkey = hex_str_to_bytes(v['pubkey'])
p2wpkh = CScript([OP_0, hash160(pubkey)])
p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL])
p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL])
p2wsh_p2pk = CScript([OP_0, sha256(p2pk)])
p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)])
p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL])
p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL])
return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]
def create_and_mine_tx_from_txids(self, txids, success = True):
tx = CTransaction()
for i in txids:
txtmp = CTransaction()
txraw = self.nodes[0].getrawtransaction(i)
f = BytesIO(hex_str_to_bytes(txraw))
txtmp.deserialize(f)
for j in range(len(txtmp.vout)):
tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
tx.vout.append(CTxOut(0, CScript()))
tx.rehash()
signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
if __name__ == '__main__':
SegWitTest().main()
| [
"[email protected]"
] | |
110f03f1ca08186fa594f660cabd56e1c35ab2e9 | caaf1b0754db1e676c37a6f1e58f19183754e654 | /sdk/network/azure-mgmt-network/generated_samples/private_link_service_delete.py | 97258f9c138bda212fe46732ae74449e11fcba41 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | rdomenzain/azure-sdk-for-python | 45dfb39121a0abda048c22e7309733a56259f525 | 58984255aeb904346b6958c5ba742749a2cc7d1b | refs/heads/master | 2023-07-07T06:53:12.967120 | 2023-07-04T16:27:37 | 2023-07-04T16:27:37 | 258,050,134 | 0 | 0 | MIT | 2020-04-23T00:12:14 | 2020-04-23T00:12:13 | null | UTF-8 | Python | false | false | 1,518 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.network import NetworkManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-network
# USAGE
python private_link_service_delete.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = NetworkManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subId",
)
client.private_link_services.begin_delete(
resource_group_name="rg1",
service_name="testPls",
).result()
# x-ms-original-file: specification/network/resource-manager/Microsoft.Network/stable/2022-11-01/examples/PrivateLinkServiceDelete.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
815555a0b9ba8d3eef9e459b9d19cd9f6e6e9305 | 824f831ce0921b3e364060710c9e531f53e52227 | /Leetcode/Python_Basics/02_C_Collection_OrderedDict.py | 4c7578f24feadc8a52aabb12cbb8fd63c8f4f69d | [] | no_license | adityakverma/Interview_Prepration | e854ff92c10d05bc2c82566ea797d2ce088de00a | d08a7f728c53943e9a27c33f8e4249633a69d1a6 | refs/heads/master | 2020-04-19T19:36:06.527353 | 2019-06-15T23:02:30 | 2019-06-15T23:02:30 | 168,392,921 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 948 | py |
# 8.3.6.1. OrderedDict Examples and Recipes
# OrderedDict Examples and Recipes
#
# Since an ordered dictionary remembers its insertion order, it can be used in
# conjunction with sorting to make a sorted dictionary:
# >>>
#
# >>> # regular unsorted dictionary
# >>> d = {'banana': 3, 'apple': 4, 'pear': 1, 'orange': 2}
#
# >>> # dictionary sorted by key
# >>> OrderedDict(sorted(d.items(), key=lambda t: t[0]))
# OrderedDict([('apple', 4), ('banana', 3), ('orange', 2), ('pear', 1)])
#
# >>> # dictionary sorted by value
# >>> OrderedDict(sorted(d.items(), key=lambda t: t[1]))
# OrderedDict([('pear', 1), ('orange', 2), ('banana', 3), ('apple', 4)])
#
# >>> # dictionary sorted by length of the key string
# >>> OrderedDict(sorted(d.items(), key=lambda t: len(t[0])))
# OrderedDict([('pear', 1), ('apple', 4), ('orange', 2), ('banana', 3)])
# ------------------------------------------------------------------------------ | [
"[email protected]"
] | |
14a3acc0bab16e60abbf783682b7aa042165a154 | 20bb1ae805cd796a7c377e55966633441d1d9fd5 | /uva/10680 LCM/lcm.py | b168af4bd0eee42ae91ee9c4dc81041695a8fe27 | [] | no_license | nathantheinventor/solved-problems | 1791c9588aefe2ebdc9293eb3d58317346d88e83 | c738e203fa77ae931b0ec613e5a00f9a8f7ff845 | refs/heads/master | 2022-10-27T08:58:23.860159 | 2022-10-13T20:18:43 | 2022-10-13T20:18:43 | 122,110,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | ans = [0, 1]
cur = 1
factors = [[] for _ in range(1000001)]
for i in range(2, 1000001, 2):
factors[i].append(2)
for i in range(3, 1000001, 2):
if len(factors[i]) == 0:
for k in range(i, 1000001, i):
factors[k].append(i)
for i in range(2, 1000001):
if len(factors[i]) == 1:
for factor in factors[i]:
cur *= factor
while cur % 10 == 0:
cur //= 10
cur %= 1000
ans.append(cur)
n = int(input())
while n > 0:
# print(n)
print(ans[n] % 10)
n = int(input()) | [
"[email protected]"
] | |
b9dde9ef681c952721f0165bff9acbd9a024ba59 | 95740c67e49e1528919eb8f96ae8086e7386e558 | /project/reports/election_prediction/pattern/web/__init__.py | 67bf0c5ef521ac625225815154e73cffa6a8de32 | [
"MIT"
] | permissive | mdeff/ntds_2016 | 5449fd5b7a1e4aa8721d0ae33a1f8a097f73b265 | 2d597838cb2688471cc6122a5570441585393148 | refs/heads/master | 2021-01-17T17:47:01.434340 | 2019-12-16T17:53:04 | 2019-12-16T17:53:04 | 69,178,943 | 109 | 51 | null | null | null | null | UTF-8 | Python | false | false | 149,004 | py | #### PATTERN | WEB #################################################################################
# -*- coding: utf-8 -*-
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <[email protected]>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
####################################################################################################
# Python API interface for various web services (Google, Twitter, Wikipedia, ...)
# smgllib.py is removed from Python 3, a warning is issued in Python 2.6+. Ignore for now.
import warnings; warnings.filterwarnings(action='ignore', category=DeprecationWarning, module="sgmllib")
import threading
import time
import os
import socket, urlparse, urllib, urllib2
import base64
import htmlentitydefs
import httplib
import sgmllib
import re
import xml.dom.minidom
import StringIO
import bisect
import itertools
import new
import api
import feed
import oauth
import json
import locale
from feed import feedparser
from soup import BeautifulSoup
try:
# Import persistent Cache.
# If this module is used separately,
# a dict is used (i.e. this Python session only).
from cache import Cache, cache, TMP
except:
cache = {}
try:
from imap import Mail, MailFolder, Message, GMAIL
from imap import MailError, MailServiceError, MailLoginError, MailNotLoggedIn
from imap import FROM, SUBJECT, DATE, BODY, ATTACHMENTS
except:
pass
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
#### UNICODE #######################################################################################
# Latin-1 (ISO-8859-1) encoding is identical to Windows-1252 except for the code points 128-159:
# Latin-1 assigns control codes in this range, Windows-1252 has characters, punctuation, symbols
# assigned to these code points.
def decode_string(v, encoding="utf-8"):
""" Returns the given value as a Unicode string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, str):
for e in encoding:
try: return v.decode(*e)
except:
pass
return v
return unicode(v)
def encode_string(v, encoding="utf-8"):
""" Returns the given value as a Python byte string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, unicode):
for e in encoding:
try: return v.encode(*e)
except:
pass
return v
return str(v)
u = decode_utf8 = decode_string
s = encode_utf8 = encode_string
# For clearer source code:
bytestring = s
#### ASYNCHRONOUS REQUEST ##########################################################################
class AsynchronousRequest(object):
def __init__(self, function, *args, **kwargs):
""" Executes the function in the background.
AsynchronousRequest.done is False as long as it is busy, but the program will not halt in the meantime.
AsynchronousRequest.value contains the function's return value once done.
AsynchronousRequest.error contains the Exception raised by an erronous function.
For example, this is useful for running live web requests while keeping an animation running.
For good reasons, there is no way to interrupt a background process (i.e. Python thread).
You are responsible for ensuring that the given function doesn't hang.
"""
self._response = None # The return value of the given function.
self._error = None # The exception (if any) raised by the function.
self._time = time.time()
self._function = function
self._thread = threading.Thread(target=self._fetch, args=(function,)+args, kwargs=kwargs)
self._thread.start()
def _fetch(self, function, *args, **kwargs):
""" Executes the function and sets AsynchronousRequest.response.
"""
try:
self._response = function(*args, **kwargs)
except Exception, e:
self._error = e
def now(self):
""" Waits for the function to finish and yields its return value.
"""
self._thread.join(); return self._response
@property
def elapsed(self):
return time.time() - self._time
@property
def done(self):
return not self._thread.isAlive()
@property
def value(self):
return self._response
@property
def error(self):
return self._error
def __repr__(self):
return "AsynchronousRequest(function='%s')" % self._function.__name__
def asynchronous(function, *args, **kwargs):
""" Returns an AsynchronousRequest object for the given function.
"""
return AsynchronousRequest(function, *args, **kwargs)
send = asynchronous
#### URL ###########################################################################################
# User agent and referrer.
# Used to identify the application accessing the web.
USER_AGENT = "Pattern/2.6 +http://www.clips.ua.ac.be/pattern"
REFERRER = "http://www.clips.ua.ac.be/pattern"
# Mozilla user agent.
# Websites can include code to block out any application except browsers.
MOZILLA = "Mozilla/5.0"
# HTTP request method.
GET = "get" # Data is encoded in the URL.
POST = "post" # Data is encoded in the message body.
# URL parts.
# protocol://username:password@domain:port/path/page?query_string#anchor
PROTOCOL, USERNAME, PASSWORD, DOMAIN, PORT, PATH, PAGE, QUERY, ANCHOR = \
"protocol", "username", "password", "domain", "port", "path", "page", "query", "anchor"
# MIME type.
MIMETYPE_WEBPAGE = ["text/html"]
MIMETYPE_STYLESHEET = ["text/css"]
MIMETYPE_PLAINTEXT = ["text/plain"]
MIMETYPE_PDF = ["application/pdf"]
MIMETYPE_NEWSFEED = ["application/rss+xml", "application/atom+xml"]
MIMETYPE_IMAGE = ["image/gif", "image/jpeg", "image/png", "image/tiff"]
MIMETYPE_AUDIO = ["audio/mpeg", "audio/mp4", "audio/x-aiff", "audio/x-wav"]
MIMETYPE_VIDEO = ["video/mpeg", "video/mp4", "video/avi", "video/quicktime", "video/x-flv"]
MIMETYPE_ARCHIVE = ["application/x-stuffit", "application/x-tar", "application/zip"]
MIMETYPE_SCRIPT = ["application/javascript", "application/ecmascript"]
def extension(filename):
""" Returns the extension in the given filename: "cat.jpg" => ".jpg".
"""
return os.path.splitext(filename)[1]
def urldecode(query):
""" Inverse operation of urllib.urlencode.
Returns a dictionary of (name, value)-items from a URL query string.
"""
def _format(s):
if s == "" or s == "None":
return None
if s.lstrip("-").isdigit():
return int(s)
try: return float(s)
except:
return s
if query:
query = query.lstrip("?").split("&")
query = ((kv.split("=") + [None])[:2] for kv in query)
query = ((u(urllib.unquote_plus(bytestring(k))),
_format(u(urllib.unquote_plus(bytestring(v))))) for k, v in query if k != "")
return dict(query)
return {}
url_decode = urldecode
def proxy(host, protocol="https"):
""" Returns the value for the URL.open() proxy parameter.
- host: host address of the proxy server.
"""
return (host, protocol)
class Error(Exception):
""" Base class for pattern.web errors.
"""
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args)
self.src = kwargs.pop("src", None)
self.url = kwargs.pop("url", None)
@property
def headers(self):
return dict(self.src.headers.items())
class URLError(Error):
pass # URL contains errors (e.g. a missing t in htp://).
class URLTimeout(URLError):
pass # URL takes to long to load.
class HTTPError(URLError):
pass # URL causes an error on the contacted server.
class HTTP301Redirect(HTTPError):
pass # Too many redirects.
# The site may be trying to set a cookie and waiting for you to return it,
# or taking other measures to discern a browser from a script.
# For specific purposes you should build your own urllib2.HTTPRedirectHandler
# and pass it to urllib2.build_opener() in URL.open()
class HTTP400BadRequest(HTTPError):
pass # URL contains an invalid request.
class HTTP401Authentication(HTTPError):
pass # URL requires a login and password.
class HTTP403Forbidden(HTTPError):
pass # URL is not accessible (user-agent?)
class HTTP404NotFound(HTTPError):
pass # URL doesn't exist on the internet.
class HTTP420Error(HTTPError):
pass # Used by Twitter for rate limiting.
class HTTP429TooMayRequests(HTTPError):
pass # Used by Twitter for rate limiting.
class HTTP500InternalServerError(HTTPError):
pass # Generic server error.
class HTTP503ServiceUnavailable(HTTPError):
pass # Used by Bing for rate limiting.
class URL(object):
def __init__(self, string=u"", method=GET, query={}, **kwargs):
""" URL object with the individual parts available as attributes:
For protocol://username:password@domain:port/path/page?query_string#anchor:
- URL.protocol: http, https, ftp, ...
- URL.username: username for restricted domains.
- URL.password: password for restricted domains.
- URL.domain : the domain name, e.g. nodebox.net.
- URL.port : the server port to connect to.
- URL.path : the server path of folders, as a list, e.g. ['news', '2010']
- URL.page : the page name, e.g. page.html.
- URL.query : the query string as a dictionary of (name, value)-items.
- URL.anchor : the page anchor.
If method is POST, the query string is sent with HTTP POST.
"""
self.__dict__["method"] = method # Use __dict__ directly since __setattr__ is overridden.
self.__dict__["_string"] = u(string)
self.__dict__["_parts"] = None
self.__dict__["_headers"] = None
self.__dict__["_redirect"] = None
if isinstance(string, URL):
self.__dict__["method"] = string.method
self.query.update(string.query)
if len(query) > 0:
# Requires that we parse the string first (see URL.__setattr__).
self.query.update(query)
if len(kwargs) > 0:
# Requires that we parse the string first (see URL.__setattr__).
self.parts.update(kwargs)
def _parse(self):
""" Parses all the parts of the URL string to a dictionary.
URL format: protocal://username:password@domain:port/path/page?querystring#anchor
For example: http://user:[email protected]:992/animal/bird?species=seagull&q#wings
This is a cached method that is only invoked when necessary, and only once.
"""
p = urlparse.urlsplit(self._string)
P = {PROTOCOL: p[0], # http
USERNAME: u"", # user
PASSWORD: u"", # pass
DOMAIN: p[1], # example.com
PORT: u"", # 992
PATH: p[2], # [animal]
PAGE: u"", # bird
QUERY: urldecode(p[3]), # {"species": "seagull", "q": None}
ANCHOR: p[4] # wings
}
# Split the username and password from the domain.
if "@" in P[DOMAIN]:
P[USERNAME], \
P[PASSWORD] = (p[1].split("@")[0].split(":")+[u""])[:2]
P[DOMAIN] = p[1].split("@")[1]
# Split the port number from the domain.
if ":" in P[DOMAIN]:
P[DOMAIN], \
P[PORT] = P[DOMAIN].split(":")
P[PORT] = P[PORT].isdigit() and int(P[PORT]) or P[PORT]
# Split the base page from the path.
if "/" in P[PATH]:
P[PAGE] = p[2].split("/")[-1]
P[PATH] = p[2][:len(p[2]) - len(P[PAGE])].strip("/").split("/")
P[PATH] = filter(lambda v: v != "", P[PATH])
else:
P[PAGE] = p[2].strip("/")
P[PATH] = []
self.__dict__["_parts"] = P
# URL.string yields unicode(URL) by joining the different parts,
# if the URL parts have been modified.
def _get_string(self): return unicode(self)
def _set_string(self, v):
self.__dict__["_string"] = u(v)
self.__dict__["_parts"] = None
string = property(_get_string, _set_string)
@property
def parts(self):
""" Yields a dictionary with the URL parts.
"""
if not self._parts: self._parse()
return self._parts
@property
def querystring(self):
""" Yields the URL querystring: "www.example.com?page=1" => "page=1"
"""
s = self.parts[QUERY].items()
s = dict((bytestring(k), bytestring(v if v is not None else "")) for k, v in s)
s = urllib.urlencode(s)
return s
def __getattr__(self, k):
if k in self.__dict__ : return self.__dict__[k]
if k in self.parts : return self.__dict__["_parts"][k]
raise AttributeError, "'URL' object has no attribute '%s'" % k
def __setattr__(self, k, v):
if k in self.__dict__ : self.__dict__[k] = u(v); return
if k == "string" : self._set_string(v); return
if k == "query" : self.parts[k] = v; return
if k in self.parts : self.__dict__["_parts"][k] = u(v); return
raise AttributeError, "'URL' object has no attribute '%s'" % k
def open(self, timeout=10, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None):
""" Returns a connection to the url from which data can be retrieved with connection.read().
When the timeout amount of seconds is exceeded, raises a URLTimeout.
When an error occurs, raises a URLError (e.g. HTTP404NotFound).
"""
url = self.string
# Use basic urllib.urlopen() instead of urllib2.urlopen() for local files.
if os.path.exists(url):
return urllib.urlopen(url)
# Get the query string as a separate parameter if method=POST.
post = self.method == POST and self.querystring or None
socket.setdefaulttimeout(timeout)
if proxy:
proxy = urllib2.ProxyHandler({proxy[1]: proxy[0]})
proxy = urllib2.build_opener(proxy, urllib2.HTTPHandler)
urllib2.install_opener(proxy)
try:
request = urllib2.Request(bytestring(url), post, {
"User-Agent": user_agent,
"Referer": referrer
})
# Basic authentication is established with authentication=(username, password).
if authentication is not None:
request.add_header("Authorization", "Basic %s" %
base64.encodestring('%s:%s' % authentication))
return urllib2.urlopen(request)
except urllib2.HTTPError, e:
if e.code == 301: raise HTTP301Redirect(src=e, url=url)
if e.code == 400: raise HTTP400BadRequest(src=e, url=url)
if e.code == 401: raise HTTP401Authentication(src=e, url=url)
if e.code == 403: raise HTTP403Forbidden(src=e, url=url)
if e.code == 404: raise HTTP404NotFound(src=e, url=url)
if e.code == 420: raise HTTP420Error(src=e, url=url)
if e.code == 429: raise HTTP429TooMayRequests(src=e, url=url)
if e.code == 500: raise HTTP500InternalServerError(src=e, url=url)
if e.code == 503: raise HTTP503ServiceUnavailable(src=e, url=url)
raise HTTPError(str(e), src=e, url=url)
except httplib.BadStatusLine, e:
raise HTTPError(str(e), src=e, url=url)
except socket.timeout, e:
raise URLTimeout(src=e, url=url)
except socket.error, e:
if "timed out" in e.args[0]:
raise URLTimeout(src=e, url=url)
raise URLError(str(e), src=e, url=url)
except urllib2.URLError, e:
if "timed out" in e.args[0] \
or "timed out" in e.reason:
raise URLTimeout(src=e, url=url)
raise URLError(str(e), src=e, url=url)
except ValueError, e:
raise URLError(str(e), src=e, url=url)
def download(self, timeout=10, cached=True, throttle=0, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None, unicode=False, **kwargs):
""" Downloads the content at the given URL (by default it will be cached locally).
Unless unicode=False, the content is returned as a unicode string.
"""
# Filter OAuth parameters from cache id (they will be unique for each request).
if self._parts is None and self.method == GET and "oauth_" not in self._string:
id = self._string
else:
id = repr(self.parts)
id = re.sub("u{0,1}'oauth_.*?': u{0,1}'.*?', ", "", id)
# Keep a separate cache of unicode and raw download for same URL.
if unicode is True:
id = "u" + id
if cached and id in cache:
if isinstance(cache, dict): # Not a Cache object.
return cache[id]
if unicode is True:
return cache[id]
if unicode is False:
return cache.get(id, unicode=False)
t = time.time()
# Open a connection with the given settings, read it and (by default) cache the data.
try:
data = self.open(timeout, proxy, user_agent, referrer, authentication).read()
except socket.timeout, e:
raise URLTimeout(src=e, url=self.string)
if unicode is True:
data = u(data)
if cached:
cache[id] = data
if throttle:
time.sleep(max(throttle-(time.time()-t), 0))
return data
def read(self, *args, **kwargs):
return self.open(**kwargs).read(*args)
@property
def exists(self, timeout=10):
""" Yields False if the URL generates a HTTP404NotFound error.
"""
try: self.open(timeout)
except HTTP404NotFound:
return False
except HTTPError, URLTimeoutError:
return True
except URLError:
return False
except:
return True
return True
@property
def mimetype(self, timeout=10):
""" Yields the MIME-type of the document at the URL, or None.
MIME is more reliable than simply checking the document extension.
You can then do: URL.mimetype in MIMETYPE_IMAGE.
"""
try:
return self.headers["content-type"].split(";")[0]
except KeyError:
return None
@property
def headers(self, timeout=10):
""" Yields a dictionary with the HTTP response headers.
"""
if self.__dict__["_headers"] is None:
try:
h = dict(self.open(timeout).info())
except URLError:
h = {}
self.__dict__["_headers"] = h
return self.__dict__["_headers"]
@property
def redirect(self, timeout=10):
""" Yields the redirected URL, or None.
"""
if self.__dict__["_redirect"] is None:
try:
r = self.open(timeout).geturl()
except URLError:
r = None
self.__dict__["_redirect"] = r != self.string and r or ""
return self.__dict__["_redirect"] or None
def __str__(self):
return bytestring(self.string)
def __unicode__(self):
# The string representation includes the query attributes with HTTP GET.
P = self.parts
u = []
if P[PROTOCOL]:
u.append("%s://" % P[PROTOCOL])
if P[USERNAME]:
u.append("%s:%s@" % (P[USERNAME], P[PASSWORD]))
if P[DOMAIN]:
u.append(P[DOMAIN])
if P[PORT]:
u.append(":%s" % P[PORT])
if P[PORT] or P[DOMAIN] and not P[PATH] and not P[PAGE]:
u.append("/")
if P[PATH]:
u.append("/%s/" % "/".join(P[PATH]))
if P[PAGE] and len(u) > 0:
u[-1] = u[-1].rstrip("/")
if P[PAGE]:
u.append("/%s" % P[PAGE])
if P[QUERY] and self.method == GET:
u.append("?%s" % self.querystring)
if P[ANCHOR]:
u.append("#%s" % P[ANCHOR])
u = u"".join(u)
u = u.lstrip("/")
return u
def __repr__(self):
return "URL(%s, method=%s)" % (repr(self.string), repr(self.method))
def copy(self):
return URL(self.string, self.method, self.query)
def download(url=u"", method=GET, query={}, timeout=10, cached=True, throttle=0, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None, unicode=False):
""" Downloads the content at the given URL (by default it will be cached locally).
Unless unicode=False, the content is returned as a unicode string.
"""
return URL(url, method, query).download(timeout, cached, throttle, proxy, user_agent, referrer, authentication, unicode)
#url = URL("http://user:[email protected]:992/animal/bird?species#wings")
#print url.parts
#print url.query
#print url.string
#--- STREAMING URL BUFFER --------------------------------------------------------------------------
def bind(object, method, function):
""" Attaches the function as a method with the given name to the given object.
"""
setattr(object, method, new.instancemethod(function, object))
class Stream(list):
def __init__(self, url, delimiter="\n", **kwargs):
""" Buffered stream of data from a given URL.
"""
self.socket = URL(url).open(**kwargs)
self.buffer = ""
self.delimiter = delimiter
def update(self, bytes=1024):
""" Reads a number of bytes from the stream.
If a delimiter is encountered, calls Stream.parse() on the packet.
"""
packets = []
self.buffer += self.socket.read(bytes)
self.buffer = self.buffer.split(self.delimiter, 1)
while len(self.buffer) > 1:
data = self.buffer[0]
data = self.parse(data)
if data is not None:
packets.append(data)
self.buffer = self.buffer[-1]
self.buffer = self.buffer.split(self.delimiter, 1)
self.buffer = self.buffer[-1]
self.extend(packets)
return packets
def parse(self, data):
""" Must be overridden in a subclass.
"""
return data
def clear(self):
list.__init__(self, [])
def stream(url, delimiter="\n", parse=lambda data: data, **kwargs):
""" Returns a new Stream with the given parse method.
"""
stream = Stream(url, delimiter, **kwargs)
bind(stream, "parse", lambda stream, data: parse(data))
return stream
#--- FIND URLs -------------------------------------------------------------------------------------
# Functions for parsing URL's and e-mail adresses from strings.
RE_URL_PUNCTUATION = ("\"'{(>", "\"'.,;)}")
RE_URL_HEAD = r"[%s|\[|\s]" % "|".join(RE_URL_PUNCTUATION[0]) # Preceded by space, parenthesis or HTML tag.
RE_URL_TAIL = r"[%s|\]]*[\s|\<]" % "|".join(RE_URL_PUNCTUATION[1]) # Followed by space, punctuation or HTML tag.
RE_URL1 = r"(https?://.*?)" + RE_URL_TAIL # Starts with http:// or https://
RE_URL2 = RE_URL_HEAD + r"(www\..*?\..*?)" + RE_URL_TAIL # Starts with www.
RE_URL3 = RE_URL_HEAD + r"([\w|-]*?\.(com|net|org|edu|de|uk))" + RE_URL_TAIL
RE_URL1, RE_URL2, RE_URL3 = (
re.compile(RE_URL1, re.I),
re.compile(RE_URL2, re.I),
re.compile(RE_URL3, re.I))
def find_urls(string, unique=True):
""" Returns a list of URLs parsed from the string.
Works on http://, https://, www. links or domain names ending in .com, .org, .net.
Links can be preceded by leading punctuation (open parens)
and followed by trailing punctuation (period, comma, close parens).
"""
string = u(string)
string = string.replace(u"\u2024", ".")
string = string.replace(" ", " ")
matches = []
for p in (RE_URL1, RE_URL2, RE_URL3):
for m in p.finditer(" %s " % string):
s = m.group(1)
s = s.split("\">")[0].split("'>")[0] # google.com">Google => google.com
if not unique or s not in matches:
matches.append(s)
return matches
links = find_urls
RE_EMAIL = re.compile(r"[\w\-\.\+]+@(\w[\w\-]+\.)+[\w\-]+") # [email protected]
def find_email(string, unique=True):
""" Returns a list of e-mail addresses parsed from the string.
"""
string = u(string).replace(u"\u2024", ".")
matches = []
for m in RE_EMAIL.finditer(string):
s = m.group(0)
if not unique or s not in matches:
matches.append(s)
return matches
def find_between(a, b, string):
""" Returns a list of substrings between a and b in the given string.
"""
p = "%s(.*?)%s" % (a, b)
p = re.compile(p, re.DOTALL | re.I)
return [m for m in p.findall(string)]
#### PLAIN TEXT ####################################################################################
# Functions for stripping HTML tags from strings.
BLOCK = [
"title", "h1", "h2", "h3", "h4", "h5", "h6", "p",
"center", "blockquote", "div", "table", "ul", "ol", "dl", "pre", "code", "form"
]
SELF_CLOSING = ["br", "hr", "img"]
# Element tag replacements for a stripped version of HTML source with strip_tags().
# Block-level elements are followed by linebreaks,
# list items are preceded by an asterisk ("*").
LIST_ITEM = "*"
blocks = dict.fromkeys(BLOCK+["br", "tr", "td"], ("", "\n\n"))
blocks.update({
"li": ("%s " % LIST_ITEM, "\n"),
"img": ("", ""),
"br": ("", "\n"),
"th": ("", "\n"),
"tr": ("", "\n"),
"td": ("", "\t"),
})
class HTMLParser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, attrs)
def unknown_endtag(self, tag):
self.handle_endtag(tag)
def clean(self, html):
html = decode_utf8(html)
html = html.replace("/>", " />")
html = html.replace(" />", " />")
html = html.replace("<!", "<!")
html = html.replace("<!DOCTYPE", "<!DOCTYPE")
html = html.replace("<!doctype", "<!doctype")
html = html.replace("<!--", "<!--")
return html
def parse_declaration(self, i):
# We can live without sgmllib's parse_declaration().
try:
return sgmllib.SGMLParser.parse_declaration(self, i)
except sgmllib.SGMLParseError:
return i + 1
def convert_charref(self, name):
# This fixes a bug in older versions of sgmllib when working with Unicode.
# Fix: ASCII ends at 127, not 255
try:
n = int(name)
except ValueError:
return
if not 0 <= n <= 127:
return
return chr(n)
class HTMLTagstripper(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
def strip(self, html, exclude=[], replace=blocks):
""" Returns the HTML string with all element tags (e.g. <p>) removed.
- exclude : a list of tags to keep. Element attributes are stripped.
To preserve attributes a dict of (tag name, [attribute])-items can be given.
- replace : a dictionary of (tag name, (replace_before, replace_after))-items.
By default, block-level elements are separated with linebreaks.
"""
if html is None:
return None
self._exclude = isinstance(exclude, dict) and exclude or dict.fromkeys(exclude, [])
self._replace = replace
self._data = []
self.feed(self.clean(html))
self.close()
self.reset()
return "".join(self._data)
def clean(self, html):
# Escape all entities (just strip tags).
return HTMLParser.clean(self, html).replace("&", "&")
def handle_starttag(self, tag, attributes):
if tag in BLOCK and self._data and self._data[-1][-1:] != "\n":
# Block-level elements always break to a new line.
self._data.append("\n")
if tag in self._exclude:
# Create the tag attribute string,
# including attributes defined in the HTMLTagStripper._exclude dict.
a = len(self._exclude[tag]) > 0 and attributes or []
a = ["%s=\"%s\"" % (k,v) for k, v in a if k in self._exclude[tag]]
a = (" "+" ".join(a)).rstrip()
self._data.append("<%s%s>" % (tag, a))
if tag in self._replace:
self._data.append(self._replace[tag][0])
if tag in self._replace and tag in SELF_CLOSING:
self._data.append(self._replace[tag][1])
def handle_endtag(self, tag):
if tag in self._exclude and self._data and self._data[-1].startswith("<"+tag):
# Never keep empty elements (e.g. <a></a>).
self._data.pop(-1); return
if tag in self._exclude:
self._data.append("</%s>" % tag)
if tag in self._replace:
self._data.append(self._replace[tag][1])
def handle_data(self, data):
self._data.append(data.strip("\n\t"))
def handle_comment(self, comment):
if "comment" in self._exclude or \
"!--" in self._exclude:
self._data.append("<!--%s-->" % comment)
# As a function:
strip_tags = HTMLTagstripper().strip
def strip_element(string, tag, attributes=""):
""" Removes all elements with the given tagname and attributes from the string.
Open and close tags are kept in balance.
No HTML parser is used: strip_element(s, "a", 'class="x"') matches
'<a class="x">' or '<a href="x" class="x">' but not "<a class='x'>".
"""
s = string.lower() # Case-insensitive.
t = tag.strip("</>")
a = (" " + attributes.lower().strip()).rstrip()
i = 0
j = 0
while j >= 0:
#i = s.find("<%s%s" % (t, a), i)
m = re.search(r"<%s[^\>]*?%s" % (t, a), s[i:])
i = i + m.start() if m else -1
j = s.find("</%s>" % t, i+1)
opened, closed = s[i:j].count("<%s" % t), 1
while opened > closed and j >= 0:
k = s.find("</%s>" % t, j+1)
opened += s[j:k].count("<%s" % t)
closed += 1
j = k
if i < 0: return string
if j < 0: return string[:i]
string = string[:i] + string[j+len(t)+3:]; s=string.lower()
return string
def strip_between(a, b, string):
""" Removes anything between (and including) string a and b inside the given string.
"""
p = "%s.*?%s" % (a, b)
p = re.compile(p, re.DOTALL | re.I)
return re.sub(p, "", string)
def strip_javascript(html):
return strip_between("<script.*?>", "</script>", html)
def strip_inline_css(html):
return strip_between("<style.*?>", "</style>", html)
def strip_comments(html):
return strip_between("<!--", "-->", html)
def strip_forms(html):
return strip_between("<form.*?>", "</form>", html)
RE_AMPERSAND = re.compile("\&(?!\#)") # & not followed by #
RE_UNICODE = re.compile(r'&(#?)(x|X?)(\w+);') # É
def encode_entities(string):
""" Encodes HTML entities in the given string ("<" => "<").
For example, to display "<em>hello</em>" in a browser,
we need to pass "<em>hello</em>" (otherwise "hello" in italic is displayed).
"""
if isinstance(string, basestring):
string = RE_AMPERSAND.sub("&", string)
string = string.replace("<", "<")
string = string.replace(">", ">")
string = string.replace('"', """)
string = string.replace("'", "'")
return string
def decode_entities(string):
""" Decodes HTML entities in the given string ("<" => "<").
"""
# http://snippets.dzone.com/posts/show/4569
def replace_entity(match):
hash, hex, name = match.group(1), match.group(2), match.group(3)
if hash == "#" or name.isdigit():
if hex == "":
return unichr(int(name)) # "&" => "&"
if hex.lower() == "x":
return unichr(int("0x" + name, 16)) # "&" = > "&"
else:
cp = htmlentitydefs.name2codepoint.get(name) # "&" => "&"
return unichr(cp) if cp else match.group() # "&foo;" => "&foo;"
if isinstance(string, basestring):
return RE_UNICODE.subn(replace_entity, string)[0]
return string
def encode_url(string):
return urllib.quote_plus(bytestring(string)) # "black/white" => "black%2Fwhite".
def decode_url(string):
return urllib.unquote_plus(string)
RE_SPACES = re.compile("( |\xa0)+", re.M) # Matches one or more spaces.
RE_TABS = re.compile(r"\t+", re.M) # Matches one or more tabs.
def collapse_spaces(string, indentation=False, replace=" "):
""" Returns a string with consecutive spaces collapsed to a single space.
Whitespace on empty lines and at the end of each line is removed.
With indentation=True, retains leading whitespace on each line.
"""
p = []
for x in string.splitlines():
n = indentation and len(x) - len(x.lstrip()) or 0
p.append(x[:n] + RE_SPACES.sub(replace, x[n:]).strip())
return "\n".join(p)
def collapse_tabs(string, indentation=False, replace=" "):
""" Returns a string with (consecutive) tabs replaced by a single space.
Whitespace on empty lines and at the end of each line is removed.
With indentation=True, retains leading whitespace on each line.
"""
p = []
for x in string.splitlines():
n = indentation and len(x) - len(x.lstrip()) or 0
p.append(x[:n] + RE_TABS.sub(replace, x[n:]).strip())
return "\n".join(p)
def collapse_linebreaks(string, threshold=1):
""" Returns a string with consecutive linebreaks collapsed to at most the given threshold.
Whitespace on empty lines and at the end of each line is removed.
"""
n = "\n" * threshold
p = [s.rstrip() for s in string.splitlines()]
string = "\n".join(p)
string = re.sub(n+r"+", n, string)
return string
def plaintext(html, keep=[], replace=blocks, linebreaks=2, indentation=False):
""" Returns a string with all HTML tags removed.
Content inside HTML comments, the <style> tag and the <script> tags is removed.
- keep : a list of tags to keep. Element attributes are stripped.
To preserve attributes a dict of (tag name, [attribute])-items can be given.
- replace : a dictionary of (tag name, (replace_before, replace_after))-items.
By default, block-level elements are followed by linebreaks.
- linebreaks : the maximum amount of consecutive linebreaks,
- indentation : keep left line indentation (tabs and spaces)?
"""
if not keep.__contains__("script"):
html = strip_javascript(html)
if not keep.__contains__("style"):
html = strip_inline_css(html)
if not keep.__contains__("form"):
html = strip_forms(html)
if not keep.__contains__("comment") and \
not keep.__contains__("!--"):
html = strip_comments(html)
html = html.replace("\r", "\n")
html = strip_tags(html, exclude=keep, replace=replace)
html = decode_entities(html)
html = collapse_spaces(html, indentation)
html = collapse_tabs(html, indentation)
html = collapse_linebreaks(html, linebreaks)
html = html.strip()
return html
#### SEARCH ENGINE #################################################################################
SEARCH = "search" # Query for pages (i.e. links to websites).
IMAGE = "image" # Query for images.
NEWS = "news" # Query for news items.
TINY = "tiny" # Image size around 100x100.
SMALL = "small" # Image size around 200x200.
MEDIUM = "medium" # Image size around 500x500.
LARGE = "large" # Image size around 1000x1000.
RELEVANCY = "relevancy" # Sort results by most relevant.
LATEST = "latest" # Sort results by most recent.
class Result(dict):
def __init__(self, url):
""" An item in a list of results returned by SearchEngine.search().
All dictionary entries are available as unicode string attributes.
- url : the URL of the referred web content,
- title : the title of the content at the URL,
- text : the content text,
- language: the content language,
- author : for news items and images, the author,
- date : for news items, the publication date.
"""
dict.__init__(self)
self.url = url
@property
def txt(self):
return self.text
@property
def description(self):
return self.text # Backwards compatibility.
def download(self, *args, **kwargs):
""" Download the content at the given URL.
By default it will be cached - see URL.download().
"""
return URL(self.url).download(*args, **kwargs)
def __getattr__(self, k):
return self.get(k, u"")
def __getitem__(self, k):
return self.get(k, u"")
def __setattr__(self, k, v):
dict.__setitem__(self, u(k), v is not None and u(v) or u"") # Store strings as unicode.
def __setitem__(self, k, v):
dict.__setitem__(self, u(k), v is not None and u(v) or u"")
def setdefault(self, k, v):
dict.setdefault(self, u(k), u(v))
def update(self, *args, **kwargs):
map = dict()
map.update(*args, **kwargs)
dict.update(self, [(u(k), u(v)) for k, v in map.items()])
def __repr__(self):
return "Result(%s)" % dict.__repr__(self)
class Results(list):
def __init__(self, source=None, query=None, type=SEARCH, total=0):
""" A list of results returned from SearchEngine.search().
- source: the service that yields the results (e.g. GOOGLE, TWITTER).
- query : the query that yields the results.
- type : the query type (SEARCH, IMAGE, NEWS).
- total : the total result count.
This is not the length of the list, but the total number of matches for the given query.
"""
self.source = source
self.query = query
self.type = type
self.total = total
class SearchEngine(object):
def __init__(self, license=None, throttle=1.0, language=None):
""" A base class for a web service.
- license : license key for the API,
- throttle : delay between requests (avoid hammering the server).
Inherited by: Google, Bing, Wikipedia, Twitter, Facebook, Flickr, ...
"""
self.license = license
self.throttle = throttle # Amount of sleep time after executing a query.
self.language = language # Result.language restriction (e.g., "en").
self.format = lambda x: x # Formatter applied to each attribute of each Result.
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
return Results(source=None, query=query, type=type)
class SearchEngineError(HTTPError):
pass
class SearchEngineTypeError(SearchEngineError):
pass # Raised when an unknown type is passed to SearchEngine.search().
class SearchEngineLimitError(SearchEngineError):
pass # Raised when the query limit for a license is reached.
#--- GOOGLE ----------------------------------------------------------------------------------------
# Google Search is a web search engine owned by Google Inc.
# Google Custom Search is a paid service.
# https://code.google.com/apis/console/
# http://code.google.com/apis/customsearch/v1/overview.html
GOOGLE = "https://www.googleapis.com/customsearch/v1?"
GOOGLE_LICENSE = api.license["Google"]
GOOGLE_CUSTOM_SEARCH_ENGINE = "000579440470800426354:_4qo2s0ijsi"
# Search results can start with: "Jul 29, 2007 ...",
# which is the date of the page parsed by Google from the content.
RE_GOOGLE_DATE = re.compile("^([A-Z][a-z]{2} [0-9]{1,2}, [0-9]{4}) {0,1}...")
class Google(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or GOOGLE_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Google for the given query.
- type : SEARCH,
- start: maximum 100 results => start 1-10 with count=10,
- count: maximum 10,
There is a daily limit of 10,000 queries. Google Custom Search is a paid service.
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or count < 1 or start < 1 or start > (100 / count):
return Results(GOOGLE, query, type)
# 1) Create request URL.
url = URL(GOOGLE, query={
"key": self.license or GOOGLE_LICENSE,
"cx": GOOGLE_CUSTOM_SEARCH_ENGINE,
"q": query,
"start": 1 + (start-1) * count,
"num": min(count, 10),
"alt": "json"
})
# 2) Restrict language.
if self.language is not None:
url.query["lr"] = "lang_" + self.language
# 3) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
if data.get("error", {}).get("code") == 403:
raise SearchEngineLimitError
results = Results(GOOGLE, query, type)
results.total = int(data.get("queries", {}).get("request", [{}])[0].get("totalResults") or 0)
for x in data.get("items", []):
r = Result(url=None)
r.url = self.format(x.get("link"))
r.title = self.format(x.get("title"))
r.text = self.format(x.get("htmlSnippet").replace("<br> ","").replace("<b>...</b>", "..."))
r.language = self.language or ""
r.date = ""
if not r.date:
# Google Search results can start with a date (parsed from the content):
m = RE_GOOGLE_DATE.match(r.text)
if m:
r.date = m.group(1)
r.text = "..." + r.text[len(m.group(0)):]
results.append(r)
return results
def translate(self, string, input="en", output="fr", **kwargs):
""" Returns the translation of the given string in the desired output language.
Google Translate is a paid service, license without billing raises HTTP401Authentication.
"""
url = URL("https://www.googleapis.com/language/translate/v2?", method=GET, query={
"key": self.license or GOOGLE_LICENSE,
"q": string, # 1000 characters maximum
"source": input,
"target": output
})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(**kwargs)
except HTTP403Forbidden, e:
raise HTTP401Authentication, "Google translate API is a paid service"
data = json.loads(data)
data = data.get("data", {}).get("translations", [{}])[0].get("translatedText", "")
data = decode_entities(data)
return u(data)
def identify(self, string, **kwargs):
""" Returns a (language, confidence)-tuple for the given string.
Google Translate is a paid service, license without billing raises HTTP401Authentication.
"""
url = URL("https://www.googleapis.com/language/translate/v2/detect?", method=GET, query={
"key": self.license or GOOGLE_LICENSE,
"q": string[:1000]
})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(**kwargs)
except HTTP403Forbidden:
raise HTTP401Authentication, "Google translate API is a paid service"
data = json.loads(data)
data = data.get("data", {}).get("detections", [[{}]])[0][0]
data = u(data.get("language")), float(data.get("confidence"))
return data
#--- YAHOO -----------------------------------------------------------------------------------------
# Yahoo! Search is a web search engine owned by Yahoo! Inc.
# Yahoo! BOSS ("Build Your Own Search Service") is a paid service.
# http://developer.yahoo.com/search/
YAHOO = "http://yboss.yahooapis.com/ysearch/"
YAHOO_LICENSE = api.license["Yahoo"]
class Yahoo(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or YAHOO_LICENSE, throttle, language)
def _authenticate(self, url):
url.query.update({
"oauth_version": "1.0",
"oauth_nonce": oauth.nonce(),
"oauth_timestamp": oauth.timestamp(),
"oauth_consumer_key": self.license[0],
"oauth_signature_method": "HMAC-SHA1"
})
url.query["oauth_signature"] = oauth.sign(url.string.split("?")[0], url.query,
method = GET,
secret = self.license[1]
)
return url
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Yahoo for the given query.
- type : SEARCH, IMAGE or NEWS,
- start: maximum 1000 results => start 1-100 with count=10, 1000/count,
- count: maximum 50, or 35 for images.
There is no daily limit, however Yahoo BOSS is a paid service.
"""
if type not in (SEARCH, IMAGE, NEWS):
raise SearchEngineTypeError
if type == SEARCH:
url = YAHOO + "web"
if type == IMAGE:
url = YAHOO + "images"
if type == NEWS:
url = YAHOO + "news"
if not query or count < 1 or start < 1 or start > 1000 / count:
return Results(YAHOO, query, type)
# 1) Create request URL.
url = URL(url, method=GET, query={
"q": query.replace(" ", "+"),
"start": 1 + (start-1) * count,
"count": min(count, type==IMAGE and 35 or 50),
"format": "json"
})
# 2) Restrict language.
if self.language is not None:
market = locale.market(self.language)
if market:
url.query["market"] = market.lower()
# 3) Authenticate.
url = self._authenticate(url)
# 4) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(cached=cached, **kwargs)
except HTTP401Authentication:
raise HTTP401Authentication, "Yahoo %s API is a paid service" % type
except HTTP403Forbidden:
raise SearchEngineLimitError
data = json.loads(data)
data = data.get("bossresponse") or {}
data = data.get({SEARCH:"web", IMAGE:"images", NEWS:"news"}[type], {})
results = Results(YAHOO, query, type)
results.total = int(data.get("totalresults") or 0)
for x in data.get("results", []):
r = Result(url=None)
r.url = self.format(x.get("url", x.get("clickurl")))
r.title = self.format(x.get("title"))
r.text = self.format(x.get("abstract"))
r.date = self.format(x.get("date"))
r.author = self.format(x.get("source"))
r.language = self.format(x.get("language") and \
x.get("language").split(" ")[0] or self.language or "")
results.append(r)
return results
#--- BING ------------------------------------------------------------------------------------------
# Bing is a web search engine owned by Microsoft.
# Bing Search API is a paid service.
# https://datamarket.azure.com/dataset/5BA839F1-12CE-4CCE-BF57-A49D98D29A44
# https://datamarket.azure.com/account/info
BING = "https://api.datamarket.azure.com/Bing/Search/"
BING_LICENSE = api.license["Bing"]
class Bing(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or BING_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
"""" Returns a list of results from Bing for the given query.
- type : SEARCH, IMAGE or NEWS,
- start: maximum 1000 results => start 1-100 with count=10, 1000/count,
- count: maximum 50, or 15 for news,
- size : for images, either SMALL, MEDIUM or LARGE.
There is no daily query limit.
"""
if type not in (SEARCH, IMAGE, NEWS):
raise SearchEngineTypeError
if type == SEARCH:
src = "Web"
if type == IMAGE:
src = "Image"
if type == NEWS:
src = "News"
if not query or count < 1 or start < 1 or start > 1000 / count:
return Results(BING + src + "?", query, type)
# 1) Construct request URL.
url = URL(BING + "Composite", method=GET, query={
"Sources": "'" + src.lower() + "'",
"Query": "'" + query + "'",
"$skip": 1 + (start-1) * count,
"$top": min(count, type==NEWS and 15 or 50),
"$format": "json",
})
# 2) Restrict image size.
if size in (TINY, SMALL, MEDIUM, LARGE):
url.query["ImageFilters"] = {
TINY: "'Size:Small'",
SMALL: "'Size:Small'",
MEDIUM: "'Size:Medium'",
LARGE: "'Size:Large'" }[size]
# 3) Restrict language.
if type in (SEARCH, IMAGE) and self.language is not None:
url.query["Query"] = url.query["Query"][:-1] + " language: %s'" % self.language
#if self.language is not None:
# market = locale.market(self.language)
# if market:
# url.query["market"] = market
# 4) Parse JSON response.
kwargs["authentication"] = ("", self.license)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(cached=cached, **kwargs)
except HTTP401Authentication:
raise HTTP401Authentication, "Bing %s API is a paid service" % type
except HTTP503ServiceUnavailable:
raise SearchEngineLimitError
data = json.loads(data)
data = data.get("d", {})
data = data.get("results", [{}])[0]
results = Results(BING, query, type)
results.total = int(data.get(src+"Total", 0))
for x in data.get(src, []):
r = Result(url=None)
r.url = self.format(x.get("MediaUrl", x.get("Url")))
r.title = self.format(x.get("Title"))
r.text = self.format(x.get("Description", x.get("Snippet")))
r.language = self.language or ""
r.date = self.format(x.get("DateTime", x.get("Date")))
r.author = self.format(x.get("Source"))
results.append(r)
return results
#--- DUCKDUCKGO ------------------------------------------------------------------------------------
# DuckDuckGo is a privacy-respecting aggregate search engine,
# with information from Wikipedia, WikiHow, Wikia, GitHub, The Free Dictionary, etc.
# https://duckduckgo.com/api.html
# https://duckduckgo.com/params.html
DUCKDUCKGO = "http://api.duckduckgo.com/"
DUCKDUCKGO_LICENSE = api.license["DuckDuckGo"]
# Results from DuckDuckGo have a Result.type with semantic information,
# e.g., "apple" => "plant and plant parts". Known types:
REFERENCE, CATEGORY, DEFINITION = \
"reference", "category", "definition"
class DuckDuckGo(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or DUCKDUCKGO_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=None, count=None, sort=RELEVANCY, size=None, cached=True, **kwargs):
"""" Returns a list of results from DuckDuckGo for the given query.
"""
if type != SEARCH:
raise SearchEngineTypeError
# 1) Construct request URL.
url = URL(DUCKDUCKGO, method=GET, query={
"q": query,
"o": "json"
})
# 2) Restrict language.
if type == SEARCH and self.language is not None:
url.query["kl"] = self.language
# 3) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
results = Results(DUCKDUCKGO, query, type)
results.total = None
for x in data.get("Results", []):
if x.get("FirstURL"):
r = Result(url=None)
# Parse official website link.
r.url = self.format(x.get("FirstURL"))
r.title = self.format(data.get("Heading"))
r.text = self.format(data.get("Abstract"))
r.author = self.format(data.get("AbstractSource"))
r.type = self.format(REFERENCE)
results.append(r)
for topic in data.get("RelatedTopics", []):
for x in topic.get("Topics", [topic]):
r = Result(url=None)
r.url = x.get("FirstURL")
# Parse title and type from URL (e.g., http://duckduckgo.com/d/Cats?kl=en).
m = re.match(r"^http://duckduckgo.com/([a-z]/)?(.*?)(\?|$)", r.url)
# Parse title: "Cats".
s1 = m and m.group(2) or "" # Title: "Cats"
s1 = u(decode_url(s1.encode("utf-8")))
s1 = s1.strip().replace("_", " ")
s1 = s1[:1].upper() + s1[1:]
# Parse description; the part before the first "-" or "," was the link.
s2 = x.get("Text", "").strip()
s2 = re.sub(r" +", " ", s2)
s2 = s2[:1].upper() + s2[1:] or ""
s2 = s2.startswith(s1) \
and "<a href=\"%s\">%s</a>%s" % (r.url, s1, s2[len(s1):]) \
or re.sub(r"^(.*?)( - | or |, )(.*?)", "<a href=\"%s\">\\1</a>\\2\\3" % r.url, s2)
# Parse type: "d/" => "definition".
s3 = m and m.group(1) or ""
s3 = {"c": CATEGORY, "d": DEFINITION}.get(s3.rstrip("/"), "")
s3 = topic.get("Name", "").lower() or s3
s3 = re.sub("^in ", "", s3)
# Format result.
r.url = self.format(r.url)
r.title = self.format(s1)
r.text = self.format(s2)
r.type = self.format(s3)
results.append(r)
return results
def answer(self, string, **kwargs):
""" Returns a DuckDuckGo answer for the given string (e.g., math, spelling, ...)
"""
url = URL(DUCKDUCKGO, method=GET, query={
"q": string,
"o": "json"
})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(**kwargs)
data = json.loads(data)
data = data.get(kwargs.get("field", "Answer"))
return u(data)
def spelling(self, string):
""" Returns a list of spelling suggestions for the given string.
"""
s = self.answer("spell " + string, cached=True)
s = re.findall(r"<a.*?>(.*?)</a>", s)
return s
def definition(self, string):
""" Returns a dictionary definition for the given string.
"""
s = self.answer(string, field="Definition", cached=True)
s = re.sub(r"^.*? definition: ", "", s)
s = re.sub(r"(^'''.*?''' |^)(.)(.*?)$",
lambda m: m.group(1) + m.group(2).upper() + m.group(3), s)
return s
DDG = DuckDuckGo
#for r in DDG().search("cats"):
# print r.url
# print r.title # Can be used as a new query.
# print plaintext(r.text)
# print r.type # REFERENCE, CATEGORY, DEFINITION, "people", "sports" ...
# print
#print DDG().definition("cat")
#print DDG().spelling("catnpa")
#--- TWITTER ---------------------------------------------------------------------------------------
# Twitter is an online social networking service and microblogging service,
# that enables users to post and read text-based messages of up to 140 characters ("tweets").
# https://dev.twitter.com/docs/api/1.1
TWITTER = "https://api.twitter.com/1.1/"
TWITTER_STREAM = "https://stream.twitter.com/1.1/statuses/filter.json"
TWITTER_STATUS = "https://twitter.com/%s/status/%s"
TWITTER_LICENSE = api.license["Twitter"]
TWITTER_HASHTAG = re.compile(r"(\s|^)(#[a-z0-9_\-]+)", re.I) # Word starts with "#".
TWITTER_RETWEET = re.compile(r"(\s|^RT )(@[a-z0-9_\-]+)", re.I) # Word starts with "RT @".
class Twitter(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or TWITTER_LICENSE, throttle, language)
self._pagination = {}
def _authenticate(self, url):
url.query.update({
"oauth_version": "1.0",
"oauth_nonce": oauth.nonce(),
"oauth_timestamp": oauth.timestamp(),
"oauth_consumer_key": self.license[0],
"oauth_token": self.license[2][0],
"oauth_signature_method": "HMAC-SHA1"
})
url.query["oauth_signature"] = oauth.sign(url.string.split("?")[0], url.query,
method = GET,
secret = self.license[1],
token = self.license[2][1]
)
return url
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=False, **kwargs):
""" Returns a list of results from Twitter for the given query.
- type : SEARCH,
- start: Result.id or int,
- count: maximum 100.
There is a limit of 150+ queries per 15 minutes.
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or count < 1 or (isinstance(start, (int, long, float)) and start < 1):
return Results(TWITTER, query, type)
if not isinstance(start, (int, long, float)):
id = int(start) - 1 if start and start.isdigit() else ""
else:
if start == 1:
self._pagination = {}
if start <= 10000:
id = (query, kwargs.get("geo"), kwargs.get("date"), int(start)-1, count)
id = self._pagination.get(id, "")
else:
id = int(start) - 1
# 1) Construct request URL.
url = URL(TWITTER + "search/tweets.json?", method=GET)
url.query = {
"q": query,
"max_id": id,
"count": min(count, 100)
}
# 2) Restrict location with geo=(latitude, longitude, radius).
# It can also be a (latitude, longitude)-tuple with default radius "10km".
if "geo" in kwargs:
url.query["geocode"] = ",".join((map(str, kwargs.pop("geo")) + ["10km"])[:3])
# 3) Restrict most recent with date="YYYY-MM-DD".
# Only older tweets are returned.
if "date" in kwargs:
url.query["until"] = kwargs.pop("date")
# 4) Restrict language.
url.query["lang"] = self.language or ""
# 5) Authenticate.
url = self._authenticate(url)
# 6) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(cached=cached, **kwargs)
except HTTP420Error:
raise SearchEngineLimitError
except HTTP429TooMayRequests:
raise SearchEngineLimitError
data = json.loads(data)
results = Results(TWITTER, query, type)
results.total = None
for x in data.get("statuses", []):
r = Result(url=None)
r.id = self.format(x.get("id_str"))
r.url = self.format(TWITTER_STATUS % (x.get("user", {}).get("screen_name"), x.get("id_str")))
r.text = self.format(x.get("text"))
r.date = self.format(x.get("created_at"))
r.author = self.format(x.get("user", {}).get("screen_name"))
r.profile = self.format(x.get("user", {}).get("profile_image_url")) # Profile picture URL.
r.language = self.format(x.get("metadata", {}).get("iso_language_code"))
results.append(r)
# Twitter.search(start=id, count=10) takes a tweet.id,
# and returns 10 results that are older than this id.
# In the past, start took an int used for classic pagination.
# However, new tweets may arrive quickly,
# so that by the time Twitter.search(start=2) is called,
# it will yield results from page 1 (or even newer results).
# For backward compatibility, we keep page cache,
# that remembers the last id for a "page" for a given query,
# when called in a loop.
#
# Store the last id retrieved.
# If search() is called again with start+1, start from this id.
if isinstance(start, (int, long, float)):
k = (query, kwargs.get("geo"), kwargs.get("date"), int(start), count)
if results:
self._pagination[k] = str(int(results[-1].id) - 1)
else:
self._pagination[k] = id
return results
def profile(self, query, start=1, count=10, **kwargs):
""" For the given author id, alias or search query,
returns a list of (id, handle, name, description, location, picture, tweets)-tuple.
"""
# 1) Construct request URL.
url = URL(TWITTER + "users/search.json?", method=GET, query={
"q": query,
"page": start,
"count": count
})
url = self._authenticate(url)
# 2) Parse JSON response.
kwargs.setdefault("cached", True)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = URL(url).download(**kwargs)
data = json.loads(data)
except HTTP400BadRequest:
return []
return [(
u(x.get("id_str", "")),
u(x.get("screen_name", "")),
u(x.get("name", "")),
u(x.get("description", "")),
u(x.get("location", "")),
u(x.get("profile_image_url", "")),
u(x.get("statuses_count", ""))) for x in data]
def trends(self, **kwargs):
""" Returns a list with 10 trending topics on Twitter.
"""
# 1) Construct request URL.
url = URL("https://api.twitter.com/1.1/trends/place.json?id=1")
url = self._authenticate(url)
# 2) Parse JSON response.
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(**kwargs)
data = json.loads(data)
except HTTP400BadRequest:
return []
return [u(x.get("name")) for x in data[0].get("trends", [])]
def stream(self, query, **kwargs):
""" Returns a live stream of Result objects for the given query.
"""
url = URL(TWITTER_STREAM)
url.query["track"] = query
url = self._authenticate(url)
return TwitterStream(url, delimiter="\n", format=self.format, **kwargs)
class TwitterStream(Stream):
def __init__(self, socket, delimiter="\n", format=lambda s: s, **kwargs):
kwargs.setdefault("timeout", 30)
Stream.__init__(self, socket, delimiter, **kwargs)
self.format = format
def parse(self, data):
""" TwitterStream.queue will populate with Result objects as
TwitterStream.update() is called iteratively.
"""
if data.strip():
x = json.loads(data)
r = Result(url=None)
r.id = self.format(x.get("id_str"))
r.url = self.format(TWITTER_STATUS % (x.get("user", {}).get("screen_name"), x.get("id_str")))
r.text = self.format(x.get("text"))
r.date = self.format(x.get("created_at"))
r.author = self.format(x.get("user", {}).get("screen_name"))
r.profile = self.format(x.get("user", {}).get("profile_image_url"))
r.language = self.format(x.get("metadata", {}).get("iso_language_code"))
return r
def author(name):
""" Returns a Twitter query-by-author-name that can be passed to Twitter.search().
For example: Twitter().search(author("tom_de_smedt"))
"""
return "from:%s" % name
def hashtags(string):
""" Returns a list of hashtags (words starting with a #hash) from a tweet.
"""
return [b for a, b in TWITTER_HASHTAG.findall(string)]
def retweets(string):
""" Returns a list of retweets (words starting with a RT @author) from a tweet.
"""
return [b for a, b in TWITTER_RETWEET.findall(string)]
#engine = Twitter()
#for i in range(2):
# for tweet in engine.search("cat nap", cached=False, start=i+1, count=10):
# print
# print tweet.id
# print tweet.url
# print tweet.text
# print tweet.author
# print tweet.profile
# print tweet.language
# print tweet.date
# print hashtags(tweet.text)
# print retweets(tweet.text)
#stream = Twitter().stream("cat")
#for i in range(10):
# print i
# stream.update()
# for tweet in reversed(stream):
# print tweet.id
# print tweet.text
# print tweet.url
# print tweet.language
# print
#stream.clear()
#--- MEDIAWIKI -------------------------------------------------------------------------------------
# MediaWiki is a free wiki software application.
# MediaWiki powers popular websites such as Wikipedia, Wiktionary and Wikia.
# http://www.mediawiki.org/wiki/API:Main_page
# http://en.wikipedia.org/w/api.php
WIKIA = "http://wikia.com"
WIKIPEDIA = "http://wikipedia.com"
WIKIPEDIA_LICENSE = api.license["Wikipedia"]
MEDIAWIKI_LICENSE = None
MEDIAWIKI = "http://{SUBDOMAIN}.{DOMAIN}{API}"
# Pattern for meta links (e.g. Special:RecentChanges).
# http://en.wikipedia.org/wiki/Main_namespace
MEDIAWIKI_NAMESPACE = ["Main", "User", "Wikipedia", "File", "MediaWiki", "Template", "Help", "Category", "Portal", "Book"]
MEDIAWIKI_NAMESPACE += [s+" talk" for s in MEDIAWIKI_NAMESPACE] + ["Talk", "Special", "Media"]
MEDIAWIKI_NAMESPACE += ["WP", "WT", "MOS", "C", "CAT", "Cat", "P", "T", "H", "MP", "MoS", "Mos"]
_mediawiki_namespace = re.compile(r"^("+"|".join(MEDIAWIKI_NAMESPACE)+"):", re.I)
# Pattern to identify disambiguation pages.
MEDIAWIKI_DISAMBIGUATION = "<a href=\"/wiki/Help:Disambiguation\" title=\"Help:Disambiguation\">disambiguation</a> page"
# Pattern to identify references, e.g. [12]
MEDIAWIKI_REFERENCE = r"\s*\[[0-9]{1,3}\]"
# Mediawiki.search(type=ALL).
ALL = "all"
class MediaWiki(SearchEngine):
def __init__(self, license=None, throttle=5.0, language="en"):
SearchEngine.__init__(self, license or MEDIAWIKI_LICENSE, throttle, language)
@property
def _url(self):
# Must be overridden in a subclass; see Wikia and Wikipedia.
return None
@property
def MediaWikiArticle(self):
return MediaWikiArticle
@property
def MediaWikiSection(self):
return MediaWikiSection
@property
def MediaWikiTable(self):
return MediaWikiTable
def __iter__(self):
return self.articles()
def articles(self, **kwargs):
""" Returns an iterator over all MediaWikiArticle objects.
Optional parameters can include those passed to
MediaWiki.index(), MediaWiki.search() and URL.download().
"""
for title in self.index(**kwargs):
yield self.search(title, **kwargs)
# Backwards compatibility.
all = articles
def index(self, namespace=0, start=None, count=100, cached=True, **kwargs):
""" Returns an iterator over all article titles (for a given namespace id).
"""
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
# Fetch article titles (default) or a custom id.
id = kwargs.pop("_id", "title")
# Loop endlessly (= until the last request no longer yields an "apcontinue").
# See: http://www.mediawiki.org/wiki/API:Allpages
while start != -1:
url = URL(self._url, method=GET, query={
"action": "query",
"list": "allpages",
"apnamespace": namespace,
"apfrom": start or "",
"aplimit": min(count, 500),
"apfilterredir": "nonredirects",
"format": "json"
})
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
for x in data.get("query", {}).get("allpages", {}):
if x.get(id):
yield x[id]
start = data.get("query-continue", {}).get("allpages", {})
start = start.get("apcontinue", start.get("apfrom", -1))
raise StopIteration
# Backwards compatibility.
list = index
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" With type=SEARCH, returns a MediaWikiArticle for the given query (case-sensitive).
With type=ALL, returns a list of results.
Each result.title is the title of an article that contains the given query.
"""
if type not in (SEARCH, ALL, "*"):
raise SearchEngineTypeError
if type == SEARCH: # Backwards compatibility.
return self.article(query, cached=cached, **kwargs)
if not query or start < 1 or count < 1:
return Results(self._url, query, type)
# 1) Construct request URL (e.g., Wikipedia for a given language).
url = URL(self._url, method=GET, query={
"action": "query",
"list": "search",
"srsearch": query,
"sroffset": (start - 1) * count,
"srlimit": min(count, 100),
"srprop": "snippet",
"format": "json"
})
# 2) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
data = data.get("query", {})
results = Results(self._url, query, type)
results.total = int(data.get("searchinfo", {}).get("totalhits", 0))
for x in data.get("search", []):
u = "http://%s/wiki/%s" % (URL(self._url).domain, x.get("title").replace(" ", "_"))
r = Result(url=u)
r.id = self.format(x.get("title"))
r.title = self.format(x.get("title"))
r.text = plaintext(self.format(x.get("snippet")))
results.append(r)
return results
def article(self, query, cached=True, **kwargs):
""" Returns a MediaWikiArticle for the given query.
The query is case-sensitive, for example on Wikipedia:
- "tiger" = Panthera tigris,
- "TIGER" = Topologically Integrated Geographic Encoding and Referencing.
"""
url = URL(self._url, method=GET, query={
"action": "parse",
"page": query.replace(" ", "_"),
"redirects": 1,
"format": "json"
})
kwargs.setdefault("unicode", True)
kwargs.setdefault("timeout", 30) # Parsing the article takes some time.
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
data = data.get("parse", {})
a = self._parse_article(data, query=query)
a = self._parse_article_sections(a, data)
a = self._parse_article_section_structure(a)
if not a.html or "id=\"noarticletext\"" in a.html:
return None
return a
def _parse_article(self, data, **kwargs):
return self.MediaWikiArticle(
title = plaintext(data.get("displaytitle", data.get("title", ""))),
source = data.get("text", {}).get("*", ""),
disambiguation = data.get("text", {}).get("*", "").find(MEDIAWIKI_DISAMBIGUATION) >= 0,
links = [x["*"] for x in data.get("links", []) if not _mediawiki_namespace.match(x["*"])],
categories = [x["*"] for x in data.get("categories", [])],
external = [x for x in data.get("externallinks", [])],
media = [x for x in data.get("images", [])],
redirects = [x for x in data.get("redirects", [])],
languages = dict([(x["lang"], x["*"]) for x in data.get("langlinks", [])]),
language = self.language,
parser = self, **kwargs)
def _parse_article_sections(self, article, data):
# If "References" is a section in the article,
# the HTML will contain a marker <h*><span class="mw-headline" id="References">.
# http://en.wikipedia.org/wiki/Section_editing
t = article.title
d = 0
i = 0
for x in data.get("sections", {}):
a = x.get("anchor")
if a:
p = r"<h.>\s*.*?\s*<span class=\"mw-headline\" id=\"%s\">" % a
p = re.compile(p)
m = p.search(article.source, i)
if m:
j = m.start()
t = plaintext(t)
article.sections.append(self.MediaWikiSection(article,
title = t,
start = i,
stop = j,
level = d))
t = plaintext(x.get("line", ""))
d = int(x.get("level", 2)) - 1
i = j
return article
def _parse_article_section_structure(self, article):
# Sections with higher level are children of previous sections with lower level.
for i, s2 in enumerate(article.sections):
for s1 in reversed(article.sections[:i]):
if s1.level < s2.level:
s2.parent = s1
s1.children.append(s2)
break
return article
class MediaWikiArticle(object):
def __init__(self, title=u"", source=u"", links=[], categories=[], languages={}, disambiguation=False, **kwargs):
""" A MediaWiki article returned from MediaWiki.search().
MediaWikiArticle.string contains the HTML content.
"""
self.title = title # Article title.
self.source = source # Article HTML content.
self.sections = [] # Article sections.
self.links = links # List of titles of linked articles.
self.categories = categories # List of categories. As links, prepend "Category:".
self.external = [] # List of external links.
self.media = [] # List of linked media (images, sounds, ...)
self.disambiguation = disambiguation # True when the article is a disambiguation page.
self.languages = languages # Dictionary of (language, article)-items, e.g. Cat => ("nl", "Kat")
self.language = kwargs.get("language", "en")
self.redirects = kwargs.get("redirects", [])
self.parser = kwargs.get("parser", MediaWiki())
for k, v in kwargs.items():
setattr(self, k, v)
def _plaintext(self, string, **kwargs):
""" Strips HTML tags, whitespace and wiki markup from the HTML source, including:
metadata, info box, table of contents, annotations, thumbnails, disambiguation link.
This is called internally from MediaWikiArticle.string.
"""
s = string
# Strip meta <table> elements.
s = strip_element(s, "table", "id=\"toc") # Table of contents.
s = strip_element(s, "table", "class=\"infobox") # Infobox.
s = strip_element(s, "table", "class=\"navbox") # Navbox.
s = strip_element(s, "table", "class=\"mbox") # Message.
s = strip_element(s, "table", "class=\"metadata") # Metadata.
s = strip_element(s, "table", "class=\".*?wikitable") # Table.
s = strip_element(s, "table", "class=\"toc") # Table (usually footer).
# Strip meta <div> elements.
s = strip_element(s, "div", "id=\"toc") # Table of contents.
s = strip_element(s, "div", "class=\"infobox") # Infobox.
s = strip_element(s, "div", "class=\"navbox") # Navbox.
s = strip_element(s, "div", "class=\"mbox") # Message.
s = strip_element(s, "div", "class=\"metadata") # Metadata.
s = strip_element(s, "div", "id=\"annotation") # Annotations.
s = strip_element(s, "div", "class=\"dablink") # Disambiguation message.
s = strip_element(s, "div", "class=\"magnify") # Thumbnails.
s = strip_element(s, "div", "class=\"thumb ") # Thumbnail captions.
s = strip_element(s, "div", "class=\"barbox") # Bar charts.
s = strip_element(s, "div", "class=\"noprint") # Hidden from print.
s = strip_element(s, "sup", "class=\"noprint")
# Strip absolute elements (don't know their position).
s = strip_element(s, "div", "style=\"position:absolute")
# Strip meta <span> elements.
s = strip_element(s, "span", "class=\"error")
# Strip math formulas, add [math] placeholder.
s = re.sub(r"<img class=\"tex\".*?/>", "[math]", s) # LaTex math images.
s = plaintext(s, **kwargs)
# Strip [edit] link (language dependent.)
s = re.sub(r"\[edit\]\s*", "", s)
s = re.sub(r"\[%s\]\s*" % {
"en": "edit",
"es": u"editar código",
"de": "Bearbeiten",
"fr": "modifier le code",
"it": "modifica sorgente",
"nl": "bewerken",
}.get(self.language, "edit"), "", s)
# Insert space before inline references.
s = s.replace("[", " [").replace(" [", " [")
# Strip inline references.
#s = re.sub(r" \[[0-9]+\]", "", s)
return s
def plaintext(self, **kwargs):
return self._plaintext(self.source, **kwargs)
@property
def html(self):
return self.source
@property
def string(self):
return self.plaintext()
def __repr__(self):
return "MediaWikiArticle(title=%s)" % repr(self.title)
class MediaWikiSection(object):
def __init__(self, article, title=u"", start=0, stop=0, level=1):
""" A (nested) section in the content of a MediaWikiArticle.
"""
self.article = article # MediaWikiArticle the section is part of.
self.parent = None # MediaWikiSection the section is part of.
self.children = [] # MediaWikiSections belonging to this section.
self.title = title # Section title.
self._start = start # Section start index in MediaWikiArticle.string.
self._stop = stop # Section stop index in MediaWikiArticle.string.
self._level = level # Section depth (main title + intro = level 0).
self._links = None
self._tables = None
def plaintext(self, **kwargs):
return self.article._plaintext(self.source, **kwargs)
@property
def source(self):
return self.article.source[self._start:self._stop]
@property
def html(self):
return self.source
@property
def string(self):
return self.plaintext()
@property
def content(self):
# ArticleSection.string, minus the title.
s = self.plaintext()
t = plaintext(self.title)
if s == t or (len(s) > len(t)) and s.startswith(t) and s[len(t)] not in (",", " "):
return s[len(t):].lstrip()
return s
@property
def links(self, path="/wiki/"):
""" Yields a list of Wikipedia links in this section. Similar
in functionality to MediaWikiArticle.links.
"""
if self._links is None:
a = HTMLLinkParser().parse(self.source)
a = (decode_url(a.url) for a in a)
a = (a[len(path):].replace("_", " ") for a in a if a.startswith(path))
a = (a for a in a if not _mediawiki_namespace.match(a))
self._links = sorted(set(a))
return self._links
@property
def tables(self):
""" Yields a list of MediaWikiTable objects in the section.
"""
if self._tables is None:
self._tables = []
for style in ("wikitable", "sortable wikitable"):
b = "<table class=\"%s\"" % style, "</table>"
p = self.article._plaintext
f = find_between
for s in f(b[0], b[1], self.source):
t = self.article.parser.MediaWikiTable(self,
title = p((f(r"<caption.*?>", "</caption>", s) + [""])[0]),
source = b[0] + s + b[1])
# 1) Parse <td> and <th> content and format it as plain text.
# 2) Parse <td colspan=""> attribute, duplicate spanning cells.
# 3) For <th> in the first row, update MediaWikiTable.headers.
for i, row in enumerate(f(r"<tr", "</tr>", s)):
r1 = f(r"<t[d|h]", r"</t[d|h]>", row)
r1 = (((f(r'colspan="', r'"', v)+[1])[0], v[v.find(">")+1:]) for v in r1)
r1 = ((int(n), v) for n, v in r1)
r2 = []; [[r2.append(p(v)) for j in range(n)] for n, v in r1]
if i == 0 and "</th>" in row:
t.headers = r2
else:
t.rows.append(r2)
self._tables.append(t)
return self._tables
@property
def level(self):
return self._level
depth = level
def __repr__(self):
return "MediaWikiSection(title=%s)" % repr(self.title)
class MediaWikiTable(object):
def __init__(self, section, title=u"", headers=[], rows=[], source=u""):
""" A <table class="wikitable> in a MediaWikiSection.
"""
self.section = section # MediaWikiSection the table is part of.
self.source = source # Table HTML.
self.title = title # Table title.
self.headers = headers # List of table headers.
self.rows = rows # List of table rows, each a list of cells.
def plaintext(self, **kwargs):
return self.article._plaintext(self.source, **kwargs)
@property
def html(self):
return self.source
@property
def string(self):
return self.plaintext()
def __repr__(self):
return "MediaWikiTable(title=%s)" % repr(self.title)
#--- MEDIAWIKI: WIKIPEDIA --------------------------------------------------------------------------
# Wikipedia is a collaboratively edited, multilingual, free Internet encyclopedia.
# Wikipedia depends on MediaWiki.
class Wikipedia(MediaWiki):
def __init__(self, license=None, throttle=5.0, language="en"):
""" Mediawiki search engine for http://[language].wikipedia.org.
"""
SearchEngine.__init__(self, license or WIKIPEDIA_LICENSE, throttle, language)
self._subdomain = language
@property
def _url(self):
s = MEDIAWIKI
s = s.replace("{SUBDOMAIN}", self._subdomain)
s = s.replace("{DOMAIN}", "wikipedia.org")
s = s.replace("{API}", "/w/api.php")
return s
@property
def MediaWikiArticle(self):
return WikipediaArticle
@property
def MediaWikiSection(self):
return WikipediaSection
@property
def MediaWikiTable(self):
return WikipediaTable
class WikipediaArticle(MediaWikiArticle):
def download(self, media, **kwargs):
""" Downloads an item from MediaWikiArticle.media and returns the content.
Note: images on Wikipedia can be quite large, and this method uses screen-scraping,
so Wikipedia might not like it that you download media in this way.
To save the media in a file:
data = article.download(media)
open(filename+extension(media),"w").write(data)
"""
url = "http://%s.wikipedia.org/wiki/File:%s" % (self.__dict__.get("language", "en"), media)
if url not in cache:
time.sleep(1)
data = URL(url).download(**kwargs)
data = re.search(r"upload.wikimedia.org/.*?/%s" % media, data)
data = data and URL("http://" + data.group(0)).download(**kwargs) or None
return data
def __repr__(self):
return "WikipediaArticle(title=%s)" % repr(self.title)
class WikipediaSection(MediaWikiSection):
def __repr__(self):
return "WikipediaSection(title=%s)" % repr(self.title)
class WikipediaTable(MediaWikiTable):
def __repr__(self):
return "WikipediaTable(title=%s)" % repr(self.title)
#article = Wikipedia().search("cat")
#for section in article.sections:
# print " "*(section.level-1) + section.title
#if article.media:
# data = article.download(article.media[2])
# f = open(article.media[2], "w")
# f.write(data)
# f.close()
#
#article = Wikipedia(language="nl").search("borrelnootje")
#print article.string
#for result in Wikipedia().search("\"cat's\"", type="*"):
# print result.title
# print result.text
# print
#--- MEDIAWIKI: WIKTIONARY -------------------------------------------------------------------------
# Wiktionary is a collaborative project to produce a free-content multilingual dictionary.
class Wiktionary(MediaWiki):
def __init__(self, license=None, throttle=5.0, language="en"):
""" Mediawiki search engine for http://[language].wiktionary.com.
"""
SearchEngine.__init__(self, license or MEDIAWIKI_LICENSE, throttle, language)
self._subdomain = language
@property
def _url(self):
s = MEDIAWIKI
s = s.replace("{SUBDOMAIN}", self._subdomain)
s = s.replace("{DOMAIN}", "wiktionary.org")
s = s.replace("{API}", "/w/api.php")
return s
@property
def MediaWikiArticle(self):
return WiktionaryArticle
@property
def MediaWikiSection(self):
return WiktionarySection
@property
def MediaWikiTable(self):
return WiktionaryTable
class WiktionaryArticle(MediaWikiArticle):
def __repr__(self):
return "WiktionaryArticle(title=%s)" % repr(self.title)
class WiktionarySection(MediaWikiSection):
def __repr__(self):
return "WiktionarySection(title=%s)" % repr(self.title)
class WiktionaryTable(MediaWikiTable):
def __repr__(self):
return "WiktionaryTable(title=%s)" % repr(self.title)
#--- MEDIAWIKI: WIKIA ------------------------------------------------------------------------------
# Wikia (formerly Wikicities) is a free web hosting service and a wiki farm for wikis.
# Wikia hosts several hundred thousand wikis using MediaWiki.
# Author: Robert Elwell (2012)
class Wikia(MediaWiki):
def __init__(self, domain="www", license=None, throttle=5.0, language="en"):
""" Mediawiki search engine for http://[domain].wikia.com.
"""
SearchEngine.__init__(self, license or MEDIAWIKI_LICENSE, throttle, language)
self._subdomain = domain
@property
def _url(self):
s = MEDIAWIKI
s = s.replace("{SUBDOMAIN}", self._subdomain)
s = s.replace("{DOMAIN}", "wikia.com")
s = s.replace("{API}", '/api.php')
return s
@property
def MediaWikiArticle(self):
return WikiaArticle
@property
def MediaWikiSection(self):
return WikiaSection
@property
def MediaWikiTable(self):
return WikiaTable
def articles(self, **kwargs):
if kwargs.pop("batch", True):
# We can take advantage of Wikia's search API to reduce bandwith.
# Instead of executing a query to retrieve each article,
# we query for a batch of (10) articles.
iterator = self.index(_id="pageid", **kwargs)
while True:
batch, done = [], False
try:
for i in range(10): batch.append(iterator.next())
except StopIteration:
done = True # No more articles, finish batch and raise StopIteration.
url = URL(self._url.replace("api.php", "wikia.php"), method=GET, query={
"controller": "WikiaSearch",
"method": "getPages",
"ids": '|'.join(str(id) for id in batch),
"format": "json"
})
kwargs.setdefault("unicode", True)
kwargs.setdefault("cached", True)
kwargs["timeout"] = 10 * (1 + len(batch))
data = url.download(**kwargs)
data = json.loads(data)
for x in (data or {}).get("pages", {}).values():
yield WikiaArticle(title=x.get("title", ""), source=x.get("html", ""))
if done:
raise StopIteration
for title in self.index(**kwargs):
yield self.search(title, **kwargs)
class WikiaArticle(MediaWikiArticle):
def __repr__(self):
return "WikiaArticle(title=%s)" % repr(self.title)
class WikiaSection(MediaWikiSection):
def __repr__(self):
return "WikiaSection(title=%s)" % repr(self.title)
class WikiaTable(MediaWikiTable):
def __repr__(self):
return "WikiaTable(title=%s)" % repr(self.title)
#--- DBPEDIA --------------------------------------------------------------------------------------------------
# DBPedia is a database of structured information mined from Wikipedia.
# DBPedia data is stored as RDF triples: (subject, predicate, object),
# e.g., X is-a Actor, Y is-a Country, Z has-birthplace Country, ...
# DBPedia can be queried using SPARQL:
# http://www.w3.org/TR/rdf-sparql-query/
# A SPARQL query yields rows that match all triples in the WHERE clause.
# A SPARQL query uses ?wildcards in triple subject/object to select fields.
# For example:
# > PREFIX dbo: <http://dbpedia.org/ontology/>
# > SELECT ?actor ?place
# > WHERE {
# > ?actor a dbo:Actor; dbo:birthPlace ?place.
# > ?place a dbo:Country.
# > }
#
# - Each row in the results has an "actor" and a "place" field.
# - The actor is of the class "Actor".
# - The place is of the class "Country".
# - Only actors for which a place of birth is known are retrieved.
#
# The fields are RDF resources, e.g.:
# http://dbpedia.org/resource/Australia
# Author: Kenneth Koch (2013) <[email protected]>
DBPEDIA = "http://dbpedia.org/sparql?"
SPARQL = "sparql"
class DBPediaQueryError(HTTP400BadRequest):
pass
class DBPediaResource(unicode):
@property
def name(self):
# http://dbpedia.org/resource/Australia => Australia
s = re.sub("^http://dbpedia.org/resource/", "", self)
s = s.replace("_", " ")
s = encode_utf8(s)
s = decode_url(s)
s = decode_utf8(s)
return s
class DBPedia(SearchEngine):
def __init__(self, license=None, throttle=1.0, language=None):
SearchEngine.__init__(self, license, throttle, language)
def search(self, query, type=SPARQL, start=1, count=10, sort=RELEVANCY, size=None, cached=False, **kwargs):
""" Returns a list of results from DBPedia for the given SPARQL query.
- type : SPARQL,
- start: no maximum,
- count: maximum 1000,
There is a limit of 10 requests/second.
Maximum query execution time is 120 seconds.
"""
if type not in (SPARQL,):
raise SearchEngineTypeError
if not query or count < 1 or start < 1:
return Results(DBPEDIA, query, type)
# 1) Construct request URL.
url = URL(DBPEDIA, method=GET)
url.query = {
"format": "json",
"query": "%s OFFSET %s LIMIT %s" % (query,
(start-1) * min(count, 1000),
(start-0) * min(count, 1000)
)
}
# 2) Parse JSON response.
try:
data = URL(url).download(cached=cached, timeout=30, **kwargs)
data = json.loads(data)
except HTTP400BadRequest, e:
raise DBPediaQueryError, e.src.read().splitlines()[0]
except HTTP403Forbidden:
raise SearchEngineLimitError
results = Results(DBPEDIA, url.query, type)
results.total = None
for x in data["results"]["bindings"]:
r = Result(url=None)
for k in data["head"]["vars"]:
t1 = x[k].get("type", "literal") # uri | literal | typed-literal
t2 = x[k].get("datatype", "?") # http://www.w3.org/2001/XMLSchema#float | int | date
v = x[k].get("value")
v = self.format(v)
if t1 == "uri":
v = DBPediaResource(v)
if t2.endswith("float"):
v = float(v)
if t2.endswith("int"):
v = int(v)
dict.__setitem__(r, k, v)
results.append(r)
return results
#--- FLICKR ----------------------------------------------------------------------------------------
# Flickr is a popular image hosting and video hosting website.
# http://www.flickr.com/services/api/
FLICKR = "http://api.flickr.com/services/rest/"
FLICKR_LICENSE = api.license["Flickr"]
INTERESTING = "interesting"
class Flickr(SearchEngine):
def __init__(self, license=None, throttle=5.0, language=None):
SearchEngine.__init__(self, license or FLICKR_LICENSE, throttle, language)
def search(self, query, type=IMAGE, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Flickr for the given query.
Retrieving the URL of a result (i.e. image) requires an additional query.
- type : SEARCH, IMAGE,
- start: maximum undefined,
- count: maximum 500,
- sort : RELEVANCY, LATEST or INTERESTING.
There is no daily limit.
"""
if type not in (SEARCH, IMAGE):
raise SearchEngineTypeError
if not query or count < 1 or start < 1 or start > 500/count:
return Results(FLICKR, query, IMAGE)
# 1) Construct request URL.
url = FLICKR+"?"
url = URL(url, method=GET, query={
"api_key": self.license or "",
"method": "flickr.photos.search",
"text": query.replace(" ", "_"),
"page": start,
"per_page": min(count, 500),
"sort": { RELEVANCY: "relevance",
LATEST: "date-posted-desc",
INTERESTING: "interestingness-desc" }.get(sort)
})
if kwargs.get("copyright", True) is False:
# With copyright=False, only returns Public Domain and Creative Commons images.
# http://www.flickr.com/services/api/flickr.photos.licenses.getInfo.html
# 5: "Attribution-ShareAlike License"
# 7: "No known copyright restriction"
url.query["license"] = "5,7"
# 2) Parse XML response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = xml.dom.minidom.parseString(bytestring(data))
results = Results(FLICKR, query, IMAGE)
results.total = int(data.getElementsByTagName("photos")[0].getAttribute("total"))
for x in data.getElementsByTagName("photo"):
r = FlickrResult(url=None)
r.__dict__["_id"] = x.getAttribute("id")
r.__dict__["_size"] = size
r.__dict__["_license"] = self.license
r.__dict__["_throttle"] = self.throttle
r.text = self.format(x.getAttribute("title"))
r.author = self.format(x.getAttribute("owner"))
results.append(r)
return results
class FlickrResult(Result):
@property
def url(self):
# Retrieving the url of a FlickrResult (i.e. image location) requires another query.
# Note: the "Original" size no longer appears in the response,
# so Flickr might not like it if we download it.
url = FLICKR + "?method=flickr.photos.getSizes&photo_id=%s&api_key=%s" % (self._id, self._license)
data = URL(url).download(throttle=self._throttle, unicode=True)
data = xml.dom.minidom.parseString(bytestring(data))
size = { TINY: "Thumbnail",
SMALL: "Small",
MEDIUM: "Medium",
LARGE: "Original" }.get(self._size, "Medium")
for x in data.getElementsByTagName("size"):
if size == x.getAttribute("label"):
return x.getAttribute("source")
if size == "Original":
url = x.getAttribute("source")
url = url[:-len(extension(url))-2] + "_o" + extension(url)
return u(url)
#images = Flickr().search("kitten", count=10, size=SMALL)
#for img in images:
# print bytestring(img.description)
# print img.url
#
#data = img.download()
#f = open("kitten"+extension(img.url), "wb")
#f.write(data)
#f.close()
#--- FACEBOOK --------------------------------------------------------------------------------------
# Facebook is a popular online social networking service.
# https://developers.facebook.com/docs/reference/api/
FACEBOOK = "https://graph.facebook.com/"
FACEBOOK_LICENSE = api.license["Facebook"]
FEED = "feed" # Facebook timeline.
COMMENTS = "comments" # Facebook comments (for a given news feed post).
LIKES = "likes" # Facebook likes (for a given post or comment).
FRIENDS = "friends" # Facebook friends (for a given profile id).
class FacebookResult(Result):
def __repr__(self):
return "Result(id=%s)" % repr(self.id)
class Facebook(SearchEngine):
def __init__(self, license=None, throttle=1.0, language=None):
SearchEngine.__init__(self, license or FACEBOOK_LICENSE, throttle, language)
@property
def _token(self):
# Yields the "application access token" (stored in api.license["Facebook"]).
# With this license, we can view public content.
# To view more information, we need a "user access token" as license key.
# This token can be retrieved manually from:
# http://www.clips.ua.ac.be/pattern-facebook
# Or parsed from this URL:
# https://graph.facebook.com/oauth/authorize?type=user_agent
# &client_id=332061826907464
# &redirect_uri=http://www.clips.ua.ac.be/pattern-facebook
# &scope=read_stream,user_birthday,user_likes,user_photos,friends_birthday,friends_likes
# The token is valid for a limited duration.
return URL(FACEBOOK + "oauth/access_token?", query={
"grant_type": "client_credentials",
"client_id": "332061826907464",
"client_secret": "81ff4204e73ecafcd87635a3a3683fbe"
}).download().split("=")[1]
def search(self, query, type=SEARCH, start=1, count=10, cached=False, **kwargs):
""" Returns a list of results from Facebook public status updates for the given query.
- query: string, or Result.id for NEWS and COMMENTS,
- type : SEARCH,
- start: 1,
- count: maximum 100 for SEARCH and NEWS, 1000 for COMMENTS and LIKES.
There is an hourly limit of +-600 queries (actual amount undisclosed).
"""
# Facebook.search(type=SEARCH) returns public posts + author.
# Facebook.search(type=NEWS) returns posts for the given author (id | alias | "me").
# Facebook.search(type=COMMENTS) returns comments for the given post id.
# Facebook.search(type=LIKES) returns authors for the given author, post or comments.
# Facebook.search(type=FRIENDS) returns authors for the given author.
# An author is a Facebook user or other entity (e.g., a product page).
if type not in (SEARCH, NEWS, COMMENTS, LIKES, FRIENDS):
raise SearchEngineTypeError
if type in (SEARCH, NEWS):
max = 100
if type in (COMMENTS, LIKES):
max = 1000
if type in (FRIENDS,):
max = 10000
if not query or start < 1 or count < 1:
return Results(FACEBOOK, query, SEARCH)
if isinstance(query, FacebookResult):
query = query.id
# 1) Construct request URL.
if type == SEARCH:
url = FACEBOOK + type
url = URL(url, method=GET, query={
"q": query,
"type": "post",
"access_token": self.license,
"offset": (start-1) * min(count, max),
"limit": (start-0) * min(count, max)
})
if type in (NEWS, FEED, COMMENTS, LIKES, FRIENDS):
url = FACEBOOK + (u(query) or "me").replace(FACEBOOK, "") + "/" + type.replace("news", "feed")
url = URL(url, method=GET, query={
"access_token": self.license,
"offset": (start-1) * min(count, max),
"limit": (start-0) * min(count, max),
})
if type in (SEARCH, NEWS, FEED):
url.query["fields"] = ",".join((
"id", "from", "name", "story", "message", "link", "picture", "created_time",
"comments.limit(1).summary(true)",
"likes.limit(1).summary(true)"
))
# 2) Parse JSON response.
kwargs.setdefault("cached", cached)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = URL(url).download(**kwargs)
except HTTP400BadRequest:
raise HTTP401Authentication
data = json.loads(data)
results = Results(FACEBOOK, query, SEARCH)
results.total = None
for x in data.get("data", []):
r = FacebookResult(url=None)
r.id = self.format(x.get("id"))
r.url = self.format(x.get("link"))
r.text = self.format(x.get("story", x.get("message", x.get("name"))))
r.date = self.format(x.get("created_time"))
# Store likes & comments count as int, author as (id, name)-tuple
# (by default Result will store everything as Unicode strings).
s = lambda r, k, v: dict.__setitem__(r, k, v)
s(r, "likes", \
self.format(x.get("like_count", x.get("likes", {}).get("summary", {}).get("total_count", 0))) + 0)
s(r, "comments", \
self.format(x.get("comments", {}).get("summary", {}).get("total_count", 0)) + 0)
s(r, "author", (
u(self.format(x.get("from", {}).get("id", ""))),
u(self.format(x.get("from", {}).get("name", "")))))
# Set Result.text to author name for likes.
if type in (LIKES, FRIENDS):
s(r, "author", (
u(self.format(x.get("id", ""))),
u(self.format(x.get("name", "")))))
r.text = \
self.format(x.get("name"))
# Set Result.url to full-size image.
if re.match(r"^http(s?)://www\.facebook\.com/photo", r.url) is not None:
r.url = x.get("picture", "").replace("_s", "_b") or r.url
# Set Result.title to object id.
if re.match(r"^http(s?)://www\.facebook\.com/", r.url) is not None:
r.title = r.url.split("/")[-1].split("?")[0]
results.append(r)
return results
def profile(self, id=None, **kwargs):
""" For the given author id or alias,
returns a (id, name, date of birth, gender, locale, likes)-tuple.
"""
# 1) Construct request URL.
url = FACEBOOK + (u(id or "me")).replace(FACEBOOK, "")
url = URL(url, method=GET, query={"access_token": self.license})
kwargs.setdefault("cached", True)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
# 2) Parse JSON response.
try:
data = URL(url).download(**kwargs)
data = json.loads(data)
except HTTP400BadRequest:
raise HTTP401Authentication
return (
u(data.get("id", "")),
u(data.get("name", "")),
u(data.get("birthday", "")),
u(data.get("gender", "")[:1]),
u(data.get("locale", "")),
int(data.get("likes", 0)) # For pages.
)
page = profile
#--- PRODUCT REVIEWS -------------------------------------------------------------------------------
# ProductWiki is an open web-based product information resource.
# http://connect.productwiki.com/connect-api/
PRODUCTWIKI = "http://api.productwiki.com/connect/api.aspx"
PRODUCTWIKI_LICENSE = api.license["ProductWiki"]
class ProductWiki(SearchEngine):
def __init__(self, license=None, throttle=5.0, language=None):
SearchEngine.__init__(self, license or PRODUCTWIKI_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Productwiki for the given query.
Each Result.reviews is a list of (review, score)-items.
- type : SEARCH,
- start: maximum undefined,
- count: 20,
- sort : RELEVANCY.
There is no daily limit.
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or start < 1 or count < 1:
return Results(PRODUCTWIKI, query, type)
# 1) Construct request URL.
url = PRODUCTWIKI+"?"
url = URL(url, method=GET, query={
"key": self.license or "",
"q": query,
"page": start,
"op": "search",
"fields": "proscons", # "description,proscons" is heavy.
"format": "json"
})
# 2) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = URL(url).download(cached=cached, **kwargs)
data = json.loads(data)
results = Results(PRODUCTWIKI, query, type)
results.total = None
for x in data.get("products", [])[:count]:
r = Result(url=None)
r.__dict__["title"] = u(x.get("title"))
r.__dict__["text"] = u(x.get("text"))
r.__dict__["reviews"] = []
reviews = x.get("community_review") or {}
for p in reviews.get("pros", []):
r.reviews.append((p.get("text", ""), int(p.get("score")) or +1))
for p in reviews.get("cons", []):
r.reviews.append((p.get("text", ""), int(p.get("score")) or -1))
r.__dict__["score"] = int(sum(score for review, score in r.reviews))
results.append(r)
# Highest score first.
results.sort(key=lambda r: r.score, reverse=True)
return results
# Backwards compatibility.
Products = ProductWiki
#for r in ProductWiki().search("tablet"):
# print r.title
# print r.score
# print r.reviews
# print
#--- NEWS FEED -------------------------------------------------------------------------------------
# Based on the Universal Feed Parser by Mark Pilgrim:
# http://www.feedparser.org/
class Newsfeed(SearchEngine):
def __init__(self, license=None, throttle=1.0, language=None):
SearchEngine.__init__(self, license, throttle, language)
def search(self, query, type=NEWS, start=1, count=10, sort=LATEST, size=SMALL, cached=True, **kwargs):
""" Returns a list of results from the given RSS or Atom newsfeed URL.
"""
if type != NEWS:
raise SearchEngineTypeError
if not query or start < 1 or count < 1:
return Results(query, query, NEWS)
# 1) Construct request URL.
# 2) Parse RSS/Atom response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
tags = kwargs.pop("tags", [])
data = URL(query).download(cached=cached, **kwargs)
data = feedparser.parse(bytestring(data))
results = Results(query, query, NEWS)
results.total = None
for x in data["entries"][:count]:
s = "\n\n".join([v.get("value") for v in x.get("content", [])]) or x.get("summary")
r = Result(url=None)
r.id = self.format(x.get("id"))
r.url = self.format(x.get("link"))
r.title = self.format(x.get("title"))
r.text = self.format(s)
r.date = self.format(x.get("updated"))
r.author = self.format(x.get("author"))
r.language = self.format(x.get("content") and \
x.get("content")[0].get("language") or \
data.get("language"))
for tag in tags:
# Parse custom tags.
# Newsfeed.search(tags=["dc:identifier"]) => Result.dc_identifier.
tag = tag.replace(":", "_")
r[tag] = self.format(x.get(tag))
results.append(r)
return results
feeds = {
"Nature": "http://feeds.nature.com/nature/rss/current",
"Science": "http://www.sciencemag.org/rss/podcast.xml",
"Herald Tribune": "http://www.iht.com/rss/frontpage.xml",
"TIME": "http://feeds.feedburner.com/time/topstories",
"CNN": "http://rss.cnn.com/rss/edition.rss",
}
#for r in Newsfeed().search(feeds["Nature"]):
# print r.title
# print r.author
# print r.url
# print plaintext(r.text)
# print
#--- QUERY -----------------------------------------------------------------------------------------
def query(string, service=GOOGLE, **kwargs):
""" Returns the list of search query results from the given service.
For service=WIKIPEDIA, this is a single WikipediaArticle or None.
"""
service = service.lower()
if service in (GOOGLE, "google", "g"):
engine = Google
if service in (YAHOO, "yahoo", "y!"):
engine = Yahoo
if service in (BING, "bing"):
engine = Bing
if service in (DUCKDUCKGO, "duckduckgo", "ddg"):
engine = DuckDuckGo
if service in (TWITTER, "twitter", "tw"):
engine = Twitter
if service in (FACEBOOK, "facebook", "fb"):
engine = Facebook
if service in (WIKIPEDIA, "wikipedia", "wp"):
engine = Wikipedia
if service in (WIKIA, "wikia"):
engine = Wikia
if service in (DBPEDIA, "dbpedia", "dbp"):
engine = DBPedia
if service in (FLICKR, "flickr"):
engine = Flickr
try:
kw = {}
for a in ("license", "throttle", "language"):
if a in kwargs:
kw[a] = kwargs.pop(a)
return engine(kw).search(string, **kwargs)
except UnboundLocalError:
raise SearchEngineError, "unknown search engine '%s'" % service
#--- WEB SORT --------------------------------------------------------------------------------------
SERVICES = {
GOOGLE : Google,
YAHOO : Yahoo,
BING : Bing,
TWITTER : Twitter,
WIKIPEDIA : Wikipedia,
WIKIA : Wikia,
FLICKR : Flickr,
FACEBOOK : Facebook
}
def sort(terms=[], context="", service=GOOGLE, license=None, strict=True, prefix=False, **kwargs):
""" Returns a list of (percentage, term)-tuples for the given list of terms.
Sorts the terms in the list according to search result count.
When a context is defined, sorts according to relevancy to the context, e.g.:
sort(terms=["black", "green", "red"], context="Darth Vader") =>
yields "black" as the best candidate, because "black Darth Vader" is more common in search results.
- terms : list of search terms,
- context : term used for sorting,
- service : web service name (GOOGLE, YAHOO, BING),
- license : web service license id,
- strict : when True the query constructed from term + context is wrapped in quotes.
"""
service = SERVICES.get(service, SearchEngine)(license, language=kwargs.pop("language", None))
R = []
for word in terms:
q = prefix and (context + " " + word) or (word + " " + context)
q.strip()
q = strict and "\"%s\"" % q or q
t = service in (WIKIPEDIA, WIKIA) and "*" or SEARCH
r = service.search(q, type=t, count=1, **kwargs)
R.append(r)
s = float(sum([r.total or 1 for r in R])) or 1.0
R = [((r.total or 1)/s, r.query) for r in R]
R = sorted(R, reverse=kwargs.pop("reverse", True))
return R
#print sort(["black", "happy"], "darth vader", GOOGLE)
#### DOCUMENT OBJECT MODEL #########################################################################
# The Document Object Model (DOM) is a cross-platform and language-independent convention
# for representing and interacting with objects in HTML, XHTML and XML documents.
# The pattern.web DOM can be used to traverse HTML source code as a tree of nested elements.
# The pattern.web DOM is based on Beautiful Soup.
# Beautiful Soup is wrapped in DOM, Element and Text classes, resembling the Javascript DOM.
# Beautiful Soup can also be used directly, since it is imported here.
# L. Richardson (2004), http://www.crummy.com/software/BeautifulSoup/
SOUP = (
BeautifulSoup.BeautifulSoup,
BeautifulSoup.Tag,
BeautifulSoup.NavigableString,
BeautifulSoup.Comment
)
NODE, TEXT, COMMENT, ELEMENT, DOCUMENT = \
"node", "text", "comment", "element", "document"
#--- NODE ------------------------------------------------------------------------------------------
class Node(object):
def __init__(self, html, type=NODE, **kwargs):
""" The base class for Text, Comment and Element.
All DOM nodes can be navigated in the same way (e.g. Node.parent, Node.children, ...)
"""
self.type = type
self._p = not isinstance(html, SOUP) and BeautifulSoup.BeautifulSoup(u(html), **kwargs) or html
@property
def _beautifulSoup(self):
# If you must, access the BeautifulSoup object with Node._beautifulSoup.
return self._p
def __eq__(self, other):
# Two Node objects containing the same BeautifulSoup object, are the same.
return isinstance(other, Node) and hash(self._p) == hash(other._p)
def _wrap(self, x):
# Navigating to other nodes yields either Text, Element or None.
if isinstance(x, BeautifulSoup.Comment):
return Comment(x)
if isinstance(x, BeautifulSoup.Declaration):
return Text(x)
if isinstance(x, BeautifulSoup.NavigableString):
return Text(x)
if isinstance(x, BeautifulSoup.Tag):
return Element(x)
@property
def parent(self):
return self._wrap(self._p.parent)
@property
def children(self):
return hasattr(self._p, "contents") and [self._wrap(x) for x in self._p.contents] or []
@property
def html(self):
return self.__unicode__()
@property
def source(self):
return self.__unicode__()
@property
def next_sibling(self):
return self._wrap(self._p.nextSibling)
@property
def previous_sibling(self):
return self._wrap(self._p.previousSibling)
next, previous = next_sibling, previous_sibling
def traverse(self, visit=lambda node: None):
""" Executes the visit function on this node and each of its child nodes.
"""
visit(self); [node.traverse(visit) for node in self.children]
def __nonzero__(self):
return True
def __len__(self):
return len(self.children)
def __iter__(self):
return iter(self.children)
def __getitem__(self, index):
return self.children[index]
def __repr__(self):
return "Node(type=%s)" % repr(self.type)
def __str__(self):
return bytestring(self.__unicode__())
def __unicode__(self):
return u(self._p)
#--- TEXT ------------------------------------------------------------------------------------------
class Text(Node):
""" Text represents a chunk of text without formatting in a HTML document.
For example: "the <b>cat</b>" is parsed to [Text("the"), Element("cat")].
"""
def __init__(self, string):
Node.__init__(self, string, type=TEXT)
def __repr__(self):
return "Text(%s)" % repr(self._p)
class Comment(Text):
""" Comment represents a comment in the HTML source code.
For example: "<!-- comment -->".
"""
def __init__(self, string):
Node.__init__(self, string, type=COMMENT)
def __repr__(self):
return "Comment(%s)" % repr(self._p)
#--- ELEMENT ---------------------------------------------------------------------------------------
class Element(Node):
def __init__(self, html):
""" Element represents an element or tag in the HTML source code.
For example: "<b>hello</b>" is a "b"-Element containing a child Text("hello").
"""
Node.__init__(self, html, type=ELEMENT)
@property
def tagname(self):
return self._p.name
tag = tagName = tagname
@property
def attributes(self):
if "_attributes" not in self.__dict__:
self._attributes = self._p._getAttrMap()
return self._attributes
attr = attrs = attributes
@property
def id(self):
return self.attributes.get("id")
@property
def content(self):
""" Yields the element content as a unicode string.
"""
return u"".join([u(x) for x in self._p.contents])
string = content
@property
def source(self):
""" Yields the HTML source as a unicode string (tag + content).
"""
return u(self._p)
html = source
def get_elements_by_tagname(self, v):
""" Returns a list of nested Elements with the given tag name.
The tag name can include a class (e.g. div.header) or an id (e.g. div#content).
"""
if isinstance(v, basestring) and "#" in v:
v1, v2 = v.split("#")
v1 = v1 in ("*","") or v1.lower()
return [Element(x) for x in self._p.findAll(v1, id=v2)]
if isinstance(v, basestring) and "." in v:
v1, v2 = v.split(".")
v1 = v1 in ("*","") or v1.lower()
return [Element(x) for x in self._p.findAll(v1, v2)]
return [Element(x) for x in self._p.findAll(v in ("*","") or v.lower())]
by_tag = getElementsByTagname = get_elements_by_tagname
def get_element_by_id(self, v):
""" Returns the first nested Element with the given id attribute value.
"""
return ([Element(x) for x in self._p.findAll(id=v, limit=1) or []]+[None])[0]
by_id = getElementById = get_element_by_id
def get_elements_by_classname(self, v):
""" Returns a list of nested Elements with the given class attribute value.
"""
return [Element(x) for x in (self._p.findAll(True, v))]
by_class = getElementsByClassname = get_elements_by_classname
def get_elements_by_attribute(self, **kwargs):
""" Returns a list of nested Elements with the given attribute value.
"""
return [Element(x) for x in (self._p.findAll(True, attrs=kwargs))]
by_attribute = by_attr = getElementsByAttribute = get_elements_by_attribute
def __call__(self, selector):
""" Returns a list of nested Elements that match the given CSS selector.
For example: Element("div#main p.comment a:first-child") matches:
"""
return SelectorChain(selector).search(self)
def __getattr__(self, k):
if k in self.__dict__:
return self.__dict__[k]
if k in self.attributes:
return self.attributes[k]
raise AttributeError, "'Element' object has no attribute '%s'" % k
def __contains__(self, v):
if isinstance(v, Element):
v = v.content
return v in self.content
def __repr__(self):
return "Element(tag=%s)" % repr(self.tagname)
#--- DOCUMENT --------------------------------------------------------------------------------------
class Document(Element):
def __init__(self, html, **kwargs):
""" Document is the top-level element in the Document Object Model.
It contains nested Element, Text and Comment nodes.
"""
# Aliases for BeautifulSoup optional parameters:
kwargs["selfClosingTags"] = kwargs.pop("self_closing", kwargs.get("selfClosingTags"))
Node.__init__(self, u(html).strip(), type=DOCUMENT, **kwargs)
@property
def declaration(self):
""" Yields the <!doctype> declaration, as a TEXT Node or None.
"""
for child in self.children:
if isinstance(child._p, BeautifulSoup.Declaration):
return child
@property
def head(self):
return self._wrap(self._p.head)
@property
def body(self):
return self._wrap(self._p.body)
@property
def tagname(self):
return None
tag = tagname
def __repr__(self):
return "Document()"
DOM = Document
#article = Wikipedia().search("Document Object Model")
#dom = DOM(article.html)
#print dom.get_element_by_id("References").source
#print [element.attributes["href"] for element in dom.get_elements_by_tagname("a")]
#print dom.get_elements_by_tagname("p")[0].next.previous.children[0].parent.__class__
#print
#--- DOM CSS SELECTORS -----------------------------------------------------------------------------
# CSS selectors are pattern matching rules (or selectors) to select elements in the DOM.
# CSS selectors may range from simple element tag names to rich contextual patterns.
# http://www.w3.org/TR/CSS2/selector.html
# "*" = <div>, <p>, ... (all elements)
# "*#x" = <div id="x">, <p id="x">, ... (all elements with id="x")
# "div#x" = <div id="x"> (<div> elements with id="x")
# "div.x" = <div class="x"> (<div> elements with class="x")
# "div[class='x']" = <div class="x"> (<div> elements with attribute "class"="x")
# "div:first-child" = <div><a>1st<a><a></a></div> (first child inside a <div>)
# "div a" = <div><p><a></a></p></div> (all <a>'s inside a <div>)
# "div, a" = <div>, <a> (all <a>'s and <div> elements)
# "div + a" = <div></div><a></a> (all <a>'s directly preceded by <div>)
# "div > a" = <div><a></a></div> (all <a>'s directly inside a <div>)
# "div < a" (all <div>'s directly containing an <a>)
# Selectors are case-insensitive.
class Selector(object):
def __init__(self, s):
""" A simple CSS selector is a type (e.g., "p") or universal ("*") selector
followed by id selectors, attribute selectors, or pseudo-elements.
"""
self.string = s
s = s.strip()
s = s.lower()
s = s.startswith(("#", ".", ":")) and "*" + s or s
s = s.replace("#", " #") + " #" # #id
s = s.replace(".", " .") # .class
s = s.replace(":", " :") # :pseudo-element
s = s.replace("[", " [") # [attribute="value"]
s = s.split(" ")
self.tag, self.id, self.classes, self.pseudo, self.attributes = (
s[0],
[x[1:] for x in s if x[0] == "#"][0],
set([x[1:] for x in s if x[0] == "."]),
set([x[1:] for x in s if x[0] == ":"]),
dict(self._parse_attribute(x) for x in s if x[0] == "[")
)
def _parse_attribute(self, s):
""" Returns an (attribute, value)-tuple for the given attribute selector.
"""
s = s.strip("[]")
s = s.replace("'", "")
s = s.replace('"', "")
s = s.replace("<!space>", " ")
s = re.sub(r"(\~|\||\^|\$|\*)\=", "=\\1", s)
s = s.split("=") + [True]
s = s[:2]
if s[1] is not True and s[1].startswith(("~", "|", "^", "$", "*")):
p, s[1] = s[1][0], s[1][1:]
if p == "~": r = r"(^|\s)%s(\s|$)"
if p == "|": r = r"^%s(-|$)" # XXX doesn't work with spaces.
if p == "^": r = r"^%s"
if p == "$": r = r"%s$"
if p == "*": r = r"%s"
s[1] = re.compile(r % s[1], re.I)
return s[:2]
def _first_child(self, e):
""" Returns the first child Element of the given element.
"""
if isinstance(e, Node):
for e in e.children:
if isinstance(e, Element):
return e
def _first_sibling(self, e):
""" Returns the first next sibling Element of the given element.
"""
while isinstance(e, Node):
e = e.next
if isinstance(e, Element):
return e
def _contains(self, e, s):
""" Returns True if string s occurs in the given element (case-insensitive).
"""
s = re.sub(r"^contains\((.*?)\)$", "\\1", s)
s = re.sub(r"^[\"']|[\"']$", "", s)
return re.search(s.lower(), e.content.lower()) is not None
def match(self, e):
""" Returns True if the given element matches the simple CSS selector.
"""
if not isinstance(e, Element):
return False
if self.tag not in (e.tag, "*"):
return False
if self.id not in ((e.id or "").lower(), "", None):
return False
if self.classes.issubset(set(map(lambda s: s.lower(), e.attr.get("class", "").split()))) is False:
return False
if "first-child" in self.pseudo and self._first_child(e.parent) != e:
return False
if any(x.startswith("contains") and not self._contains(e, x) for x in self.pseudo):
return False # jQuery :contains("...") selector.
for k, v in self.attributes:
if k not in e.attrs or v not in (e.attrs[k].lower(), True):
return False
return True
def search(self, e):
""" Returns the nested elements that match the simple CSS selector.
"""
# Map tag to True if it is "*".
tag = self.tag == "*" or self.tag
# Map id into a case-insensitive **kwargs dict.
i = lambda s: re.compile(r"\b%s\b" % s, re.I)
a = {"id": i(self.id)} if self.id else {}
a.update(map(lambda (k, v): (k, i(v)), self.attributes.iteritems()))
# Match tag + id + all classes + relevant pseudo-elements.
if not isinstance(e, Element):
return []
if len(self.classes) == 0 or len(self.classes) >= 2:
e = map(Element, e._p.findAll(tag, **a))
if len(self.classes) == 1:
e = map(Element, e._p.findAll(tag, **dict(a, **{"class": i(list(self.classes)[0])})))
if len(self.classes) >= 2:
e = filter(lambda e: self.classes.issubset(set(e.attr.get("class", "").lower().split())), e)
if "first-child" in self.pseudo:
e = filter(lambda e: e == self._first_child(e.parent), e)
if any(x.startswith("contains") for x in self.pseudo):
e = filter(lambda e: all(not x.startswith("contains") or self._contains(e, x) for x in self.pseudo), e)
return e
def __repr__(self):
return "Selector(%s)" % repr(self.string)
class SelectorChain(list):
def __init__(self, s):
""" A selector is a chain of one or more simple selectors,
separated by combinators (e.g., ">").
"""
self.string = s
for s in s.split(","):
s = s.lower()
s = s.strip()
s = re.sub(r" +", " ", s)
s = re.sub(r" *\> *", " >", s)
s = re.sub(r" *\< *", " <", s)
s = re.sub(r" *\+ *", " +", s)
s = re.sub(r"\[.*?\]", lambda m: m.group(0).replace(" ", "<!space>"), s)
self.append([])
for s in s.split(" "):
if not s.startswith((">", "<", "+")):
self[-1].append((" ", Selector(s)))
elif s.startswith(">"):
self[-1].append((">", Selector(s[1:])))
elif s.startswith("<"):
self[-1].append(("<", Selector(s[1:])))
elif s.startswith("+"):
self[-1].append(("+", Selector(s[1:])))
def search(self, e):
""" Returns the nested elements that match the CSS selector chain.
"""
m, root = [], e
for chain in self:
e = [root]
for combinator, s in chain:
# Search Y, where:
if combinator == " ":
# X Y => X is ancestor of Y
e = map(s.search, e)
e = list(itertools.chain(*e))
if combinator == ">":
# X > Y => X is parent of Y
e = map(lambda e: filter(s.match, e.children), e)
e = list(itertools.chain(*e))
if combinator == "<":
# X < Y => X is child of Y
e = map(lambda e: e.parent, e)
e = filter(s.match, e)
if combinator == "+":
# X + Y => X directly precedes Y
e = map(s._first_sibling, e)
e = filter(s.match, e)
m.extend(e)
return m
#dom = DOM("""
#<html>
#<head></head>
#<body>
# <div id="#main">
# <span class="11 22 33">x</span>
# </div>
#</body>
#</hmtl>
#""")
#
#print dom("*[class='11']")
#print dom("*[class^='11']")
#print dom("*[class~='22']")
#print dom("*[class$='33']")
#print dom("*[class*='3']")
#### WEB CRAWLER ###################################################################################
# Tested with a crawl across 1,000 domains so far.
class Link(object):
def __init__(self, url, text="", relation="", referrer=""):
""" A hyperlink parsed from a HTML document, in the form:
<a href="url"", title="text", rel="relation">xxx</a>.
"""
self.url, self.text, self.relation, self.referrer = \
u(url), u(text), u(relation), u(referrer),
@property
def description(self):
return self.text
def __repr__(self):
return "Link(url=%s)" % repr(self.url)
# Used for sorting in Crawler.links:
def __eq__(self, link):
return self.url == link.url
def __ne__(self, link):
return self.url != link.url
def __lt__(self, link):
return self.url < link.url
def __gt__(self, link):
return self.url > link.url
class HTMLLinkParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
def parse(self, html, url=""):
""" Returns a list of Links parsed from the given HTML string.
"""
if html is None:
return None
self._url = url
self._data = []
self.feed(self.clean(html))
self.close()
self.reset()
return self._data
def handle_starttag(self, tag, attributes):
if tag == "a":
attributes = dict(attributes)
if "href" in attributes:
link = Link(url = attributes.get("href"),
text = attributes.get("title"),
relation = attributes.get("rel", ""),
referrer = self._url)
self._data.append(link)
def base(url):
""" Returns the URL domain name:
http://en.wikipedia.org/wiki/Web_crawler => en.wikipedia.org
"""
return urlparse.urlparse(url).netloc
def abs(url, base=None):
""" Returns the absolute URL:
../media + http://en.wikipedia.org/wiki/ => http://en.wikipedia.org/media
"""
if url.startswith("#") and not base is None and not base.endswith("/"):
if not re.search("[^/]/[^/]", base):
base += "/"
return urlparse.urljoin(base, url)
DEPTH = "depth"
BREADTH = "breadth"
FIFO = "fifo" # First In, First Out.
FILO = "filo" # First In, Last Out.
LIFO = "lifo" # Last In, First Out (= FILO).
class Crawler(object):
def __init__(self, links=[], domains=[], delay=20.0, parse=HTMLLinkParser().parse, sort=FIFO):
""" A crawler can be used to browse the web in an automated manner.
It visits the list of starting URLs, parses links from their content, visits those, etc.
- Links can be prioritized by overriding Crawler.priority().
- Links can be ignored by overriding Crawler.follow().
- Each visited link is passed to Crawler.visit(), which can be overridden.
"""
self.parse = parse
self.delay = delay # Delay between visits to the same (sub)domain.
self.domains = domains # Domains the crawler is allowed to visit.
self.history = {} # Domain name => time last visited.
self.visited = {} # URLs visited.
self._queue = [] # URLs scheduled for a visit: (priority, time, Link).
self._queued = {} # URLs scheduled so far, lookup dictionary.
self.QUEUE = 10000 # Increase or decrease according to available memory.
self.sort = sort
# Queue given links in given order:
for link in (isinstance(links, basestring) and [links] or links):
self.push(link, priority=1.0, sort=FIFO)
@property
def done(self):
""" Yields True if no further links are scheduled to visit.
"""
return len(self._queue) == 0
def push(self, link, priority=1.0, sort=FILO):
""" Pushes the given link to the queue.
Position in the queue is determined by priority.
Equal ranks are sorted FIFO or FILO.
With priority=1.0 and FILO, the link is inserted to the queue.
With priority=0.0 and FIFO, the link is appended to the queue.
"""
if not isinstance(link, Link):
link = Link(url=link)
dt = time.time()
dt = sort == FIFO and dt or 1 / dt
bisect.insort(self._queue, (1 - priority, dt, link))
self._queued[link.url] = True
def pop(self, remove=True):
""" Returns the next Link queued to visit and removes it from the queue.
Links on a recently visited (sub)domain are skipped until Crawler.delay has elapsed.
"""
now = time.time()
for i, (priority, dt, link) in enumerate(self._queue):
if self.delay <= now - self.history.get(base(link.url), 0):
if remove is True:
self._queue.pop(i)
self._queued.pop(link.url, None)
return link
@property
def next(self):
""" Returns the next Link queued to visit (without removing it).
"""
return self.pop(remove=False)
def crawl(self, method=DEPTH, **kwargs):
""" Visits the next link in Crawler._queue.
If the link is on a domain recently visited (< Crawler.delay) it is skipped.
Parses the content at the link for new links and adds them to the queue,
according to their Crawler.priority().
Visited links (and content) are passed to Crawler.visit().
"""
link = self.pop()
if link is None:
return False
if link.url not in self.visited:
t = time.time()
url = URL(link.url)
if url.mimetype == "text/html":
try:
kwargs.setdefault("unicode", True)
html = url.download(**kwargs)
for new in self.parse(html, url=link.url):
new.url = abs(new.url, base=url.redirect or link.url)
new.url = self.normalize(new.url)
# 1) Parse new links from HTML web pages.
# 2) Schedule unknown links for a visit.
# 3) Only links that are not already queued are queued.
# 4) Only links for which Crawler.follow() is True are queued.
# 5) Only links on Crawler.domains are queued.
if new.url == link.url:
continue
if new.url in self.visited:
continue
if new.url in self._queued:
continue
if self.follow(new) is False:
continue
if self.domains and not base(new.url).endswith(tuple(self.domains)):
continue
# 6) Limit the queue (remove tail), unless you are Google.
if self.QUEUE is not None and \
self.QUEUE * 1.25 < len(self._queue):
self._queue = self._queue[:self.QUEUE]
self._queued.clear()
self._queued.update(dict((q[2].url, True) for q in self._queue))
# 7) Position in the queue is determined by Crawler.priority().
# 8) Equal ranks are sorted FIFO or FILO.
self.push(new, priority=self.priority(new, method=method), sort=self.sort)
self.visit(link, source=html)
except URLError:
# URL can not be reached (HTTP404NotFound, URLTimeout).
self.fail(link)
else:
# URL MIME-type is not HTML, don't know how to handle.
self.fail(link)
# Log the current time visited for the domain (see Crawler.pop()).
# Log the URL as visited.
self.history[base(link.url)] = t
self.visited[link.url] = True
return True
# Nothing happened, we already visited this link.
return False
def normalize(self, url):
""" Called from Crawler.crawl() to normalize URLs.
For example: return url.split("?")[0]
"""
# All links pass through here (visited or not).
# This can be a place to count backlinks.
return url
def follow(self, link):
""" Called from Crawler.crawl() to determine if it should follow this link.
For example: return "nofollow" not in link.relation
"""
return True
def priority(self, link, method=DEPTH):
""" Called from Crawler.crawl() to determine the priority of this link,
as a number between 0.0-1.0. Links with higher priority are visited first.
"""
# Depth-first search dislikes external links to other (sub)domains.
external = base(link.url) != base(link.referrer)
if external is True:
if method == DEPTH:
return 0.75
if method == BREADTH:
return 0.85
return 0.80
def visit(self, link, source=None):
""" Called from Crawler.crawl() when the link is crawled.
When source=None, the link is not a web page (and was not parsed),
or possibly a URLTimeout occured (content size too big).
"""
pass
def fail(self, link):
""" Called from Crawler.crawl() for link whose MIME-type could not be determined,
or which raised a URLError on download.
"""
pass
Spider = Crawler
#class Polly(Crawler):
# def visit(self, link, source=None):
# print "visited:", link.url, "from:", link.referrer
# def fail(self, link):
# print "failed:", link.url
#
#p = Polly(links=["http://nodebox.net/"], domains=["nodebox.net"], delay=5)
#while not p.done:
# p.crawl(method=DEPTH, cached=True, throttle=5)
#--- CRAWL FUNCTION --------------------------------------------------------------------------------
# Functional approach to crawling.
def crawl(links=[], domains=[], delay=20.0, parse=HTMLLinkParser().parse, sort=FIFO, method=DEPTH, **kwargs):
""" Returns a generator that yields (Link, source)-tuples of visited pages.
When the crawler is idle, it yields (None, None).
"""
# The scenarios below defines "idle":
# - crawl(delay=10, throttle=0)
# The crawler will wait 10 seconds before visiting the same subdomain.
# The crawler will not throttle downloads, so the next link is visited instantly.
# So sometimes (None, None) is returned while it waits for an available subdomain.
# - crawl(delay=0, throttle=10)
# The crawler will wait 10 seconds after each and any visit.
# The crawler will not delay before visiting the same subdomain.
# So usually a result is returned each crawl.next(), but each call takes 10 seconds.
# - asynchronous(crawl().next)
# AsynchronousRequest.value is set to (Link, source) once AsynchronousRequest.done=True.
# The program will not halt in the meantime (i.e., the next crawl is threaded).
crawler = Crawler(links, domains, delay, parse, sort)
bind(crawler, "visit", \
lambda crawler, link, source=None: \
setattr(crawler, "crawled", (link, source))) # Define Crawler.visit() on-the-fly.
while not crawler.done:
crawler.crawled = (None, None)
crawler.crawl(method, **kwargs)
yield crawler.crawled
#for link, source in crawl("http://www.clips.ua.ac.be/", delay=0, throttle=1, cached=False):
# print link
#g = crawl("http://www.clips.ua.ac.be/"")
#for i in range(10):
# p = asynchronous(g.next)
# while not p.done:
# print "zzz..."
# time.sleep(0.1)
# link, source = p.value
# print link
#### DOCUMENT PARSER ###############################################################################
# Not to be confused with Document, which is the top-level element in the HTML DOM.
class DocumentParserError(Exception):
pass
class DocumentParser(object):
def __init__(self, path, *args, **kwargs):
""" Parses a text document (e.g., .pdf or .docx),
given as a file path or a string.
"""
self.content = self._parse(path, *args, **kwargs)
def _open(self, path):
""" Returns a file-like object with a read() method,
from the given file path or string.
"""
if isinstance(path, basestring) and os.path.exists(path):
return open(path, "rb")
if hasattr(path, "read"):
return path
return StringIO.StringIO(path)
def _parse(self, path, *args, **kwargs):
""" Returns a plaintext Unicode string parsed from the given document.
"""
return plaintext(decode_utf8(self.open(path).read()))
@property
def string(self):
return self.content
def __unicode__(self):
return self.content
#--- PDF PARSER ------------------------------------------------------------------------------------
# Yusuke Shinyama, PDFMiner, http://www.unixuser.org/~euske/python/pdfminer/
class PDFError(DocumentParserError):
pass
class PDF(DocumentParser):
def __init__(self, path, output="txt"):
self.content = self._parse(path, format=output)
def _parse(self, path, *args, **kwargs):
# The output is useful for mining but not for display.
# Alternatively, PDF(format="html") preserves some layout.
from pdf.pdfinterp import PDFResourceManager, process_pdf
from pdf.converter import TextConverter, HTMLConverter
from pdf.layout import LAParams
try:
m = PDFResourceManager()
s = StringIO.StringIO()
p = kwargs.get("format", "txt").endswith("html") and HTMLConverter or TextConverter
p = p(m, s, codec="utf-8", laparams=LAParams())
process_pdf(m, p, self._open(path), set(), maxpages=0, password="")
except Exception, e:
raise PDFError, str(e)
s = s.getvalue()
s = decode_utf8(s)
s = s.strip()
s = re.sub(r"([a-z])\-\n", "\\1", s) # Hyphenation.
s = s.replace("\n\n", "<!-- #p -->") # Paragraphs.
s = s.replace("\n", " ")
s = s.replace("<!-- #p -->", "\n\n")
s = collapse_spaces(s)
return s
#--- OOXML PARSER ----------------------------------------------------------------------------------
# Mike Maccana, Python docx, https://github.com/mikemaccana/python-docx
class DOCXError(DocumentParserError):
pass
class DOCX(DocumentParser):
def _parse(self, path, *args, **kwargs):
from docx.docx import opendocx
from docx.docx import getdocumenttext
try:
s = opendocx(self._open(path))
s = getdocumenttext(s)
except Exception, e:
raise DOCXError, str(e)
s = "\n\n".join(p for p in s)
s = decode_utf8(s)
s = collapse_spaces(s)
return s
#---------------------------------------------------------------------------------------------------
def parsepdf(path, *args, **kwargs):
""" Returns the content as a Unicode string from the given .pdf file.
"""
return PDF(path, *args, **kwargs).content
def parsedocx(path, *args, **kwargs):
""" Returns the content as a Unicode string from the given .docx file.
"""
return DOCX(path, *args, **kwargs).content
def parsehtml(path, *args, **kwargs):
""" Returns the content as a Unicode string from the given .html file.
"""
return plaintext(DOM(path, *args, **kwargs).body)
def parsedoc(path, format=None):
""" Returns the content as a Unicode string from the given document (.html., .pdf, .docx).
"""
if isinstance(path, basestring):
if format == "pdf" or path.endswith(".pdf"):
return parsepdf(path)
if format == "docx" or path.endswith(".docx"):
return parsedocx(path)
if format == "html" or path.endswith((".htm", ".html", ".xhtml")):
return parsehtml(path)
# Brute-force approach if the format is unknown.
for f in (parsepdf, parsedocx, parsehtml):
try:
return f(path)
except:
pass
| [
"[email protected]"
] | |
4cd078afb5311f126095acf7b92ad0506caea81c | 87796f8b79b325cdfca7a98b120d690a13ebd267 | /capture/noworkflow/tests/prov_definition/__init__.py | b9e742c0dad1702ab87430c5fbb2e8279693e496 | [
"MIT"
] | permissive | raffaelfoidl/noworkflow | c53c6be45960a93aa546fd8f6c967af2b7c31c93 | aa4ca189df24fec6c7abd32bcca6a097b21fdf31 | refs/heads/master | 2021-01-06T16:32:57.036864 | 2020-03-23T14:12:37 | 2020-03-23T14:12:37 | 241,398,493 | 0 | 0 | MIT | 2020-02-18T15:36:16 | 2020-02-18T15:36:15 | null | UTF-8 | Python | false | false | 426 | py | # Copyright (c) 2016 Universidade Federal Fluminense (UFF)
# Copyright (c) 2016 Polytechnic Institute of New York University.
# This file is part of noWorkflow.
# Please, consult the license terms in the LICENSE file.
from __future__ import (absolute_import, print_function,
division, unicode_literals)
from .slicing_test import TestSlicingDependencies
__all__ = [
b"TestSlicingDependencies",
]
| [
"[email protected]"
] | |
8614ee5eeee6d74b19ddc1e1113d47b06dddb8bd | 4e5141121d8b4015db233cbc71946ec3cfbe5fe6 | /samples/basic/crud/gnmi/models/cisco-ios-xr/Cisco-IOS-XR-ethernet-lldp-cfg/gn-create-xr-ethernet-lldp-cfg-20-ydk.py | 3d8398de12dc3fa89df78ddc852a3fa57177f1dc | [
"Apache-2.0"
] | permissive | itbj/ydk-py-samples | 898c6c9bad9d6f8072892300d42633d82ec38368 | c5834091da0ebedbb11af7bbf780f268aad7040b | refs/heads/master | 2022-11-20T17:44:58.844428 | 2020-07-25T06:18:02 | 2020-07-25T06:18:02 | 282,382,442 | 1 | 0 | null | 2020-07-25T06:04:51 | 2020-07-25T06:04:50 | null | UTF-8 | Python | false | false | 2,824 | py | #!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Create configuration for model Cisco-IOS-XR-ethernet-lldp-cfg.
usage: gn-create-xr-ethernet-lldp-cfg-20-ydk.py [-h] [-v] device
positional arguments:
device gNMI device (http://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.path import Repository
from ydk.services import CRUDService
from ydk.gnmi.providers import gNMIServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_ethernet_lldp_cfg \
as xr_ethernet_lldp_cfg
import os
import logging
YDK_REPO_DIR = os.path.expanduser("~/.ydk/")
def config_lldp(lldp):
"""Add config data to lldp object."""
lldp.enable = True
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="gNMI device (http://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create gNMI provider
repository = Repository(YDK_REPO_DIR+device.hostname)
provider = gNMIServiceProvider(repo=repository,
address=device.hostname,
port=device.port,
username=device.username,
password=device.password)
# create CRUD service
crud = CRUDService()
lldp = xr_ethernet_lldp_cfg.Lldp() # create object
config_lldp(lldp) # add object configuration
# create configuration on gNMI device
crud.create(provider, lldp)
exit()
# End of script
| [
"[email protected]"
] | |
174b23a0701205e2da87894ca11e6fddd5a246d5 | 38a263d52c52a8834b175e867330d8415dad7384 | /Regression_suite_bigc/api/test_shipping_method_api.py | 39dd52af582926da309c4c1bc141b4d413de60a9 | [] | no_license | testing-sravan/tests-scripts-worked | 4e57c47c4ea9a9bc22e85a8b6d628615907537bd | 33c579918356f6ff1cdfd5635d6eba1d85eba0b7 | refs/heads/master | 2021-01-10T20:39:12.805680 | 2014-09-15T04:54:02 | 2014-09-15T04:54:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,893 | py | from lib.api_lib import *
from lib.shipping_class import *
AUSPOST_AUTH_KEY = "bzfac4efaf7e7e51a4b1dbd7cc76cb31"
@pytest.mark.skipif("True")
def ttest_disable_shipping_reboot(browser, url, email, password):
shipping = ShippingClass(browser)
shipping.go_to_admin(browser, url, email, password)
shipping.set_feature_flag(browser, 'disable', 'ShippingReboot')
def test_create_australian_post_in_control_panel(browser, url, email, password):
shipping = ShippingClass(browser)
shipping.go_to_admin(browser, url, email, password)
shipping.navigate_to_shipping()
shipping.skip_shipping_intro()
if not shipping.is_new_ui(browser):
shipping.setup_store_location_new(shipping.au_store_location)
shipping.add_country_zone(shipping.us_country_zone)
shipping.open_country_zone("United States")
shipping.setup_australia_post()
return
pytest.skip("Not new UI")
shipping.setup_store_location(browser, "Australia","New South Wales","2000")
browser.find_element_by_id('tab1').click()
browser.execute_script("$('#div1 .dropdown-trigger:first').click()")
WebDriverWait(browser, 30).until(lambda s: s.find_element_by_link_text('Edit Methods').is_displayed() and s.find_element_by_link_text('Edit Methods'))
browser.find_element_by_link_text('Edit Methods').click()
shipping.disable_the_shipping_method(browser)
WebDriverWait(browser, 30).until(lambda s: s.find_element_by_xpath('//input[@value="Add a Shipping Method..."]').is_displayed() and s.find_element_by_xpath('//input[@value="Add a Shipping Method..."]'))
browser.find_element_by_xpath('//input[@value="Add a Shipping Method..."]').click()
browser.find_element_by_xpath('//span[text()="Australia Post"]').click()
element= shipping.wait_until_element_present('shipping_australiapost_auth_key', "ID")
element.send_keys(AUSPOST_AUTH_KEY)
element = shipping.wait_until_element_present('Select All', 'LINK')
element.click()
browser.find_element_by_name('SubmitButton1').click()
shipping.verify_and_assert_success_message(browser, "The shipping method has been created successfully.", ".alert-success")
#JSON Payload
def test_get_shipping_methods(auth_token, url, username, state):
api = urlparse.urljoin(url, 'api/v2/shipping/methods')
result = basic_auth_get(api, username, auth_token)
newdata = json.loads(result.text)
state['shipping_method_id'] = newdata[0]['id']
assert newdata[0]['id'] > 0
for item in newdata:
try:
assert newdata[item].find('name').text == "Australia Post"
assert newdata[item].find('method_name').text == "shipping_australiapost"
return
except:
pass
return False
def test_get_shipping_methods_by_id(auth_token, url, username, state):
api = urlparse.urljoin(url, 'api/v2/shipping/methods/' + str(state['shipping_method_id']) + '')
result = basic_auth_get(api, username, auth_token)
newdata = json.loads(result.text)
assert newdata[0]['id'] > 0
for item in newdata:
try:
assert newdata[item].find('name').text == "Australia Post"
assert newdata[item].find('method_name').text == "shipping_australiapost"
return
except:
pass
return False
# XML Payload
def test_get_shipping_methods_xml_payload(auth_token, url, username, state):
api = urlparse.urljoin(url, 'api/v2/shipping/methods')
result = basic_auth_get(api, username, auth_token, payload_format = 'xml')
newdata = etree.fromstring(result.text)
state['shipping_method_id_xml'] = newdata[0].find('id').text
assert newdata[0].find('id').text > 0
for item in newdata:
try:
assert newdata[item].find('name').text == "Australia Post"
assert newdata[item].find('method_name').text == "shipping_australiapost"
return
except:
pass
return False
def test_get_shipping_methods_by_id_xml_payload(auth_token, url, username, state):
api = urlparse.urljoin(url, 'api/v2/shipping/methods/' + str(state['shipping_method_id_xml']) + '')
result = basic_auth_get(api, username, auth_token, payload_format = 'xml')
newdata = etree.fromstring(result.text)
assert newdata[0].find('id').text > 0
for item in newdata:
try:
assert newdata[item].find('name').text == "Australia Post"
assert newdata[item].find('method_name').text == "shipping_australiapost"
return
except:
pass
return False
@pytest.mark.skipif("True")
def ttest_delete_australian_post_in_control_panel(browser, url, email, password):
shipping = ShippingClass(browser)
shipping.go_to_admin(browser, url, email, password)
browser.find_element_by_link_text("Setup & Tools").click()
browser.find_element_by_link_text('Shipping').click()
browser.find_element_by_id('tab1').click()
browser.execute_script("$('#div1 .dropdown-trigger:first').click()")
WebDriverWait(browser, 30).until(lambda s: s.find_element_by_link_text('Edit Methods').is_displayed() and s.find_element_by_link_text('Edit Methods'))
browser.find_element_by_link_text('Edit Methods').click()
browser.execute_script("$('.GridRow').find('td:contains(Australia Post)').parent('tr').children('td:eq(0)').find('input').attr('checked','checked')")
browser.find_element_by_xpath('//input[@value="Delete Selected"]').click()
try:
alert = browser.switch_to_alert()
alert.accept()
except WebDriverException:
browser.execute_script("window.confirm = function(){return true;}");
browser.find_element_by_xpath('//input[@value="Delete Selected"]').click()
shipping.verify_and_assert_success_message(browser, "The selected shipping methods have been deleted successfully.", ".alert-success")
| [
"jenkins@localhost"
] | jenkins@localhost |
ae79db95820afa78fc580aa49376922c0c238952 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/models/ms_data/datatypes/facets/float_pkg/float_min_inclusive005_xsd/__init__.py | 2aa44ebe2b8660a5c90d5e944cdea45e6804713b | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 180 | py | from output.models.ms_data.datatypes.facets.float_pkg.float_min_inclusive005_xsd.float_min_inclusive005 import (
FooType,
Test,
)
__all__ = [
"FooType",
"Test",
]
| [
"[email protected]"
] | |
19278d8ef7d38d9fef53807d8b5f43c6599c1860 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_310/ch24_2019_02_28_22_51_17_076072.py | a696f9213b91ef86ad0fdb8ed69e705024d7c53c | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | def classifica_triangulo(l1, l2, l3):
if l1==l2 and l1==l3:
return "equilátero"
elif l1==l2 and l1!=l3 or l1!=l2 and l1==l3:
return "isóceles"
else:
return "escaleno" | [
"[email protected]"
] | |
8b315feae897c34875ac54d7346c9f3fa36a7ae9 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_28421.py | e88056b6d7429d3371507942c15ea0699c845088 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | # Python yagmail attachment not working
pip3 install -U yagmail
| [
"[email protected]"
] | |
12b57b06ca8399fdc27fa38e011de43de67a8a30 | 94f4bb0f6e43b2eb2f1bdb284a580b76121fa9af | /559.py | 36b10d1ab3ede26c95a47dba50b8ef0aa9c74592 | [] | no_license | huosan0123/leetcode-py | f1ec8226bae732369d4e1989b99ab0ba4b4061c4 | 22794e5e80f534c41ff81eb40072acaa1346a75c | refs/heads/master | 2021-01-25T11:48:17.365118 | 2019-09-12T15:45:34 | 2019-09-12T15:45:34 | 93,934,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | """
# Definition for a Node.
class Node(object):
def __init__(self, val, children):
self.val = val
self.children = children
"""
class Solution(object):
def maxDepth(self, root):
"""
:type root: Node
:rtype: int
"""
if not root:
return 0
depths = [self.maxDepth(ch) for ch in root.children]
# children 使用list表示的,可能为空;
if not depths:
depth = 0
else:
depth = max(depths)
return 1 + depth
| [
"[email protected]"
] | |
abc71170bebc5eeabbc454115b8a9dcc7b9a858e | 7db0883137d119565540f2d071638c4016f39213 | /Note/Spider/2.28-selenium/selenium+phantomjs学习/selenium_study3.py | 452662361c9123151650e6ef605f1db84e6b3d4e | [] | no_license | PhilHuang-d/python--- | cf22a4cc00d4beaaf75ef7ca87a4c5d31a9d5efe | 152c18f51838ce652b79a0cd24765b1a1c237eee | refs/heads/master | 2021-09-13T05:32:53.754865 | 2018-04-25T13:36:40 | 2018-04-25T13:36:40 | 108,812,447 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,975 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
__author__ = 'Terry'
USER_AGENTS = [
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:16.0) Gecko/20121026 Firefox/16.0'
]
phantomjs_driver_path = 'D:/phantomjs/bin/phantomjs.exe'
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import time
# 引入配置对象DesiredCapabilities
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
def save_vcode(driver, element):
# 获取截图
driver.get_screenshot_as_file('screenshot.png')
left = int(element.location['x'])
top = int(element.location['y'])
right = int(element.location['x'] + element.size['width'])
bottom = int(element.location['y'] + element.size['height'])
# 通过Image处理图像
from PIL import Image
im = Image.open('screenshot.png')
im = im.crop((left, top, right, bottom))
im.save('vcode.png')
dcap = dict(DesiredCapabilities.PHANTOMJS)
#从USER_AGENTS列表中随机选一个浏览器头,伪装浏览器
# dcap["phantomjs.page.settings.userAgent"] = (random.choice(USER_AGENTS))
dcap["phantomjs.page.settings.userAgent"] = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:16.0) Gecko/20121026 Firefox/16.0'
# 不载入图片,爬页面速度会快很多
# dcap["phantomjs.page.settings.loadImages"] = False
#打开带配置信息的phantomJS浏览器
driver = webdriver.PhantomJS(phantomjs_driver_path, desired_capabilities=dcap)
# 设置10秒页面超时返回,类似于requests.get()的timeout选项,driver.get()没有timeout选项
# 以前遇到过driver.get(url)一直不返回,但也不报错的问题,这时程序会卡住,设置超时选项能解决这个问题。
driver.set_page_load_timeout(10)
# 设置10秒脚本超时时间
driver.set_script_timeout(10)
# 设置屏幕尺寸
driver.set_window_size(1366, 768)
# 访问百度
driver.get('https://www.baidu.com')
WebDriverWait(driver, 30, 1).until(EC.presence_of_element_located((By.XPATH, '//*[@id="u1"]/a[7]')))
print(driver.title)
time.sleep(1)
# 点击 弹出登录的窗口
login_index = driver.find_element_by_xpath('//*[@id="u1"]/a[7]')
login_index.click()
time.sleep(.5)
# 选择 用户名登录
login_user_and_pwd = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__footerULoginBtn"]')
login_user_and_pwd.click()
time.sleep(.5)
# 用户名元素
user = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__userName"]')
user.clear()
user.send_keys('用户名')
# 密码元素
pwd = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__password"]')
pwd.clear()
pwd.send_keys('密码')
while True:
# 换下一张 验证码
next_vcode = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__verifyCodeChange"]')
next_vcode.click()
# 验证码图片的元素
vcode_img = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__verifyCodeImg"]')
save_vcode(driver, vcode_img)
# 输入验证码
vcode_input = input('请输入验证码:')
vcode = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__verifyCode"]')
# 在页面上填写验证码
vcode.send_keys(vcode_input)
# 登录
login = driver.find_element_by_xpath('//*[@id="TANGRAM__PSP_10__submit"]')
login.click()
time.sleep(1)
try:
# 判断是否登录成功
user_name = driver.find_element_by_xpath('//*[@id="s_username_top"]/span')
print("登录名为:"+user_name.text)
print("登录成功:")
break
except:
time.sleep(.3)
driver.get('http://index.baidu.com/?tpl=trend&word=%BB%C6%BD%F0')
# 需要手动退出driver
driver.quit() | [
"[email protected]"
] | |
bfc54acf5cfe74cdca27af81c9e898c7ad2005c3 | 37a67a9531f8c32739ae70fc5be55b4c6acae60d | /multinomial.py | 61b5f29f47a1620d45a703bb42a374da45ab4e3d | [
"MIT"
] | permissive | gongkecun/distribution-is-all-you-need | da1b1b363df3343e0753e55564311e323cd6c890 | 93ae5ed7fa63607474d61723d2d28d1a4b3c653a | refs/heads/master | 2020-08-27T10:03:21.144561 | 2019-10-24T15:04:43 | 2019-10-24T15:04:43 | 217,326,807 | 0 | 0 | MIT | 2019-10-24T15:02:48 | 2019-10-24T15:02:48 | null | UTF-8 | Python | false | false | 1,217 | py | """
Code by Tae-Hwan Hung(@graykode)
https://en.wikipedia.org/wiki/Multinomial_distribution
3-Class Example
"""
import numpy as np
from matplotlib import pyplot as plt
import operator as op
from functools import reduce
def factorial(n):
return reduce(op.mul, range(1, n + 1), 1)
def const(n, a, b, c):
"""
return n! / a! b! c!, where a+b+c == n
"""
assert a + b + c == n
numer = factorial(n)
denom = factorial(a) * factorial(b) * factorial(c)
return numer / denom
def multinomial(n):
"""
:param x : list, sum(x) should be `n`
:param n : number of trial
:param p: list, sum(p) should be `1`
"""
# get all a,b,c where a+b+c == n, a<b<c
ls = []
for i in range(1, n + 1):
for j in range(i, n + 1):
for k in range(j, n + 1):
if i + j + k == n:
ls.append([i, j, k])
y = [const(n, l[0], l[1], l[2]) for l in ls]
x = np.arange(len(y))
return x, y, np.mean(y), np.std(y)
for n_experiment in [20, 21, 22]:
x, y, u, s = multinomial(n_experiment)
plt.scatter(x, y, label=r'$trial=%d$' % (n_experiment))
plt.legend()
plt.savefig('graph/multinomial.png')
plt.show()
| [
"[email protected]"
] | |
245925707f4f6c6c98786d629ccf351760017361 | 80ae9b5cfb45b6e9cf7873ef7c46e17e117e4019 | /data/HackerRank-Python/Matrix Script.py | 114fa5ab8380753bbc2fe298d07e8f760aeb97f0 | [] | no_license | Ritvik19/CodeBook | ef7764d89b790e902ede5802f36d5ca910d8a50e | 2b4ed7938bbf156553d6ba5cba6216449528f0fc | refs/heads/master | 2021-07-04T08:25:52.478719 | 2020-08-08T06:54:14 | 2020-08-08T06:54:14 | 138,744,302 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | #!/bin/python3
import math
import os
import random
import re
import sys
n, m = map(int, input().rstrip().split())
matrix = []
for _ in range(n):
matrix_item = input()
matrix.append(matrix_item)
print(re.sub(r'(?<=[A-Za-z0-9])([^A-Za-z0-9]+)(?=[A-Za-z0-9])',' ',"".join("".join(decode) for decode in zip(*matrix))))
| [
"[email protected]"
] | |
e4076d56d19e0b60f79ef0b63139cbc98873367c | e92d752737f3e90a47355d5502a364a9e2d0c08b | /tests/test_reverseproxy.py | c93c3521ab514877f5527c50f379fbd88223bb84 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | miri64/aiocoap | c7815758371f0a83c51b058fb0c24e8024feae9f | 93e699280b644465213fc8ba29bae556475fb6fc | refs/heads/master | 2023-08-15T16:46:27.985718 | 2021-10-05T17:15:00 | 2021-10-05T17:15:00 | 404,324,558 | 0 | 0 | NOASSERTION | 2021-09-08T11:39:12 | 2021-09-08T11:39:12 | null | UTF-8 | Python | false | false | 4,526 | py | # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak <http://sixpinetrees.blogspot.com/>,
# 2013-2014 Christian Amsüss <[email protected]>
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
import unittest
from . import common
from .test_server import WithAsyncLoop, Destructing, WithClient, WithTestServer, CLEANUPTIME
import aiocoap.proxy.client
import aiocoap.cli.proxy
from aiocoap.util import hostportjoin
class WithReverseProxy(WithAsyncLoop, Destructing):
def setUp(self):
super(WithReverseProxy, self).setUp()
self.reverseproxy = aiocoap.cli.proxy.Main(
["--reverse", "--bind", hostportjoin(self.proxyhost, self.proxyport), "--namebased", "%s:%s"%(self.name_for_real_server, self.servernetloc), "--pathbased", "%s:%s"%("/".join(self.path_for_real_server), self.servernetloc)],
loop=self.loop,
)
self.loop.run_until_complete(self.reverseproxy.initializing)
def tearDown(self):
super(WithReverseProxy, self).tearDown()
self.loop.run_until_complete(self.reverseproxy.shutdown())
# creating a reference loop between the cli instance and its contexts,
# so that the cli instance's gc-ing is linked o the contexts'.
# TODO how can we handle this more smoothly?
self.reverseproxy.outgoing_context._cli = self.reverseproxy
self.reverseproxy.proxy_context._cli = self.reverseproxy
self._del_to_be_sure('reverseproxy')
self.loop.run_until_complete(asyncio.sleep(CLEANUPTIME))
proxyport = 56839
proxyhost = common.loopbackname_v6 or common.loopbackname_v46
proxyaddress = '%s:%d'%(proxyhost, proxyport)
name_for_real_server = 'aliasedname'
path_for_real_server = ('aliased', 'name')
class TestReverseProxy(WithReverseProxy, WithClient, WithTestServer):
@unittest.skipIf(common.using_simple6, "Some proxy tests fail with simple6 (https://github.com/chrysn/aiocoap/issues/88)")
def test_routing(self):
yieldfrom = lambda f: self.loop.run_until_complete(f)
def req():
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.proxyaddress
request.opt.uri_path = ('big',)
return request
request = req()
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.BAD_REQUEST, "GET without hostname gave resource (something like BAD_REQUEST expected)")
request = req()
request.opt.uri_host = self.name_for_real_server
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "GET with hostname based proxying was not successful)")
request = req()
request.opt.uri_path = self.path_for_real_server + request.opt.uri_path
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "GET with path based proxying was not successful)")
@unittest.skipIf(common.using_simple6, "Some proxy tests fail with simple6 (https://github.com/chrysn/aiocoap/issues/88)")
def test_options(self):
yieldfrom = lambda f: self.loop.run_until_complete(f)
def req():
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.proxyaddress
request.opt.uri_path = ('big',)
request.opt.uri_host = self.name_for_real_server
return request
request = req()
request.opt.proxy_scheme = 'coap'
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.BAD_OPTION, "Reverse proxy supports proxying even though it shouldn't.")
request = req()
request.opt.add_option(aiocoap.optiontypes.StringOption(2**10 + 2, "can't proxy this"))
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.BAD_OPTION, "Proxy did not react to unsafe option.")
request = req()
request.opt.add_option(aiocoap.optiontypes.StringOption(2**10, "nothing to see here"))
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "Proxy did not ignore to safe-to-forward option.")
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.