blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
66f614fc294e9d8c94babbbce4963368e0136402 | 35b460a5e72e3cb40681861c38dc6d5df1ae9b92 | /CodeFights/Arcade/Intro/islandOfKnowledge/minesweeper.py | 7a778a54e8fe37740efbc970e191ddc6ef1ca2ae | []
| no_license | robgoyal/CodingChallenges | 9c5f3457a213cf54193a78058f74fcf085ef25bc | 0aa99d1aa7b566a754471501945de26644558d7c | refs/heads/master | 2021-06-23T09:09:17.085873 | 2019-03-04T04:04:59 | 2019-03-04T04:04:59 | 94,391,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,653 | py | # Name: minesweeper.py
# Author: Robin Goyal
# Last-Modified: July 12, 2017
# Purpose: Give an array of true and false values with true indicating a mine
# return an array of the same length indicating number of surrounding
# mines at each position
# Note: Could've optimized the solution but a pure brute force implementation
def minesweeper(matrix):
grid = []
for row in range(len(matrix)):
gridRow = []
for col in range(len(matrix[0])):
count = 0
# Top Row
if (row == 0):
if (col == 0): # Top-Left corner
count = [matrix[row][col+1], matrix[row+1][col], matrix[row+1][col+1]].count(True)
elif (col == len(matrix[0]) - 1): # Top-Right corner
count = [matrix[row][col-1], matrix[row+1][col], matrix[row+1][col-1]].count(True)
else: # Middle Columns in top Row
count = [matrix[row][col-1], matrix[row][col+1]].count(True) \
+ matrix[row+1][col-1:col+2].count(True)
# Bottom Row
elif (row == len(matrix) -1):
if (col == 0): # Bottom-Left corner
count = [matrix[row][col+1], matrix[row-1][col], matrix[row-1][col+1]].count(True)
elif (col == len(matrix[0]) - 1): # Bottom-Right corner
count = [matrix[row][col-1], matrix[row-1][col], matrix[row-1][col-1]].count(True)
else: # Middle Columns in bottom Row
count = [matrix[row][col-1], matrix[row][col+1]].count(True) \
+ matrix[row-1][col-1:col+2].count(True)
# Middle Rows
else:
if (col == 0): # Left most column
count = matrix[row-1][col:col+2].count(True) + [matrix[row][col+1]].count(True) \
+ matrix[row+1][col:col+2].count(True)
elif (col == len(matrix[0]) -1): # Right most column
count = matrix[row-1][col-1:col+1].count(True) + [matrix[row][col-1]].count(True) \
+ matrix[row+1][col-1:col+1].count(True)
else: # Middle columns
count = matrix[row-1][col-1:col+2].count(True) + matrix[row+1][col-1:col+2].count(True) + \
[matrix[row][col-1], matrix[row][col+1]].count(True)
gridRow.append(count)
grid.append(tempRow)
return grid | [
"[email protected]"
]
| |
f9e1ca44905679e39f7b725bab3e049bd3cf44d3 | 10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94 | /Python/number-of-senior-citizens.py | 50b65c0c4bd9f9324ebc57219dbfd33cea759e81 | [
"MIT"
]
| permissive | kamyu104/LeetCode-Solutions | f54822059405ef4df737d2e9898b024f051fd525 | 4dc4e6642dc92f1983c13564cc0fd99917cab358 | refs/heads/master | 2023-09-02T13:48:26.830566 | 2023-08-28T10:11:12 | 2023-08-28T10:11:12 | 152,631,182 | 4,549 | 1,651 | MIT | 2023-05-31T06:10:33 | 2018-10-11T17:38:35 | C++ | UTF-8 | Python | false | false | 229 | py | # Time: O(n)
# Space: O(1)
# string
class Solution(object):
def countSeniors(self, details):
"""
:type details: List[str]
:rtype: int
"""
return sum(x[-4:-2] > "60" for x in details)
| [
"[email protected]"
]
| |
cee24ad2b9015a0358c23faf46c7db3e63048385 | b40a661aa78c10ea8413b349f1efe288149f4ab0 | /App/migrations/0004_address.py | 20a5f2bea93a0a8e0e15352f1439fbf6e1dd1c5b | []
| no_license | 0helloword/DjangoSum | daed4ab9488c5d53518623eb5d35c3a32a826129 | 72b528415edd2a76a7a19da708d4046de2a014ac | refs/heads/master | 2022-11-25T15:15:30.843401 | 2020-08-02T03:18:07 | 2020-08-02T03:18:07 | 275,606,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-06-27 13:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('App', '0003_cart'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('a_add', models.CharField(max_length=128)),
('a_customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='App.Customer')),
],
),
]
| [
"[email protected]"
]
| |
3aefc1186a88845c16d658de39ccb722a760a83f | e922f5dac332fbf4de910ade55f07cb75d900d1b | /templates/influxdb/actions.py | 22fc5f7c27e2e8b057d5a9a71db43c9800bbaa34 | [
"Apache-2.0"
]
| permissive | hossnys/0-orchestrator | 441970f0bd784b72c40f6da4fa44ca2c70b9ea8c | cce7cc1e1f957e0eb691b863502fa6c3f4620d52 | refs/heads/master | 2021-01-01T18:46:27.123614 | 2017-07-26T13:59:30 | 2017-07-26T13:59:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,070 | py | def get_container(service, force=True):
containers = service.producers.get('container')
if not containers:
if force:
raise RuntimeError('Service didn\'t consume any containers')
else:
return
return containers[0]
def init(job):
from zeroos.orchestrator.configuration import get_configuration
service = job.service
container_actor = service.aysrepo.actorGet('container')
config = get_configuration(service.aysrepo)
args = {
'node': service.model.data.node,
'flist': config.get(
'influxdb-flist', 'https://hub.gig.tech/gig-official-apps/influxdb.flist'),
'hostNetworking': True
}
cont_service = container_actor.serviceCreate(instance='{}_influxdb'.format(service.name), args=args)
service.consume(cont_service)
def install(job):
j.tools.async.wrappers.sync(job.service.executeAction('start', context=job.context))
def start(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
service = job.service
container = get_container(service)
j.tools.async.wrappers.sync(container.executeAction('start', context=job.context))
container_ays = Container.from_ays(container, job.context['token'])
influx = InfluxDB(
container_ays, service.parent.model.data.redisAddr, service.model.data.port)
influx.start()
service.model.data.status = 'running'
influx.create_databases(service.model.data.databases)
service.saveAll()
def stop(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
service = job.service
container = get_container(service)
container_ays = Container.from_ays(container, job.context['token'])
if container_ays.is_running():
influx = InfluxDB(
container_ays, service.parent.model.data.redisAddr, service.model.data.port)
influx.stop()
j.tools.async.wrappers.sync(container.executeAction('stop', context=job.context))
service.model.data.status = 'halted'
service.saveAll()
def uninstall(job):
service = job.service
container = get_container(service, False)
if container:
j.tools.async.wrappers.sync(service.executeAction('stop', context=job.context))
j.tools.async.wrappers.sync(container.delete())
j.tools.async.wrappers.sync(service.delete())
def processChange(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
from zeroos.orchestrator.configuration import get_jwt_token_from_job
service = job.service
args = job.model.args
if args.pop('changeCategory') != 'dataschema' or service.model.actionsState['install'] in ['new', 'scheduled']:
return
container_service = get_container(service)
container = Container.from_ays(container_service, get_jwt_token_from_job(job))
influx = InfluxDB(
container, service.parent.model.data.redisAddr, service.model.data.port)
if args.get('port'):
if container.is_running() and influx.is_running()[0]:
influx.stop()
service.model.data.status = 'halted'
influx.port = args['port']
influx.start()
service.model.data.status = 'running'
service.model.data.port = args['port']
if args.get('databases'):
if container.is_running() and influx.is_running()[0]:
create_dbs = set(args['databases']) - set(service.model.data.databases)
drop_dbs = set(service.model.data.databases) - set(args['databases'])
influx.create_databases(create_dbs)
influx.drop_databases(drop_dbs)
service.model.data.databases = args['databases']
service.saveAll()
def init_actions_(service, args):
return {
'init': [],
'install': ['init'],
'monitor': ['start'],
'delete': ['uninstall'],
'uninstall': [],
} | [
"[email protected]"
]
| |
21439bcac6cdd546eeab5d2c26363fe72b79eb43 | ea549f5974db822d0733e5417d313997de9ca2bb | /craig_list_site/migrations/0001_initial.py | 57e111b3bffaa4b9a0892c8fca78c81caa2d727c | []
| no_license | Bibin22/craiglist | fe5a641cf4b8c03557c1775605a5e8b4da9b43de | 853b377f4951ee3ac9072bc22d486f520e18b1bc | refs/heads/master | 2023-02-05T11:32:24.911491 | 2021-01-02T05:30:34 | 2021-01-02T05:30:34 | 326,116,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | # Generated by Django 3.1.4 on 2020-12-24 10:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Search',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search', models.CharField(max_length=500)),
('created', models.DateTimeField(auto_now=True)),
],
),
]
| [
"[email protected]"
]
| |
e33bf7188bb39a15eab44ec863cb21e1daa47b3e | acf15961c47fb947a407a4318214110b9597d9e6 | /env/bin/jupyter-kernel | 667c26308122816ab476f0645e256bfc37e040c0 | []
| no_license | qu4ku/dshub-website | 43e378352246357db83da9b9b0acd760aebbc83a | 792d94e41fa666093eda2b5511bbcab27e0bb287 | refs/heads/master | 2021-06-02T19:27:39.708138 | 2020-05-19T09:18:42 | 2020-05-19T09:18:42 | 135,715,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | #!/Users/kamilwroniewicz/_code/_github/180601-datahub-website/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_client.kernelapp import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
b10f1d015e39167aece2ad6df9958a447a5a7f45 | ff73cf9a9b1c924e46e9e088243991a184a8a668 | /UMass/2019Research/synthesis/minimal_box/softbox_model.py | 071de3ad937c9345961ca5481b953dce6441bbc0 | []
| no_license | Lorraine333/minimal_box | c844b14ca2b5a6a898be9dec48ce89473887a325 | 472e1de6087c4183fe706edc0efc4e917b7518a3 | refs/heads/master | 2020-06-11T14:31:39.612547 | 2019-06-27T01:11:26 | 2019-06-27T01:11:26 | 193,998,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,992 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import utils
from box import MyBox
import tensorflow as tf
my_seed = 20180112
tf.set_random_seed(my_seed)
def model_fn(features, labels, mode, params):
"""
Creates model_fn for Tensorflow estimator. This function takes features and input, and
is responsible for the creation and processing of the Tensorflow graph for training, prediction and evaluation.
Expected feature: {'image': image tensor }
:param features: dictionary of input features
:param labels: dictionary of ground truth labels
:param mode: graph mode
:param params: params to configure model
:return: Estimator spec dependent on mode
"""
learning_rate = params['learning_rate']
"""Initiate box embeddings"""
mybox = softbox_model_fn(features, labels, mode, params)
log_prob = mybox.log_prob
if mode == tf.estimator.ModeKeys.PREDICT:
return get_prediction_spec(log_prob)
total_loss = mybox.get_loss(log_prob, labels, params)
if mode == tf.estimator.ModeKeys.TRAIN:
return get_training_spec(learning_rate, total_loss)
else:
return get_eval_spec(log_prob, labels, total_loss)
def get_prediction_spec(log_cond_prob):
"""
Creates estimator spec for prediction
:param log_cond_prob: log prob for conditionals
:param log_marg_prob: log prob for marginals
:return: Estimator spec
"""
predictions = {
"probability": tf.exp(log_cond_prob)
}
return tf.estimator.EstimatorSpec(mode=tf.estimator.ModeKeys.PREDICT, predictions=predictions)
def get_training_spec(learning_rate, loss):
"""
Creates training estimator spec
:param learning rate for optimizer
:param joint_loss: loss op
:return: Training estimator spec
"""
# optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
# train_op = optimizer.minimize(
# loss=loss,
# global_step=tf.train.get_global_step())
return tf.estimator.EstimatorSpec(mode=tf.estimator.ModeKeys.TRAIN, loss=loss, train_op=train_op)
def get_eval_spec(log_cond_prob, labels, loss):
"""
Creates eval spec for tensorflow estimator
:param log_cond_prob: log prob for conditionals
:param log_marg_prob: log prob for marginals
:param labels: ground truth labels for conditional and marginal
:param loss: loss op
:return: Eval estimator spec
"""
eval_metric_ops = {
"pearson_correlation":tf.contrib.metrics.streaming_pearson_correlation(
predictions=tf.exp(log_cond_prob), labels=labels['prob'])
}
return tf.estimator.EstimatorSpec(
mode=tf.estimator.ModeKeys.EVAL, loss=loss, eval_metric_ops=eval_metric_ops)
class softbox_model_fn(object):
def __init__(self, features, labels, mode, params):
self.label_size = params['label_size']
self.embed_dim = params['embed_dim']
self.prob_label = labels['prob']
self.cond_weight = params['cond_weight']
self.marg_weight = params['marg_weight']
self.reg_weight = params['reg_weight']
self.regularization_method = params['regularization_method']
self.temperature = 1.0
self.min_embed, self.delta_embed = init_word_embedding(self.label_size, self.embed_dim)
self.log_prob = tf.cond(tf.equal(tf.shape(self.prob_label)[1], tf.constant(self.label_size)),
true_fn=lambda: self.softbox_marg(features, params, mode),
false_fn=lambda: self.softbox_cond(features, params, mode))
self.log_prob = tf.Print(self.log_prob, [tf.equal(tf.shape(self.prob_label)[1], tf.constant(self.label_size))], '0 for marginal, 1 for conditional')
def softbox_cond(self, features, params, mode):
embed_dim = params['embed_dim']
t1x = features['term1']
t2x = features['term2']
# t1x = tf.Print(t1x, [t1x, t2x], 't1x shape')
"""cond log probability"""
t1_box = get_word_embedding(t1x, self.min_embed, self.delta_embed)
t2_box = get_word_embedding(t2x, self.min_embed, self.delta_embed)
evaluation_logits = get_conditional_probability(t1_box, t2_box, embed_dim, self.temperature)
return evaluation_logits
def softbox_marg(self, features, params, mode):
"""marg log probability"""
max_embed = self.min_embed + tf.exp(self.delta_embed)
universe_min = tf.reduce_min(self.min_embed, axis=0, keepdims=True)
universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
universe_volume = volume_calculation(MyBox(universe_min, universe_max), self.temperature)
box_volume = volume_calculation(MyBox(self.min_embed, max_embed), self.temperature)
predicted_marginal_logits = tf.log(box_volume) - tf.log(universe_volume)
return predicted_marginal_logits
def get_cond_loss(self, cond_log_prob):
"""get conditional probability loss"""
cond_pos_loss = tf.multiply(cond_log_prob, self.prob_label)
cond_neg_loss = tf.multiply(tf.log(1-tf.exp(cond_log_prob)+1e-10), 1-self.prob_label)
cond_loss = -tf.reduce_mean(cond_pos_loss+ cond_neg_loss)
cond_loss = self.cond_weight * cond_loss
return cond_loss
def get_marg_loss(self, marg_log_prob):
"""get marginal probability loss"""
marg_pos_loss = tf.multiply(marg_log_prob, self.prob_label)
marg_neg_loss = tf.multiply(tf.log(1-tf.exp(marg_log_prob)+1e-10), 1-self.prob_label)
marg_loss = -tf.reduce_mean(marg_pos_loss+marg_neg_loss)
marg_loss = self.marg_weight * marg_loss
return marg_loss
def get_loss(self, log_prob, labels, params):
prob_loss = tf.cond(tf.equal(tf.shape(self.prob_label)[0], tf.constant(self.label_size)),
true_fn=lambda: self.get_marg_loss(log_prob),
false_fn=lambda: self.get_cond_loss(log_prob))
"""get regularization loss"""
if self.regularization_method == 'universe_edge':
max_embed = self.min_embed + tf.exp(self.delta_embed)
universe_min = tf.reduce_min(self.min_embed, axis=0, keepdims=True)
universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
regularization = tf.reduce_mean(
tf.nn.softplus(universe_max - universe_min))
elif self.regularization_method == 'delta':
regularization = tf.reduce_mean(
tf.square(tf.exp(self.delta_embed)))
else:
raise ValueError('Wrong regularization method')
total_loss = prob_loss + self.reg_weight * regularization
total_loss = tf.Print(total_loss, [prob_loss, self.reg_weight * regularization], 'loss')
return total_loss
# def softbox(features, params, mode):
# label_size = params['label_size']
# embed_dim = params['embed_dim']
# temperature = 1.0
#
# t1x = features['term1']
# t2x = features['term2']
#
# t1x = tf.Print(t1x, [tf.shape(t1x)], 't1x shape')
#
# """Initiate box embeddings"""
# min_embed, delta_embed = init_word_embedding(label_size, embed_dim)
# """cond log probability"""
# t1_box = get_word_embedding(t1x, min_embed, delta_embed)
# t2_box = get_word_embedding(t2x, min_embed, delta_embed)
# evaluation_logits = get_conditional_probability(t1_box, t2_box, embed_dim, temperature)
#
# """marg log probability"""
# max_embed = min_embed + tf.exp(delta_embed)
# universe_min = tf.reduce_min(min_embed, axis=0, keepdims=True)
# universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
# universe_volume = volume_calculation(MyBox(universe_min, universe_max), temperature)
# box_volume = volume_calculation(MyBox(min_embed, max_embed), temperature)
# predicted_marginal_logits = tf.log(box_volume) - tf.log(universe_volume)
#
# return evaluation_logits, predicted_marginal_logits, min_embed, delta_embed
def volume_calculation(mybox, temperature):
return tf.reduce_prod(tf.nn.softplus((mybox.max_embed - mybox.min_embed)/
temperature)*temperature, axis=-1)
def init_embedding_scale():
# softbox delta log init
# min_lower_scale, min_higher_scale = 1e-4, 0.9
# delta_lower_scale, delta_higher_scale = -1.0, -0.1
min_lower_scale, min_higher_scale = 1e-4, 0.9
delta_lower_scale, delta_higher_scale = -0.1, 0
return min_lower_scale, min_higher_scale, delta_lower_scale, delta_higher_scale
def init_word_embedding(label_size, embed_dim):
min_lower_scale, min_higher_scale, delta_lower_scale, delta_higher_scale = init_embedding_scale()
min_embed = tf.Variable(
tf.random_uniform([label_size, embed_dim],
min_lower_scale, min_higher_scale, seed=my_seed), trainable=True, name='word_embed')
delta_embed = tf.Variable(
tf.random_uniform([label_size, embed_dim],
delta_lower_scale, delta_higher_scale, seed=my_seed), trainable=True, name='delta_embed')
return min_embed, delta_embed
def get_word_embedding(idx, min_embed, delta_embed):
"""read word embedding from embedding table, get unit cube embeddings"""
min_embed = tf.nn.embedding_lookup(min_embed, idx)
delta_embed = tf.nn.embedding_lookup(delta_embed, idx) # [batch_size, embed_size]
max_embed = min_embed + tf.exp(delta_embed)
t1_box = MyBox(min_embed, max_embed)
return t1_box
def get_conditional_probability(t1_box, t2_box, embed_dim, temperature):
_, meet_box, disjoint = utils.calc_join_and_meet(t1_box, t2_box)
nested = utils.calc_nested(t1_box, t2_box, embed_dim)
"""get conditional probabilities"""
overlap_volume = volume_calculation(meet_box, temperature)
rhs_volume = volume_calculation(t1_box, temperature)
conditional_logits = tf.log(overlap_volume+1e-10) - tf.log(rhs_volume+1e-10)
return conditional_logits
| [
"[email protected]"
]
| |
a3fac0df2496aea555fb951e1641e2c5d9b07391 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_233/ch45_2020_03_09_13_17_35_961786.py | 7cf83bf5b09f45069fdb4e7c49f9a6d644c64307 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | lista = []
while True:
num = int(input())
if num <= 0: break
lista.append(num)
lista_inv = range(len(lista))
for i in range(len(lista)):
lista_inv[-i + 1] = lista[i]
print(lista_inv) | [
"[email protected]"
]
| |
ddc161b7e46516dd3785e6dba80385cf69326f1e | f3f01d98f2f924b7f2ce9c682b63ef68a0b943d7 | /Type_conversion.py | 67e8274a805561624b385bea780d5a3d1ffc4e07 | []
| no_license | nihalgaurav/pythonprep | 0d935244f4c20b2ba660a1bc192352654d4a9366 | d3023e1b58d9d5333e909f71d9c3fa7c54c420f5 | refs/heads/master | 2023-03-27T06:09:38.757433 | 2021-03-16T05:22:07 | 2021-03-16T05:22:07 | 344,804,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | number = 17
width = len(str(bin(number)[2:])) + 2
print("INT".rjust(width) + "OCT".rjust(width) + "HEX".rjust(width) + "BIN".rjust(width))
for x in range(1, number+1):
print(str(int(x)).rjust(width, " ") + str(oct(x))[2:].rjust(width, " ") + str(hex(x))[2:].upper().rjust(width, " ")
+ str(bin(x)[2:]).rjust(width, " "))
num = 5
n = 97 + num
for i in range(num):
p = ''
for j in range(i):
p = p + "-" + chr(n-i+j)
print(p[::-1].rjust(num*2-2, "-") + chr(n-i-1) + p.ljust(num*2-2, "-"))
for i in range(num-2,-1, -1):
p = ''
for j in range(i):
p = p + "-" + chr(n-i+j)
print(p[::-1].rjust(num*2-2, "-") + chr(n-i-1) + p.ljust(num*2-2, "-")) | [
"[email protected]"
]
| |
2eb9a26bdde17a586ad5280059024d4004382a91 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /ayQTiQAcFJhtauhe3_17.py | 1dca03f7cb44ef5e8ee9f421dc82cdd88d7fd01c | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | """
Given a list of integers, determine whether the sum of its elements is even or
odd.
The output should be a string (`"odd"` or `"even"`).
If the input list is empty, consider it as a list with a zero (`[0]`).
### Examples
even_or_odd([0]) ➞ "even"
even_or_odd([1]) ➞ "odd"
even_or_odd([]) ➞ "even"
even_or_odd([0, 1, 5]) ➞ "even"
### Notes
N/A
"""
def even_or_odd(lst):
summ=int(sum(lst))
if summ % 2 == 0:
return "even"
if summ % 2 == 1:
return "odd"
| [
"[email protected]"
]
| |
12a9b9befcf7af332c3ea172149268070aea9c7c | deb740e5086386a68d155b2482f9a9ec2095012c | /jdcloud_sdk/services/live/apis/DescribeLivePublishStreamNumRequest.py | 212277a2947473efb924dd9775e2df6ca9c01142 | [
"Apache-2.0"
]
| permissive | aluode99/jdcloud-sdk-python | 843afdd2855a55ecd7cd90fe255df213a8f56e28 | 3da9ae9c0f08e2c20a73dde04f6453d3eb9db16a | refs/heads/master | 2020-05-26T09:26:24.307434 | 2019-05-29T02:35:23 | 2019-05-29T02:35:23 | 188,186,313 | 0 | 0 | null | 2019-05-23T07:46:01 | 2019-05-23T07:46:00 | null | UTF-8 | Python | false | false | 2,692 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeLivePublishStreamNumRequest(JDCloudRequest):
"""
查询直播推流数
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeLivePublishStreamNumRequest, self).__init__(
'/describeLivePublishStreamNum', 'GET', header, version)
self.parameters = parameters
class DescribeLivePublishStreamNumParameters(object):
def __init__(self, startTime, ):
"""
:param startTime: 起始时间
- UTC时间
格式:yyyy-MM-dd'T'HH:mm:ss'Z'
示例:2018-10-21T10:00:00Z
"""
self.domainName = None
self.appName = None
self.protocolType = None
self.period = None
self.startTime = startTime
self.endTime = None
def setDomainName(self, domainName):
"""
:param domainName: (Optional) 播放域名
"""
self.domainName = domainName
def setAppName(self, appName):
"""
:param appName: (Optional) 应用名称
"""
self.appName = appName
def setProtocolType(self, protocolType):
"""
:param protocolType: (Optional) 查询的流协议类型,取值范围:"rtmp,hdl,hls",多个时以逗号分隔
"""
self.protocolType = protocolType
def setPeriod(self, period):
"""
:param period: (Optional) 查询周期,当前取值范围:“oneMin,fiveMin,halfHour,hour,twoHour,sixHour,day,followTime”,分别表示1min,5min,半小时,1小时,2小时,6小时,1天,跟随时间。默认为空,表示fiveMin。当传入followTime时,表示按Endtime-StartTime的周期,只返回一个点
"""
self.period = period
def setEndTime(self, endTime):
"""
:param endTime: (Optional) 结束时间:
- UTC时间
格式:yyyy-MM-dd'T'HH:mm:ss'Z'
示例:2018-10-21T10:00:00Z
- 为空,默认为当前时间
"""
self.endTime = endTime
| [
"[email protected]"
]
| |
579a8846030030a1b4f846da2163172703055c1e | 3592ef6ceb0e7654dc68fa9879b8c6fe31bcf6d1 | /reveries/tools/modeldiffer/lib.py | aa794920283f2358703cbb6ef0aad11ced9d157f | [
"MIT"
]
| permissive | all-in-one-of/reveries-config | a83a8208680d857a155e0a05297bde111d8c6845 | b47a5a6ce05376dffcb893e0823fecbcf1d08e67 | refs/heads/master | 2021-01-04T07:44:45.383431 | 2020-02-13T09:00:51 | 2020-02-13T09:00:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py |
import logging
from avalon import io
main_logger = logging.getLogger("modeldiffer")
def profile_from_database(version_id):
"""
"""
representation = io.find_one({"type": "representation",
"name": "mayaBinary",
"parent": version_id})
if representation is None:
main_logger.critical("Representation not found. This is a bug.")
return
model_profile = representation["data"].get("modelProfile")
if model_profile is None:
main_logger.critical("'data.modelProfile' not found."
"This is a bug.")
return
profile = dict()
for id, meshes_data in model_profile.items():
for data in meshes_data:
name = data.pop("hierarchy")
# No need to compare normals
data.pop("normals")
data["avalonId"] = id
profile[name] = data
return profile
profile_from_host = NotImplemented
select_from_host = NotImplemented
def is_supported_loader(name):
return name in ("ModelLoader",) # "RigLoader")
def is_supported_subset(name):
return any(name.startswith(family)
for family in ("model",)) # "rig"))
| [
"[email protected]"
]
| |
66c48db3d472e9cbef6459a534d94dd8fe60f1ce | 94f156b362fbce8f89c8e15cd7687f8af267ef08 | /week3/main/models.py | 267cfd064ac83b9e6a1feed9dae6e559d5dabd77 | []
| no_license | DastanB/AdvancedDjango | 6eee5477cd5a00423972c9cc3d2b5f1e4a501841 | 2b5d4c22b278c6d0e08ab7e84161163fe42e9a3f | refs/heads/master | 2020-07-17T19:21:16.271964 | 2019-12-03T21:58:51 | 2019-12-03T21:58:51 | 206,081,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,424 | py | from django.db import models
from users.models import MainUser
from main.constants import PROJECT_STATUSES, PROJECT_IN_PROCESS, PROJECT_FROZEN, PROJECT_DONE, BLOCK_STATUSES, TASKS_DONE, TASKS_FROZEN, TASKS_IN_PROCESS
import datetime
# Create your models here.
class Project(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=1000)
status = models.PositiveSmallIntegerField(choises=PROJECT_STATUSES, default=PROJECT_IN_PROCESS)
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='projects')
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.name
class Block(models.Model):
name = models.CharField(max_length=255)
type_of = models.PositiveSmallIntegerField(choises=BLOCK_STATUSES, default=TASKS_IN_PROCESS)
project = models.ForeignKey(Project, on_delete=models.CASCADE, related_name='blocks')
def __str__(self):
return self.name
class Task(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=1000)
priority = models.IntegerField()
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='created_tasks')
executor = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='tasks', null=True)
block = models.ForeignKey(Block, on_delete=models.CASCADE, related_name='tasks')
order = models.IntegerField()
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.name
class TaskDocument(models.Model):
document = models.FileField()
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='docs')
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name='docs')
def is_owner(self, request):
return self.creator.id == request.user.id
class TaskComment(models.Model):
body = models.CharField(max_length=10000)
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name='comments')
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='comments')
created_at = models.DateTimeField(default=datetime.datetime.now)
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.body | [
"[email protected]"
]
| |
01f2c3c84e5a212093e01c4e4dbbf82b5026e90e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02621/s553012004.py | 34870c7adb3722196b85d445063e4fd1201d1d96 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | a = int(input())
res = a+a**2+a**3
print(res)
| [
"[email protected]"
]
| |
039edd18fd3e878624c2de8607511b5b9ad8a545 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/4170/codes/1594_1800.py | 4bfac1cb471a1d30c906e35552843d6922186bbd | []
| no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | a = int(input("Insira o valor da variavel a: "))
b = int(input("Insira o valor da variavel b: "))
c = int(input("Insira o valor da variavel c: "))
x = ((a**2) + (b**2) + (c**2)) / (a + b + c)
print(round(x,7)) | [
"[email protected]"
]
| |
87279f6dae5afa6e6c26657b9c58b42e66be2d5f | 2940f5416082dadd9c646cd9a46d2d0a99883efb | /venv/Lib/site-packages/networkx/algorithms/community/quality.py | 2ffe40786d0df3fd548db7595029d83819f2e47a | [
"MIT"
]
| permissive | tpike3/SugarScape | 4813e4fefbfb0a701f5913d74f045fd0eaed1942 | 39efe4007fba2b12b75c72f7795827a1f74d640b | refs/heads/main | 2021-06-20T03:55:46.288721 | 2021-01-20T17:06:35 | 2021-01-20T17:06:35 | 168,583,530 | 11 | 3 | MIT | 2021-01-20T17:19:53 | 2019-01-31T19:29:40 | Jupyter Notebook | UTF-8 | Python | false | false | 10,608 | py | """Functions for measuring the quality of a partition (into
communities).
"""
from functools import wraps
from itertools import product
import networkx as nx
from networkx import NetworkXError
from networkx.utils import not_implemented_for
from networkx.algorithms.community.community_utils import is_partition
__all__ = ["coverage", "modularity", "performance"]
class NotAPartition(NetworkXError):
"""Raised if a given collection is not a partition.
"""
def __init__(self, G, collection):
msg = f"{G} is not a valid partition of the graph {collection}"
super().__init__(msg)
def require_partition(func):
"""Decorator to check that a valid partition is input to a function
Raises :exc:`networkx.NetworkXError` if the partition is not valid.
This decorator should be used on functions whose first two arguments
are a graph and a partition of the nodes of that graph (in that
order)::
>>> @require_partition
... def foo(G, partition):
... print("partition is valid!")
...
>>> G = nx.complete_graph(5)
>>> partition = [{0, 1}, {2, 3}, {4}]
>>> foo(G, partition)
partition is valid!
>>> partition = [{0}, {2, 3}, {4}]
>>> foo(G, partition)
Traceback (most recent call last):
...
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
>>> partition = [{0, 1}, {1, 2, 3}, {4}]
>>> foo(G, partition)
Traceback (most recent call last):
...
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
"""
@wraps(func)
def new_func(*args, **kw):
# Here we assume that the first two arguments are (G, partition).
if not is_partition(*args[:2]):
raise nx.NetworkXError(
"`partition` is not a valid partition of" " the nodes of G"
)
return func(*args, **kw)
return new_func
def intra_community_edges(G, partition):
"""Returns the number of intra-community edges for a partition of `G`.
Parameters
----------
G : NetworkX graph.
partition : iterable of sets of nodes
This must be a partition of the nodes of `G`.
The "intra-community edges" are those edges joining a pair of nodes
in the same block of the partition.
"""
return sum(G.subgraph(block).size() for block in partition)
def inter_community_edges(G, partition):
"""Returns the number of inter-community edges for a prtition of `G`.
according to the given
partition of the nodes of `G`.
Parameters
----------
G : NetworkX graph.
partition : iterable of sets of nodes
This must be a partition of the nodes of `G`.
The *inter-community edges* are those edges joining a pair of nodes
in different blocks of the partition.
Implementation note: this function creates an intermediate graph
that may require the same amount of memory as that of `G`.
"""
# Alternate implementation that does not require constructing a new
# graph object (but does require constructing an affiliation
# dictionary):
#
# aff = dict(chain.from_iterable(((v, block) for v in block)
# for block in partition))
# return sum(1 for u, v in G.edges() if aff[u] != aff[v])
#
MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph
return nx.quotient_graph(G, partition, create_using=MG).size()
def inter_community_non_edges(G, partition):
"""Returns the number of inter-community non-edges according to the
given partition of the nodes of `G`.
`G` must be a NetworkX graph.
`partition` must be a partition of the nodes of `G`.
A *non-edge* is a pair of nodes (undirected if `G` is undirected)
that are not adjacent in `G`. The *inter-community non-edges* are
those non-edges on a pair of nodes in different blocks of the
partition.
Implementation note: this function creates two intermediate graphs,
which may require up to twice the amount of memory as required to
store `G`.
"""
# Alternate implementation that does not require constructing two
# new graph objects (but does require constructing an affiliation
# dictionary):
#
# aff = dict(chain.from_iterable(((v, block) for v in block)
# for block in partition))
# return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
#
return inter_community_edges(nx.complement(G), partition)
@not_implemented_for("multigraph")
@require_partition
def performance(G, partition):
"""Returns the performance of a partition.
The *performance* of a partition is the ratio of the number of
intra-community edges plus inter-community non-edges with the total
number of potential edges.
Parameters
----------
G : NetworkX graph
A simple graph (directed or undirected).
partition : sequence
Partition of the nodes of `G`, represented as a sequence of
sets of nodes. Each block of the partition represents a
community.
Returns
-------
float
The performance of the partition, as defined above.
Raises
------
NetworkXError
If `partition` is not a valid partition of the nodes of `G`.
References
----------
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
<https://arxiv.org/abs/0906.0612>
"""
# Compute the number of intra-community edges and inter-community
# edges.
intra_edges = intra_community_edges(G, partition)
inter_edges = inter_community_non_edges(G, partition)
# Compute the number of edges in the complete graph (directed or
# undirected, as it depends on `G`) on `n` nodes.
#
# (If `G` is an undirected graph, we divide by two since we have
# double-counted each potential edge. We use integer division since
# `total_pairs` is guaranteed to be even.)
n = len(G)
total_pairs = n * (n - 1)
if not G.is_directed():
total_pairs //= 2
return (intra_edges + inter_edges) / total_pairs
@require_partition
def coverage(G, partition):
"""Returns the coverage of a partition.
The *coverage* of a partition is the ratio of the number of
intra-community edges to the total number of edges in the graph.
Parameters
----------
G : NetworkX graph
partition : sequence
Partition of the nodes of `G`, represented as a sequence of
sets of nodes. Each block of the partition represents a
community.
Returns
-------
float
The coverage of the partition, as defined above.
Raises
------
NetworkXError
If `partition` is not a valid partition of the nodes of `G`.
Notes
-----
If `G` is a multigraph, the multiplicity of edges is counted.
References
----------
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
<https://arxiv.org/abs/0906.0612>
"""
intra_edges = intra_community_edges(G, partition)
total_edges = G.number_of_edges()
return intra_edges / total_edges
def modularity(G, communities, weight="weight"):
r"""Returns the modularity of the given partition of the graph.
Modularity is defined in [1]_ as
.. math::
Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \frac{k_ik_j}{2m}\right)
\delta(c_i,c_j)
where $m$ is the number of edges, $A$ is the adjacency matrix of
`G`, $k_i$ is the degree of $i$ and $\delta(c_i, c_j)$
is 1 if $i$ and $j$ are in the same community and 0 otherwise.
According to [2]_ (and verified by some algebra) this can be reduced to
.. math::
Q = \sum_{c=1}^{n}
\left[ \frac{L_c}{m} - \left( \frac{k_c}{2m} \right) ^2 \right]
where the sum iterates over all communities $c$, $m$ is the number of edges,
$L_c$ is the number of intra-community links for community $c$,
$k_c$ is the sum of degrees of the nodes in community $c$.
The second formula is the one actually used in calculation of the modularity.
Parameters
----------
G : NetworkX Graph
communities : list or iterable of set of nodes
These node sets must represent a partition of G's nodes.
weight : string or None, optional (default="weight")
The edge attribute that holds the numerical value used
as a weight. If None or an edge does not have that attribute,
then that edge has weight 1.
Returns
-------
Q : float
The modularity of the paritition.
Raises
------
NotAPartition
If `communities` is not a partition of the nodes of `G`.
Examples
--------
>>> import networkx.algorithms.community as nx_comm
>>> G = nx.barbell_graph(3, 0)
>>> nx_comm.modularity(G, [{0, 1, 2}, {3, 4, 5}])
0.35714285714285715
>>> nx_comm.modularity(G, nx_comm.label_propagation_communities(G))
0.35714285714285715
References
----------
.. [1] M. E. J. Newman *Networks: An Introduction*, page 224.
Oxford University Press, 2011.
.. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore.
"Finding community structure in very large networks."
Physical review E 70.6 (2004). <https://arxiv.org/abs/cond-mat/0408187>
"""
if not isinstance(communities, list):
communities = list(communities)
if not is_partition(G, communities):
raise NotAPartition(G, communities)
directed = G.is_directed()
if directed:
out_degree = dict(G.out_degree(weight=weight))
in_degree = dict(G.in_degree(weight=weight))
m = sum(out_degree.values())
norm = 1 / m ** 2
else:
out_degree = in_degree = dict(G.degree(weight=weight))
deg_sum = sum(out_degree.values())
m = deg_sum / 2
norm = 1 / deg_sum ** 2
def community_contribution(community):
comm = set(community)
L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm)
out_degree_sum = sum(out_degree[u] for u in comm)
in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum
return L_c / m - out_degree_sum * in_degree_sum * norm
return sum(map(community_contribution, communities))
| [
"[email protected]"
]
| |
6d816df5012606bc69d35c03b4aac39b3a25c6dd | 0ec4defa6f83ec044b9e1235cc45964a8145b4d1 | /venv/lib/python3.6/site-packages/pybrain3/rl/experiments/continuous.py | 72df9483cfa96feab6da58c6c9be10525203864b | []
| no_license | nnarziev/MyWeek_Server | e6f6c10ce813cf3dc3aa644958c31a4d01567b4d | 7c51e79224ba48cd1a230536c27f3bd8cec73a21 | refs/heads/master | 2021-08-19T13:46:56.450003 | 2017-11-25T16:48:07 | 2017-11-25T16:48:07 | 112,080,782 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | __author__ = 'Thomas Rueckstiess, [email protected]'
from .experiment import Experiment
class ContinuousExperiment(Experiment):
""" The extension of Experiment to handle continuous tasks. """
def doInteractionsAndLearn(self, number = 1):
""" Execute a number of steps while learning continuously.
no reset is performed, such that consecutive calls to
this function can be made.
"""
for _ in range(number):
self._oneInteraction()
self.agent.learn()
return self.stepid
| [
"[email protected]"
]
| |
751c4f954046428b61efaafd22a8356d4489ddcf | 7e9c0243c48bbf0ddca9779ef03fc13bb9ac0496 | /t20.py | 12d2b72ba0d2fda0af04cbc0ed30cab0ad37b4ce | []
| no_license | suchismitarout/tt | c47f1f59659d2678392e2f0c3aaee8cfaa147ff4 | 54a5b625a82dab854b679050d67e340e74d71edd | refs/heads/master | 2020-09-16T20:25:34.146741 | 2019-11-25T06:52:07 | 2019-11-25T06:52:07 | 223,880,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py |
with open("foo.txt", "r") as fr:
print(fr.name)
print(fr.closed)
print(fr.mode)
| [
"[email protected]"
]
| |
47a1085793c09d8ff86cf8e73980e0bcd9595eeb | 43461f999228079c9bfee03f0e4043f08426051f | /python爬虫开发与项目实战笔记/通用爬虫/day10/code/SNBook/items.py | cc4533585eccbe86d3f6186bcea51a5c1d717dbc | []
| no_license | MapleStoryBoy/spider | f9af844ae9812fe21141060213ac2677e719ac73 | b014d81d52805f9317e85b66024d047e73d59053 | refs/heads/master | 2020-05-21T18:27:50.585790 | 2019-07-12T10:11:58 | 2019-07-12T10:11:58 | 186,132,575 | 6 | 2 | null | null | null | null | UTF-8 | Python | false | false | 603 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class SnbookItem(scrapy.Item):
# define the fields for your item here like:
parent_type = scrapy.Field()
parent_href = scrapy.Field()
pagecount = scrapy.Field()
son_type = scrapy.Field()
son_href = scrapy.Field()
belong_son_tyoe = scrapy.Field()
book_href = scrapy.Field()
book_name = scrapy.Field()
book_img = scrapy.Field()
book_author = scrapy.Field()
book_descrip = scrapy.Field()
| [
"[email protected]"
]
| |
2224c4722a23ff2f4f9c86984146a37d9ca3749e | e76ea38dbe5774fccaf14e1a0090d9275cdaee08 | /src/media/cast/rtp_receiver/rtp_parser/rtp_parser.gyp | ade15eebff42e9f6af9baf7ca1709eba30e3b3e3 | [
"BSD-3-Clause"
]
| permissive | eurogiciel-oss/Tizen_Crosswalk | efc424807a5434df1d5c9e8ed51364974643707d | a68aed6e29bd157c95564e7af2e3a26191813e51 | refs/heads/master | 2021-01-18T19:19:04.527505 | 2014-02-06T13:43:21 | 2014-02-06T13:43:21 | 16,070,101 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 569 | gyp | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'cast_rtp_parser',
'type': 'static_library',
'include_dirs': [
'<(DEPTH)/',
'<(DEPTH)/third_party/',
],
'sources': [
'rtp_parser.cc',
'rtp_parser.h',
], # source
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:test_support_base',
],
},
],
}
| [
"[email protected]"
]
| |
e244afe21842d52ced891cd2c82f5a5dc61e1701 | 658e2e3cb8a4d5343a125f7deed19c9ebf06fa68 | /course_DE/Udacity-Data-Engineering-master/Data Pipeline with Airflow/Production Data Pipelines - Exercise 1.py | 2189c509168783ee7e6770e7df5d77f68ffca7c2 | []
| no_license | yennanliu/analysis | 3f0018809cdc2403f4fbfe4b245df1ad73fa08a5 | 643ad3fed41961cddd006fadceb0e927f1db1f23 | refs/heads/master | 2021-01-23T21:48:58.572269 | 2020-10-13T22:47:12 | 2020-10-13T22:47:12 | 57,648,676 | 11 | 9 | null | null | null | null | UTF-8 | Python | false | false | 2,591 | py | #Instructions
#In this exercise, we’ll consolidate repeated code into Operator Plugins
#1 - Move the data quality check logic into a custom operator
#2 - Replace the data quality check PythonOperators with our new custom operator
#3 - Consolidate both the S3 to RedShift functions into a custom operator
#4 - Replace the S3 to RedShift PythonOperators with our new custom operator
#5 - Execute the DAG
import datetime
import logging
from airflow import DAG
from airflow.contrib.hooks.aws_hook import AwsHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.operators import (
HasRowsOperator,
PostgresOperator,
PythonOperator,
S3ToRedshiftOperator
)
import sql_statements
#
# TODO: Replace the data quality checks with the HasRowsOperator
#
dag = DAG(
"lesson3.exercise1",
start_date=datetime.datetime(2018, 1, 1, 0, 0, 0, 0),
end_date=datetime.datetime(2018, 12, 1, 0, 0, 0, 0),
schedule_interval="@monthly",
max_active_runs=1
)
create_trips_table = PostgresOperator(
task_id="create_trips_table",
dag=dag,
postgres_conn_id="redshift",
sql=sql_statements.CREATE_TRIPS_TABLE_SQL
)
copy_trips_task = S3ToRedshiftOperator(
task_id="load_trips_from_s3_to_redshift",
dag=dag,
table="trips",
redshift_conn_id="redshift",
aws_credentials_id="aws_credentials",
s3_bucket="udac-data-pipelines",
s3_key="divvy/partitioned/{execution_date.year}/{execution_date.month}/divvy_trips.csv"
)
#
# TODO: Replace this data quality check with the HasRowsOperator
#
check_trips = HasRowsOperator(
task_id='check_trips_data',
dag=dag,
redshift_conn_id="redshift",
table="trips"
)
create_stations_table = PostgresOperator(
task_id="create_stations_table",
dag=dag,
postgres_conn_id="redshift",
sql=sql_statements.CREATE_STATIONS_TABLE_SQL,
)
copy_stations_task = S3ToRedshiftOperator(
task_id="load_stations_from_s3_to_redshift",
dag=dag,
redshift_conn_id="redshift",
aws_credentials_id="aws_credentials",
s3_bucket="udac-data-pipelines",
s3_key="divvy/unpartitioned/divvy_stations_2017.csv",
table="stations"
)
#
# TODO: Replace this data quality check with the HasRowsOperator
#
check_stations = HasRowsOperator(
task_id='check_stations_data',
dag=dag,
redshift_conn_id="redshift",
table="stations"
)
create_trips_table >> copy_trips_task
create_stations_table >> copy_stations_task
copy_stations_task >> check_stations
copy_trips_task >> check_trips | [
"[email protected]"
]
| |
0fe346359edc276de2c737c0eb967f27d570aafe | 6ac77834909c485686638d27c0bf41e6d1765cf7 | /src/mapping/writer/mysql_hbase_hawq_writer.py | 79911c5ab6d7c61ed2a50ecbcdabf8ecb5943d18 | []
| no_license | YangXinNewlife/gears | 4144e451861efb0f3ae1d738eb5fcd6cec46a833 | 486b1ce5a7b8d8682bb1394be8f5dd6ae0fca837 | refs/heads/master | 2021-01-20T01:41:30.074696 | 2017-05-26T08:17:45 | 2017-05-26T08:17:45 | 89,316,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 221 | py | # -*- coding:utf-8 -*-
__author__ = 'yx'
from src.mapping.writer.writer import Writer
class MysqlHBaseHawqWriter(Writer):
def __init__(self):
pass
def convert_data_type(self, data_type):
pass
| [
"[email protected]"
]
| |
febfe65ae8c61e9e2ee00a30f5a65ef5d45eb9df | 6b8c3974d3ce5f7841e51dcb406666c0c5d92155 | /heat/heat/tests/test_sahara_templates.py | 4a887b85c2db0dce48627fc26ea234c8235c9a1b | [
"Apache-2.0"
]
| permissive | swjang/cloudexchange | bbbf78a2e7444c1070a55378092c17e8ecb27059 | c06ed54f38daeff23166fb0940b27df74c70fc3e | refs/heads/master | 2020-12-29T03:18:43.076887 | 2015-09-21T07:13:22 | 2015-09-21T07:13:22 | 42,845,532 | 1 | 1 | null | 2015-09-21T07:13:22 | 2015-09-21T05:19:35 | C++ | UTF-8 | Python | false | false | 13,120 | py | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import six
from heat.common import exception
from heat.common import template_format
from heat.engine.clients.os import neutron
from heat.engine.clients.os import nova
from heat.engine.clients.os import sahara
from heat.engine.resources.openstack.sahara import sahara_templates as st
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
node_group_template = """
heat_template_version: 2013-05-23
description: Sahara Node Group Template
resources:
node-group:
type: OS::Sahara::NodeGroupTemplate
properties:
name: node-group-template
plugin_name: vanilla
hadoop_version: 2.3.0
flavor: m1.large
volume_type: lvm
floating_ip_pool: some_pool_name
node_processes:
- namenode
- jobtracker
"""
cluster_template = """
heat_template_version: 2013-05-23
description: Sahara Cluster Template
resources:
cluster-template:
type: OS::Sahara::ClusterTemplate
properties:
name: test-cluster-template
plugin_name: vanilla
hadoop_version: 2.3.0
neutron_management_network: some_network
"""
cluster_template_without_name = """
heat_template_version: 2013-05-23
resources:
cluster_template!:
type: OS::Sahara::ClusterTemplate
properties:
plugin_name: vanilla
hadoop_version: 2.3.0
neutron_management_network: some_network
"""
node_group_template_without_name = """
heat_template_version: 2013-05-23
resources:
node_group!:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: vanilla
hadoop_version: 2.3.0
flavor: m1.large
floating_ip_pool: some_pool_name
node_processes:
- namenode
- jobtracker
"""
class FakeNodeGroupTemplate(object):
def __init__(self):
self.id = "some_ng_id"
self.name = "test-cluster-template"
class FakeClusterTemplate(object):
def __init__(self):
self.id = "some_ct_id"
self.name = "node-group-template"
class SaharaNodeGroupTemplateTest(common.HeatTestCase):
def setUp(self):
super(SaharaNodeGroupTemplateTest, self).setUp()
self.patchobject(st.constraints.CustomConstraint,
'_is_valid').return_value = True
self.patchobject(nova.NovaClientPlugin, 'get_flavor_id'
).return_value = 'someflavorid'
self.patchobject(neutron.NeutronClientPlugin, '_create')
self.patchobject(neutron.NeutronClientPlugin, 'find_neutron_resource'
).return_value = 'some_pool_id'
sahara_mock = mock.MagicMock()
self.ngt_mgr = sahara_mock.node_group_templates
self.patchobject(sahara.SaharaClientPlugin,
'_create').return_value = sahara_mock
self.fake_ngt = FakeNodeGroupTemplate()
self.t = template_format.parse(node_group_template)
def _init_ngt(self, template):
self.stack = utils.parse_stack(template)
return self.stack['node-group']
def test_ngt_resource_mapping(self):
ngt = self._init_ngt(self.t)
mapping = st.resource_mapping()
self.assertEqual(st.SaharaNodeGroupTemplate,
mapping['OS::Sahara::NodeGroupTemplate'])
self.assertIsInstance(ngt,
st.SaharaNodeGroupTemplate)
def _create_ngt(self, template):
ngt = self._init_ngt(template)
self.ngt_mgr.create.return_value = self.fake_ngt
scheduler.TaskRunner(ngt.create)()
self.assertEqual((ngt.CREATE, ngt.COMPLETE), ngt.state)
self.assertEqual(self.fake_ngt.id, ngt.resource_id)
return ngt
def test_ngt_create(self):
self._create_ngt(self.t)
expected_args = ('node-group-template', 'vanilla',
'2.3.0', 'someflavorid')
expected_kwargs = {'description': "",
'volumes_per_node': None,
'volumes_size': None,
'volume_type': 'lvm',
'security_groups': None,
'auto_security_group': None,
'availability_zone': None,
'volumes_availability_zone': None,
'node_processes': ['namenode', 'jobtracker'],
'floating_ip_pool': 'some_pool_id',
'node_configs': None,
'image_id': None,
}
self.ngt_mgr.create.assert_called_once_with(*expected_args,
**expected_kwargs)
def test_ngt_delete(self):
ngt = self._create_ngt(self.t)
scheduler.TaskRunner(ngt.delete)()
self.ngt_mgr.delete.assert_called_once_with(self.fake_ngt.id)
self.assertEqual((ngt.DELETE, ngt.COMPLETE), ngt.state)
def test_ngt_delete_ignores_not_found(self):
ngt = self._create_ngt(self.t)
self.ngt_mgr.delete.side_effect = sahara.sahara_base.APIException(
error_code=404)
scheduler.TaskRunner(ngt.delete)()
self.ngt_mgr.delete.assert_called_once_with(self.fake_ngt.id)
def test_ngt_delete_fails(self):
ngt = self._create_ngt(self.t)
self.ngt_mgr.delete.side_effect = sahara.sahara_base.APIException()
delete_task = scheduler.TaskRunner(ngt.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = "APIException: resources.node-group: None"
self.assertEqual(expected, six.text_type(ex))
self.ngt_mgr.delete.assert_called_once_with(self.fake_ngt.id)
def test_validate_floatingippool_on_neutron_fails(self):
ngt = self._init_ngt(self.t)
self.patchobject(ngt, 'is_using_neutron').return_value = True
self.patchobject(
neutron.NeutronClientPlugin, 'find_neutron_resource'
).side_effect = [
neutron.exceptions.NeutronClientNoUniqueMatch(message='Too many'),
neutron.exceptions.NeutronClientException(message='Not found',
status_code=404)
]
ex = self.assertRaises(exception.StackValidationFailed, ngt.validate)
self.assertEqual('Too many',
six.text_type(ex))
ex = self.assertRaises(exception.StackValidationFailed, ngt.validate)
self.assertEqual('Not found',
six.text_type(ex))
def test_validate_floatingippool_on_novanetwork_fails(self):
ngt = self._init_ngt(self.t)
self.patchobject(ngt, 'is_using_neutron').return_value = False
nova_mock = mock.MagicMock()
nova_mock.floating_ip_pools.find.side_effect = (
nova.exceptions.NotFound(404, message='Not found'))
self.patchobject(nova.NovaClientPlugin,
'_create').return_value = nova_mock
ex = self.assertRaises(exception.StackValidationFailed, ngt.validate)
self.assertEqual('Not found', six.text_type(ex))
def test_validate_flavor_constraint_return_false(self):
self.t['resources']['node-group']['properties'].pop('floating_ip_pool')
self.t['resources']['node-group']['properties'].pop('volume_type')
ngt = self._init_ngt(self.t)
self.patchobject(st.constraints.CustomConstraint, '_is_valid'
).return_value = False
self.patchobject(ngt, 'is_using_neutron').return_value = False
ex = self.assertRaises(exception.StackValidationFailed, ngt.validate)
self.assertEqual(u"Property error: "
u"resources.node-group.properties.flavor: "
u"Error validating value 'm1.large'",
six.text_type(ex))
def test_template_invalid_name(self):
tmpl = template_format.parse(node_group_template_without_name)
stack = utils.parse_stack(tmpl)
ngt = stack['node_group!']
self.ngt_mgr.create.return_value = self.fake_ngt
scheduler.TaskRunner(ngt.create)()
self.assertEqual((ngt.CREATE, ngt.COMPLETE), ngt.state)
self.assertEqual(self.fake_ngt.id, ngt.resource_id)
name = self.ngt_mgr.create.call_args[0][0]
self.assertIn('-nodegroup-', name)
class SaharaClusterTemplateTest(common.HeatTestCase):
def setUp(self):
super(SaharaClusterTemplateTest, self).setUp()
self.patchobject(st.constraints.CustomConstraint, '_is_valid'
).return_value = True
self.patchobject(neutron.NeutronClientPlugin, '_create')
self.patchobject(neutron.NeutronClientPlugin, 'find_neutron_resource'
).return_value = 'some_network_id'
sahara_mock = mock.MagicMock()
self.ct_mgr = sahara_mock.cluster_templates
self.patchobject(sahara.SaharaClientPlugin,
'_create').return_value = sahara_mock
self.fake_ct = FakeClusterTemplate()
self.t = template_format.parse(cluster_template)
def _init_ct(self, template):
self.stack = utils.parse_stack(template)
return self.stack['cluster-template']
def test_ct_resource_mapping(self):
ct = self._init_ct(self.t)
mapping = st.resource_mapping()
self.assertEqual(st.SaharaClusterTemplate,
mapping['OS::Sahara::ClusterTemplate'])
self.assertIsInstance(ct,
st.SaharaClusterTemplate)
def _create_ct(self, template):
ct = self._init_ct(template)
self.ct_mgr.create.return_value = self.fake_ct
scheduler.TaskRunner(ct.create)()
self.assertEqual((ct.CREATE, ct.COMPLETE), ct.state)
self.assertEqual(self.fake_ct.id, ct.resource_id)
return ct
def test_ct_create(self):
self._create_ct(self.t)
expected_args = ('test-cluster-template', 'vanilla',
'2.3.0')
expected_kwargs = {'description': '',
'default_image_id': None,
'net_id': 'some_network_id',
'anti_affinity': None,
'node_groups': None,
'cluster_configs': None
}
self.ct_mgr.create.assert_called_once_with(*expected_args,
**expected_kwargs)
def test_ct_delete(self):
ct = self._create_ct(self.t)
scheduler.TaskRunner(ct.delete)()
self.ct_mgr.delete.assert_called_once_with(self.fake_ct.id)
self.assertEqual((ct.DELETE, ct.COMPLETE), ct.state)
def test_ngt_delete_ignores_not_found(self):
ct = self._create_ct(self.t)
self.ct_mgr.delete.side_effect = sahara.sahara_base.APIException(
error_code=404)
scheduler.TaskRunner(ct.delete)()
self.ct_mgr.delete.assert_called_once_with(self.fake_ct.id)
def test_ngt_delete_fails(self):
ct = self._create_ct(self.t)
self.ct_mgr.delete.side_effect = sahara.sahara_base.APIException()
delete_task = scheduler.TaskRunner(ct.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = "APIException: resources.cluster-template: None"
self.assertEqual(expected, six.text_type(ex))
self.ct_mgr.delete.assert_called_once_with(self.fake_ct.id)
def test_ct_validate_no_network_on_neutron_fails(self):
self.t['resources']['cluster-template']['properties'].pop(
'neutron_management_network')
ct = self._init_ct(self.t)
self.patchobject(ct, 'is_using_neutron', return_value=True)
ex = self.assertRaises(exception.StackValidationFailed,
ct.validate)
self.assertEqual("neutron_management_network must be provided",
six.text_type(ex))
def test_template_invalid_name(self):
tmpl = template_format.parse(cluster_template_without_name)
stack = utils.parse_stack(tmpl)
ct = stack['cluster_template!']
self.ct_mgr.create.return_value = self.fake_ct
scheduler.TaskRunner(ct.create)()
self.assertEqual((ct.CREATE, ct.COMPLETE), ct.state)
self.assertEqual(self.fake_ct.id, ct.resource_id)
name = self.ct_mgr.create.call_args[0][0]
self.assertIn('-clustertemplate-', name)
| [
"[email protected]"
]
| |
e9f8df1e669df7bb971e196bef4e8f0b517d633e | ca17bd80ac1d02c711423ac4093330172002a513 | /goodyhandy/FirstMissingPositive.py | 9988bcba209286d3584cc6e41ed5e95b6469f9f4 | []
| no_license | Omega094/lc_practice | 64046dea8bbdaee99d767b70002a2b5b56313112 | e61776bcfd5d93c663b247d71e00f1b298683714 | refs/heads/master | 2020-03-12T13:45:13.988645 | 2018-04-23T06:28:32 | 2018-04-23T06:28:32 | 130,649,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | class Solution(object):
def firstMissingPositive(self, A):
"""
:type nums: List[int]
:rtype: int
"""
length = len(A)
for i, num in enumerate(A):
if A[i] != i + 1:
while A[i] != i + 1:
if A[i] <= 0 or A[i] > length or A[A[i] -1] == A[i]: break
t = A[A[i] - 1] ; A[A[i] - 1] = A[i] ; A[i] = t
for i, num in enumerate(A):
if num != i + 1:
return i + 1
return length + 1
| [
"[email protected]"
]
| |
b84c3ca4482e26c4f3ab2a79107e873f9b1656c5 | b3879bc761ac38dab903da57c4061ad79fd70c6d | /курсы пайтон модуль 3/задание 23.py | 14e81b84571b42392d7be1c1572eee18530c2954 | []
| no_license | Ruslan5252/all-of-my-projects-byPyCharm | 4df70cc3a31c4a5d97560fa858a706edcc856299 | 817d5f711408590ea141590ae52c6d888dfa2015 | refs/heads/master | 2023-05-03T01:06:30.156731 | 2021-05-29T13:51:16 | 2021-05-29T13:51:16 | 371,970,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | a=1
max=0
while a != 0:
a = int(input("введите число"))
if a>max:
max=a
print("максимальное значение",max)
| [
"[email protected]"
]
| |
7bf8d2a366551d6774730e60de1d62b78af16d52 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_108/125.py | c96c54037ce532c493eb7d9d77e0a2a5ad1f93b3 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,787 | py | #!/usr/bin/env python
import bisect
import sys
from collections import defaultdict
def main(args):
finname = '%s.in' % args[1]
foutname = '%s.out' % args[1]
with open(finname, 'r') as fin, open(foutname, 'w') as fout:
T = int(fin.readline().strip())
for i in xrange(1, T+1):
num_vines = int(fin.readline().strip())
vinestats = []
for j in xrange(num_vines):
d, l = [int(_) for _ in fin.readline().strip().split()]
vinestats.append((d, l))
D = int(fin.readline().strip())
memo = dict()
def ok(start_vine, swing_length):
if (start_vine, swing_length) in memo:
return memo[(start_vine, swing_length)]
vine_d, vine_l = vinestats[start_vine]
if vine_l < swing_length:
swing_length = vine_l
if vine_d + swing_length >= D:
memo[(start_vine, swing_length)] = True
return True
last_vine = bisect.bisect(vinestats, (vine_d+swing_length+1, 0), start_vine)
i = start_vine+1
result = False
while i < last_vine:
if ok(i, vinestats[i][0]-vine_d):
memo[(start_vine, swing_length)] = True
return True
i+=1
memo[(start_vine, swing_length)] = False
return False
result = 'YES' if ok(0, vinestats[0][0]) else 'NO'
result_str = 'Case #%s: %s\n' % (i, result)
# print result_str,
fout.write(result_str)
if __name__ == '__main__':
status = main(sys.argv)
sys.exit(status)
| [
"[email protected]"
]
| |
016d2f4b0007f8a40384dcd7a57e8d67f5a5f01f | 7708c2526947a86d064fc8b07a579baa332c5575 | /Database/build_db_datasets.py | b0b7c3d3ff443564267cc2ad0962d02df56a6c71 | []
| no_license | shunsunsun/Cell_BLAST-notebooks | d622aea190015e8b76207866889dddbd4dd333a8 | 9baebb4311eaf71670f4852238db7b91157e71b1 | refs/heads/master | 2022-01-19T05:05:30.269257 | 2019-04-21T13:30:42 | 2019-04-21T13:30:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,024 | py | #!/usr/bin/env python
import os
import numpy as np
import pandas as pd
import mysql.connector
from utils import nan_safe
def generate_datasets_meta():
dataset_dict = {
item: [
file for file in os.listdir(item)
if file.endswith(".pdf") and file != "peek.pdf"
] for item in os.listdir(".") if item not in (
"__pycache__", ".ipynb_checkpoints"
) and os.path.isdir(item)
}
used_columns = (
"dataset_name", "organism", "organ", "platform",
"cell_number", "publication", "pmid", "remark"
)
single = pd.read_csv(
"../../Datasets/ACA_datasets.csv",
comment="#", skip_blank_lines=True
).loc[:, used_columns]
additional = pd.read_csv(
"../../Datasets/additional_datasets.csv",
comment="#", skip_blank_lines=True
).loc[:, used_columns]
single = pd.concat([single, additional], axis=0, ignore_index=True)
aligned = pd.read_csv(
"../../Datasets/aligned_datasets.csv",
comment="#", skip_blank_lines=True
).loc[:, used_columns]
for idx, row in aligned.iterrows():
aligned.loc[idx, "cell_number"] = single.loc[np.in1d(
single["dataset_name"], row["remark"].split(", ")
), "cell_number"].sum()
combined = pd.concat([single, aligned], axis=0, ignore_index=True)
combined["display"] = np.in1d(
combined["dataset_name"], list(dataset_dict.keys()))
# combined = combined.loc[np.in1d(
# combined["dataset_name"], list(dataset_dict.keys())
# ), :]
# combined["cell_number"] = combined["cell_number"].astype(np.int)
combined["self-projection coverage"] = np.nan
combined["self-projection accuracy"] = np.nan
for idx, row in combined.iterrows():
spf_path = os.path.join(row["dataset_name"], "self_projection.txt")
if not os.path.exists(spf_path):
if row["dataset_name"] in dataset_dict:
print("Missing: " + spf_path)
else:
with open(spf_path, "r") as spf:
lines = spf.readlines()
k1, v1 = lines[0].split()
k2, v2 = lines[1].split()
assert k1 == "coverage" and k2 == "accuracy"
v1, v2 = float(v1.strip()), float(v2.strip())
combined.loc[idx, "self-projection coverage"] = v1
combined.loc[idx, "self-projection accuracy"] = v2
combined["visualization"] = [
(", ".join(dataset_dict[item]) if item in dataset_dict else np.nan)
for item in combined["dataset_name"]
]
# combined.to_csv("./datasets_meta.csv", index=False)
# combined.to_json("./datasets_meta.json", orient="records", double_precision=3)
return combined
def create_table(cnx, cursor):
cursor.execute("DROP TABLE IF EXISTS `datasets`;")
cursor.execute(
"CREATE TABLE `datasets` ("
" `dataset_name` CHAR(50) NOT NULL UNIQUE,"
" `organism` char(50) NOT NULL,"
" `organ` char(100) NOT NULL,"
" `platform` char(50),"
" `cell_number` INT CHECK(`cell_number` > 0),"
" `publication` VARCHAR(300),"
" `pmid` CHAR(8),"
" `remark` VARCHAR(200),"
" `self-projection coverage` FLOAT CHECK(`self-projection coverage` BETWEEN 0 AND 1),"
" `self-projection accuracy` FLOAT CHECK(`self-projection accuracy` BETWEEN 0 AND 1),"
" `visualization` VARCHAR(200),"
" `display` BOOL NOT NULL,"
" PRIMARY KEY USING HASH(`dataset_name`)"
");"
)
def insert_data(cnx, cursor, data):
insert_sql = (
"INSERT INTO `datasets` ("
" `dataset_name`, `organism`, `organ`, `platform`,"
" `cell_number`, `publication`, `pmid`, `remark`,"
" `self-projection coverage`, `self-projection accuracy`,"
" `visualization`, `display`"
") VALUES ("
" %s, %s, %s, %s,"
" %s, %s, %s, %s,"
" %s, %s, %s, %s"
");"
)
for idx, row in data.iterrows():
cursor.execute(insert_sql, (
nan_safe(row["dataset_name"]), nan_safe(row["organism"]),
nan_safe(row["organ"]), nan_safe(row["platform"]),
nan_safe(row["cell_number"], int), nan_safe(row["publication"]),
nan_safe(row["pmid"], lambda x: str(int(x))), nan_safe(row["remark"]),
nan_safe(row["self-projection coverage"], lambda x: float(np.round(x, 3))),
nan_safe(row["self-projection accuracy"], lambda x: float(np.round(x, 3))),
nan_safe(row["visualization"]), nan_safe(row["display"])
))
def main():
cnx = mysql.connector.connect(
user=input("Please enter username: "), password=input("Please enter password: "),
host="127.0.0.1", database="aca"
)
cursor = cnx.cursor()
create_table(cnx, cursor)
insert_data(cnx, cursor, generate_datasets_meta())
cnx.commit()
cursor.close()
cnx.close()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
a7db53021d314e8a8940afd0b9d509d6c3431464 | eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd | /homeassistant/components/wilight/light.py | 3236b3b3851a234fc1d369afef91f7753338940f | [
"Apache-2.0"
]
| permissive | JeffLIrion/home-assistant | 53966b81b5d5816679f12fc761f79e8777c738d6 | 8f4ec89be6c2505d8a59eee44de335abe308ac9f | refs/heads/dev | 2023-08-22T09:42:02.399277 | 2022-02-16T01:26:13 | 2022-02-16T01:26:13 | 136,679,169 | 5 | 2 | Apache-2.0 | 2023-09-13T06:59:25 | 2018-06-09T00:58:35 | Python | UTF-8 | Python | false | false | 5,995 | py | """Support for WiLight lights."""
from pywilight.const import (
ITEM_LIGHT,
LIGHT_COLOR,
LIGHT_DIMMER,
LIGHT_ON_OFF,
SUPPORT_NONE,
)
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import DOMAIN, WiLightDevice
def entities_from_discovered_wilight(hass, api_device):
"""Parse configuration and add WiLight light entities."""
entities = []
for item in api_device.items:
if item["type"] != ITEM_LIGHT:
continue
index = item["index"]
item_name = item["name"]
if item["sub_type"] == LIGHT_ON_OFF:
entity = WiLightLightOnOff(api_device, index, item_name)
elif item["sub_type"] == LIGHT_DIMMER:
entity = WiLightLightDimmer(api_device, index, item_name)
elif item["sub_type"] == LIGHT_COLOR:
entity = WiLightLightColor(api_device, index, item_name)
else:
continue
entities.append(entity)
return entities
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up WiLight lights from a config entry."""
parent = hass.data[DOMAIN][entry.entry_id]
# Handle a discovered WiLight device.
entities = entities_from_discovered_wilight(hass, parent.api)
async_add_entities(entities)
class WiLightLightOnOff(WiLightDevice, LightEntity):
"""Representation of a WiLights light on-off."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_NONE
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
class WiLightLightDimmer(WiLightDevice, LightEntity):
"""Representation of a WiLights light dimmer."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._status.get("brightness", 0))
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on,set brightness if needed."""
# Dimmer switches use a range of [0, 255] to control
# brightness. Level 255 might mean to set it to previous value
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
await self._client.set_brightness(self._index, brightness)
else:
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
def wilight_to_hass_hue(value):
"""Convert wilight hue 1..255 to hass 0..360 scale."""
return min(360, round((value * 360) / 255, 3))
def hass_to_wilight_hue(value):
"""Convert hass hue 0..360 to wilight 1..255 scale."""
return min(255, round((value * 255) / 360))
def wilight_to_hass_saturation(value):
"""Convert wilight saturation 1..255 to hass 0..100 scale."""
return min(100, round((value * 100) / 255, 3))
def hass_to_wilight_saturation(value):
"""Convert hass saturation 0..100 to wilight 1..255 scale."""
return min(255, round((value * 255) / 100))
class WiLightLightColor(WiLightDevice, LightEntity):
"""Representation of a WiLights light rgb."""
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS | SUPPORT_COLOR
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._status.get("brightness", 0))
@property
def hs_color(self):
"""Return the hue and saturation color value [float, float]."""
return [
wilight_to_hass_hue(int(self._status.get("hue", 0))),
wilight_to_hass_saturation(int(self._status.get("saturation", 0))),
]
@property
def is_on(self):
"""Return true if device is on."""
return self._status.get("on")
async def async_turn_on(self, **kwargs):
"""Turn the device on,set brightness if needed."""
# Brightness use a range of [0, 255] to control
# Hue use a range of [0, 360] to control
# Saturation use a range of [0, 100] to control
if ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
hue = hass_to_wilight_hue(kwargs[ATTR_HS_COLOR][0])
saturation = hass_to_wilight_saturation(kwargs[ATTR_HS_COLOR][1])
await self._client.set_hsb_color(self._index, hue, saturation, brightness)
elif ATTR_BRIGHTNESS in kwargs and ATTR_HS_COLOR not in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
await self._client.set_brightness(self._index, brightness)
elif ATTR_BRIGHTNESS not in kwargs and ATTR_HS_COLOR in kwargs:
hue = hass_to_wilight_hue(kwargs[ATTR_HS_COLOR][0])
saturation = hass_to_wilight_saturation(kwargs[ATTR_HS_COLOR][1])
await self._client.set_hs_color(self._index, hue, saturation)
else:
await self._client.turn_on(self._index)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._index)
| [
"[email protected]"
]
| |
1bb80b25bf87d695dd5433efee4ab2a9b1aa572c | 483508a4e002bcd734b8729459d3e5d5e02aae70 | /number_frequency.py | 27b6776f20516181ec134ca21ebb9c493c09bc5c | []
| no_license | jdavid54/benford_law | 9d54cd539130bc3665080ca801d1bb4db96a18a9 | 3ff9d8358f59fef60f401c290ceb94701613e1b2 | refs/heads/main | 2023-07-18T03:56:18.685081 | 2021-08-25T10:44:37 | 2021-08-25T10:44:37 | 399,751,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,069 | py | import numpy as np
import random
import matplotlib.pyplot as plt
# benford's law
# value
l1 = 10000
# size
l2 = 100
freq=[0]*10
x = np.arange(1,10)
'''
a = np.random.randint(1,l1,(1,l2))
print(a)
for i in np.array(*a):
n = int(str(i)[0])
#print(n)
freq[n] = freq[n]+1
print(freq)
plt.bar(x,freq[1:])
#plt.show()
for i in range(100):
n = int(str(a[0][np.random.randint(0,l2)])[0])
#print(n)
freq[n] = freq[n]+1
print(freq)
plt.bar(x,freq[1:])
#plt.show()
'''
# loi benford
log_array=[]
for k in x:
print((1+1/k, np.log10(1+1/k)))
log_array.append(np.log10(1+1/k))
#print('sum',sum(log_array)) # sum=1
#plt.bar(x, np.log10(1+1/x)*100)
#plt.title('Loi Benford')
#plt.show()
# https://fr.wikipedia.org/wiki/Loi_de_Benford
# Par contre, dans une liste de 100 nombres obtenus comme produits de deux nombres
# ou plus tirés au hasard entre 1 et 10 000, les fréquences des chiffres 1 à 9 en
# première position suivent peu ou prou les valeurs de la loi de Benford.
val = 10000
numbers=[]
m = 5
kmin = 2
kmax = 5
klist = []
benford=[np.log10(1+1/x) for x in range(1,10)]
print(benford)
benford_cumsum = np.cumsum(benford)
print(benford_cumsum)
# get 100 numbers as a product of k random numbers between 1 and val=10000
for i in range(m*100):
p = 1
k = random.randint(kmin,kmax)
if k not in klist:
klist.append(k)
for i in range(k):
p *= np.random.randint(1,val)
p0 = int(str(p)[0])
numbers.append((k,p0,p))
freq[p0] = freq[p0]+1
freq=[f/m for f in freq]
freq_cumul = np.cumsum(freq)
print(freq[1:])
print(klist)
print(numbers)
plt.bar(x-0.2,np.log10(1+1/x)*100,0.4, label='Benford\'s law')
plt.bar(x+0.2,freq[1:],0.4, label='Product of k random numbers')
plt.title(', '.join([str(round(s,1)) for s in freq[1:]]))
plt.legend()
plt.show()
plt.bar(x-0.2, benford_cumsum*100,0.4, label='Benford\'s cumul sum')
plt.bar(x+0.2,freq_cumul[1:],0.4, label='Product of k random numbers frequence cumul sum')
#plt.bar(x,freq_cumul[1:])
plt.title('Fréquences cumulées')
plt.legend()
plt.show() | [
"[email protected]"
]
| |
a44f361047b27f3505d603357681d2fca47f37b6 | bad686ba27539a3d3286418cc3ebf2aa80ae4958 | /src/pong/full-game.py | 383a097d39786a83f75f9eefa942508b67aa3626 | []
| no_license | AaryaBatchu/micropython | f0a31b579b3a998586f26b92036875c93588eca7 | aef7d33937352e9ab6f9615bfc5bf9aa1a9bee57 | refs/heads/main | 2023-08-19T13:33:15.006432 | 2021-10-23T19:06:26 | 2021-10-23T19:06:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,452 | py | # Pong game on Raspberry Pi Pico with a OLED and two Potentimeters
from machine import Pin, PWM, SPI
import ssd1306
from utime import sleep
import random # random direction for new ball
sda=machine.Pin(0)
scl=machine.Pin(1)
pot_pin = machine.ADC(26)
WIDTH = 128
HEIGHT = 64
i2c=machine.I2C(0,sda=sda, scl=scl)
oled = ssd1306.SSD1306_I2C(WIDTH, HEIGHT, i2c)
# connect the center tops of the potentiometers to ADC0 and ADC1
pot_pin_1 = machine.ADC(27)
pot_pin_2 = machine.ADC(26) # make them the same for testing
# lower right corner with USB connector on top
SPEAKER_PIN = 16
# create a Pulse Width Modulation Object on this pin
speaker = PWM(Pin(SPEAKER_PIN))
# globals variables
# static variables are constants are uppercase variable names
HALF_WIDTH = int(WIDTH / 2)
HALF_HEIGHT = HEIGHT
BALL_SIZE = 3 # 2X2 pixels
PAD_WIDTH = 2
PAD_HEIGHT = 8
HALF_PAD_WIDTH = int(PAD_WIDTH / 2)
HALF_PAD_HEIGHT = int(PAD_HEIGHT / 2)
POT_MIN = 3000
POT_MAX = 65534
MAX_ADC_VALUE = 65534 # Maximum value from the Analog to Digital Converter is 2^16 - 1
# dynamic global variables use lowercase
paddle1_vel = 0
paddle2_vel = 0
l_score = 0
r_score = 0
# continiuous update of the paddle and ball
# play_startup_sound()
# start with the ball in the center
ball_x = int(WIDTH / 2)
ball_y = int(HEIGHT / 2)
# set the initial directinon to down to the right
ball_x_dir = 1
ball_y_dir = 1
def play_startup_sound():
speaker.duty_u16(1000)
speaker.freq(600)
sleep(.25)
speaker.freq(800)
sleep(.25)
speaker.freq(1200)
sleep(.25)
speaker.duty_u16(0)
def play_bounce_sound():
speaker.duty_u16(1000)
speaker.freq(900)
sleep(.25)
speaker.duty_u16(0)
def play_score_sound():
speaker.duty_u16(1000)
speaker.freq(600)
sleep(.25)
speaker.freq(800)
sleep(.25)
speaker.duty_u16(0)
# note that OLEDs have problems with screen burn it - don't leave this on too long!
def border(WIDTH, HEIGHT):
oled.rect(0, 0, WIDTH, HEIGHT, 1)
# Takes an input number vale and a range between high-and-low and returns it scaled to the new range
# This is similar to the Arduino map() function
def valmap(value, istart, istop, ostart, ostop):
return int(ostart + (ostop - ostart) * ((value - istart) / (istop - istart)))
# draw a vertical bar
def draw_paddle(paddle_no, paddle_center):
if paddle_no == 1:
x = 0
else:
x = WIDTH - 2
y = paddle_center - HALF_PAD_HEIGHT
oled.fill_rect(x, y, PAD_WIDTH, PAD_HEIGHT, 1) # fill with 1s
def draw_ball():
oled.fill_rect(ball_x, ball_y, BALL_SIZE, BALL_SIZE, 1) # square balls for now
# The main event loop
while True:
oled.fill(0) # clear screen
oled.vline(int(WIDTH / 2), 0, HEIGHT, 1)
# border(WIDTH, HEIGHT)
# read both the pot values
pot_val_1 = pot_pin_1.read_u16()
pot_val_2 = pot_pin_2.read_u16()
# print(pot_val_1)
# scale the values from the max value of the input is a 2^16 or 65536 to 0 to HEIGHT - PAD_HEIGHT
# ideally, it should range from 5 to 58
pot_val_1 = valmap(pot_val_1, POT_MIN, POT_MAX, HALF_PAD_HEIGHT, HEIGHT - HALF_PAD_HEIGHT - 2)
pot_val_2 = valmap(pot_val_2, POT_MIN, POT_MAX, HALF_PAD_HEIGHT, HEIGHT - HALF_PAD_HEIGHT - 2)
# print(pot_val, pot_scaled)
draw_paddle(1, pot_val_1 + HALF_PAD_HEIGHT)
draw_paddle(2, pot_val_2 + HALF_PAD_HEIGHT)
draw_ball()
#update ball position with the current directions
ball_x = ball_x + ball_x_dir
ball_y = ball_y + ball_y_dir
# update the ball direction if we are at the top or bottom edge
if ball_y < 0:
ball_y_dir = 1
#play_bounce_sound()
if ball_y > HEIGHT - 3:
ball_y_dir = -1
#play_bounce_sound()
# if it hits the paddle bounce else score
if ball_x < 1:
top_paddle = pot_val_1 - HALF_PAD_HEIGHT
bottom_paddle = pot_val_1 + HALF_PAD_HEIGHT
if ball_y > top_paddle and ball_y < bottom_paddle:
# we have a hit
ball_x_dir = 1
ball_x = 2
play_bounce_sound()
print('paddle hit on left edge', pot_val_1, top_paddle, bottom_paddle)
else:
# we have a score for the right player
play_score_sound()
r_score += 1
ball_x = int(WIDTH / 2)
ball_y = int(HEIGHT / 2)
ball_x_dir = random.randint(-1, 2)
if ball_x_dir == 0:
ball_x_dir = 1
ball_y_dir = random.randint(-1, 2)
print('score on left edge', pot_val_1, top_paddle, bottom_paddle)
sleep(.25)
if ball_x > WIDTH - 3:
ball_x = WIDTH - 4
top_paddle = pot_val_2 - HALF_PAD_HEIGHT
bottom_paddle = pot_val_2 + HALF_PAD_HEIGHT
if ball_y > top_paddle and ball_y < bottom_paddle:
ball_x_dir = -1
print('bounce on right paddle', pot_val_1, top_paddle, bottom_paddle)
else:
l_score += 1
play_score_sound()
ball_x = int(WIDTH / 2)
ball_y = int(HEIGHT / 2)
ball_x_dir = random.randint(-1, 2)
if ball_x_dir == 0:
ball_x_dir = 1
ball_y_dir = random.randint(-1, 2)
play_bounce_sound()
print('score on right edge', pot_val_1, top_paddle, bottom_paddle)
sleep(.25)
oled.text(str(l_score), HALF_WIDTH - 20, 5, 1)
oled.text(str(r_score), HALF_WIDTH + 5, 5, 1)
oled.show() | [
"[email protected]"
]
| |
79a4bb8bec0d2d35bfcfb2c239be6aee46b0fd66 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_373/ch4_2020_04_12_18_58_48_907546.py | cde9dac5e0e2b7c03893f3ea611cee967836abd9 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | def classifica_idade (idade):
input(int( 'idade: '))
if idade <=11:
print ( 'crinca')
elif idade >= 12 and idade <= 17:
print ('adolecente')
else:
print ('adulto')
| [
"[email protected]"
]
| |
6405b2626aba482937b14dfeafe8be7ddfd5657d | 6392354e74cce4a303a544c53e13d0a7b87978ee | /m4/socket_correlation/company_review/lock_test.py | 154a5366cb5434bb78837c326d9e8b9c99355720 | []
| no_license | music51555/wxPythonCode | dc35e42e55d11850d7714a413da3dde51ccdd37e | f77b71ed67d926fbafd1cfec89de8987d9832016 | refs/heads/master | 2020-04-11T20:20:38.136446 | 2019-04-01T09:17:34 | 2019-04-01T09:17:34 | 162,067,449 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 937 | py | import time
from threading import Thread,RLock
mutexA = mutexB = RLock()
class MyThread(Thread):
def __init__(self,name):
super(MyThread,self).__init__()
self.name = name
def run(self):
self.f1()
self.f2()
def f1(self):
mutexA.acquire()
print('%s得到A锁'%self.name)
mutexB.acquire()
print('%s得到B锁'%self.name)
mutexA.release()
print('%s释放A锁'%self.name)
mutexB.release()
print('%s释放B锁'%self.name)
def f2(self):
mutexB.acquire()
print('%s得到B锁'%self.name)
time.sleep(0.1)
mutexA.acquire()
print('%s得到A锁'%self.name)
mutexB.release()
print('%s释放B锁'%self.name)
mutexA.release()
print('%s释放A锁'%self.name)
if __name__ == '__main__':
for i in range(3):
m = MyThread('子线程%s'%i)
m.start() | [
"[email protected]"
]
| |
4bbf48067b37dfa9b7a43b74bd31d72cf471611d | 8f8e378c0ce4224244582c506c268edda3cc3b30 | /Common/OpenCV/Day1/open2.py | 6683662896f77c68b9e3b75157a97e725953ee7e | []
| no_license | srsapireddy/Diploma-in-AI_NIELIT_Files | 223318319b2d4b8647d77b99d1ba03f0d6e15cf6 | 9e2ed78fbe03369ebef1aa81f3417fc21bdd4107 | refs/heads/master | 2021-05-17T14:28:00.059617 | 2020-03-29T09:28:04 | 2020-03-29T09:28:04 | 250,820,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | import cv2 as cv
img=cv.imread('walking.jpg',0)
cv.imshow('img1',img)
print(img)
print(img.shape)
cv.waitKey(0)
| [
"[email protected]"
]
| |
e63b88ed084aef3af607dfd3983492929682e249 | bebacae90aa17ad2ab4c9111a2e5cfa0f8cf13a6 | /Python-3/basic_examples/raw_input_example.py | 98bdf626e06b78a68c079f81f91a901b1a33be39 | [
"MIT"
]
| permissive | ayanakshi/journaldev | 5b0d73c53bc9a5292a8629c6c0320196abeab76e | a61cba22232e8cc9c40264c31aaba0bd17ff2522 | refs/heads/master | 2020-03-27T21:52:15.081736 | 2018-08-31T11:51:28 | 2018-08-31T11:51:28 | 147,182,378 | 1 | 0 | MIT | 2018-09-03T09:28:38 | 2018-09-03T09:28:38 | null | UTF-8 | Python | false | false | 61 | py | a = raw_input('What\'s your name : ')
print 'Username : ', a
| [
"[email protected]"
]
| |
ebf5338c9d16d52fb1f01ccc605998b512d9edf6 | c6ff2a4484c371efd97ce610832cd9772dd406e0 | /app10_udemy/app10_udemy/wsgi.py | bb40d92d717d10e2eaaa247e3e39c58b6fc183fe | []
| no_license | inderdevkumar/Upload-and-display | 66bbb808be27d47f3ff8d57e663b58b71f62ef71 | 668beb97392f12d4b545937c18f2723919264987 | refs/heads/master | 2022-10-10T01:19:02.044549 | 2020-06-09T12:56:22 | 2020-06-09T12:56:22 | 271,003,802 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
WSGI config for app10_udemy project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app10_udemy.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
be38d3ebadb2460af99adbcd9d5e38e954fef300 | 89220198e6869bf13ff99f1d07b5aa0f49f23b2a | /modules/tools/perception/empty_prediction.py | 33dd04981b1164fe3cb0fdef209d432d879386cd | [
"Apache-2.0"
]
| permissive | maohaihua/apollo | 2cd073a0844a9028756582e6db4c6b66fd4f8a0a | a30d7a6c65a58ca82681df81211176f98eeffde2 | refs/heads/master | 2020-04-30T20:55:14.018814 | 2019-03-22T00:29:56 | 2019-03-22T04:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,225 | py | #!/usr/bin/env python
###############################################################################
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
this module creates a node and fake prediction data based
on json configurations
"""
import argparse
import math
import time
import numpy
import simplejson
from cyber_py import cyber
from modules.prediction.proto.prediction_obstacle_pb2 import PredictionObstacles
def prediction_publisher(prediction_channel, rate):
"""publisher"""
cyber.init()
node = cyber.Node("prediction")
writer = node.create_writer(prediction_channel, PredictionObstacles)
sleep_time = 1.0 / rate
seq_num = 1
while not cyber.is_shutdown():
prediction = PredictionObstacles()
prediction.header.sequence_num = seq_num
prediction.header.timestamp_sec = time.time()
prediction.header.module_name = "prediction"
print(str(prediction))
writer.write(prediction)
seq_num += 1
time.sleep(sleep_time)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create empty prediction message",
prog="replay_prediction.py")
parser.add_argument("-c", "--channel", action="store", type=str, default="/apollo/prediction",
help="set the prediction channel")
parser.add_argument("-r", "--rate", action="store", type=int, default=10,
help="set the prediction channel publish time duration")
args = parser.parse_args()
prediction_publisher(args.channel, args.rate)
| [
"[email protected]"
]
| |
97ed4b7b177f9bfd4dd65cf0fe4e612cec5f5ca7 | c68580258e9fbe64bbf232e781d75584691de4c4 | /tests/django_settings.py | 2af62bb352df44c8386b6fd77435541a4214c8d9 | [
"MIT"
]
| permissive | KyleAMathews/graphene | 7e092e6e7d9575c1f736d834a2913a63bc753006 | 5738b69271fd245339f35640d375d6bc13092358 | refs/heads/master | 2023-08-31T21:12:22.927712 | 2015-11-30T18:08:12 | 2015-11-30T18:08:12 | 47,149,828 | 2 | 0 | null | 2015-11-30T22:24:27 | 2015-11-30T22:24:27 | null | UTF-8 | Python | false | false | 196 | py | SECRET_KEY = 1
INSTALLED_APPS = [
'examples.starwars_django',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'tests/django.sqlite',
}
}
| [
"[email protected]"
]
| |
07dd881dfa838563a5ef9d22778cd9993402dd4c | 22ebcc842dbc933bfa8fdad89b8b8ef48ecc91c7 | /load/load_aes_hd.py | f70ad4aa349abe0942c49ece8386f2b88e6237e6 | []
| no_license | klikooo/thesis-src | 192651c18f243c59cfa588e7052dc1a96ab0a146 | 64f2ee824afdc2d3fd0f98c6d9fcfda597b9ad9f | refs/heads/master | 2020-04-16T18:16:20.638147 | 2019-08-20T14:59:52 | 2019-08-20T14:59:52 | 161,623,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,364 | py | from decimal import Decimal
import torch
import numpy as np
import matplotlib.pyplot as plt
from models.load_model import load_model
from test import test_with_key_guess
import util
import pdb
path = '/media/rico/Data/TU/thesis'
#####################################################################################
# Parameters
use_hw = False
n_classes = 9 if use_hw else 256
spread_factor = 1
runs = [x for x in range(5)]
train_size = 20000
epochs = 140
batch_size = 100
lr = 0.00075
sub_key_index = 2
attack_size = 100
rank_step = 1
type_network = 'HW' if use_hw else 'ID'
unmask = False if sub_key_index < 2 else True
# network_names = ['SpreadV2', 'SpreadNet']
network_names = ['ConvNetKernel']
kernel_sizes = [3, 5, 7, 9, 11, 13, 15]
# network_names = ['ConvNet', 'ConvNetDK']
plt_titles = ['$Spread_{V2}$', '$Spread_{PH}$', '$Dense_{RT}$', '$MLP_{best}$']
only_accuracy = False
data_set = util.DataSet.RANDOM_DELAY
raw_traces = True
validation_size = 1000
#####################################################################################
data_set_name = str(data_set)
if len(plt_titles) != len(network_names):
plt_titles = network_names
device = torch.device("cuda")
# Load Data
loader = util.load_data_set(data_set)
print('Loading data set')
total_x_attack, total_y_attack, plain = loader({'use_hw': use_hw,
'traces_path': '/media/rico/Data/TU/thesis/data',
'raw_traces': raw_traces,
'start': train_size + validation_size,
'size': attack_size,
'domain_knowledge': True})
print('Loading key guesses')
key_guesses = util.load_csv('/media/rico/Data/TU/thesis/data/{}/Value/key_guesses_ALL_transposed.csv'.format(
data_set_name),
delimiter=' ',
dtype=np.int,
start=train_size + validation_size,
size=attack_size)
real_key = util.load_csv('/media/rico/Data/TU/thesis/data/{}/secret_key.csv'.format(data_set_name), dtype=np.int)
x_attack = total_x_attack
y_attack = total_y_attack
def get_ranks(x_attack, y_attack, key_guesses, runs, train_size,
epochs, lr, sub_key_index, attack_size, rank_step, unmask, network_name, kernel_size_string=""):
ranks_x = []
ranks_y = []
for run in runs:
model_path = '/media/rico/Data/TU/thesis/runs2/' \
'{}/subkey_{}/{}_SF{}_E{}_BZ{}_LR{}/train{}/model_r{}_{}{}.pt'.format(
data_set_name,
sub_key_index,
type_network,
spread_factor,
epochs,
batch_size,
'%.2E' % Decimal(lr),
train_size,
run,
network_name,
kernel_size_string)
print('path={}'.format(model_path))
# Load the model
model = load_model(network_name=network_name, model_path=model_path)
model.eval()
print("Using {}".format(model))
model.to(device)
# Number of times we test a single model + shuffle the test traces
num_exps = 100
x, y = [], []
for exp_i in range(num_exps):
permutation = np.random.permutation(x_attack.shape[0])
# permutation = np.arange(0, x_attack.shape[0])
x_attack_shuffled = util.shuffle_permutation(permutation, np.array(x_attack))
y_attack_shuffled = util.shuffle_permutation(permutation, np.array(y_attack))
key_guesses_shuffled = util.shuffle_permutation(permutation, key_guesses)
# Check if we need domain knowledge
dk_plain = None
if network_name in util.req_dk:
dk_plain = plain
dk_plain = util.shuffle_permutation(permutation, dk_plain)
x_exp, y_exp = test_with_key_guess(x_attack_shuffled, y_attack_shuffled, key_guesses_shuffled, model,
attack_size=attack_size,
real_key=real_key,
use_hw=use_hw,
plain=dk_plain)
x = x_exp
y.append(y_exp)
# Take the mean of the different experiments
y = np.mean(y, axis=0)
# Add the ranks
ranks_x.append(x)
ranks_y.append(y)
return ranks_x, ranks_y
# Test the networks that were specified
ranks_x = []
ranks_y = []
rank_mean_y = []
name_models = []
for network_name in network_names:
if network_name in util.req_kernel_size:
for kernel_size in kernel_sizes:
kernel_string = "_k{}".format(kernel_size)
x, y = get_ranks(x_attack, y_attack, key_guesses, runs, train_size, epochs, lr, sub_key_index,
attack_size, rank_step, unmask, network_name, kernel_string)
mean_y = np.mean(y, axis=0)
ranks_x.append(x)
ranks_y.append(y)
rank_mean_y.append(mean_y)
name_models.append("{} K{}".format(network_name, kernel_size))
else:
x, y = get_ranks(x_attack, y_attack, key_guesses, runs, train_size, epochs, lr, sub_key_index,
attack_size, rank_step, unmask, network_name)
mean_y = np.mean(y, axis=0)
ranks_x.append(x)
ranks_y.append(y)
rank_mean_y.append(mean_y)
name_models.append(network_name)
for i in range(len(rank_mean_y)):
plt.title('Performance of {}'.format(name_models[i]))
plt.xlabel('number of traces')
plt.ylabel('rank')
plt.grid(True)
# Plot the results
for x, y in zip(ranks_x[i], ranks_y[i]):
plt.plot(x, y)
figure = plt.gcf()
plt.figure()
figure.savefig('/home/rico/Pictures/{}.png'.format(name_models[i]), dpi=100)
# plt.title('Comparison of networks')
plt.xlabel('Number of traces')
plt.ylabel('Mean rank')
plt.grid(True)
for i in range(len(rank_mean_y)):
plt.plot(ranks_x[i][0], rank_mean_y[i], label=name_models[i])
plt.legend()
# plt.figure()
figure = plt.gcf()
figure.savefig('/home/rico/Pictures/{}.png'.format('mean'), dpi=100)
plt.show()
| [
"[email protected]"
]
| |
11e1b75a57e9bc7732942119e5dbf9bfc029fa0b | f62fd455e593a7ad203a5c268e23129473d968b6 | /senlin-3.0.1/senlin/tests/unit/engine/actions/test_action_base.py | 058e15a75749ccf0a3430225113222d3bedc14a0 | [
"Apache-2.0"
]
| permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 35,544 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from senlin.common import consts
from senlin.common import exception
from senlin.common import utils as common_utils
from senlin.engine.actions import base as ab
from senlin.engine import cluster as cluster_mod
from senlin.engine import environment
from senlin.engine import event as EVENT
from senlin.engine import node as node_mod
from senlin.objects import action as ao
from senlin.objects import cluster_policy as cpo
from senlin.objects import dependency as dobj
from senlin.policies import base as policy_mod
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
from senlin.tests.unit import fakes
CLUSTER_ID = 'e1cfd82b-dc95-46ad-86e8-37864d7be1cd'
OBJID = '571fffb8-f41c-4cbc-945c-cb2937d76f19'
OWNER_ID = 'c7114713-ee68-409d-ba5d-0560a72a386c'
ACTION_ID = '4c2cead2-fd74-418a-9d12-bd2d9bd7a812'
USER_ID = '3c4d64baadcd437d8dd49054899e73dd'
PROJECT_ID = 'cf7a6ae28dde4f46aa8fe55d318a608f'
class DummyAction(ab.Action):
def __init__(self, target, action, context, **kwargs):
super(DummyAction, self).__init__(target, action, context, **kwargs)
class ActionBaseTest(base.SenlinTestCase):
def setUp(self):
super(ActionBaseTest, self).setUp()
self.ctx = utils.dummy_context(project=PROJECT_ID, user_id=USER_ID)
self.action_values = {
'name': 'FAKE_NAME',
'cause': 'FAKE_CAUSE',
'owner': OWNER_ID,
'interval': 60,
'start_time': 0,
'end_time': 0,
'timeout': 120,
'status': 'FAKE_STATUS',
'status_reason': 'FAKE_STATUS_REASON',
'inputs': {'param': 'value'},
'outputs': {'key': 'output_value'},
'created_at': timeutils.utcnow(True),
'updated_at': None,
'data': {'data_key': 'data_value'},
'user': USER_ID,
'project': PROJECT_ID,
}
def _verify_new_action(self, obj, target, action):
self.assertIsNone(obj.id)
self.assertEqual('', obj.name)
self.assertEqual(target, obj.target)
self.assertEqual(action, obj.action)
self.assertEqual('', obj.cause)
self.assertIsNone(obj.owner)
self.assertEqual(-1, obj.interval)
self.assertIsNone(obj.start_time)
self.assertIsNone(obj.end_time)
self.assertEqual(cfg.CONF.default_action_timeout, obj.timeout)
self.assertEqual('INIT', obj.status)
self.assertEqual('', obj.status_reason)
self.assertEqual({}, obj.inputs)
self.assertEqual({}, obj.outputs)
self.assertIsNone(obj.created_at)
self.assertIsNone(obj.updated_at)
self.assertEqual({}, obj.data)
@mock.patch.object(node_mod.Node, 'load')
@mock.patch.object(cluster_mod.Cluster, 'load')
def test_action_new(self, mock_n_load, mock_c_load):
for action in ['CLUSTER_CREATE', 'NODE_CREATE', 'WHAT_EVER']:
obj = ab.Action(OBJID, action, self.ctx)
self._verify_new_action(obj, OBJID, action)
def test_action_init_with_values(self):
values = copy.deepcopy(self.action_values)
values['id'] = 'FAKE_ID'
values['created_at'] = 'FAKE_CREATED_TIME'
values['updated_at'] = 'FAKE_UPDATED_TIME'
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
self.assertEqual('FAKE_ID', obj.id)
self.assertEqual('FAKE_NAME', obj.name)
self.assertEqual(OBJID, obj.target)
self.assertEqual('FAKE_CAUSE', obj.cause)
self.assertEqual(OWNER_ID, obj.owner)
self.assertEqual(60, obj.interval)
self.assertEqual(0, obj.start_time)
self.assertEqual(0, obj.end_time)
self.assertEqual(120, obj.timeout)
self.assertEqual('FAKE_STATUS', obj.status)
self.assertEqual('FAKE_STATUS_REASON', obj.status_reason)
self.assertEqual({'param': 'value'}, obj.inputs)
self.assertEqual({'key': 'output_value'}, obj.outputs)
self.assertEqual('FAKE_CREATED_TIME', obj.created_at)
self.assertEqual('FAKE_UPDATED_TIME', obj.updated_at)
self.assertEqual({'data_key': 'data_value'}, obj.data)
def test_action_store_for_create(self):
values = copy.deepcopy(self.action_values)
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
self.assertEqual(common_utils.isotime(values['created_at']),
common_utils.isotime(obj.created_at))
self.assertIsNone(obj.updated_at)
# store for creation
res = obj.store(self.ctx)
self.assertIsNotNone(res)
self.assertEqual(obj.id, res)
self.assertIsNotNone(obj.created_at)
self.assertIsNone(obj.updated_at)
def test_action_store_for_update(self):
values = copy.deepcopy(self.action_values)
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
obj_id = obj.store(self.ctx)
self.assertIsNotNone(obj_id)
self.assertIsNotNone(obj.created_at)
self.assertIsNone(obj.updated_at)
# store for creation
res = obj.store(self.ctx)
self.assertIsNotNone(res)
self.assertEqual(obj_id, res)
self.assertEqual(obj.id, res)
self.assertIsNotNone(obj.created_at)
self.assertIsNotNone(obj.updated_at)
def test_from_db_record(self):
values = copy.deepcopy(self.action_values)
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
obj.store(self.ctx)
record = ao.Action.get(self.ctx, obj.id)
action_obj = ab.Action._from_object(record)
self.assertIsInstance(action_obj, ab.Action)
self.assertEqual(obj.id, action_obj.id)
self.assertEqual(obj.action, action_obj.action)
self.assertEqual(obj.name, action_obj.name)
self.assertEqual(obj.target, action_obj.target)
self.assertEqual(obj.cause, action_obj.cause)
self.assertEqual(obj.owner, action_obj.owner)
self.assertEqual(obj.interval, action_obj.interval)
self.assertEqual(obj.start_time, action_obj.start_time)
self.assertEqual(obj.end_time, action_obj.end_time)
self.assertEqual(obj.timeout, action_obj.timeout)
self.assertEqual(obj.status, action_obj.status)
self.assertEqual(obj.status_reason, action_obj.status_reason)
self.assertEqual(obj.inputs, action_obj.inputs)
self.assertEqual(obj.outputs, action_obj.outputs)
self.assertEqual(common_utils.isotime(obj.created_at),
common_utils.isotime(action_obj.created_at))
self.assertEqual(obj.updated_at, action_obj.updated_at)
self.assertEqual(obj.data, action_obj.data)
self.assertEqual(obj.user, action_obj.user)
self.assertEqual(obj.project, action_obj.project)
self.assertEqual(obj.domain, action_obj.domain)
def test_from_db_record_with_empty_fields(self):
values = copy.deepcopy(self.action_values)
del values['inputs']
del values['outputs']
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
obj.store(self.ctx)
record = ao.Action.get(self.ctx, obj.id)
action_obj = ab.Action._from_object(record)
self.assertEqual({}, action_obj.inputs)
self.assertEqual({}, action_obj.outputs)
def test_load(self):
values = copy.deepcopy(self.action_values)
obj = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
obj.store(self.ctx)
result = ab.Action.load(self.ctx, obj.id, None)
# no need to do a thorough test here
self.assertEqual(obj.id, result.id)
self.assertEqual(obj.action, result.action)
db_action = ao.Action.get(self.ctx, obj.id)
result = ab.Action.load(self.ctx, None, db_action)
# no need to do a thorough test here
self.assertEqual(obj.id, result.id)
self.assertEqual(obj.action, result.action)
def test_load_not_found(self):
# not found due to bad identity
ex = self.assertRaises(exception.ResourceNotFound,
ab.Action.load,
self.ctx, 'non-existent', None)
self.assertEqual("The action 'non-existent' could not be "
"found.", six.text_type(ex))
# not found due to no object
self.patchobject(ao.Action, 'get', return_value=None)
ex = self.assertRaises(exception.ResourceNotFound,
ab.Action.load,
self.ctx, 'whatever', None)
self.assertEqual("The action 'whatever' could not be found.",
six.text_type(ex))
@mock.patch.object(ab.Action, 'store')
def test_action_create(self, mock_store):
mock_store.return_value = 'FAKE_ID'
result = ab.Action.create(self.ctx, OBJID, 'CLUSTER_DANCE',
name='test')
self.assertEqual('FAKE_ID', result)
mock_store.assert_called_once_with(self.ctx)
def test_action_delete(self):
result = ab.Action.delete(self.ctx, 'non-existent')
self.assertIsNone(result)
values = copy.deepcopy(self.action_values)
action1 = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
action1.store(self.ctx)
result = ab.Action.delete(self.ctx, action1.id)
self.assertIsNone(result)
@mock.patch.object(ao.Action, 'delete')
def test_action_delete_db_call(self, mock_call):
# test db api call
ab.Action.delete(self.ctx, 'FAKE_ID')
mock_call.assert_called_once_with(self.ctx, 'FAKE_ID')
@mock.patch.object(ao.Action, 'signal')
def test_action_signal_bad_command(self, mock_call):
values = copy.deepcopy(self.action_values)
action1 = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
action1.store(self.ctx)
result = action1.signal('BOGUS')
self.assertIsNone(result)
self.assertEqual(0, mock_call.call_count)
@mock.patch.object(ao.Action, 'signal')
def test_action_signal_cancel(self, mock_call):
values = copy.deepcopy(self.action_values)
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, **values)
action.store(self.ctx)
expected = [action.INIT, action.WAITING, action.READY, action.RUNNING]
for status in expected:
action.status = status
result = action.signal(action.SIG_CANCEL)
self.assertIsNone(result)
self.assertEqual(1, mock_call.call_count)
mock_call.reset_mock()
invalid = [action.SUSPENDED, action.SUCCEEDED, action.CANCELLED,
action.FAILED]
for status in invalid:
action.status = status
result = action.signal(action.SIG_CANCEL)
self.assertIsNone(result)
self.assertEqual(0, mock_call.call_count)
mock_call.reset_mock()
@mock.patch.object(ao.Action, 'signal')
def test_action_signal_suspend(self, mock_call):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id=ACTION_ID)
expected = [action.RUNNING]
for status in expected:
action.status = status
result = action.signal(action.SIG_SUSPEND)
self.assertIsNone(result)
self.assertEqual(1, mock_call.call_count)
mock_call.reset_mock()
invalid = [action.INIT, action.WAITING, action.READY, action.SUSPENDED,
action.SUCCEEDED, action.CANCELLED, action.FAILED]
for status in invalid:
action.status = status
result = action.signal(action.SIG_SUSPEND)
self.assertIsNone(result)
self.assertEqual(0, mock_call.call_count)
mock_call.reset_mock()
@mock.patch.object(ao.Action, 'signal')
def test_action_signal_resume(self, mock_call):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id=ACTION_ID)
expected = [action.SUSPENDED]
for status in expected:
action.status = status
result = action.signal(action.SIG_RESUME)
self.assertIsNone(result)
self.assertEqual(1, mock_call.call_count)
mock_call.reset_mock()
invalid = [action.INIT, action.WAITING, action.READY, action.RUNNING,
action.SUCCEEDED, action.CANCELLED, action.FAILED]
for status in invalid:
action.status = status
result = action.signal(action.SIG_RESUME)
self.assertIsNone(result)
self.assertEqual(0, mock_call.call_count)
mock_call.reset_mock()
def test_execute_default(self):
action = ab.Action.__new__(DummyAction, OBJID, 'BOOM', self.ctx)
self.assertRaises(NotImplementedError,
action.execute)
@mock.patch.object(EVENT, 'info')
@mock.patch.object(EVENT, 'error')
@mock.patch.object(EVENT, 'warning')
@mock.patch.object(ao.Action, 'mark_succeeded')
@mock.patch.object(ao.Action, 'mark_failed')
@mock.patch.object(ao.Action, 'mark_cancelled')
@mock.patch.object(ao.Action, 'abandon')
def test_set_status(self, mock_abandon, mark_cancel, mark_fail,
mark_succeed, mock_event, mock_error, mock_info):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id='FAKE_ID')
action.entity = mock.Mock()
action.set_status(action.RES_OK, 'FAKE_REASON')
self.assertEqual(action.SUCCEEDED, action.status)
self.assertEqual('FAKE_REASON', action.status_reason)
mark_succeed.assert_called_once_with(action.context, 'FAKE_ID',
mock.ANY)
action.set_status(action.RES_ERROR, 'FAKE_ERROR')
self.assertEqual(action.FAILED, action.status)
self.assertEqual('FAKE_ERROR', action.status_reason)
mark_fail.assert_called_once_with(action.context, 'FAKE_ID', mock.ANY,
'FAKE_ERROR')
mark_fail.reset_mock()
action.set_status(action.RES_TIMEOUT, 'TIMEOUT_ERROR')
self.assertEqual(action.FAILED, action.status)
self.assertEqual('TIMEOUT_ERROR', action.status_reason)
mark_fail.assert_called_once_with(action.context, 'FAKE_ID', mock.ANY,
'TIMEOUT_ERROR')
mark_fail.reset_mock()
action.set_status(action.RES_CANCEL, 'CANCELLED')
self.assertEqual(action.CANCELLED, action.status)
self.assertEqual('CANCELLED', action.status_reason)
mark_cancel.assert_called_once_with(action.context, 'FAKE_ID',
mock.ANY)
mark_fail.reset_mock()
action.set_status(action.RES_RETRY, 'BUSY')
self.assertEqual(action.READY, action.status)
self.assertEqual('BUSY', action.status_reason)
mock_abandon.assert_called_once_with(action.context, 'FAKE_ID')
@mock.patch.object(EVENT, 'info')
@mock.patch.object(EVENT, 'error')
@mock.patch.object(EVENT, 'warning')
@mock.patch.object(ao.Action, 'mark_succeeded')
@mock.patch.object(ao.Action, 'mark_failed')
@mock.patch.object(ao.Action, 'abandon')
def test_set_status_dump_event(self, mock_abandon, mark_fail,
mark_succeed, mock_warning, mock_error,
mock_info):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id='FAKE_ID')
action.entity = mock.Mock()
action.set_status(action.RES_OK, 'FAKE_SUCCEEDED')
mock_info.assert_called_once_with(action, consts.PHASE_END,
'FAKE_SUCCEEDED')
action.set_status(action.RES_ERROR, 'FAKE_ERROR')
mock_error.assert_called_once_with(action, consts.PHASE_ERROR,
'FAKE_ERROR')
action.set_status(action.RES_RETRY, 'FAKE_RETRY')
mock_warning.assert_called_once_with(action, consts.PHASE_ERROR,
'FAKE_RETRY')
@mock.patch.object(EVENT, 'info')
@mock.patch.object(EVENT, 'error')
@mock.patch.object(EVENT, 'warning')
@mock.patch.object(ao.Action, 'mark_succeeded')
@mock.patch.object(ao.Action, 'mark_failed')
@mock.patch.object(ao.Action, 'abandon')
def test_set_status_reason_is_none(self, mock_abandon, mark_fail,
mark_succeed, mock_warning, mock_error,
mock_info):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id='FAKE_ID')
action.entity = mock.Mock()
action.set_status(action.RES_OK)
mock_info.assert_called_once_with(action, consts.PHASE_END,
'SUCCEEDED')
action.set_status(action.RES_ERROR)
mock_error.assert_called_once_with(action, consts.PHASE_ERROR,
'ERROR')
action.set_status(action.RES_RETRY)
mock_warning.assert_called_once_with(action, consts.PHASE_ERROR,
'RETRY')
@mock.patch.object(ao.Action, 'check_status')
def test_get_status(self, mock_get):
mock_get.return_value = 'FAKE_STATUS'
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
action.id = 'FAKE_ID'
res = action.get_status()
self.assertEqual('FAKE_STATUS', res)
self.assertEqual('FAKE_STATUS', action.status)
mock_get.assert_called_once_with(action.context, 'FAKE_ID', mock.ANY)
@mock.patch.object(ab, 'wallclock')
def test_is_timeout(self, mock_time):
action = ab.Action.__new__(DummyAction, 'OBJ', 'BOOM', self.ctx)
action.start_time = 1
action.timeout = 10
mock_time.return_value = 9
self.assertFalse(action.is_timeout())
mock_time.return_value = 10
self.assertFalse(action.is_timeout())
mock_time.return_value = 11
self.assertFalse(action.is_timeout())
mock_time.return_value = 12
self.assertTrue(action.is_timeout())
@mock.patch.object(EVENT, 'debug')
def test_check_signal_timeout(self, mock_debug):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx, id='FAKE_ID',
timeout=10)
action.entity = mock.Mock()
self.patchobject(action, 'is_timeout', return_value=True)
res = action._check_signal()
self.assertEqual(action.RES_TIMEOUT, res)
@mock.patch.object(ao.Action, 'signal_query')
def test_check_signal_signals_caught(self, mock_query):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
action.id = 'FAKE_ID'
action.timeout = 100
self.patchobject(action, 'is_timeout', return_value=False)
sig_cmd = mock.Mock()
mock_query.return_value = sig_cmd
res = action._check_signal()
self.assertEqual(sig_cmd, res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
@mock.patch.object(ao.Action, 'signal_query')
def test_is_cancelled(self, mock_query):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
action.id = 'FAKE_ID'
action.timeout = 100
self.patchobject(action, 'is_timeout', return_value=False)
mock_query.return_value = action.SIG_CANCEL
res = action.is_cancelled()
self.assertTrue(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
mock_query.reset_mock()
mock_query.return_value = None
res = action.is_cancelled()
self.assertFalse(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
@mock.patch.object(ao.Action, 'signal_query')
def test_is_suspended(self, mock_query):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
action.id = 'FAKE_ID'
action.timeout = 100
self.patchobject(action, 'is_timeout', return_value=False)
mock_query.return_value = action.SIG_SUSPEND
res = action.is_suspended()
self.assertTrue(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
mock_query.reset_mock()
mock_query.return_value = 'OTHERS'
res = action.is_suspended()
self.assertFalse(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
@mock.patch.object(ao.Action, 'signal_query')
def test_is_resumed(self, mock_query):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
action.id = 'FAKE_ID'
action.timeout = 100
self.patchobject(action, 'is_timeout', return_value=False)
mock_query.return_value = action.SIG_RESUME
res = action.is_resumed()
self.assertTrue(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
mock_query.reset_mock()
mock_query.return_value = 'OTHERS'
res = action.is_resumed()
self.assertFalse(res)
mock_query.assert_called_once_with(action.context, 'FAKE_ID')
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
def test_policy_check_target_invalid(self, mock_load):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
res = action.policy_check('FAKE_CLUSTER', 'WHEN')
self.assertIsNone(res)
self.assertEqual(0, mock_load.call_count)
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
def test_policy_check_no_bindings(self, mock_load):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
mock_load.return_value = []
res = action.policy_check('FAKE_CLUSTER', 'BEFORE')
self.assertIsNone(res)
self.assertEqual(policy_mod.CHECK_OK, action.data['status'])
mock_load.assert_called_once_with(action.context, 'FAKE_CLUSTER',
sort='priority',
filters={'enabled': True})
@mock.patch.object(dobj.Dependency, 'get_depended')
@mock.patch.object(dobj.Dependency, 'get_dependents')
def test_action_to_dict(self, mock_dep_by, mock_dep_on):
mock_dep_on.return_value = ['ACTION_1']
mock_dep_by.return_value = ['ACTION_2']
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx,
**self.action_values)
action.id = 'FAKE_ID'
ts = common_utils.isotime(self.action_values['created_at'])
expected = {
'id': 'FAKE_ID',
'name': 'FAKE_NAME',
'action': 'OBJECT_ACTION',
'target': OBJID,
'cause': 'FAKE_CAUSE',
'owner': OWNER_ID,
'interval': 60,
'start_time': 0,
'end_time': 0,
'timeout': 120,
'status': 'FAKE_STATUS',
'status_reason': 'FAKE_STATUS_REASON',
'inputs': {'param': 'value'},
'outputs': {'key': 'output_value'},
'depends_on': ['ACTION_1'],
'depended_by': ['ACTION_2'],
'created_at': ts,
'updated_at': None,
'data': {'data_key': 'data_value'},
'user': USER_ID,
'project': PROJECT_ID,
}
res = action.to_dict()
self.assertEqual(expected, res)
mock_dep_on.assert_called_once_with(action.context, 'FAKE_ID')
mock_dep_by.assert_called_once_with(action.context, 'FAKE_ID')
class ActionPolicyCheckTest(base.SenlinTestCase):
def setUp(self):
super(ActionPolicyCheckTest, self).setUp()
self.ctx = utils.dummy_context()
environment.global_env().register_policy('DummyPolicy',
fakes.TestPolicy)
def _create_policy(self):
values = {
'user': self.ctx.user,
'project': self.ctx.project,
}
policy = fakes.TestPolicy('DummyPolicy', 'test-policy', **values)
policy.store(self.ctx)
return policy
def _create_cp_binding(self, cluster_id, policy_id):
return cpo.ClusterPolicy(cluster_id=cluster_id, policy_id=policy_id,
enabled=True, id=uuidutils.generate_uuid(),
last_op=None)
@mock.patch.object(policy_mod.Policy, 'post_op')
@mock.patch.object(policy_mod.Policy, 'pre_op')
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
@mock.patch.object(policy_mod.Policy, 'load')
def test_policy_check_missing_target(self, mock_load, mock_load_all,
mock_pre_op, mock_post_op):
cluster_id = CLUSTER_ID
# Note: policy is mocked
spec = {
'type': 'TestPolicy',
'version': '1.0',
'properties': {'KEY2': 5},
}
policy = fakes.TestPolicy('test-policy', spec)
policy.id = uuidutils.generate_uuid()
policy.TARGET = [('BEFORE', 'OBJECT_ACTION')]
# Note: policy binding is created but not stored
pb = self._create_cp_binding(cluster_id, policy.id)
self.assertIsNone(pb.last_op)
mock_load_all.return_value = [pb]
mock_load.return_value = policy
mock_pre_op.return_value = None
mock_post_op.return_value = None
action = ab.Action(cluster_id, 'OBJECT_ACTION_1', self.ctx)
res = action.policy_check(cluster_id, 'AFTER')
self.assertIsNone(res)
self.assertEqual(policy_mod.CHECK_OK, action.data['status'])
mock_load_all.assert_called_once_with(
action.context, cluster_id, sort='priority',
filters={'enabled': True})
mock_load.assert_called_once_with(action.context, policy.id)
# last_op was updated anyway
self.assertIsNotNone(pb.last_op)
# neither pre_op nor post_op was called, because target not match
self.assertEqual(0, mock_pre_op.call_count)
self.assertEqual(0, mock_post_op.call_count)
def test__check_result_true(self):
cluster_id = CLUSTER_ID
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
action.data['status'] = policy_mod.CHECK_OK
action.data['reason'] = "Completed policy checking."
res = action._check_result('FAKE_POLICY_NAME')
self.assertTrue(res)
def test__check_result_false(self):
cluster_id = CLUSTER_ID
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
action.data['status'] = policy_mod.CHECK_ERROR
reason = ("Policy '%s' cooldown is still in progress." %
'FAKE_POLICY_2')
action.data['reason'] = reason
res = action._check_result('FAKE_POLICY_NAME')
reason = ("Failed policy '%(name)s': %(reason)s"
) % {'name': 'FAKE_POLICY_NAME', 'reason': reason}
self.assertFalse(res)
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
@mock.patch.object(policy_mod.Policy, 'load')
def test_policy_check_pre_op(self, mock_load, mock_load_all):
cluster_id = CLUSTER_ID
# Note: policy is mocked
spec = {
'type': 'TestPolicy',
'version': '1.0',
'properties': {'KEY2': 5},
}
policy = fakes.TestPolicy('test-policy', spec)
policy.id = uuidutils.generate_uuid()
policy.TARGET = [('BEFORE', 'OBJECT_ACTION')]
# Note: policy binding is created but not stored
pb = self._create_cp_binding(cluster_id, policy.id)
self.assertIsNone(pb.last_op)
mock_load_all.return_value = [pb]
mock_load.return_value = policy
entity = mock.Mock()
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
action.entity = entity
res = action.policy_check(cluster_id, 'BEFORE')
self.assertIsNone(res)
self.assertEqual(policy_mod.CHECK_OK, action.data['status'])
mock_load_all.assert_called_once_with(
action.context, cluster_id, sort='priority',
filters={'enabled': True})
mock_load.assert_called_once_with(action.context, policy.id)
# last_op was not updated
self.assertIsNone(pb.last_op)
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
@mock.patch.object(policy_mod.Policy, 'load')
def test_policy_check_post_op(self, mock_load, mock_load_all):
cluster_id = CLUSTER_ID
# Note: policy is mocked
policy = mock.Mock(id=uuidutils.generate_uuid(), cooldown=0,
TARGET=[('AFTER', 'OBJECT_ACTION')])
# Note: policy binding is created but not stored
pb = self._create_cp_binding(cluster_id, policy.id)
self.assertIsNone(pb.last_op)
mock_load_all.return_value = [pb]
mock_load.return_value = policy
entity = mock.Mock()
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
action.entity = entity
res = action.policy_check(CLUSTER_ID, 'AFTER')
self.assertIsNone(res)
self.assertEqual(policy_mod.CHECK_OK, action.data['status'])
mock_load_all.assert_called_once_with(
action.context, cluster_id, sort='priority',
filters={'enabled': True})
mock_load.assert_called_once_with(action.context, policy.id)
# last_op was updated for POST check
self.assertIsNotNone(pb.last_op)
# pre_op is called, but post_op was not called
self.assertEqual(0, policy.pre_op.call_count)
policy.post_op.assert_called_once_with(cluster_id, action)
@mock.patch.object(cpo.ClusterPolicy, 'cooldown_inprogress')
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
@mock.patch.object(policy_mod.Policy, 'load')
def test_policy_check_cooldown_inprogress(self, mock_load, mock_load_all,
mock_inprogress):
cluster_id = CLUSTER_ID
# Note: policy is mocked
policy_id = uuidutils.generate_uuid()
policy = mock.Mock(id=policy_id, TARGET=[('AFTER', 'OBJECT_ACTION')])
# Note: policy binding is created but not stored
pb = self._create_cp_binding(cluster_id, policy.id)
mock_inprogress.return_value = True
mock_load_all.return_value = [pb]
mock_load.return_value = policy
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
# Do it
res = action.policy_check(CLUSTER_ID, 'AFTER')
self.assertIsNone(res)
self.assertEqual(policy_mod.CHECK_ERROR, action.data['status'])
self.assertEqual(
'Policy %s cooldown is still in progress.' % policy_id,
six.text_type(action.data['reason']))
mock_load_all.assert_called_once_with(
action.context, cluster_id, sort='priority',
filters={'enabled': True})
mock_load.assert_called_once_with(action.context, policy.id)
# last_op was updated for POST check
self.assertIsNotNone(pb.last_op)
# neither pre_op nor post_op was not called, due to cooldown
self.assertEqual(0, policy.pre_op.call_count)
self.assertEqual(0, policy.post_op.call_count)
@mock.patch.object(cpo.ClusterPolicy, 'get_all')
@mock.patch.object(policy_mod.Policy, 'load')
@mock.patch.object(ab.Action, '_check_result')
def test_policy_check_abort_in_middle(self, mock_check, mock_load,
mock_load_all):
cluster_id = CLUSTER_ID
# Note: both policies are mocked
policy1 = mock.Mock(id=uuidutils.generate_uuid(), cooldown=0,
TARGET=[('AFTER', 'OBJECT_ACTION')])
policy1.name = 'P1'
policy2 = mock.Mock(id=uuidutils.generate_uuid(), cooldown=0,
TARGET=[('AFTER', 'OBJECT_ACTION')])
policy2.name = 'P2'
action = ab.Action(cluster_id, 'OBJECT_ACTION', self.ctx)
# Note: policy binding is created but not stored
pb1 = self._create_cp_binding(cluster_id, policy1.id)
pb2 = self._create_cp_binding(cluster_id, policy2.id)
mock_load_all.return_value = [pb1, pb2]
# mock return value for two calls
mock_load.side_effect = [policy1, policy2]
mock_check.side_effect = [False, True]
res = action.policy_check(cluster_id, 'AFTER')
self.assertIsNone(res)
# post_op from policy1 was called, but post_op from policy2 was not
policy1.post_op.assert_called_once_with(cluster_id, action)
self.assertEqual(0, policy2.post_op.call_count)
mock_load_all.assert_called_once_with(
action.context, cluster_id, sort='priority',
filters={'enabled': True})
calls = [mock.call(action.context, policy1.id)]
mock_load.assert_has_calls(calls)
class ActionProcTest(base.SenlinTestCase):
def setUp(self):
super(ActionProcTest, self).setUp()
self.ctx = utils.dummy_context()
@mock.patch.object(EVENT, 'info')
@mock.patch.object(ab.Action, 'load')
@mock.patch.object(ao.Action, 'mark_succeeded')
def test_action_proc_successful(self, mock_mark, mock_load,
mock_event_info):
action = ab.Action(OBJID, 'OBJECT_ACTION', self.ctx)
mock_obj = mock.Mock()
action.entity = mock_obj
self.patchobject(action, 'execute',
return_value=(action.RES_OK, 'BIG SUCCESS'))
mock_status = self.patchobject(action, 'set_status')
mock_load.return_value = action
res = ab.ActionProc(self.ctx, 'ACTION_ID')
self.assertTrue(res)
mock_load.assert_called_once_with(self.ctx, action_id='ACTION_ID',
project_safe=False)
mock_event_info.assert_called_once_with(action, 'start')
mock_status.assert_called_once_with(action.RES_OK, 'BIG SUCCESS')
@mock.patch.object(EVENT, 'info')
@mock.patch.object(ab.Action, 'load')
@mock.patch.object(ao.Action, 'mark_failed')
def test_action_proc_failed_error(self, mock_mark, mock_load, mock_info):
action = ab.Action(OBJID, 'CLUSTER_ACTION', self.ctx, id=ACTION_ID)
action.entity = mock.Mock(id=CLUSTER_ID, name='fake-cluster')
self.patchobject(action, 'execute', side_effect=Exception('Boom!'))
mock_status = self.patchobject(action, 'set_status')
mock_load.return_value = action
res = ab.ActionProc(self.ctx, 'ACTION')
self.assertFalse(res)
mock_load.assert_called_once_with(self.ctx, action_id='ACTION',
project_safe=False)
mock_info.assert_called_once_with(action, 'start')
mock_status.assert_called_once_with(action.RES_ERROR, 'Boom!')
| [
"[email protected]"
]
| |
e664beb018a1c9ae9d3a87597696086278f40c0e | dbe012dbedc967332ae58414473185055136d189 | /maskrcnn_benchmark/data/transforms/transforms.py | 283a9d3055d8b0ed951c6e0ec938684bcaf74ce3 | [
"MIT"
]
| permissive | kevincao91/maskrcnn | 87561a023939a71d624252dd44f4c882b2dfa2a6 | a55f6ab82219329e353a20dd53c3f25f4375f537 | refs/heads/master | 2020-09-24T18:41:36.565752 | 2020-05-07T05:45:39 | 2020-05-07T05:45:39 | 225,819,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,511 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import random
import torch
import torchvision
from torchvision.transforms import functional as F
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
def __repr__(self):
format_string = self.__class__.__name__ + "("
for t in self.transforms:
format_string += "\n"
format_string += " {0}".format(t)
format_string += "\n)"
return format_string
class Resize(object):
def __init__(self, min_size, max_size):
if not isinstance(min_size, (list, tuple)):
min_size = (min_size,)
self.min_size = min_size
self.max_size = max_size
# modified from torchvision to add support for max size
def get_size(self, image_size):
w, h = image_size
size = random.choice(self.min_size)
max_size = self.max_size
if max_size is not None:
min_original_size = float(min((w, h)))
max_original_size = float(max((w, h)))
if max_original_size / min_original_size * size > max_size:
size = int(round(max_size * min_original_size / max_original_size))
if (w <= h and w == size) or (h <= w and h == size):
return (h, w)
if w < h:
ow = size
oh = int(size * h / w)
else:
oh = size
ow = int(size * w / h)
return (oh, ow)
def __call__(self, image, target=None):
size = self.get_size(image.size)
#print('get size:', size)
image = F.resize(image, size)
if target is None:
return image
target = target.resize(image.size)
return image, target
class RandomHorizontalFlip(object):
def __init__(self, prob=0.5):
self.prob = prob
def __call__(self, image, target):
if random.random() < self.prob:
image = F.hflip(image)
target = target.transpose(0)
return image, target
class RandomVerticalFlip(object):
def __init__(self, prob=0.5):
self.prob = prob
def __call__(self, image, target):
if random.random() < self.prob:
image = F.vflip(image)
target = target.transpose(1)
return image, target
class ColorJitter(object):
def __init__(self,
brightness=None,
contrast=None,
saturation=None,
hue=None,
):
self.color_jitter = torchvision.transforms.ColorJitter(
brightness=brightness,
contrast=contrast,
saturation=saturation,
hue=hue,)
def __call__(self, image, target):
image = self.color_jitter(image)
return image, target
class ToTensor(object):
def __call__(self, image, target):
return F.to_tensor(image), target
class Normalize(object):
def __init__(self, mean, std, to_bgr255=True):
self.mean = mean
self.std = std
self.to_bgr255 = to_bgr255
def __call__(self, image, target=None):
if self.to_bgr255:
image = image[[2, 1, 0]] * 255
image = F.normalize(image, mean=self.mean, std=self.std)
if target is None:
return image
return image, target
| [
"[email protected]"
]
| |
59bebd47be55198c6ec48813b99966195120cdd5 | 3b3b9bbc39c50a270e96b4394024f1753e35aaec | /ncbly/spiders/spider.py | 30b8ffc51883646ef9f6e34a1fd77a8c78d021b7 | []
| no_license | hristo-grudev/ncbly | f94e2fdc8d556fba416d556cac5649b7f492c7c5 | 6b33ceb9b287ed0047f4676b3c036dc0b7c8e08a | refs/heads/main | 2023-04-11T02:09:07.152764 | 2021-04-15T06:30:42 | 2021-04-15T06:30:42 | 358,152,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | import scrapy
from scrapy import FormRequest
from scrapy.loader import ItemLoader
from ..items import NcblyItem
from itemloaders.processors import TakeFirst
class NcblySpider(scrapy.Spider):
name = 'ncbly'
start_urls = ['https://www.ncb.ly/en/media-center/news/']
def parse(self, response):
post_links = response.xpath('//h4/a/@href').getall()
yield from response.follow_all(post_links, self.parse_post)
next_page = response.xpath('//a[text()="Next"]/@href').getall()
if next_page:
yield FormRequest.from_response(response, formdata={
'__EVENTTARGET': 'ctl00$cph_body$pgrCustomRepeater$ctl02$ctl00'}, callback=self.parse)
def parse_post(self, response):
title = response.xpath('//h1[@class="new-mc-big-title"]/text()').get()
description = response.xpath('//div[@class="col col_8_of_12 mc-body"]//text()[normalize-space()]').getall()
description = [p.strip() for p in description if '{' not in p]
description = ' '.join(description).strip()
date = response.xpath('//div[@class="new-mc-big-date"]/text()').get()
item = ItemLoader(item=NcblyItem(), response=response)
item.default_output_processor = TakeFirst()
item.add_value('title', title)
item.add_value('description', description)
item.add_value('date', date)
return item.load_item()
| [
"[email protected]"
]
| |
d92ae14ec4a5f7f378a2afd59049a7861ff896ad | cf58c2c216f6c76c71b5a04f72d79fb1d58e4b64 | /tests/components/modbus/test_init.py | 90fc0c086e8b3925d523bda0e1e2adfab5a83adb | [
"Apache-2.0"
]
| permissive | whtsky/home-assistant | c301a7a0c2f8e94806d411b705c8f7b5939355d2 | 2ea5811e3a34e228908802e18c29af1c2fc249c5 | refs/heads/dev | 2023-08-19T07:37:29.365289 | 2023-02-17T22:21:28 | 2023-02-17T22:21:28 | 204,410,639 | 1 | 0 | Apache-2.0 | 2023-02-22T06:14:25 | 2019-08-26T06:30:12 | Python | UTF-8 | Python | false | false | 27,086 | py | """The tests for the Modbus init.
This file is responsible for testing:
- pymodbus API
- Functionality of class ModbusHub
- Coverage 100%:
__init__.py
const.py
modbus.py
validators.py
baseplatform.py (only BasePlatform)
It uses binary_sensors/sensors to do black box testing of the read calls.
"""
from datetime import timedelta
import logging
from unittest import mock
from freezegun.api import FrozenDateTimeFactory
from pymodbus.exceptions import ModbusException
from pymodbus.pdu import ExceptionResponse, IllegalFunctionRequest
import pytest
import voluptuous as vol
from homeassistant import config as hass_config
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.modbus.const import (
ATTR_ADDRESS,
ATTR_HUB,
ATTR_SLAVE,
ATTR_UNIT,
ATTR_VALUE,
CALL_TYPE_COIL,
CALL_TYPE_DISCRETE,
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CALL_TYPE_WRITE_COIL,
CALL_TYPE_WRITE_COILS,
CALL_TYPE_WRITE_REGISTER,
CALL_TYPE_WRITE_REGISTERS,
CONF_BAUDRATE,
CONF_BYTESIZE,
CONF_DATA_TYPE,
CONF_INPUT_TYPE,
CONF_MSG_WAIT,
CONF_PARITY,
CONF_SLAVE_COUNT,
CONF_STOPBITS,
CONF_SWAP,
CONF_SWAP_BYTE,
CONF_SWAP_WORD,
DEFAULT_SCAN_INTERVAL,
MODBUS_DOMAIN as DOMAIN,
RTUOVERTCP,
SERIAL,
SERVICE_RESTART,
SERVICE_STOP,
SERVICE_WRITE_COIL,
SERVICE_WRITE_REGISTER,
TCP,
UDP,
DataType,
)
from homeassistant.components.modbus.validators import (
duplicate_entity_validator,
duplicate_modbus_validator,
number_validator,
struct_validator,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
ATTR_STATE,
CONF_ADDRESS,
CONF_BINARY_SENSORS,
CONF_COUNT,
CONF_DELAY,
CONF_HOST,
CONF_METHOD,
CONF_NAME,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SENSORS,
CONF_SLAVE,
CONF_STRUCTURE,
CONF_TIMEOUT,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
SERVICE_RELOAD,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .conftest import (
TEST_ENTITY_NAME,
TEST_MODBUS_HOST,
TEST_MODBUS_NAME,
TEST_PORT_SERIAL,
TEST_PORT_TCP,
ReadResult,
)
from tests.common import async_fire_time_changed, get_fixture_path
@pytest.fixture(name="mock_modbus_with_pymodbus")
async def mock_modbus_with_pymodbus_fixture(hass, caplog, do_config, mock_pymodbus):
"""Load integration modbus using mocked pymodbus."""
caplog.clear()
caplog.set_level(logging.ERROR)
config = {DOMAIN: do_config}
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert DOMAIN in hass.config.components
assert caplog.text == ""
return mock_pymodbus
async def test_number_validator() -> None:
"""Test number validator."""
for value, value_type in (
(15, int),
(15.1, float),
("15", int),
("15.1", float),
(-15, int),
(-15.1, float),
("-15", int),
("-15.1", float),
):
assert isinstance(number_validator(value), value_type)
try:
number_validator("x15.1")
except vol.Invalid:
return
pytest.fail("Number_validator not throwing exception")
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 2,
CONF_DATA_TYPE: DataType.STRING,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_DATA_TYPE: DataType.INT32,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_DATA_TYPE: DataType.INT32,
CONF_SWAP: CONF_SWAP_BYTE,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 2,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">i",
CONF_SWAP: CONF_SWAP_BYTE,
},
],
)
async def test_ok_struct_validator(do_config) -> None:
"""Test struct validator."""
try:
struct_validator(do_config)
except vol.Invalid:
pytest.fail("struct_validator unexpected exception")
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: "int",
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: DataType.CUSTOM,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: "no good",
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 20,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">f",
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 1,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">f",
CONF_SWAP: CONF_SWAP_WORD,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 1,
CONF_DATA_TYPE: DataType.STRING,
CONF_STRUCTURE: ">f",
CONF_SWAP: CONF_SWAP_WORD,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 2,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">f",
CONF_SLAVE_COUNT: 5,
},
],
)
async def test_exception_struct_validator(do_config) -> None:
"""Test struct validator."""
try:
struct_validator(do_config)
except vol.Invalid:
return
pytest.fail("struct_validator missing exception")
@pytest.mark.parametrize(
"do_config",
[
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST + " 2",
CONF_PORT: TEST_PORT_TCP,
},
],
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_NAME: TEST_MODBUS_NAME + " 2",
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
],
],
)
async def test_duplicate_modbus_validator(do_config) -> None:
"""Test duplicate modbus validator."""
duplicate_modbus_validator(do_config)
assert len(do_config) == 1
@pytest.mark.parametrize(
"do_config",
[
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 119,
CONF_SLAVE: 0,
},
],
}
],
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
{
CONF_NAME: TEST_ENTITY_NAME + " 2",
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
],
}
],
],
)
async def test_duplicate_entity_validator(do_config) -> None:
"""Test duplicate entity validator."""
duplicate_entity_validator(do_config)
assert len(do_config[0][CONF_SENSORS]) == 1
@pytest.mark.parametrize(
"do_config",
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: UDP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: UDP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: RTUOVERTCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: RTUOVERTCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_MSG_WAIT: 100,
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_DELAY: 5,
},
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: f"{TEST_MODBUS_NAME} 2",
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_NAME: f"{TEST_MODBUS_NAME} 3",
},
],
{
# Special test for scan_interval validator with scan_interval: 0
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: 0,
}
],
},
],
)
async def test_config_modbus(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus_with_pymodbus
) -> None:
"""Run configuration test for modbus."""
VALUE = "value"
FUNC = "func"
DATA = "data"
SERVICE = "service"
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
},
],
)
@pytest.mark.parametrize(
"do_write",
[
{
DATA: ATTR_VALUE,
VALUE: 15,
SERVICE: SERVICE_WRITE_REGISTER,
FUNC: CALL_TYPE_WRITE_REGISTER,
},
{
DATA: ATTR_VALUE,
VALUE: [1, 2, 3],
SERVICE: SERVICE_WRITE_REGISTER,
FUNC: CALL_TYPE_WRITE_REGISTERS,
},
{
DATA: ATTR_STATE,
VALUE: False,
SERVICE: SERVICE_WRITE_COIL,
FUNC: CALL_TYPE_WRITE_COIL,
},
{
DATA: ATTR_STATE,
VALUE: [True, False, True],
SERVICE: SERVICE_WRITE_COIL,
FUNC: CALL_TYPE_WRITE_COILS,
},
],
)
@pytest.mark.parametrize(
"do_return",
[
{VALUE: ReadResult([0x0001]), DATA: ""},
{VALUE: ExceptionResponse(0x06), DATA: "Pymodbus:"},
{VALUE: IllegalFunctionRequest(0x06), DATA: "Pymodbus:"},
{VALUE: ModbusException("fail write_"), DATA: "Pymodbus:"},
],
)
@pytest.mark.parametrize(
"do_unit",
[
ATTR_UNIT,
ATTR_SLAVE,
],
)
async def test_pb_service_write(
hass: HomeAssistant,
do_write,
do_return,
do_unit,
caplog: pytest.LogCaptureFixture,
mock_modbus_with_pymodbus,
) -> None:
"""Run test for service write_register."""
func_name = {
CALL_TYPE_WRITE_COIL: mock_modbus_with_pymodbus.write_coil,
CALL_TYPE_WRITE_COILS: mock_modbus_with_pymodbus.write_coils,
CALL_TYPE_WRITE_REGISTER: mock_modbus_with_pymodbus.write_register,
CALL_TYPE_WRITE_REGISTERS: mock_modbus_with_pymodbus.write_registers,
}
data = {
ATTR_HUB: TEST_MODBUS_NAME,
do_unit: 17,
ATTR_ADDRESS: 16,
do_write[DATA]: do_write[VALUE],
}
mock_modbus_with_pymodbus.reset_mock()
caplog.clear()
caplog.set_level(logging.DEBUG)
func_name[do_write[FUNC]].return_value = do_return[VALUE]
await hass.services.async_call(DOMAIN, do_write[SERVICE], data, blocking=True)
assert func_name[do_write[FUNC]].called
assert func_name[do_write[FUNC]].call_args[0] == (
data[ATTR_ADDRESS],
data[do_write[DATA]],
)
if do_return[DATA]:
assert any(message.startswith("Pymodbus:") for message in caplog.messages)
@pytest.fixture(name="mock_modbus_read_pymodbus")
async def mock_modbus_read_pymodbus_fixture(
hass,
do_group,
do_type,
do_scan_interval,
do_return,
do_exception,
caplog,
mock_pymodbus,
freezer: FrozenDateTimeFactory,
):
"""Load integration modbus using mocked pymodbus."""
caplog.clear()
caplog.set_level(logging.ERROR)
mock_pymodbus.read_coils.side_effect = do_exception
mock_pymodbus.read_discrete_inputs.side_effect = do_exception
mock_pymodbus.read_input_registers.side_effect = do_exception
mock_pymodbus.read_holding_registers.side_effect = do_exception
mock_pymodbus.read_coils.return_value = do_return
mock_pymodbus.read_discrete_inputs.return_value = do_return
mock_pymodbus.read_input_registers.return_value = do_return
mock_pymodbus.read_holding_registers.return_value = do_return
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
do_group: [
{
CONF_INPUT_TYPE: do_type,
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 51,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: do_scan_interval,
}
],
}
],
}
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert DOMAIN in hass.config.components
assert caplog.text == ""
freezer.tick(timedelta(seconds=DEFAULT_SCAN_INTERVAL + 60))
async_fire_time_changed(hass)
await hass.async_block_till_done()
return mock_pymodbus
@pytest.mark.parametrize(
("do_domain", "do_group", "do_type", "do_scan_interval"),
[
[SENSOR_DOMAIN, CONF_SENSORS, CALL_TYPE_REGISTER_HOLDING, 10],
[SENSOR_DOMAIN, CONF_SENSORS, CALL_TYPE_REGISTER_INPUT, 10],
[BINARY_SENSOR_DOMAIN, CONF_BINARY_SENSORS, CALL_TYPE_DISCRETE, 10],
[BINARY_SENSOR_DOMAIN, CONF_BINARY_SENSORS, CALL_TYPE_COIL, 1],
],
)
@pytest.mark.parametrize(
("do_return", "do_exception", "do_expect_state", "do_expect_value"),
[
[ReadResult([1]), None, STATE_ON, "1"],
[IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE],
[ExceptionResponse(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE],
[
ReadResult([1]),
ModbusException("fail read_"),
STATE_UNAVAILABLE,
STATE_UNAVAILABLE,
],
],
)
async def test_pb_read(
hass: HomeAssistant,
do_domain,
do_expect_state,
do_expect_value,
caplog: pytest.LogCaptureFixture,
mock_modbus_read_pymodbus,
) -> None:
"""Run test for different read."""
# Check state
entity_id = f"{do_domain}.{TEST_ENTITY_NAME}".replace(" ", "_")
state = hass.states.get(entity_id).state
assert hass.states.get(entity_id).state
# this if is needed to avoid explode the
if do_domain == SENSOR_DOMAIN:
do_expect = do_expect_value
else:
do_expect = do_expect_state
assert state == do_expect
async def test_pymodbus_constructor_fail(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
) -> None:
"""Run test for failing pymodbus constructor."""
config = {
DOMAIN: [
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
with mock.patch(
"homeassistant.components.modbus.modbus.ModbusTcpClient", autospec=True
) as mock_pb:
caplog.set_level(logging.ERROR)
mock_pb.side_effect = ModbusException("test no class")
assert await async_setup_component(hass, DOMAIN, config) is False
await hass.async_block_till_done()
message = f"Pymodbus: {TEST_MODBUS_NAME}: Modbus Error: test"
assert caplog.messages[0].startswith(message)
assert caplog.records[0].levelname == "ERROR"
assert mock_pb.called
async def test_pymodbus_close_fail(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus
) -> None:
"""Run test for failing pymodbus close."""
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
caplog.set_level(logging.ERROR)
mock_pymodbus.connect.return_value = True
mock_pymodbus.close.side_effect = ModbusException("close fail")
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
# Close() is called as part of teardown
async def test_pymodbus_connect_fail(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_pymodbus
) -> None:
"""Run test for failing pymodbus constructor."""
config = {
DOMAIN: [
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
caplog.set_level(logging.WARNING)
ExceptionMessage = "test connect exception"
mock_pymodbus.connect.side_effect = ModbusException(ExceptionMessage)
assert await async_setup_component(hass, DOMAIN, config) is False
assert ExceptionMessage in caplog.text
async def test_delay(
hass: HomeAssistant, mock_pymodbus, freezer: FrozenDateTimeFactory
) -> None:
"""Run test for startup delay."""
# the purpose of this test is to test startup delay
# We "hijiack" a binary_sensor to make a proper blackbox test.
set_delay = 15
set_scan_interval = 5
entity_id = f"{BINARY_SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_DELAY: set_delay,
CONF_BINARY_SENSORS: [
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 52,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: set_scan_interval,
},
],
}
]
}
mock_pymodbus.read_coils.return_value = ReadResult([0x01])
start_time = dt_util.utcnow()
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_UNKNOWN
time_sensor_active = start_time + timedelta(seconds=2)
time_after_delay = start_time + timedelta(seconds=(set_delay))
time_after_scan = start_time + timedelta(seconds=(set_delay + set_scan_interval))
time_stop = time_after_scan + timedelta(seconds=10)
now = start_time
while now < time_stop:
# This test assumed listeners are always fired at 0
# microseconds which is impossible in production so
# we use 999999 microseconds to simulate the real world.
freezer.tick(timedelta(seconds=1, microseconds=999999))
now = dt_util.utcnow()
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
if now > time_sensor_active:
if now <= time_after_delay:
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
elif now > time_after_scan:
assert hass.states.get(entity_id).state == STATE_ON
@pytest.mark.parametrize(
"do_config",
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: 0,
}
],
},
],
)
async def test_shutdown(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mock_pymodbus,
mock_modbus_with_pymodbus,
) -> None:
"""Run test for shutdown."""
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert mock_pymodbus.close.called
assert caplog.text == ""
@pytest.mark.parametrize(
"do_config",
[
{
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 51,
CONF_SLAVE: 0,
}
]
},
],
)
async def test_stop_restart(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus
) -> None:
"""Run test for service stop."""
caplog.set_level(logging.INFO)
entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
assert hass.states.get(entity_id).state == STATE_UNKNOWN
hass.states.async_set(entity_id, 17)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "17"
mock_modbus.reset_mock()
caplog.clear()
data = {
ATTR_HUB: TEST_MODBUS_NAME,
}
await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
assert mock_modbus.close.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
mock_modbus.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert not mock_modbus.close.called
assert mock_modbus.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
mock_modbus.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert mock_modbus.close.called
assert mock_modbus.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
@pytest.mark.parametrize("do_config", [{}])
async def test_write_no_client(hass: HomeAssistant, mock_modbus) -> None:
"""Run test for service stop and write without client."""
mock_modbus.reset()
data = {
ATTR_HUB: TEST_MODBUS_NAME,
}
await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True)
await hass.async_block_till_done()
assert mock_modbus.close.called
data = {
ATTR_HUB: TEST_MODBUS_NAME,
ATTR_UNIT: 17,
ATTR_ADDRESS: 16,
ATTR_STATE: True,
}
await hass.services.async_call(DOMAIN, SERVICE_WRITE_COIL, data, blocking=True)
@pytest.mark.parametrize("do_config", [{}])
async def test_integration_reload(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
mock_modbus,
freezer: FrozenDateTimeFactory,
) -> None:
"""Run test for integration reload."""
caplog.set_level(logging.INFO)
caplog.clear()
yaml_path = get_fixture_path("configuration.yaml", "modbus")
with mock.patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True)
await hass.async_block_till_done()
for i in range(4):
freezer.tick(timedelta(seconds=1))
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert "Modbus reloading" in caplog.text
@pytest.mark.parametrize("do_config", [{}])
async def test_integration_reload_failed(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_modbus
) -> None:
"""Run test for integration connect failure on reload."""
caplog.set_level(logging.INFO)
caplog.clear()
yaml_path = get_fixture_path("configuration.yaml", "modbus")
with mock.patch.object(
hass_config, "YAML_CONFIG_FILE", yaml_path
), mock.patch.object(mock_modbus, "connect", side_effect=ModbusException("error")):
await hass.services.async_call(DOMAIN, SERVICE_RELOAD, blocking=True)
await hass.async_block_till_done()
assert "Modbus reloading" in caplog.text
assert "connect failed, retry in pymodbus" in caplog.text
| [
"[email protected]"
]
| |
fbb0f7310f7f72ed4f42979e7bdb21f2a6601ca5 | 1c99c687c696d780b3817f480129ebaa1fba109a | /codegate2017/pngparser/urllib2.py | 719e419cc28cff25cd16e85ce74590f06be5dcbb | []
| no_license | cExplr/ctf | 6be804d36922b08949154cfeba405c2aee0cb4e5 | 79f6485e6f1383fb382cd1a1453c2d25b0b5c518 | refs/heads/master | 2020-05-07T19:33:09.667499 | 2017-08-24T12:47:07 | 2017-08-24T12:47:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105,026 | py | """An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- Basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- Function that creates a new OpenerDirector instance.
Will install the default handlers. Accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. If one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- Installs a new opener as the default opener.
objects of interest:
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
the Handler classes, while dealing with requests and responses.
Request -- An object that encapsulates the state of a request. The
state can be as simple as the URL. It can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
exceptions:
URLError -- A subclass of IOError, individual protocols have their own
specific subclass.
HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
as an exceptional event or valid response.
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib2
# set up authentication info
authinfo = urllib2.HTTPBasicAuthHandler()
authinfo.add_password(realm='PDQ Application',
uri='https://mahler:8092/site-updates.py',
user='klem',
passwd='geheim$parole')
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
# install it
urllib2.install_opener(opener)
f = urllib2.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import base64
import hashlib
import httplib
import mimetools
import os
import posixpath
import random
import re
import socket
import sys
import time
import urlparse
import bisect
import warnings
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# check for SSL
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
from urllib import (unwrap, unquote, splittype, splithost, quote,
addinfourl, splitport, splittag, toBytes,
splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies, proxy_bypass
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
cafile=None, capath=None, cadefault=False, context=None):
global _opener
if cafile or capath or cadefault:
if context is not None:
raise ValueError(
"You can't pass both context and any of cafile, capath, and "
"cadefault"
)
if not _have_ssl:
raise ValueError('SSL support not available')
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
cafile=cafile,
capath=capath)
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif context:
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif _opener is None:
_opener = opener = build_opener()
else:
opener = _opener
return opener.open(url, data, timeout)
def install_opener(opener):
global _opener
_opener = opener
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason):
self.args = reason,
self.reason = reason
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
# since URLError specifies a .reason attribute, HTTPError should also
# provide this attribute. See issue13211 fo discussion.
@property
def reason(self):
return self.msg
def info(self):
return self.hdrs
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.__original, self.__fragment = splittag(self.__original)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self._tunnel_host = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr in ('_Request__r_type', '_Request__r_host'):
getattr(self, 'get_' + attr[12:])()
return self.__dict__[attr]
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
if self.__fragment:
return '%s#%s' % (self.__original, self.__fragment)
else:
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
return self.__r_host
def set_proxy(self, host, type):
if self.type == 'https' and not self._tunnel_host:
self._tunnel_host = self.host
else:
self.type = type
self.__r_host = self.__original
self.host = host
def has_proxy(self):
return self.__r_host == self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# self.handlers is retained only for backward compatibility
self.handlers = []
# manage the individual handlers
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
# accept a URL or a Request object
if isinstance(fullurl, basestring):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.add_data(data)
req.timeout = timeout
protocol = req.get_type()
# pre-process request
meth_name = protocol+"_request"
for processor in self.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
req = meth(req)
response = self._open(req, data)
# post-process response
meth_name = protocol+"_response"
for processor in self.process_response.get(protocol, []):
meth = getattr(processor, meth_name)
response = meth(req, response)
return response
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP, FTP and when applicable, HTTPS.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, (types.ClassType, type))
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses."""
handler_order = 1000 # after all other processing
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if not (200 <= code < 300):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k,v) for k,v in req.headers.items()
if k.lower() not in ("content-length", "content-type")
)
return Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
# fix a possible malformed URL
urlparts = urlparse.urlparse(newurl)
if not urlparts.path and urlparts.netloc:
urlparts = list(urlparts)
urlparts[2] = "/"
newurl = urlparse.urlunparse(urlparts)
newurl = urlparse.urljoin(req.get_full_url(), newurl)
# For security reasons we do not allow redirects to protocols
# other than HTTP, HTTPS or FTP.
newurl_lower = newurl.lower()
if not (newurl_lower.startswith('http://') or
newurl_lower.startswith('https://') or
newurl_lower.startswith('ftp://')):
raise HTTPError(newurl, code,
msg + " - Redirection to url '%s' is not allowed" %
newurl,
headers, fp)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new, timeout=req.timeout)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:[email protected]')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:[email protected]:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:[email protected]/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:[email protected]:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:[email protected]/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:[email protected]')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if req.host and proxy_bypass(req.host):
return None
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type or orig_type == 'https':
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req, timeout=req.timeout)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
'realm=(["\']?)([^"\']*)\\2', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if quote not in ['"', "'"]:
warnings.warn("Basic Auth Realm was unquoted",
UserWarning, 2)
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.get_header(self.auth_header, None) == auth:
return None
req.add_unredirected_header(self.auth_header, auth)
return self.parent.open(req, timeout=req.timeout)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
response = self.http_error_auth_reqed('www-authenticate',
url, req, headers)
return response
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
response = self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
return response
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
self.last_nonce = None
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_unredirected_header(self.auth_header, auth_val)
resp = self.parent.open(req, timeout=req.timeout)
return resp
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = hashlib.sha1("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
randombytes(8))).hexdigest()
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
raise URLError("qop '%s' is not supported." % qop)
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# algorithm should be case-insensitive according to RFC2617
algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x).hexdigest()
# XXX MD5-sess
else:
raise ValueError("Unsupported digest authentication "
"algorithm %r" % algorithm.lower())
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
sel_host = host
if request.has_proxy():
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req, **http_conn_args):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host = req.get_host()
if not host:
raise URLError('no host given')
# will parse host:port
h = http_class(host, timeout=req.timeout, **http_conn_args)
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
headers.update(dict((k, v) for k, v in req.headers.items()
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
except socket.error, err: # XXX what error?
h.close()
raise URLError(err)
else:
try:
r = h.getresponse(buffering=True)
except TypeError: # buffering kw not supported
r = h.getresponse()
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, debuglevel=0, context=None):
AbstractHTTPHandler.__init__(self, debuglevel)
self._context = context
def https_open(self, req):
return self.do_open(httplib.HTTPSConnection, req,
context=self._context)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
def __init__(self, cookiejar=None):
import cookielib
if cookiejar is None:
cookiejar = cookielib.CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
def _safe_gethostbyname(host):
try:
return socket.gethostbyname(host)
except socket.gaierror:
return None
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/' and (req.host and
req.host != 'localhost'):
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = tuple(
socket.gethostbyname_ex('localhost')[2] +
socket.gethostbyname_ex(socket.gethostname())[2])
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
import email.utils
import mimetypes
host = req.get_host()
filename = req.get_selector()
localfile = url2pathname(filename)
try:
stats = os.stat(localfile)
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(filename)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and _safe_gethostbyname(host) in self.get_names()):
if host:
origurl = 'file://' + host + filename
else:
origurl = 'file://' + filename
return addinfourl(open(localfile, 'rb'), headers, origurl)
except OSError, msg:
# urllib2 users shouldn't expect OSErrors coming from urlopen()
raise URLError(msg)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise URLError('ftp error: no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise URLError, ('ftp error: %s' % msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
fw = ftpwrapper(user, passwd, host, port, dirs, timeout,
persistent=False)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
key = user, host, port, '/'.join(dirs), timeout
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
def clear_cache(self):
for conn in self.cache.values():
conn.close()
self.cache.clear()
self.timeout.clear()
"""An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- Basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- Function that creates a new OpenerDirector instance.
Will install the default handlers. Accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. If one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- Installs a new opener as the default opener.
objects of interest:
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
the Handler classes, while dealing with requests and responses.
Request -- An object that encapsulates the state of a request. The
state can be as simple as the URL. It can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
exceptions:
URLError -- A subclass of IOError, individual protocols have their own
specific subclass.
HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
as an exceptional event or valid response.
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib2
# set up authentication info
authinfo = urllib2.HTTPBasicAuthHandler()
authinfo.add_password(realm='PDQ Application',
uri='https://mahler:8092/site-updates.py',
user='klem',
passwd='geheim$parole')
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
# install it
urllib2.install_opener(opener)
f = urllib2.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import base64
import hashlib
import httplib
import mimetools
import os
import posixpath
import random
import re
import socket
import sys
import time
import urlparse
import bisect
import warnings
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# check for SSL
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
from urllib import (unwrap, unquote, splittype, splithost, quote,
addinfourl, splitport, splittag, toBytes,
splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies, proxy_bypass
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
cafile=None, capath=None, cadefault=False, context=None):
global _opener
if cafile or capath or cadefault:
if context is not None:
raise ValueError(
"You can't pass both context and any of cafile, capath, and "
"cadefault"
)
if not _have_ssl:
raise ValueError('SSL support not available')
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
cafile=cafile,
capath=capath)
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif context:
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif _opener is None:
_opener = opener = build_opener()
else:
opener = _opener
return opener.open(url, data, timeout)
def install_opener(opener):
global _opener
_opener = opener
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason):
self.args = reason,
self.reason = reason
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
# since URLError specifies a .reason attribute, HTTPError should also
# provide this attribute. See issue13211 fo discussion.
@property
def reason(self):
return self.msg
def info(self):
return self.hdrs
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.__original, self.__fragment = splittag(self.__original)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self._tunnel_host = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr in ('_Request__r_type', '_Request__r_host'):
getattr(self, 'get_' + attr[12:])()
return self.__dict__[attr]
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
if self.__fragment:
return '%s#%s' % (self.__original, self.__fragment)
else:
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
return self.__r_host
def set_proxy(self, host, type):
if self.type == 'https' and not self._tunnel_host:
self._tunnel_host = self.host
else:
self.type = type
self.__r_host = self.__original
self.host = host
def has_proxy(self):
return self.__r_host == self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# self.handlers is retained only for backward compatibility
self.handlers = []
# manage the individual handlers
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
# accept a URL or a Request object
if isinstance(fullurl, basestring):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.add_data(data)
req.timeout = timeout
protocol = req.get_type()
# pre-process request
meth_name = protocol+"_request"
for processor in self.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
req = meth(req)
response = self._open(req, data)
# post-process response
meth_name = protocol+"_response"
for processor in self.process_response.get(protocol, []):
meth = getattr(processor, meth_name)
response = meth(req, response)
return response
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP, FTP and when applicable, HTTPS.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, (types.ClassType, type))
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses."""
handler_order = 1000 # after all other processing
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if not (200 <= code < 300):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k,v) for k,v in req.headers.items()
if k.lower() not in ("content-length", "content-type")
)
return Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
# fix a possible malformed URL
urlparts = urlparse.urlparse(newurl)
if not urlparts.path and urlparts.netloc:
urlparts = list(urlparts)
urlparts[2] = "/"
newurl = urlparse.urlunparse(urlparts)
newurl = urlparse.urljoin(req.get_full_url(), newurl)
# For security reasons we do not allow redirects to protocols
# other than HTTP, HTTPS or FTP.
newurl_lower = newurl.lower()
if not (newurl_lower.startswith('http://') or
newurl_lower.startswith('https://') or
newurl_lower.startswith('ftp://')):
raise HTTPError(newurl, code,
msg + " - Redirection to url '%s' is not allowed" %
newurl,
headers, fp)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new, timeout=req.timeout)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:[email protected]')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:[email protected]:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:[email protected]/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:[email protected]:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:[email protected]/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:[email protected]')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if req.host and proxy_bypass(req.host):
return None
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type or orig_type == 'https':
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req, timeout=req.timeout)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
'realm=(["\']?)([^"\']*)\\2', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if quote not in ['"', "'"]:
warnings.warn("Basic Auth Realm was unquoted",
UserWarning, 2)
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.get_header(self.auth_header, None) == auth:
return None
req.add_unredirected_header(self.auth_header, auth)
return self.parent.open(req, timeout=req.timeout)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
response = self.http_error_auth_reqed('www-authenticate',
url, req, headers)
return response
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
response = self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
return response
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
self.last_nonce = None
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_unredirected_header(self.auth_header, auth_val)
resp = self.parent.open(req, timeout=req.timeout)
return resp
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = hashlib.sha1("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
randombytes(8))).hexdigest()
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
raise URLError("qop '%s' is not supported." % qop)
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# algorithm should be case-insensitive according to RFC2617
algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x).hexdigest()
# XXX MD5-sess
else:
raise ValueError("Unsupported digest authentication "
"algorithm %r" % algorithm.lower())
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
sel_host = host
if request.has_proxy():
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req, **http_conn_args):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host = req.get_host()
if not host:
raise URLError('no host given')
# will parse host:port
h = http_class(host, timeout=req.timeout, **http_conn_args)
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
headers.update(dict((k, v) for k, v in req.headers.items()
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
except socket.error, err: # XXX what error?
h.close()
raise URLError(err)
else:
try:
r = h.getresponse(buffering=True)
except TypeError: # buffering kw not supported
r = h.getresponse()
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, debuglevel=0, context=None):
AbstractHTTPHandler.__init__(self, debuglevel)
self._context = context
def https_open(self, req):
return self.do_open(httplib.HTTPSConnection, req,
context=self._context)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
def __init__(self, cookiejar=None):
import cookielib
if cookiejar is None:
cookiejar = cookielib.CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
def _safe_gethostbyname(host):
try:
return socket.gethostbyname(host)
except socket.gaierror:
return None
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/' and (req.host and
req.host != 'localhost'):
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = tuple(
socket.gethostbyname_ex('localhost')[2] +
socket.gethostbyname_ex(socket.gethostname())[2])
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
import email.utils
import mimetypes
host = req.get_host()
filename = req.get_selector()
localfile = url2pathname(filename)
try:
stats = os.stat(localfile)
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(filename)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and _safe_gethostbyname(host) in self.get_names()):
if host:
origurl = 'file://' + host + filename
else:
origurl = 'file://' + filename
return addinfourl(open(localfile, 'rb'), headers, origurl)
except OSError, msg:
# urllib2 users shouldn't expect OSErrors coming from urlopen()
raise URLError(msg)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise URLError('ftp error: no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise URLError, ('ftp error: %s' % msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
fw = ftpwrapper(user, passwd, host, port, dirs, timeout,
persistent=False)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
key = user, host, port, '/'.join(dirs), timeout
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
def clear_cache(self):
for conn in self.cache.values():
conn.close()
self.cache.clear()
self.timeout.clear()
| [
"[email protected]"
]
| |
09638a0316277ba90691152d2ee5fcaa722b2305 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/coverage-big-955.py | a413f3a44075f48e40c63ede11ed563101fe0e36 | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,351 | py | count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == $Exp:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
| [
"[email protected]"
]
| |
24e05aac27a7eee6799ab5ec26fcb11af42151c3 | 147389cf28e0c92f0b2ef8000b0562a71d766880 | /pp.py | 764a1b7b187b101da8e094613efc9e68f4b889cc | []
| no_license | dohyekim/hello | 3821ca97079818c9938df33fc6d8d6ea9ca763a5 | 84f5704fe6cb6e5b63fb7903e311b650d65a394a | refs/heads/master | 2022-02-25T11:12:31.077335 | 2019-09-22T06:23:00 | 2019-09-22T06:23:00 | 156,352,382 | 1 | 0 | null | 2022-02-12T11:48:53 | 2018-11-06T08:39:18 | JavaScript | UTF-8 | Python | false | false | 707 | py | import requests
from bs4 import BeautifulSoup
import json
url = "https://www.melon.com/chart/index.htm"
headers = {
'Referer': 'https://www.melon.com/',
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
}
html = requests.get(url, headers = headers).text
soup = BeautifulSoup(html, 'html.parser')
parameter = []
rank_50 = soup.select("table > tbody #lst50")
rank_100 = soup.select("table > tbody #lst100")
for i in rank_50:
a = i.attrs['data-song-no']
parameter.append(a)
for j in rank_100:
b = j.attrs['data-song-no']
parameter.append(b)
print(parameter)
param_ = ",".join(parameter)
print(param_) | [
"[email protected]"
]
| |
dc5410da4cfff303d0f5fbc6d93fce02dc7cad1f | 79ed3f72555aad8548634f523f775f34cfe166e7 | /catch/datasets/guaroa.py | 5acd22fecfb7fdabe5e8b72c0eb5fa30d32a8df1 | [
"MIT"
]
| permissive | John-Bioinfo/catch | a2ab188ed598767e7759f74227f24af2b284b379 | fe63b86bc41396c1da0b449ac440c6ae9e52b2c5 | refs/heads/master | 2020-03-18T09:29:10.315733 | 2018-04-17T18:36:47 | 2018-04-17T18:36:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,225 | py | """Dataset with 'Guaroa orthobunyavirus' sequences.
A dataset with 25 'Guaroa orthobunyavirus' sequences. The virus is
segmented and has 3 segments. Based on their strain and/or isolate,
these sequences were able to be grouped into 16 genomes. Many genomes
may have fewer than 3 segments.
THIS PYTHON FILE WAS GENERATED BY A COMPUTER PROGRAM! DO NOT EDIT!
"""
from os.path import dirname
from os.path import join
from os import listdir
import sys
from catch.datasets import GenomesDatasetMultiChrom
__author__ = 'Hayden Metsky <[email protected]>'
chrs = ["segment_" + seg for seg in ['L', 'M', 'S']]
def seq_header_to_chr(header):
import re
c = re.compile(r'\[segment (L|M|S)\]')
m = c.search(header)
if not m:
raise ValueError("Unknown segment in header %s" % header)
seg = m.group(1)
valid_segs = ['L', 'M', 'S']
if seg not in valid_segs:
raise ValueError("Unknown segment %s" % seg)
return "segment_" + seg
ds = GenomesDatasetMultiChrom(__name__, __file__, __spec__,
chrs, seq_header_to_chr)
for f in listdir(join(dirname(__file__), "data/guaroa/")):
ds.add_fasta_path("data/guaroa/" + f, relative=True)
sys.modules[__name__] = ds
| [
"[email protected]"
]
| |
bf5657467f3bc562c237bba7906c9b1146e9b92a | 18d7876af265ec974aa5ecf9142093d845b59020 | /module/Favourite.py | d373b337f2caccf5bd99c89382bc95c872888048 | []
| no_license | xiaojieluo/savemylink | b6b2b806b8a760369860e2ec83fd85dece9bfd9d | 7eb64a4742516486bebe8498374b94552c682cfe | refs/heads/master | 2021-06-13T16:49:13.795891 | 2017-03-23T03:48:39 | 2017-03-23T03:48:39 | 77,986,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,113 | py | #!/usr/bin/env python
# coding=utf-8
from module.DB import db
from module.define import *
class Favourite(object):
db = db
link_list = []
value_dict = dict()
favourite_public = FAVOURITE_PUBLIC
favourite_count = FAVOURITE_COUNT
def __init__(self, fid = 0):
if not isinstance(fid, int):
raise TypeError('Bad operand type')
self.db = db
self.fid = fid
self.favourite_info = FAVOURITE_INFO.format(fid=self.fid)
self.favourite_count = FAVOURITE_COUNT
self.favourite = FAVOURITE.format(fid=self.fid)
self.favourite_public = FAVOURITE_PUBLIC
@classmethod
def create(cls,info):
#info = dict(
# name='name',
# created_at = 'created_at'
#)
favourite_count = FAVOURITE_COUNT
fid = cls.db.r.incr(cls.favourite_count)
favourite_info = FAVOURITE_INFO.format(fid=fid)
cls.db.r.hmset(favourite_info, info)
if info['public']:
cls.db.r.sadd(cls.favourite_public, fid)
# only return fid
# if you want add fid to account_favourite table
# you need run down code
# user = Account(id)
# account_favourite = ACCOUNT_FAVOURITE.format(uid=uid)
# cls.db.r.sadd(account_favourite, fid)
return fid
@classmethod
def public(cls):
"""
返回所有公开的收藏夹
"""
# 在这里可以做分页
pub = cls.db.smembers(cls.favourite_public)
result = []
if pub:
for k in pub:
result.append(Favourite(k))
return result
else:
return []
@property
def isPublic(self):
public = self.db.r.sismembers(self.favourite_public, self.fid)
return public
@property
def name(self):
#favourite_info = FAVOURITE_INFO.format(fid=self.fid)
result = self.db.r.hget(self.favourite_info, 'name')
return result
@property
def author(self):
user_id = int(self.db.hget(self.favourite_info, 'author'))
# print(self.db.r.hgetall(self.favourite_info))
# print(type(user_id))
if user_id:
from lib.Account import Account
return Account(user_id)
@name.setter
def name(self, value):
self.value_dict['name'] = value
@property
def created_at(self):
#favourite_info = FAVOURITE_INFO.format(fid=self.fid)
return self.db.r.hget(self.favourite_info, 'created_at')
@created_at.setter
def created_at(self, value):
self.value_dict['created_at'] = value
# add linkid to favourite , if not run save , the data is in buffer
def addlink(self, lid):
if isinstance(lid, list):
for k in lid:
if k not in self.link_list:
self.link_list.append(lid)
else:
lid = int(lid)
if lid not in self.link_list:
#self.linkid = []
self.link_list.append(lid)
return True
#print(self.link_list)
def save(self):
# save Favourite information
if len(self.value_dict) > 0:
self.db.r.hmset(self.favourite_info, self.value_dict)
# save link id into the favourite
if len(self.link_list) > 0:
for k in self.link_list:
self.db.r.sadd(self.favourite, k)
#del self.link_list[:]
self.link_list = []
self.value_dict = {}
return True
def links(self):
# get all links in favourites,
# return Link Class
#"""
favourite_links = FAVOURITE.format(fid=self.fid)
tmp = self.db.smembers(favourite_links)
print(tmp)
# only return link id
# new class in Handler's
return tmp
#print(tmp)
#if len(tmp) > 0:
# result = []
# from lib.Link import Link
# for k in tmp:
# result.append(Link(k))
# return result
#else:
# return None
| [
"[email protected]"
]
| |
88af1e1775d6df77735796278e0cad9c7dbd9207 | 00af09f4ac6f98203910d86c3791c152184ace9a | /Lib/tkinter/test/test_tkinter/test_geometry_managers.py | 83e658877ea3b79863275502589704f07f9cc287 | []
| no_license | orf53975/CarnosOS | 621d641df02d742a2452fde2f28a28c74b32695a | d06849064e4e9f30ef901ad8cf90960e1bec0805 | refs/heads/master | 2023-03-24T08:06:48.274566 | 2017-01-05T16:41:01 | 2017-01-05T16:41:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122,987 | py | <<<<<<< HEAD
<<<<<<< HEAD
import unittest
import re
import tkinter
from tkinter import TclError
from test.support import requires
from tkinter.test.support import pixels_conv, tcl_version, requires_tcl
from tkinter.test.widget_tests import AbstractWidgetTest
requires('gui')
class PackTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
pack = tkinter.Toplevel(self.root, name='pack')
pack.wm_geometry('300x200+0+0')
pack.wm_minsize(1, 1)
a = tkinter.Frame(pack, name='a', width=20, height=40, bg='red')
b = tkinter.Frame(pack, name='b', width=50, height=30, bg='blue')
c = tkinter.Frame(pack, name='c', width=80, height=80, bg='green')
d = tkinter.Frame(pack, name='d', width=40, height=30, bg='yellow')
return pack, a, b, c, d
def test_pack_configure_after(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(after=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(after='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(after=b)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
a.pack_configure(after=a)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
def test_pack_configure_anchor(self):
pack, a, b, c, d = self.create2()
def check(anchor, geom):
a.pack_configure(side='top', ipadx=5, padx=10, ipady=15, pady=20,
expand=True, anchor=anchor)
self.root.update()
self.assertEqual(a.winfo_geometry(), geom)
check('n', '30x70+135+20')
check('ne', '30x70+260+20')
check('e', '30x70+260+65')
check('se', '30x70+260+110')
check('s', '30x70+135+110')
check('sw', '30x70+10+110')
check('w', '30x70+10+65')
check('nw', '30x70+10+20')
check('center', '30x70+135+65')
def test_pack_configure_before(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(before=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(before='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(before=d)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
a.pack_configure(before=a)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
def test_pack_configure_expand(self):
pack, a, b, c, d = self.create2()
def check(*geoms):
self.root.update()
self.assertEqual(a.winfo_geometry(), geoms[0])
self.assertEqual(b.winfo_geometry(), geoms[1])
self.assertEqual(c.winfo_geometry(), geoms[2])
self.assertEqual(d.winfo_geometry(), geoms[3])
a.pack_configure(side='left')
b.pack_configure(side='top')
c.pack_configure(side='right')
d.pack_configure(side='bottom')
check('20x40+0+80', '50x30+135+0', '80x80+220+75', '40x30+100+170')
a.pack_configure(side='left', expand='yes')
b.pack_configure(side='top', expand='on')
c.pack_configure(side='right', expand=True)
d.pack_configure(side='bottom', expand=1)
check('20x40+40+80', '50x30+175+35', '80x80+180+110', '40x30+100+135')
a.pack_configure(side='left', expand='yes', fill='both')
b.pack_configure(side='top', expand='on', fill='both')
c.pack_configure(side='right', expand=True, fill='both')
d.pack_configure(side='bottom', expand=1, fill='both')
check('100x200+0+0', '200x100+100+0', '160x100+140+100', '40x100+100+100')
def test_pack_configure_in(self):
pack, a, b, c, d = self.create2()
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
a.pack_configure(in_=pack)
self.assertEqual(pack.pack_slaves(), [b, c, d, a])
a.pack_configure(in_=c)
self.assertEqual(pack.pack_slaves(), [b, c, d])
self.assertEqual(c.pack_slaves(), [a])
with self.assertRaisesRegex(TclError,
'can\'t pack %s inside itself' % (a,)):
a.pack_configure(in_=a)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(in_='.foo')
def test_pack_configure_padx_ipadx_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+260+80', '240x200+0+0', side='right', padx=20)
check('20x40+250+80', '240x200+0+0', side='right', padx=(10, 30))
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20)
check('30x40+260+80', '250x200+0+0', side='right', ipadx=5, padx=10)
check('20x40+260+80', '240x200+0+0', side='right', padx=20, fill='x')
check('20x40+249+80', '240x200+0+0',
side='right', padx=(9, 31), fill='x')
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20, fill='x')
check('30x40+260+80', '250x200+0+0',
side='right', ipadx=5, padx=10, fill='x')
check('30x40+255+80', '250x200+0+0',
side='right', ipadx=5, padx=(5, 15), fill='x')
check('20x40+140+0', '300x160+0+40', side='top', padx=20)
check('20x40+120+0', '300x160+0+40', side='top', padx=(0, 40))
check('60x40+120+0', '300x160+0+40', side='top', ipadx=20)
check('30x40+135+0', '300x160+0+40', side='top', ipadx=5, padx=10)
check('30x40+130+0', '300x160+0+40', side='top', ipadx=5, padx=(5, 15))
check('260x40+20+0', '300x160+0+40', side='top', padx=20, fill='x')
check('260x40+25+0', '300x160+0+40',
side='top', padx=(25, 15), fill='x')
check('300x40+0+0', '300x160+0+40', side='top', ipadx=20, fill='x')
check('280x40+10+0', '300x160+0+40',
side='top', ipadx=5, padx=10, fill='x')
check('280x40+5+0', '300x160+0+40',
side='top', ipadx=5, padx=(5, 15), fill='x')
a.pack_configure(padx='1c')
self.assertEqual(a.pack_info()['padx'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipadx='1c')
self.assertEqual(a.pack_info()['ipadx'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_pady_ipady_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+280+80', '280x200+0+0', side='right', pady=20)
check('20x40+280+70', '280x200+0+0', side='right', pady=(10, 30))
check('20x80+280+60', '280x200+0+0', side='right', ipady=20)
check('20x50+280+75', '280x200+0+0', side='right', ipady=5, pady=10)
check('20x40+280+80', '280x200+0+0', side='right', pady=20, fill='x')
check('20x40+280+69', '280x200+0+0',
side='right', pady=(9, 31), fill='x')
check('20x80+280+60', '280x200+0+0', side='right', ipady=20, fill='x')
check('20x50+280+75', '280x200+0+0',
side='right', ipady=5, pady=10, fill='x')
check('20x50+280+70', '280x200+0+0',
side='right', ipady=5, pady=(5, 15), fill='x')
check('20x40+140+20', '300x120+0+80', side='top', pady=20)
check('20x40+140+0', '300x120+0+80', side='top', pady=(0, 40))
check('20x80+140+0', '300x120+0+80', side='top', ipady=20)
check('20x50+140+10', '300x130+0+70', side='top', ipady=5, pady=10)
check('20x50+140+5', '300x130+0+70', side='top', ipady=5, pady=(5, 15))
check('300x40+0+20', '300x120+0+80', side='top', pady=20, fill='x')
check('300x40+0+25', '300x120+0+80',
side='top', pady=(25, 15), fill='x')
check('300x80+0+0', '300x120+0+80', side='top', ipady=20, fill='x')
check('300x50+0+10', '300x130+0+70',
side='top', ipady=5, pady=10, fill='x')
check('300x50+0+5', '300x130+0+70',
side='top', ipady=5, pady=(5, 15), fill='x')
a.pack_configure(pady='1c')
self.assertEqual(a.pack_info()['pady'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipady='1c')
self.assertEqual(a.pack_info()['ipady'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_side(self):
pack, a, b, c, d = self.create2()
def check(side, geom1, geom2):
a.pack_configure(side=side)
self.assertEqual(a.pack_info()['side'], side)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('top', '20x40+140+0', '300x160+0+40')
check('bottom', '20x40+140+160', '300x160+0+0')
check('left', '20x40+0+80', '280x200+20+0')
check('right', '20x40+280+80', '280x200+0+0')
def test_pack_forget(self):
pack, a, b, c, d = self.create2()
a.pack_configure()
b.pack_configure()
c.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
d.pack_forget()
def test_pack_info(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % a):
a.pack_info()
a.pack_configure()
b.pack_configure(side='right', in_=a, anchor='s', expand=True, fill='x',
ipadx=5, padx=10, ipady=2, pady=(5, 15))
info = a.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 'center')
self.assertEqual(info['expand'], self._str(0))
self.assertEqual(info['fill'], 'none')
self.assertEqual(info['in'], pack)
self.assertEqual(info['ipadx'], self._str(0))
self.assertEqual(info['ipady'], self._str(0))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['side'], 'top')
info = b.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 's')
self.assertEqual(info['expand'], self._str(1))
self.assertEqual(info['fill'], 'x')
self.assertEqual(info['in'], a)
self.assertEqual(info['ipadx'], self._str(5))
self.assertEqual(info['ipady'], self._str(2))
self.assertEqual(info['padx'], self._str(10))
self.assertEqual(info['pady'], self._str((5, 15)))
self.assertEqual(info['side'], 'right')
def test_pack_propagate(self):
pack, a, b, c, d = self.create2()
pack.configure(width=300, height=200)
a.pack_configure()
pack.pack_propagate(False)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 300)
self.assertEqual(pack.winfo_reqheight(), 200)
pack.pack_propagate(True)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 20)
self.assertEqual(pack.winfo_reqheight(), 40)
def test_pack_slaves(self):
pack, a, b, c, d = self.create2()
self.assertEqual(pack.pack_slaves(), [])
a.pack_configure()
self.assertEqual(pack.pack_slaves(), [a])
b.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b])
class PlaceTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
t = tkinter.Toplevel(self.root, width=300, height=200, bd=0)
t.wm_geometry('300x200+0+0')
f = tkinter.Frame(t, width=154, height=84, bd=2, relief='raised')
f.place_configure(x=48, y=38)
f2 = tkinter.Frame(t, width=30, height=60, bd=2, relief='raised')
self.root.update()
return t, f, f2
def test_place_configure_in(self):
t, f, f2 = self.create2()
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, "can't place %s relative to "
"itself" % re.escape(str(f2))):
f2.place_configure(in_=f2)
if tcl_version >= (8, 5):
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, 'bad window path name'):
f2.place_configure(in_='spam')
f2.place_configure(in_=f)
self.assertEqual(f2.winfo_manager(), 'place')
def test_place_configure_x(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['x'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(x=100)
self.assertEqual(f2.place_info()['x'], '100')
self.root.update()
self.assertEqual(f2.winfo_x(), 150)
f2.place_configure(x=-10, relx=1)
self.assertEqual(f2.place_info()['x'], '-10')
self.root.update()
self.assertEqual(f2.winfo_x(), 190)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, x='spam')
def test_place_configure_y(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['y'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(y=50)
self.assertEqual(f2.place_info()['y'], '50')
self.root.update()
self.assertEqual(f2.winfo_y(), 90)
f2.place_configure(y=-10, rely=1)
self.assertEqual(f2.place_info()['y'], '-10')
self.root.update()
self.assertEqual(f2.winfo_y(), 110)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, y='spam')
def test_place_configure_relx(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['relx'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(relx=0.5)
self.assertEqual(f2.place_info()['relx'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_x(), 125)
f2.place_configure(relx=1)
self.assertEqual(f2.place_info()['relx'], '1')
self.root.update()
self.assertEqual(f2.winfo_x(), 200)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, relx='spam')
def test_place_configure_rely(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['rely'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(rely=0.5)
self.assertEqual(f2.place_info()['rely'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_y(), 80)
f2.place_configure(rely=1)
self.assertEqual(f2.place_info()['rely'], '1')
self.root.update()
self.assertEqual(f2.winfo_y(), 120)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, rely='spam')
def test_place_configure_anchor(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad anchor "j"'):
f.place_configure(anchor='j')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
f.place_configure(anchor='')
for value in 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center':
f.place_configure(anchor=value)
self.assertEqual(f.place_info()['anchor'], value)
def test_place_configure_width(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, width=120)
self.root.update()
self.assertEqual(f2.winfo_width(), 120)
f2.place_configure(width='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(width='abcd')
def test_place_configure_height(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, height=120)
self.root.update()
self.assertEqual(f2.winfo_height(), 120)
f2.place_configure(height='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(height='abcd')
def test_place_configure_relwidth(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relwidth=0.5)
self.root.update()
self.assertEqual(f2.winfo_width(), 75)
f2.place_configure(relwidth='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relwidth='abcd')
def test_place_configure_relheight(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relheight=0.5)
self.root.update()
self.assertEqual(f2.winfo_height(), 40)
f2.place_configure(relheight='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relheight='abcd')
def test_place_configure_bordermode(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad bordermode "j"'):
f.place_configure(bordermode='j')
with self.assertRaisesRegex(TclError, 'ambiguous bordermode ""'):
f.place_configure(bordermode='')
for value in 'inside', 'outside', 'ignore':
f.place_configure(bordermode=value)
self.assertEqual(f.place_info()['bordermode'], value)
def test_place_forget(self):
foo = tkinter.Frame(self.root)
foo.place_configure(width=50, height=50)
self.root.update()
foo.place_forget()
self.root.update()
self.assertFalse(foo.winfo_ismapped())
with self.assertRaises(TypeError):
foo.place_forget(0)
def test_place_info(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, x=1, y=2, width=3, height=4,
relx=0.1, rely=0.2, relwidth=0.3, relheight=0.4,
anchor='se', bordermode='outside')
info = f2.place_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['x'], '1')
self.assertEqual(info['y'], '2')
self.assertEqual(info['width'], '3')
self.assertEqual(info['height'], '4')
self.assertEqual(info['relx'], '0.1')
self.assertEqual(info['rely'], '0.2')
self.assertEqual(info['relwidth'], '0.3')
self.assertEqual(info['relheight'], '0.4')
self.assertEqual(info['anchor'], 'se')
self.assertEqual(info['bordermode'], 'outside')
self.assertEqual(info['x'], '1')
self.assertEqual(info['x'], '1')
with self.assertRaises(TypeError):
f2.place_info(0)
def test_place_slaves(self):
foo = tkinter.Frame(self.root)
bar = tkinter.Frame(self.root)
self.assertEqual(foo.place_slaves(), [])
bar.place_configure(in_=foo)
self.assertEqual(foo.place_slaves(), [bar])
with self.assertRaises(TypeError):
foo.place_slaves(0)
class GridTest(AbstractWidgetTest, unittest.TestCase):
def tearDown(self):
cols, rows = self.root.grid_size()
for i in range(cols + 1):
self.root.grid_columnconfigure(i, weight=0, minsize=0, pad=0, uniform='')
for i in range(rows + 1):
self.root.grid_rowconfigure(i, weight=0, minsize=0, pad=0, uniform='')
self.root.grid_propagate(1)
if tcl_version >= (8, 5):
self.root.grid_anchor('nw')
super().tearDown()
def test_grid_configure(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
self.assertEqual(b.grid_info()['column'], self._str(0))
self.assertEqual(b.grid_info()['row'], self._str(0))
b.grid_configure({'column': 1}, row=2)
self.assertEqual(b.grid_info()['column'], self._str(1))
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_column(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad column value "-1": '
'must be a non-negative integer'):
b.grid_configure(column=-1)
b.grid_configure(column=2)
self.assertEqual(b.grid_info()['column'], self._str(2))
def test_grid_configure_columnspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad columnspan value "0": '
'must be a positive integer'):
b.grid_configure(columnspan=0)
b.grid_configure(columnspan=2)
self.assertEqual(b.grid_info()['columnspan'], self._str(2))
def test_grid_configure_in(self):
f = tkinter.Frame(self.root)
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
b.grid_configure(in_=f)
self.assertEqual(b.grid_info()['in'], f)
b.grid_configure({'in': self.root})
self.assertEqual(b.grid_info()['in'], self.root)
def test_grid_configure_ipadx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipadx value "-1": '
'must be positive screen distance'):
b.grid_configure(ipadx=-1)
b.grid_configure(ipadx=1)
self.assertEqual(b.grid_info()['ipadx'], self._str(1))
b.grid_configure(ipadx='.5c')
self.assertEqual(b.grid_info()['ipadx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_ipady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipady value "-1": '
'must be positive screen distance'):
b.grid_configure(ipady=-1)
b.grid_configure(ipady=1)
self.assertEqual(b.grid_info()['ipady'], self._str(1))
b.grid_configure(ipady='.5c')
self.assertEqual(b.grid_info()['ipady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_padx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(padx=-1)
b.grid_configure(padx=1)
self.assertEqual(b.grid_info()['padx'], self._str(1))
b.grid_configure(padx=(10, 5))
self.assertEqual(b.grid_info()['padx'], self._str((10, 5)))
b.grid_configure(padx='.5c')
self.assertEqual(b.grid_info()['padx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_pady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(pady=-1)
b.grid_configure(pady=1)
self.assertEqual(b.grid_info()['pady'], self._str(1))
b.grid_configure(pady=(10, 5))
self.assertEqual(b.grid_info()['pady'], self._str((10, 5)))
b.grid_configure(pady='.5c')
self.assertEqual(b.grid_info()['pady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_row(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad (row|grid) value "-1": '
'must be a non-negative integer'):
b.grid_configure(row=-1)
b.grid_configure(row=2)
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_rownspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad rowspan value "0": '
'must be a positive integer'):
b.grid_configure(rowspan=0)
b.grid_configure(rowspan=2)
self.assertEqual(b.grid_info()['rowspan'], self._str(2))
def test_grid_configure_sticky(self):
f = tkinter.Frame(self.root, bg='red')
with self.assertRaisesRegex(TclError, 'bad stickyness value "glue"'):
f.grid_configure(sticky='glue')
f.grid_configure(sticky='ne')
self.assertEqual(f.grid_info()['sticky'], 'ne')
f.grid_configure(sticky='n,s,e,w')
self.assertEqual(f.grid_info()['sticky'], 'nesw')
def test_grid_columnconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_columnconfigure()
self.assertEqual(self.root.grid_columnconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_columnconfigure(0, 'foo')
self.root.grid_columnconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_columnconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_columnconfigure('all')
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_columnconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure(b, weight=4)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 4)
def test_grid_columnconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, minsize='foo')
self.root.grid_columnconfigure(0, minsize=10)
self.assertEqual(self.root.grid_columnconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_columnconfigure(0)['minsize'], 10)
def test_grid_columnconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_columnconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_columnconfigure(0, weight=-3)
self.root.grid_columnconfigure(0, weight=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['weight'], 3)
def test_grid_columnconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_columnconfigure(0, pad=-3)
self.root.grid_columnconfigure(0, pad=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['pad'], 3)
def test_grid_columnconfigure_uniform(self):
self.root.grid_columnconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_columnconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_columnconfigure(0)['uniform'], 'foo')
def test_grid_rowconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_rowconfigure()
self.assertEqual(self.root.grid_rowconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_rowconfigure(0, 'foo')
self.root.grid_rowconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_rowconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_rowconfigure('all')
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_rowconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure(b, weight=4)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 4)
def test_grid_rowconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, minsize='foo')
self.root.grid_rowconfigure(0, minsize=10)
self.assertEqual(self.root.grid_rowconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_rowconfigure(0)['minsize'], 10)
def test_grid_rowconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_rowconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_rowconfigure(0, weight=-3)
self.root.grid_rowconfigure(0, weight=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['weight'], 3)
def test_grid_rowconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_rowconfigure(0, pad=-3)
self.root.grid_rowconfigure(0, pad=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['pad'], 3)
def test_grid_rowconfigure_uniform(self):
self.root.grid_rowconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_rowconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_rowconfigure(0)['uniform'], 'foo')
def test_grid_forget(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_forget()
c.grid_forget()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(1))
self.assertEqual(info['columnspan'], self._str(1))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['sticky'], '')
def test_grid_remove(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_remove()
c.grid_remove()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
def test_grid_info(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
info = b.grid_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['in'], self.root)
self.assertEqual(info['row'], self._str(2))
self.assertEqual(info['column'], self._str(2))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
@requires_tcl(8, 5)
def test_grid_anchor(self):
with self.assertRaisesRegex(TclError, 'bad anchor "x"'):
self.root.grid_anchor('x')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
self.root.grid_anchor('')
with self.assertRaises(TypeError):
self.root.grid_anchor('se', 'nw')
self.root.grid_anchor('se')
self.assertEqual(self.root.tk.call('grid', 'anchor', self.root), 'se')
def test_grid_bbox(self):
self.assertEqual(self.root.grid_bbox(), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0, 1, 1), (0, 0, 0, 0))
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox('x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 'x')
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 'x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 0, 'x')
with self.assertRaises(TypeError):
self.root.grid_bbox(0, 0, 0, 0, 0)
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f1 = tkinter.Frame(t, width=75, height=75, bg='red')
f2 = tkinter.Frame(t, width=90, height=90, bg='blue')
f1.grid_configure(row=0, column=0)
f2.grid_configure(row=1, column=1)
self.root.update()
self.assertEqual(t.grid_bbox(), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(0, 0), (0, 0, 75, 75))
self.assertEqual(t.grid_bbox(0, 0, 1, 1), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(1, 1), (75, 75, 90, 90))
self.assertEqual(t.grid_bbox(10, 10, 0, 0), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(-2, -2, -1, -1), (0, 0, 0, 0))
self.assertEqual(t.grid_bbox(10, 10, 12, 12), (165, 165, 0, 0))
def test_grid_location(self):
with self.assertRaises(TypeError):
self.root.grid_location()
with self.assertRaises(TypeError):
self.root.grid_location(0)
with self.assertRaises(TypeError):
self.root.grid_location(0, 0, 0)
with self.assertRaisesRegex(TclError, 'bad screen distance "x"'):
self.root.grid_location('x', 'y')
with self.assertRaisesRegex(TclError, 'bad screen distance "y"'):
self.root.grid_location('1c', 'y')
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f = tkinter.Frame(t, width=200, height=100,
highlightthickness=0, bg='red')
self.assertEqual(f.grid_location(10, 10), (-1, -1))
f.grid_configure()
self.root.update()
self.assertEqual(t.grid_location(-10, -10), (-1, -1))
self.assertEqual(t.grid_location(-10, 0), (-1, 0))
self.assertEqual(t.grid_location(-1, 0), (-1, 0))
self.assertEqual(t.grid_location(0, -10), (0, -1))
self.assertEqual(t.grid_location(0, -1), (0, -1))
self.assertEqual(t.grid_location(0, 0), (0, 0))
self.assertEqual(t.grid_location(200, 0), (0, 0))
self.assertEqual(t.grid_location(201, 0), (1, 0))
self.assertEqual(t.grid_location(0, 100), (0, 0))
self.assertEqual(t.grid_location(0, 101), (0, 1))
self.assertEqual(t.grid_location(201, 101), (1, 1))
def test_grid_propagate(self):
self.assertEqual(self.root.grid_propagate(), True)
with self.assertRaises(TypeError):
self.root.grid_propagate(False, False)
self.root.grid_propagate(False)
self.assertFalse(self.root.grid_propagate())
f = tkinter.Frame(self.root, width=100, height=100, bg='red')
f.grid_configure(row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(False)
g = tkinter.Frame(self.root, width=75, height=85, bg='green')
g.grid_configure(in_=f, row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(True)
self.root.update()
self.assertEqual(f.winfo_width(), 75)
self.assertEqual(f.winfo_height(), 85)
def test_grid_size(self):
with self.assertRaises(TypeError):
self.root.grid_size(0)
self.assertEqual(self.root.grid_size(), (0, 0))
f = tkinter.Scale(self.root)
f.grid_configure(row=0, column=0)
self.assertEqual(self.root.grid_size(), (1, 1))
f.grid_configure(row=4, column=5)
self.assertEqual(self.root.grid_size(), (6, 5))
def test_grid_slaves(self):
self.assertEqual(self.root.grid_slaves(), [])
a = tkinter.Label(self.root)
a.grid_configure(row=0, column=1)
b = tkinter.Label(self.root)
b.grid_configure(row=1, column=0)
c = tkinter.Label(self.root)
c.grid_configure(row=1, column=1)
d = tkinter.Label(self.root)
d.grid_configure(row=1, column=1)
self.assertEqual(self.root.grid_slaves(), [d, c, b, a])
self.assertEqual(self.root.grid_slaves(row=0), [a])
self.assertEqual(self.root.grid_slaves(row=1), [d, c, b])
self.assertEqual(self.root.grid_slaves(column=0), [b])
self.assertEqual(self.root.grid_slaves(column=1), [d, c, a])
self.assertEqual(self.root.grid_slaves(row=1, column=1), [d, c])
tests_gui = (
PackTest, PlaceTest, GridTest,
)
if __name__ == '__main__':
unittest.main()
=======
import unittest
import re
import tkinter
from tkinter import TclError
from test.support import requires
from tkinter.test.support import pixels_conv, tcl_version, requires_tcl
from tkinter.test.widget_tests import AbstractWidgetTest
requires('gui')
class PackTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
pack = tkinter.Toplevel(self.root, name='pack')
pack.wm_geometry('300x200+0+0')
pack.wm_minsize(1, 1)
a = tkinter.Frame(pack, name='a', width=20, height=40, bg='red')
b = tkinter.Frame(pack, name='b', width=50, height=30, bg='blue')
c = tkinter.Frame(pack, name='c', width=80, height=80, bg='green')
d = tkinter.Frame(pack, name='d', width=40, height=30, bg='yellow')
return pack, a, b, c, d
def test_pack_configure_after(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(after=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(after='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(after=b)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
a.pack_configure(after=a)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
def test_pack_configure_anchor(self):
pack, a, b, c, d = self.create2()
def check(anchor, geom):
a.pack_configure(side='top', ipadx=5, padx=10, ipady=15, pady=20,
expand=True, anchor=anchor)
self.root.update()
self.assertEqual(a.winfo_geometry(), geom)
check('n', '30x70+135+20')
check('ne', '30x70+260+20')
check('e', '30x70+260+65')
check('se', '30x70+260+110')
check('s', '30x70+135+110')
check('sw', '30x70+10+110')
check('w', '30x70+10+65')
check('nw', '30x70+10+20')
check('center', '30x70+135+65')
def test_pack_configure_before(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(before=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(before='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(before=d)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
a.pack_configure(before=a)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
def test_pack_configure_expand(self):
pack, a, b, c, d = self.create2()
def check(*geoms):
self.root.update()
self.assertEqual(a.winfo_geometry(), geoms[0])
self.assertEqual(b.winfo_geometry(), geoms[1])
self.assertEqual(c.winfo_geometry(), geoms[2])
self.assertEqual(d.winfo_geometry(), geoms[3])
a.pack_configure(side='left')
b.pack_configure(side='top')
c.pack_configure(side='right')
d.pack_configure(side='bottom')
check('20x40+0+80', '50x30+135+0', '80x80+220+75', '40x30+100+170')
a.pack_configure(side='left', expand='yes')
b.pack_configure(side='top', expand='on')
c.pack_configure(side='right', expand=True)
d.pack_configure(side='bottom', expand=1)
check('20x40+40+80', '50x30+175+35', '80x80+180+110', '40x30+100+135')
a.pack_configure(side='left', expand='yes', fill='both')
b.pack_configure(side='top', expand='on', fill='both')
c.pack_configure(side='right', expand=True, fill='both')
d.pack_configure(side='bottom', expand=1, fill='both')
check('100x200+0+0', '200x100+100+0', '160x100+140+100', '40x100+100+100')
def test_pack_configure_in(self):
pack, a, b, c, d = self.create2()
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
a.pack_configure(in_=pack)
self.assertEqual(pack.pack_slaves(), [b, c, d, a])
a.pack_configure(in_=c)
self.assertEqual(pack.pack_slaves(), [b, c, d])
self.assertEqual(c.pack_slaves(), [a])
with self.assertRaisesRegex(TclError,
'can\'t pack %s inside itself' % (a,)):
a.pack_configure(in_=a)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(in_='.foo')
def test_pack_configure_padx_ipadx_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+260+80', '240x200+0+0', side='right', padx=20)
check('20x40+250+80', '240x200+0+0', side='right', padx=(10, 30))
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20)
check('30x40+260+80', '250x200+0+0', side='right', ipadx=5, padx=10)
check('20x40+260+80', '240x200+0+0', side='right', padx=20, fill='x')
check('20x40+249+80', '240x200+0+0',
side='right', padx=(9, 31), fill='x')
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20, fill='x')
check('30x40+260+80', '250x200+0+0',
side='right', ipadx=5, padx=10, fill='x')
check('30x40+255+80', '250x200+0+0',
side='right', ipadx=5, padx=(5, 15), fill='x')
check('20x40+140+0', '300x160+0+40', side='top', padx=20)
check('20x40+120+0', '300x160+0+40', side='top', padx=(0, 40))
check('60x40+120+0', '300x160+0+40', side='top', ipadx=20)
check('30x40+135+0', '300x160+0+40', side='top', ipadx=5, padx=10)
check('30x40+130+0', '300x160+0+40', side='top', ipadx=5, padx=(5, 15))
check('260x40+20+0', '300x160+0+40', side='top', padx=20, fill='x')
check('260x40+25+0', '300x160+0+40',
side='top', padx=(25, 15), fill='x')
check('300x40+0+0', '300x160+0+40', side='top', ipadx=20, fill='x')
check('280x40+10+0', '300x160+0+40',
side='top', ipadx=5, padx=10, fill='x')
check('280x40+5+0', '300x160+0+40',
side='top', ipadx=5, padx=(5, 15), fill='x')
a.pack_configure(padx='1c')
self.assertEqual(a.pack_info()['padx'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipadx='1c')
self.assertEqual(a.pack_info()['ipadx'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_pady_ipady_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+280+80', '280x200+0+0', side='right', pady=20)
check('20x40+280+70', '280x200+0+0', side='right', pady=(10, 30))
check('20x80+280+60', '280x200+0+0', side='right', ipady=20)
check('20x50+280+75', '280x200+0+0', side='right', ipady=5, pady=10)
check('20x40+280+80', '280x200+0+0', side='right', pady=20, fill='x')
check('20x40+280+69', '280x200+0+0',
side='right', pady=(9, 31), fill='x')
check('20x80+280+60', '280x200+0+0', side='right', ipady=20, fill='x')
check('20x50+280+75', '280x200+0+0',
side='right', ipady=5, pady=10, fill='x')
check('20x50+280+70', '280x200+0+0',
side='right', ipady=5, pady=(5, 15), fill='x')
check('20x40+140+20', '300x120+0+80', side='top', pady=20)
check('20x40+140+0', '300x120+0+80', side='top', pady=(0, 40))
check('20x80+140+0', '300x120+0+80', side='top', ipady=20)
check('20x50+140+10', '300x130+0+70', side='top', ipady=5, pady=10)
check('20x50+140+5', '300x130+0+70', side='top', ipady=5, pady=(5, 15))
check('300x40+0+20', '300x120+0+80', side='top', pady=20, fill='x')
check('300x40+0+25', '300x120+0+80',
side='top', pady=(25, 15), fill='x')
check('300x80+0+0', '300x120+0+80', side='top', ipady=20, fill='x')
check('300x50+0+10', '300x130+0+70',
side='top', ipady=5, pady=10, fill='x')
check('300x50+0+5', '300x130+0+70',
side='top', ipady=5, pady=(5, 15), fill='x')
a.pack_configure(pady='1c')
self.assertEqual(a.pack_info()['pady'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipady='1c')
self.assertEqual(a.pack_info()['ipady'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_side(self):
pack, a, b, c, d = self.create2()
def check(side, geom1, geom2):
a.pack_configure(side=side)
self.assertEqual(a.pack_info()['side'], side)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('top', '20x40+140+0', '300x160+0+40')
check('bottom', '20x40+140+160', '300x160+0+0')
check('left', '20x40+0+80', '280x200+20+0')
check('right', '20x40+280+80', '280x200+0+0')
def test_pack_forget(self):
pack, a, b, c, d = self.create2()
a.pack_configure()
b.pack_configure()
c.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
d.pack_forget()
def test_pack_info(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % a):
a.pack_info()
a.pack_configure()
b.pack_configure(side='right', in_=a, anchor='s', expand=True, fill='x',
ipadx=5, padx=10, ipady=2, pady=(5, 15))
info = a.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 'center')
self.assertEqual(info['expand'], self._str(0))
self.assertEqual(info['fill'], 'none')
self.assertEqual(info['in'], pack)
self.assertEqual(info['ipadx'], self._str(0))
self.assertEqual(info['ipady'], self._str(0))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['side'], 'top')
info = b.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 's')
self.assertEqual(info['expand'], self._str(1))
self.assertEqual(info['fill'], 'x')
self.assertEqual(info['in'], a)
self.assertEqual(info['ipadx'], self._str(5))
self.assertEqual(info['ipady'], self._str(2))
self.assertEqual(info['padx'], self._str(10))
self.assertEqual(info['pady'], self._str((5, 15)))
self.assertEqual(info['side'], 'right')
def test_pack_propagate(self):
pack, a, b, c, d = self.create2()
pack.configure(width=300, height=200)
a.pack_configure()
pack.pack_propagate(False)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 300)
self.assertEqual(pack.winfo_reqheight(), 200)
pack.pack_propagate(True)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 20)
self.assertEqual(pack.winfo_reqheight(), 40)
def test_pack_slaves(self):
pack, a, b, c, d = self.create2()
self.assertEqual(pack.pack_slaves(), [])
a.pack_configure()
self.assertEqual(pack.pack_slaves(), [a])
b.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b])
class PlaceTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
t = tkinter.Toplevel(self.root, width=300, height=200, bd=0)
t.wm_geometry('300x200+0+0')
f = tkinter.Frame(t, width=154, height=84, bd=2, relief='raised')
f.place_configure(x=48, y=38)
f2 = tkinter.Frame(t, width=30, height=60, bd=2, relief='raised')
self.root.update()
return t, f, f2
def test_place_configure_in(self):
t, f, f2 = self.create2()
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, "can't place %s relative to "
"itself" % re.escape(str(f2))):
f2.place_configure(in_=f2)
if tcl_version >= (8, 5):
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, 'bad window path name'):
f2.place_configure(in_='spam')
f2.place_configure(in_=f)
self.assertEqual(f2.winfo_manager(), 'place')
def test_place_configure_x(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['x'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(x=100)
self.assertEqual(f2.place_info()['x'], '100')
self.root.update()
self.assertEqual(f2.winfo_x(), 150)
f2.place_configure(x=-10, relx=1)
self.assertEqual(f2.place_info()['x'], '-10')
self.root.update()
self.assertEqual(f2.winfo_x(), 190)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, x='spam')
def test_place_configure_y(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['y'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(y=50)
self.assertEqual(f2.place_info()['y'], '50')
self.root.update()
self.assertEqual(f2.winfo_y(), 90)
f2.place_configure(y=-10, rely=1)
self.assertEqual(f2.place_info()['y'], '-10')
self.root.update()
self.assertEqual(f2.winfo_y(), 110)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, y='spam')
def test_place_configure_relx(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['relx'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(relx=0.5)
self.assertEqual(f2.place_info()['relx'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_x(), 125)
f2.place_configure(relx=1)
self.assertEqual(f2.place_info()['relx'], '1')
self.root.update()
self.assertEqual(f2.winfo_x(), 200)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, relx='spam')
def test_place_configure_rely(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['rely'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(rely=0.5)
self.assertEqual(f2.place_info()['rely'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_y(), 80)
f2.place_configure(rely=1)
self.assertEqual(f2.place_info()['rely'], '1')
self.root.update()
self.assertEqual(f2.winfo_y(), 120)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, rely='spam')
def test_place_configure_anchor(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad anchor "j"'):
f.place_configure(anchor='j')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
f.place_configure(anchor='')
for value in 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center':
f.place_configure(anchor=value)
self.assertEqual(f.place_info()['anchor'], value)
def test_place_configure_width(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, width=120)
self.root.update()
self.assertEqual(f2.winfo_width(), 120)
f2.place_configure(width='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(width='abcd')
def test_place_configure_height(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, height=120)
self.root.update()
self.assertEqual(f2.winfo_height(), 120)
f2.place_configure(height='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(height='abcd')
def test_place_configure_relwidth(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relwidth=0.5)
self.root.update()
self.assertEqual(f2.winfo_width(), 75)
f2.place_configure(relwidth='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relwidth='abcd')
def test_place_configure_relheight(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relheight=0.5)
self.root.update()
self.assertEqual(f2.winfo_height(), 40)
f2.place_configure(relheight='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relheight='abcd')
def test_place_configure_bordermode(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad bordermode "j"'):
f.place_configure(bordermode='j')
with self.assertRaisesRegex(TclError, 'ambiguous bordermode ""'):
f.place_configure(bordermode='')
for value in 'inside', 'outside', 'ignore':
f.place_configure(bordermode=value)
self.assertEqual(f.place_info()['bordermode'], value)
def test_place_forget(self):
foo = tkinter.Frame(self.root)
foo.place_configure(width=50, height=50)
self.root.update()
foo.place_forget()
self.root.update()
self.assertFalse(foo.winfo_ismapped())
with self.assertRaises(TypeError):
foo.place_forget(0)
def test_place_info(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, x=1, y=2, width=3, height=4,
relx=0.1, rely=0.2, relwidth=0.3, relheight=0.4,
anchor='se', bordermode='outside')
info = f2.place_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['x'], '1')
self.assertEqual(info['y'], '2')
self.assertEqual(info['width'], '3')
self.assertEqual(info['height'], '4')
self.assertEqual(info['relx'], '0.1')
self.assertEqual(info['rely'], '0.2')
self.assertEqual(info['relwidth'], '0.3')
self.assertEqual(info['relheight'], '0.4')
self.assertEqual(info['anchor'], 'se')
self.assertEqual(info['bordermode'], 'outside')
self.assertEqual(info['x'], '1')
self.assertEqual(info['x'], '1')
with self.assertRaises(TypeError):
f2.place_info(0)
def test_place_slaves(self):
foo = tkinter.Frame(self.root)
bar = tkinter.Frame(self.root)
self.assertEqual(foo.place_slaves(), [])
bar.place_configure(in_=foo)
self.assertEqual(foo.place_slaves(), [bar])
with self.assertRaises(TypeError):
foo.place_slaves(0)
class GridTest(AbstractWidgetTest, unittest.TestCase):
def tearDown(self):
cols, rows = self.root.grid_size()
for i in range(cols + 1):
self.root.grid_columnconfigure(i, weight=0, minsize=0, pad=0, uniform='')
for i in range(rows + 1):
self.root.grid_rowconfigure(i, weight=0, minsize=0, pad=0, uniform='')
self.root.grid_propagate(1)
if tcl_version >= (8, 5):
self.root.grid_anchor('nw')
super().tearDown()
def test_grid_configure(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
self.assertEqual(b.grid_info()['column'], self._str(0))
self.assertEqual(b.grid_info()['row'], self._str(0))
b.grid_configure({'column': 1}, row=2)
self.assertEqual(b.grid_info()['column'], self._str(1))
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_column(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad column value "-1": '
'must be a non-negative integer'):
b.grid_configure(column=-1)
b.grid_configure(column=2)
self.assertEqual(b.grid_info()['column'], self._str(2))
def test_grid_configure_columnspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad columnspan value "0": '
'must be a positive integer'):
b.grid_configure(columnspan=0)
b.grid_configure(columnspan=2)
self.assertEqual(b.grid_info()['columnspan'], self._str(2))
def test_grid_configure_in(self):
f = tkinter.Frame(self.root)
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
b.grid_configure(in_=f)
self.assertEqual(b.grid_info()['in'], f)
b.grid_configure({'in': self.root})
self.assertEqual(b.grid_info()['in'], self.root)
def test_grid_configure_ipadx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipadx value "-1": '
'must be positive screen distance'):
b.grid_configure(ipadx=-1)
b.grid_configure(ipadx=1)
self.assertEqual(b.grid_info()['ipadx'], self._str(1))
b.grid_configure(ipadx='.5c')
self.assertEqual(b.grid_info()['ipadx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_ipady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipady value "-1": '
'must be positive screen distance'):
b.grid_configure(ipady=-1)
b.grid_configure(ipady=1)
self.assertEqual(b.grid_info()['ipady'], self._str(1))
b.grid_configure(ipady='.5c')
self.assertEqual(b.grid_info()['ipady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_padx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(padx=-1)
b.grid_configure(padx=1)
self.assertEqual(b.grid_info()['padx'], self._str(1))
b.grid_configure(padx=(10, 5))
self.assertEqual(b.grid_info()['padx'], self._str((10, 5)))
b.grid_configure(padx='.5c')
self.assertEqual(b.grid_info()['padx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_pady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(pady=-1)
b.grid_configure(pady=1)
self.assertEqual(b.grid_info()['pady'], self._str(1))
b.grid_configure(pady=(10, 5))
self.assertEqual(b.grid_info()['pady'], self._str((10, 5)))
b.grid_configure(pady='.5c')
self.assertEqual(b.grid_info()['pady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_row(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad (row|grid) value "-1": '
'must be a non-negative integer'):
b.grid_configure(row=-1)
b.grid_configure(row=2)
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_rownspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad rowspan value "0": '
'must be a positive integer'):
b.grid_configure(rowspan=0)
b.grid_configure(rowspan=2)
self.assertEqual(b.grid_info()['rowspan'], self._str(2))
def test_grid_configure_sticky(self):
f = tkinter.Frame(self.root, bg='red')
with self.assertRaisesRegex(TclError, 'bad stickyness value "glue"'):
f.grid_configure(sticky='glue')
f.grid_configure(sticky='ne')
self.assertEqual(f.grid_info()['sticky'], 'ne')
f.grid_configure(sticky='n,s,e,w')
self.assertEqual(f.grid_info()['sticky'], 'nesw')
def test_grid_columnconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_columnconfigure()
self.assertEqual(self.root.grid_columnconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_columnconfigure(0, 'foo')
self.root.grid_columnconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_columnconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_columnconfigure('all')
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_columnconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure(b, weight=4)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 4)
def test_grid_columnconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, minsize='foo')
self.root.grid_columnconfigure(0, minsize=10)
self.assertEqual(self.root.grid_columnconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_columnconfigure(0)['minsize'], 10)
def test_grid_columnconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_columnconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_columnconfigure(0, weight=-3)
self.root.grid_columnconfigure(0, weight=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['weight'], 3)
def test_grid_columnconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_columnconfigure(0, pad=-3)
self.root.grid_columnconfigure(0, pad=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['pad'], 3)
def test_grid_columnconfigure_uniform(self):
self.root.grid_columnconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_columnconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_columnconfigure(0)['uniform'], 'foo')
def test_grid_rowconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_rowconfigure()
self.assertEqual(self.root.grid_rowconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_rowconfigure(0, 'foo')
self.root.grid_rowconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_rowconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_rowconfigure('all')
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_rowconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure(b, weight=4)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 4)
def test_grid_rowconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, minsize='foo')
self.root.grid_rowconfigure(0, minsize=10)
self.assertEqual(self.root.grid_rowconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_rowconfigure(0)['minsize'], 10)
def test_grid_rowconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_rowconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_rowconfigure(0, weight=-3)
self.root.grid_rowconfigure(0, weight=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['weight'], 3)
def test_grid_rowconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_rowconfigure(0, pad=-3)
self.root.grid_rowconfigure(0, pad=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['pad'], 3)
def test_grid_rowconfigure_uniform(self):
self.root.grid_rowconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_rowconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_rowconfigure(0)['uniform'], 'foo')
def test_grid_forget(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_forget()
c.grid_forget()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(1))
self.assertEqual(info['columnspan'], self._str(1))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['sticky'], '')
def test_grid_remove(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_remove()
c.grid_remove()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
def test_grid_info(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
info = b.grid_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['in'], self.root)
self.assertEqual(info['row'], self._str(2))
self.assertEqual(info['column'], self._str(2))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
@requires_tcl(8, 5)
def test_grid_anchor(self):
with self.assertRaisesRegex(TclError, 'bad anchor "x"'):
self.root.grid_anchor('x')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
self.root.grid_anchor('')
with self.assertRaises(TypeError):
self.root.grid_anchor('se', 'nw')
self.root.grid_anchor('se')
self.assertEqual(self.root.tk.call('grid', 'anchor', self.root), 'se')
def test_grid_bbox(self):
self.assertEqual(self.root.grid_bbox(), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0, 1, 1), (0, 0, 0, 0))
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox('x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 'x')
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 'x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 0, 'x')
with self.assertRaises(TypeError):
self.root.grid_bbox(0, 0, 0, 0, 0)
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f1 = tkinter.Frame(t, width=75, height=75, bg='red')
f2 = tkinter.Frame(t, width=90, height=90, bg='blue')
f1.grid_configure(row=0, column=0)
f2.grid_configure(row=1, column=1)
self.root.update()
self.assertEqual(t.grid_bbox(), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(0, 0), (0, 0, 75, 75))
self.assertEqual(t.grid_bbox(0, 0, 1, 1), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(1, 1), (75, 75, 90, 90))
self.assertEqual(t.grid_bbox(10, 10, 0, 0), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(-2, -2, -1, -1), (0, 0, 0, 0))
self.assertEqual(t.grid_bbox(10, 10, 12, 12), (165, 165, 0, 0))
def test_grid_location(self):
with self.assertRaises(TypeError):
self.root.grid_location()
with self.assertRaises(TypeError):
self.root.grid_location(0)
with self.assertRaises(TypeError):
self.root.grid_location(0, 0, 0)
with self.assertRaisesRegex(TclError, 'bad screen distance "x"'):
self.root.grid_location('x', 'y')
with self.assertRaisesRegex(TclError, 'bad screen distance "y"'):
self.root.grid_location('1c', 'y')
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f = tkinter.Frame(t, width=200, height=100,
highlightthickness=0, bg='red')
self.assertEqual(f.grid_location(10, 10), (-1, -1))
f.grid_configure()
self.root.update()
self.assertEqual(t.grid_location(-10, -10), (-1, -1))
self.assertEqual(t.grid_location(-10, 0), (-1, 0))
self.assertEqual(t.grid_location(-1, 0), (-1, 0))
self.assertEqual(t.grid_location(0, -10), (0, -1))
self.assertEqual(t.grid_location(0, -1), (0, -1))
self.assertEqual(t.grid_location(0, 0), (0, 0))
self.assertEqual(t.grid_location(200, 0), (0, 0))
self.assertEqual(t.grid_location(201, 0), (1, 0))
self.assertEqual(t.grid_location(0, 100), (0, 0))
self.assertEqual(t.grid_location(0, 101), (0, 1))
self.assertEqual(t.grid_location(201, 101), (1, 1))
def test_grid_propagate(self):
self.assertEqual(self.root.grid_propagate(), True)
with self.assertRaises(TypeError):
self.root.grid_propagate(False, False)
self.root.grid_propagate(False)
self.assertFalse(self.root.grid_propagate())
f = tkinter.Frame(self.root, width=100, height=100, bg='red')
f.grid_configure(row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(False)
g = tkinter.Frame(self.root, width=75, height=85, bg='green')
g.grid_configure(in_=f, row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(True)
self.root.update()
self.assertEqual(f.winfo_width(), 75)
self.assertEqual(f.winfo_height(), 85)
def test_grid_size(self):
with self.assertRaises(TypeError):
self.root.grid_size(0)
self.assertEqual(self.root.grid_size(), (0, 0))
f = tkinter.Scale(self.root)
f.grid_configure(row=0, column=0)
self.assertEqual(self.root.grid_size(), (1, 1))
f.grid_configure(row=4, column=5)
self.assertEqual(self.root.grid_size(), (6, 5))
def test_grid_slaves(self):
self.assertEqual(self.root.grid_slaves(), [])
a = tkinter.Label(self.root)
a.grid_configure(row=0, column=1)
b = tkinter.Label(self.root)
b.grid_configure(row=1, column=0)
c = tkinter.Label(self.root)
c.grid_configure(row=1, column=1)
d = tkinter.Label(self.root)
d.grid_configure(row=1, column=1)
self.assertEqual(self.root.grid_slaves(), [d, c, b, a])
self.assertEqual(self.root.grid_slaves(row=0), [a])
self.assertEqual(self.root.grid_slaves(row=1), [d, c, b])
self.assertEqual(self.root.grid_slaves(column=0), [b])
self.assertEqual(self.root.grid_slaves(column=1), [d, c, a])
self.assertEqual(self.root.grid_slaves(row=1, column=1), [d, c])
tests_gui = (
PackTest, PlaceTest, GridTest,
)
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
import unittest
import re
import tkinter
from tkinter import TclError
from test.support import requires
from tkinter.test.support import pixels_conv, tcl_version, requires_tcl
from tkinter.test.widget_tests import AbstractWidgetTest
requires('gui')
class PackTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
pack = tkinter.Toplevel(self.root, name='pack')
pack.wm_geometry('300x200+0+0')
pack.wm_minsize(1, 1)
a = tkinter.Frame(pack, name='a', width=20, height=40, bg='red')
b = tkinter.Frame(pack, name='b', width=50, height=30, bg='blue')
c = tkinter.Frame(pack, name='c', width=80, height=80, bg='green')
d = tkinter.Frame(pack, name='d', width=40, height=30, bg='yellow')
return pack, a, b, c, d
def test_pack_configure_after(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(after=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(after='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(after=b)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
a.pack_configure(after=a)
self.assertEqual(pack.pack_slaves(), [b, a, c, d])
def test_pack_configure_anchor(self):
pack, a, b, c, d = self.create2()
def check(anchor, geom):
a.pack_configure(side='top', ipadx=5, padx=10, ipady=15, pady=20,
expand=True, anchor=anchor)
self.root.update()
self.assertEqual(a.winfo_geometry(), geom)
check('n', '30x70+135+20')
check('ne', '30x70+260+20')
check('e', '30x70+260+65')
check('se', '30x70+260+110')
check('s', '30x70+135+110')
check('sw', '30x70+10+110')
check('w', '30x70+10+65')
check('nw', '30x70+10+20')
check('center', '30x70+135+65')
def test_pack_configure_before(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % b):
a.pack_configure(before=b)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(before='.foo')
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
self.assertEqual(pack.pack_slaves(), [a, b, c, d])
a.pack_configure(before=d)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
a.pack_configure(before=a)
self.assertEqual(pack.pack_slaves(), [b, c, a, d])
def test_pack_configure_expand(self):
pack, a, b, c, d = self.create2()
def check(*geoms):
self.root.update()
self.assertEqual(a.winfo_geometry(), geoms[0])
self.assertEqual(b.winfo_geometry(), geoms[1])
self.assertEqual(c.winfo_geometry(), geoms[2])
self.assertEqual(d.winfo_geometry(), geoms[3])
a.pack_configure(side='left')
b.pack_configure(side='top')
c.pack_configure(side='right')
d.pack_configure(side='bottom')
check('20x40+0+80', '50x30+135+0', '80x80+220+75', '40x30+100+170')
a.pack_configure(side='left', expand='yes')
b.pack_configure(side='top', expand='on')
c.pack_configure(side='right', expand=True)
d.pack_configure(side='bottom', expand=1)
check('20x40+40+80', '50x30+175+35', '80x80+180+110', '40x30+100+135')
a.pack_configure(side='left', expand='yes', fill='both')
b.pack_configure(side='top', expand='on', fill='both')
c.pack_configure(side='right', expand=True, fill='both')
d.pack_configure(side='bottom', expand=1, fill='both')
check('100x200+0+0', '200x100+100+0', '160x100+140+100', '40x100+100+100')
def test_pack_configure_in(self):
pack, a, b, c, d = self.create2()
a.pack_configure(side='top')
b.pack_configure(side='top')
c.pack_configure(side='top')
d.pack_configure(side='top')
a.pack_configure(in_=pack)
self.assertEqual(pack.pack_slaves(), [b, c, d, a])
a.pack_configure(in_=c)
self.assertEqual(pack.pack_slaves(), [b, c, d])
self.assertEqual(c.pack_slaves(), [a])
with self.assertRaisesRegex(TclError,
'can\'t pack %s inside itself' % (a,)):
a.pack_configure(in_=a)
with self.assertRaisesRegex(TclError, 'bad window path name ".foo"'):
a.pack_configure(in_='.foo')
def test_pack_configure_padx_ipadx_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+260+80', '240x200+0+0', side='right', padx=20)
check('20x40+250+80', '240x200+0+0', side='right', padx=(10, 30))
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20)
check('30x40+260+80', '250x200+0+0', side='right', ipadx=5, padx=10)
check('20x40+260+80', '240x200+0+0', side='right', padx=20, fill='x')
check('20x40+249+80', '240x200+0+0',
side='right', padx=(9, 31), fill='x')
check('60x40+240+80', '240x200+0+0', side='right', ipadx=20, fill='x')
check('30x40+260+80', '250x200+0+0',
side='right', ipadx=5, padx=10, fill='x')
check('30x40+255+80', '250x200+0+0',
side='right', ipadx=5, padx=(5, 15), fill='x')
check('20x40+140+0', '300x160+0+40', side='top', padx=20)
check('20x40+120+0', '300x160+0+40', side='top', padx=(0, 40))
check('60x40+120+0', '300x160+0+40', side='top', ipadx=20)
check('30x40+135+0', '300x160+0+40', side='top', ipadx=5, padx=10)
check('30x40+130+0', '300x160+0+40', side='top', ipadx=5, padx=(5, 15))
check('260x40+20+0', '300x160+0+40', side='top', padx=20, fill='x')
check('260x40+25+0', '300x160+0+40',
side='top', padx=(25, 15), fill='x')
check('300x40+0+0', '300x160+0+40', side='top', ipadx=20, fill='x')
check('280x40+10+0', '300x160+0+40',
side='top', ipadx=5, padx=10, fill='x')
check('280x40+5+0', '300x160+0+40',
side='top', ipadx=5, padx=(5, 15), fill='x')
a.pack_configure(padx='1c')
self.assertEqual(a.pack_info()['padx'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipadx='1c')
self.assertEqual(a.pack_info()['ipadx'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_pady_ipady_fill(self):
pack, a, b, c, d = self.create2()
def check(geom1, geom2, **kwargs):
a.pack_forget()
b.pack_forget()
a.pack_configure(**kwargs)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('20x40+280+80', '280x200+0+0', side='right', pady=20)
check('20x40+280+70', '280x200+0+0', side='right', pady=(10, 30))
check('20x80+280+60', '280x200+0+0', side='right', ipady=20)
check('20x50+280+75', '280x200+0+0', side='right', ipady=5, pady=10)
check('20x40+280+80', '280x200+0+0', side='right', pady=20, fill='x')
check('20x40+280+69', '280x200+0+0',
side='right', pady=(9, 31), fill='x')
check('20x80+280+60', '280x200+0+0', side='right', ipady=20, fill='x')
check('20x50+280+75', '280x200+0+0',
side='right', ipady=5, pady=10, fill='x')
check('20x50+280+70', '280x200+0+0',
side='right', ipady=5, pady=(5, 15), fill='x')
check('20x40+140+20', '300x120+0+80', side='top', pady=20)
check('20x40+140+0', '300x120+0+80', side='top', pady=(0, 40))
check('20x80+140+0', '300x120+0+80', side='top', ipady=20)
check('20x50+140+10', '300x130+0+70', side='top', ipady=5, pady=10)
check('20x50+140+5', '300x130+0+70', side='top', ipady=5, pady=(5, 15))
check('300x40+0+20', '300x120+0+80', side='top', pady=20, fill='x')
check('300x40+0+25', '300x120+0+80',
side='top', pady=(25, 15), fill='x')
check('300x80+0+0', '300x120+0+80', side='top', ipady=20, fill='x')
check('300x50+0+10', '300x130+0+70',
side='top', ipady=5, pady=10, fill='x')
check('300x50+0+5', '300x130+0+70',
side='top', ipady=5, pady=(5, 15), fill='x')
a.pack_configure(pady='1c')
self.assertEqual(a.pack_info()['pady'],
self._str(pack.winfo_pixels('1c')))
a.pack_configure(ipady='1c')
self.assertEqual(a.pack_info()['ipady'],
self._str(pack.winfo_pixels('1c')))
def test_pack_configure_side(self):
pack, a, b, c, d = self.create2()
def check(side, geom1, geom2):
a.pack_configure(side=side)
self.assertEqual(a.pack_info()['side'], side)
b.pack_configure(expand=True, fill='both')
self.root.update()
self.assertEqual(a.winfo_geometry(), geom1)
self.assertEqual(b.winfo_geometry(), geom2)
check('top', '20x40+140+0', '300x160+0+40')
check('bottom', '20x40+140+160', '300x160+0+0')
check('left', '20x40+0+80', '280x200+20+0')
check('right', '20x40+280+80', '280x200+0+0')
def test_pack_forget(self):
pack, a, b, c, d = self.create2()
a.pack_configure()
b.pack_configure()
c.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
b.pack_forget()
self.assertEqual(pack.pack_slaves(), [a, c])
d.pack_forget()
def test_pack_info(self):
pack, a, b, c, d = self.create2()
with self.assertRaisesRegex(TclError, 'window "%s" isn\'t packed' % a):
a.pack_info()
a.pack_configure()
b.pack_configure(side='right', in_=a, anchor='s', expand=True, fill='x',
ipadx=5, padx=10, ipady=2, pady=(5, 15))
info = a.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 'center')
self.assertEqual(info['expand'], self._str(0))
self.assertEqual(info['fill'], 'none')
self.assertEqual(info['in'], pack)
self.assertEqual(info['ipadx'], self._str(0))
self.assertEqual(info['ipady'], self._str(0))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['side'], 'top')
info = b.pack_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['anchor'], 's')
self.assertEqual(info['expand'], self._str(1))
self.assertEqual(info['fill'], 'x')
self.assertEqual(info['in'], a)
self.assertEqual(info['ipadx'], self._str(5))
self.assertEqual(info['ipady'], self._str(2))
self.assertEqual(info['padx'], self._str(10))
self.assertEqual(info['pady'], self._str((5, 15)))
self.assertEqual(info['side'], 'right')
def test_pack_propagate(self):
pack, a, b, c, d = self.create2()
pack.configure(width=300, height=200)
a.pack_configure()
pack.pack_propagate(False)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 300)
self.assertEqual(pack.winfo_reqheight(), 200)
pack.pack_propagate(True)
self.root.update()
self.assertEqual(pack.winfo_reqwidth(), 20)
self.assertEqual(pack.winfo_reqheight(), 40)
def test_pack_slaves(self):
pack, a, b, c, d = self.create2()
self.assertEqual(pack.pack_slaves(), [])
a.pack_configure()
self.assertEqual(pack.pack_slaves(), [a])
b.pack_configure()
self.assertEqual(pack.pack_slaves(), [a, b])
class PlaceTest(AbstractWidgetTest, unittest.TestCase):
def create2(self):
t = tkinter.Toplevel(self.root, width=300, height=200, bd=0)
t.wm_geometry('300x200+0+0')
f = tkinter.Frame(t, width=154, height=84, bd=2, relief='raised')
f.place_configure(x=48, y=38)
f2 = tkinter.Frame(t, width=30, height=60, bd=2, relief='raised')
self.root.update()
return t, f, f2
def test_place_configure_in(self):
t, f, f2 = self.create2()
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, "can't place %s relative to "
"itself" % re.escape(str(f2))):
f2.place_configure(in_=f2)
if tcl_version >= (8, 5):
self.assertEqual(f2.winfo_manager(), '')
with self.assertRaisesRegex(TclError, 'bad window path name'):
f2.place_configure(in_='spam')
f2.place_configure(in_=f)
self.assertEqual(f2.winfo_manager(), 'place')
def test_place_configure_x(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['x'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(x=100)
self.assertEqual(f2.place_info()['x'], '100')
self.root.update()
self.assertEqual(f2.winfo_x(), 150)
f2.place_configure(x=-10, relx=1)
self.assertEqual(f2.place_info()['x'], '-10')
self.root.update()
self.assertEqual(f2.winfo_x(), 190)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, x='spam')
def test_place_configure_y(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['y'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(y=50)
self.assertEqual(f2.place_info()['y'], '50')
self.root.update()
self.assertEqual(f2.winfo_y(), 90)
f2.place_configure(y=-10, rely=1)
self.assertEqual(f2.place_info()['y'], '-10')
self.root.update()
self.assertEqual(f2.winfo_y(), 110)
with self.assertRaisesRegex(TclError, 'bad screen distance "spam"'):
f2.place_configure(in_=f, y='spam')
def test_place_configure_relx(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['relx'], '0')
self.root.update()
self.assertEqual(f2.winfo_x(), 50)
f2.place_configure(relx=0.5)
self.assertEqual(f2.place_info()['relx'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_x(), 125)
f2.place_configure(relx=1)
self.assertEqual(f2.place_info()['relx'], '1')
self.root.update()
self.assertEqual(f2.winfo_x(), 200)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, relx='spam')
def test_place_configure_rely(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f)
self.assertEqual(f2.place_info()['rely'], '0')
self.root.update()
self.assertEqual(f2.winfo_y(), 40)
f2.place_configure(rely=0.5)
self.assertEqual(f2.place_info()['rely'], '0.5')
self.root.update()
self.assertEqual(f2.winfo_y(), 80)
f2.place_configure(rely=1)
self.assertEqual(f2.place_info()['rely'], '1')
self.root.update()
self.assertEqual(f2.winfo_y(), 120)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "spam"'):
f2.place_configure(in_=f, rely='spam')
def test_place_configure_anchor(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad anchor "j"'):
f.place_configure(anchor='j')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
f.place_configure(anchor='')
for value in 'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center':
f.place_configure(anchor=value)
self.assertEqual(f.place_info()['anchor'], value)
def test_place_configure_width(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, width=120)
self.root.update()
self.assertEqual(f2.winfo_width(), 120)
f2.place_configure(width='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(width='abcd')
def test_place_configure_height(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, height=120)
self.root.update()
self.assertEqual(f2.winfo_height(), 120)
f2.place_configure(height='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'bad screen distance "abcd"'):
f2.place_configure(height='abcd')
def test_place_configure_relwidth(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relwidth=0.5)
self.root.update()
self.assertEqual(f2.winfo_width(), 75)
f2.place_configure(relwidth='')
self.root.update()
self.assertEqual(f2.winfo_width(), 30)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relwidth='abcd')
def test_place_configure_relheight(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, relheight=0.5)
self.root.update()
self.assertEqual(f2.winfo_height(), 40)
f2.place_configure(relheight='')
self.root.update()
self.assertEqual(f2.winfo_height(), 60)
with self.assertRaisesRegex(TclError, 'expected floating-point number '
'but got "abcd"'):
f2.place_configure(relheight='abcd')
def test_place_configure_bordermode(self):
f = tkinter.Frame(self.root)
with self.assertRaisesRegex(TclError, 'bad bordermode "j"'):
f.place_configure(bordermode='j')
with self.assertRaisesRegex(TclError, 'ambiguous bordermode ""'):
f.place_configure(bordermode='')
for value in 'inside', 'outside', 'ignore':
f.place_configure(bordermode=value)
self.assertEqual(f.place_info()['bordermode'], value)
def test_place_forget(self):
foo = tkinter.Frame(self.root)
foo.place_configure(width=50, height=50)
self.root.update()
foo.place_forget()
self.root.update()
self.assertFalse(foo.winfo_ismapped())
with self.assertRaises(TypeError):
foo.place_forget(0)
def test_place_info(self):
t, f, f2 = self.create2()
f2.place_configure(in_=f, x=1, y=2, width=3, height=4,
relx=0.1, rely=0.2, relwidth=0.3, relheight=0.4,
anchor='se', bordermode='outside')
info = f2.place_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['x'], '1')
self.assertEqual(info['y'], '2')
self.assertEqual(info['width'], '3')
self.assertEqual(info['height'], '4')
self.assertEqual(info['relx'], '0.1')
self.assertEqual(info['rely'], '0.2')
self.assertEqual(info['relwidth'], '0.3')
self.assertEqual(info['relheight'], '0.4')
self.assertEqual(info['anchor'], 'se')
self.assertEqual(info['bordermode'], 'outside')
self.assertEqual(info['x'], '1')
self.assertEqual(info['x'], '1')
with self.assertRaises(TypeError):
f2.place_info(0)
def test_place_slaves(self):
foo = tkinter.Frame(self.root)
bar = tkinter.Frame(self.root)
self.assertEqual(foo.place_slaves(), [])
bar.place_configure(in_=foo)
self.assertEqual(foo.place_slaves(), [bar])
with self.assertRaises(TypeError):
foo.place_slaves(0)
class GridTest(AbstractWidgetTest, unittest.TestCase):
def tearDown(self):
cols, rows = self.root.grid_size()
for i in range(cols + 1):
self.root.grid_columnconfigure(i, weight=0, minsize=0, pad=0, uniform='')
for i in range(rows + 1):
self.root.grid_rowconfigure(i, weight=0, minsize=0, pad=0, uniform='')
self.root.grid_propagate(1)
if tcl_version >= (8, 5):
self.root.grid_anchor('nw')
super().tearDown()
def test_grid_configure(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
self.assertEqual(b.grid_info()['column'], self._str(0))
self.assertEqual(b.grid_info()['row'], self._str(0))
b.grid_configure({'column': 1}, row=2)
self.assertEqual(b.grid_info()['column'], self._str(1))
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_column(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad column value "-1": '
'must be a non-negative integer'):
b.grid_configure(column=-1)
b.grid_configure(column=2)
self.assertEqual(b.grid_info()['column'], self._str(2))
def test_grid_configure_columnspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad columnspan value "0": '
'must be a positive integer'):
b.grid_configure(columnspan=0)
b.grid_configure(columnspan=2)
self.assertEqual(b.grid_info()['columnspan'], self._str(2))
def test_grid_configure_in(self):
f = tkinter.Frame(self.root)
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure()
self.assertEqual(b.grid_info()['in'], self.root)
b.grid_configure(in_=f)
self.assertEqual(b.grid_info()['in'], f)
b.grid_configure({'in': self.root})
self.assertEqual(b.grid_info()['in'], self.root)
def test_grid_configure_ipadx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipadx value "-1": '
'must be positive screen distance'):
b.grid_configure(ipadx=-1)
b.grid_configure(ipadx=1)
self.assertEqual(b.grid_info()['ipadx'], self._str(1))
b.grid_configure(ipadx='.5c')
self.assertEqual(b.grid_info()['ipadx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_ipady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad ipady value "-1": '
'must be positive screen distance'):
b.grid_configure(ipady=-1)
b.grid_configure(ipady=1)
self.assertEqual(b.grid_info()['ipady'], self._str(1))
b.grid_configure(ipady='.5c')
self.assertEqual(b.grid_info()['ipady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_padx(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(padx=-1)
b.grid_configure(padx=1)
self.assertEqual(b.grid_info()['padx'], self._str(1))
b.grid_configure(padx=(10, 5))
self.assertEqual(b.grid_info()['padx'], self._str((10, 5)))
b.grid_configure(padx='.5c')
self.assertEqual(b.grid_info()['padx'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_pady(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad pad value "-1": '
'must be positive screen distance'):
b.grid_configure(pady=-1)
b.grid_configure(pady=1)
self.assertEqual(b.grid_info()['pady'], self._str(1))
b.grid_configure(pady=(10, 5))
self.assertEqual(b.grid_info()['pady'], self._str((10, 5)))
b.grid_configure(pady='.5c')
self.assertEqual(b.grid_info()['pady'],
self._str(round(pixels_conv('.5c') * self.scaling)))
def test_grid_configure_row(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad (row|grid) value "-1": '
'must be a non-negative integer'):
b.grid_configure(row=-1)
b.grid_configure(row=2)
self.assertEqual(b.grid_info()['row'], self._str(2))
def test_grid_configure_rownspan(self):
b = tkinter.Button(self.root)
with self.assertRaisesRegex(TclError, 'bad rowspan value "0": '
'must be a positive integer'):
b.grid_configure(rowspan=0)
b.grid_configure(rowspan=2)
self.assertEqual(b.grid_info()['rowspan'], self._str(2))
def test_grid_configure_sticky(self):
f = tkinter.Frame(self.root, bg='red')
with self.assertRaisesRegex(TclError, 'bad stickyness value "glue"'):
f.grid_configure(sticky='glue')
f.grid_configure(sticky='ne')
self.assertEqual(f.grid_info()['sticky'], 'ne')
f.grid_configure(sticky='n,s,e,w')
self.assertEqual(f.grid_info()['sticky'], 'nesw')
def test_grid_columnconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_columnconfigure()
self.assertEqual(self.root.grid_columnconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_columnconfigure(0, 'foo')
self.root.grid_columnconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_columnconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_columnconfigure('all')
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_columnconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_columnconfigure(b, weight=4)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 4)
def test_grid_columnconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, minsize='foo')
self.root.grid_columnconfigure(0, minsize=10)
self.assertEqual(self.root.grid_columnconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_columnconfigure(0)['minsize'], 10)
def test_grid_columnconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_columnconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_columnconfigure(0, weight=-3)
self.root.grid_columnconfigure(0, weight=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['weight'], 3)
def test_grid_columnconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_columnconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_columnconfigure(0, pad=-3)
self.root.grid_columnconfigure(0, pad=3)
self.assertEqual(self.root.grid_columnconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_columnconfigure(0)['pad'], 3)
def test_grid_columnconfigure_uniform(self):
self.root.grid_columnconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_columnconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_columnconfigure(0)['uniform'], 'foo')
def test_grid_rowconfigure(self):
with self.assertRaises(TypeError):
self.root.grid_rowconfigure()
self.assertEqual(self.root.grid_rowconfigure(0),
{'minsize': 0, 'pad': 0, 'uniform': None, 'weight': 0})
with self.assertRaisesRegex(TclError, 'bad option "-foo"'):
self.root.grid_rowconfigure(0, 'foo')
self.root.grid_rowconfigure((0, 3), weight=2)
with self.assertRaisesRegex(TclError,
'must specify a single element on retrieval'):
self.root.grid_rowconfigure((0, 3))
b = tkinter.Button(self.root)
b.grid_configure(column=0, row=0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure('all', weight=3)
with self.assertRaisesRegex(TclError, 'expected integer but got "all"'):
self.root.grid_rowconfigure('all')
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(3, 'weight'), 2)
self.assertEqual(self.root.grid_rowconfigure(265, 'weight'), 0)
if tcl_version >= (8, 5):
self.root.grid_rowconfigure(b, weight=4)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 4)
def test_grid_rowconfigure_minsize(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, minsize='foo')
self.root.grid_rowconfigure(0, minsize=10)
self.assertEqual(self.root.grid_rowconfigure(0, 'minsize'), 10)
self.assertEqual(self.root.grid_rowconfigure(0)['minsize'], 10)
def test_grid_rowconfigure_weight(self):
with self.assertRaisesRegex(TclError, 'expected integer but got "bad"'):
self.root.grid_rowconfigure(0, weight='bad')
with self.assertRaisesRegex(TclError, 'invalid arg "-weight": '
'should be non-negative'):
self.root.grid_rowconfigure(0, weight=-3)
self.root.grid_rowconfigure(0, weight=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'weight'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['weight'], 3)
def test_grid_rowconfigure_pad(self):
with self.assertRaisesRegex(TclError, 'bad screen distance "foo"'):
self.root.grid_rowconfigure(0, pad='foo')
with self.assertRaisesRegex(TclError, 'invalid arg "-pad": '
'should be non-negative'):
self.root.grid_rowconfigure(0, pad=-3)
self.root.grid_rowconfigure(0, pad=3)
self.assertEqual(self.root.grid_rowconfigure(0, 'pad'), 3)
self.assertEqual(self.root.grid_rowconfigure(0)['pad'], 3)
def test_grid_rowconfigure_uniform(self):
self.root.grid_rowconfigure(0, uniform='foo')
self.assertEqual(self.root.grid_rowconfigure(0, 'uniform'), 'foo')
self.assertEqual(self.root.grid_rowconfigure(0)['uniform'], 'foo')
def test_grid_forget(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_forget()
c.grid_forget()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(1))
self.assertEqual(info['columnspan'], self._str(1))
self.assertEqual(info['padx'], self._str(0))
self.assertEqual(info['pady'], self._str(0))
self.assertEqual(info['sticky'], '')
def test_grid_remove(self):
b = tkinter.Button(self.root)
c = tkinter.Button(self.root)
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
self.assertEqual(self.root.grid_slaves(), [b])
b.grid_remove()
c.grid_remove()
self.assertEqual(self.root.grid_slaves(), [])
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=0, column=0)
info = b.grid_info()
self.assertEqual(info['row'], self._str(0))
self.assertEqual(info['column'], self._str(0))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
def test_grid_info(self):
b = tkinter.Button(self.root)
self.assertEqual(b.grid_info(), {})
b.grid_configure(row=2, column=2, rowspan=2, columnspan=2,
padx=3, pady=4, sticky='ns')
info = b.grid_info()
self.assertIsInstance(info, dict)
self.assertEqual(info['in'], self.root)
self.assertEqual(info['row'], self._str(2))
self.assertEqual(info['column'], self._str(2))
self.assertEqual(info['rowspan'], self._str(2))
self.assertEqual(info['columnspan'], self._str(2))
self.assertEqual(info['padx'], self._str(3))
self.assertEqual(info['pady'], self._str(4))
self.assertEqual(info['sticky'], 'ns')
@requires_tcl(8, 5)
def test_grid_anchor(self):
with self.assertRaisesRegex(TclError, 'bad anchor "x"'):
self.root.grid_anchor('x')
with self.assertRaisesRegex(TclError, 'ambiguous anchor ""'):
self.root.grid_anchor('')
with self.assertRaises(TypeError):
self.root.grid_anchor('se', 'nw')
self.root.grid_anchor('se')
self.assertEqual(self.root.tk.call('grid', 'anchor', self.root), 'se')
def test_grid_bbox(self):
self.assertEqual(self.root.grid_bbox(), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0), (0, 0, 0, 0))
self.assertEqual(self.root.grid_bbox(0, 0, 1, 1), (0, 0, 0, 0))
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox('x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 'x')
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 'x', 0)
with self.assertRaisesRegex(TclError, 'expected integer but got "x"'):
self.root.grid_bbox(0, 0, 0, 'x')
with self.assertRaises(TypeError):
self.root.grid_bbox(0, 0, 0, 0, 0)
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f1 = tkinter.Frame(t, width=75, height=75, bg='red')
f2 = tkinter.Frame(t, width=90, height=90, bg='blue')
f1.grid_configure(row=0, column=0)
f2.grid_configure(row=1, column=1)
self.root.update()
self.assertEqual(t.grid_bbox(), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(0, 0), (0, 0, 75, 75))
self.assertEqual(t.grid_bbox(0, 0, 1, 1), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(1, 1), (75, 75, 90, 90))
self.assertEqual(t.grid_bbox(10, 10, 0, 0), (0, 0, 165, 165))
self.assertEqual(t.grid_bbox(-2, -2, -1, -1), (0, 0, 0, 0))
self.assertEqual(t.grid_bbox(10, 10, 12, 12), (165, 165, 0, 0))
def test_grid_location(self):
with self.assertRaises(TypeError):
self.root.grid_location()
with self.assertRaises(TypeError):
self.root.grid_location(0)
with self.assertRaises(TypeError):
self.root.grid_location(0, 0, 0)
with self.assertRaisesRegex(TclError, 'bad screen distance "x"'):
self.root.grid_location('x', 'y')
with self.assertRaisesRegex(TclError, 'bad screen distance "y"'):
self.root.grid_location('1c', 'y')
t = self.root
# de-maximize
t.wm_geometry('1x1+0+0')
t.wm_geometry('')
f = tkinter.Frame(t, width=200, height=100,
highlightthickness=0, bg='red')
self.assertEqual(f.grid_location(10, 10), (-1, -1))
f.grid_configure()
self.root.update()
self.assertEqual(t.grid_location(-10, -10), (-1, -1))
self.assertEqual(t.grid_location(-10, 0), (-1, 0))
self.assertEqual(t.grid_location(-1, 0), (-1, 0))
self.assertEqual(t.grid_location(0, -10), (0, -1))
self.assertEqual(t.grid_location(0, -1), (0, -1))
self.assertEqual(t.grid_location(0, 0), (0, 0))
self.assertEqual(t.grid_location(200, 0), (0, 0))
self.assertEqual(t.grid_location(201, 0), (1, 0))
self.assertEqual(t.grid_location(0, 100), (0, 0))
self.assertEqual(t.grid_location(0, 101), (0, 1))
self.assertEqual(t.grid_location(201, 101), (1, 1))
def test_grid_propagate(self):
self.assertEqual(self.root.grid_propagate(), True)
with self.assertRaises(TypeError):
self.root.grid_propagate(False, False)
self.root.grid_propagate(False)
self.assertFalse(self.root.grid_propagate())
f = tkinter.Frame(self.root, width=100, height=100, bg='red')
f.grid_configure(row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(False)
g = tkinter.Frame(self.root, width=75, height=85, bg='green')
g.grid_configure(in_=f, row=0, column=0)
self.root.update()
self.assertEqual(f.winfo_width(), 100)
self.assertEqual(f.winfo_height(), 100)
f.grid_propagate(True)
self.root.update()
self.assertEqual(f.winfo_width(), 75)
self.assertEqual(f.winfo_height(), 85)
def test_grid_size(self):
with self.assertRaises(TypeError):
self.root.grid_size(0)
self.assertEqual(self.root.grid_size(), (0, 0))
f = tkinter.Scale(self.root)
f.grid_configure(row=0, column=0)
self.assertEqual(self.root.grid_size(), (1, 1))
f.grid_configure(row=4, column=5)
self.assertEqual(self.root.grid_size(), (6, 5))
def test_grid_slaves(self):
self.assertEqual(self.root.grid_slaves(), [])
a = tkinter.Label(self.root)
a.grid_configure(row=0, column=1)
b = tkinter.Label(self.root)
b.grid_configure(row=1, column=0)
c = tkinter.Label(self.root)
c.grid_configure(row=1, column=1)
d = tkinter.Label(self.root)
d.grid_configure(row=1, column=1)
self.assertEqual(self.root.grid_slaves(), [d, c, b, a])
self.assertEqual(self.root.grid_slaves(row=0), [a])
self.assertEqual(self.root.grid_slaves(row=1), [d, c, b])
self.assertEqual(self.root.grid_slaves(column=0), [b])
self.assertEqual(self.root.grid_slaves(column=1), [d, c, a])
self.assertEqual(self.root.grid_slaves(row=1, column=1), [d, c])
tests_gui = (
PackTest, PlaceTest, GridTest,
)
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| [
"[email protected]"
]
| |
788d74a0541595cac3c54e408e4d3d2a423cdc26 | 4b157ab5270ba430a6d7c1594ea41ceea89b7ab2 | /dataview/items/management/commands/dbimport.py | 202892882682d5ee3d0dae54641cf1e52de4ef88 | [
"MIT"
]
| permissive | estin/pomp-craigslist-example | 6a06e0671b189b45d7f688583c052b3e23efd010 | c019686776ff2235f92ece9cea19874631a561b9 | refs/heads/master | 2021-01-10T12:07:30.035957 | 2017-11-21T19:55:40 | 2017-11-21T19:55:40 | 52,002,919 | 38 | 8 | null | null | null | null | UTF-8 | Python | false | false | 2,019 | py | import logging
from django.db import transaction
from django.core.management.base import BaseCommand
from django.core.exceptions import ValidationError
from dataview.items.models import CraigsListItem
from craigslist.pipeline import KafkaPipeline
from craigslist.utils import get_statsd_client, METRIC_ITEMS_IMPORTED_KEY
log = logging.getLogger('dataview.dbimport')
class Command(BaseCommand):
help = 'import data from kafka to db'
def handle(self, *args, **options):
try:
self._handle(*args, **options)
except Exception:
log.exception("Exception")
def _handle(self, *args, **options):
statsd = get_statsd_client(sync=True)
def _items_factory(items):
for item in items:
instance = CraigsListItem(**dict(
# convert dict byte keys to string keys and use it as
# keywords
(k.decode(), v) for k, v in item.items()
))
# validate data before insert
try:
instance.full_clean()
except ValidationError as e:
log.debug('Invalid data(%s): %s', e, dict(item))
else:
yield instance
@transaction.atomic()
def do_bulk_insert(items):
cleaned_items = list(_items_factory(items))
if cleaned_items:
CraigsListItem.objects.bulk_create(cleaned_items)
return cleaned_items
log.debug(
'Start import data from kafka',
)
for items in KafkaPipeline.dump_data(
timeout=500, poll_timeout=5000, enable_auto_commit=True):
if items:
imported = do_bulk_insert(items)
log.debug(
'Successfully imported %s from %s',
len(imported), len(items),
)
statsd.incr(METRIC_ITEMS_IMPORTED_KEY, value=len(imported))
| [
"[email protected]"
]
| |
b4729aeb7d1d1ca7fc8785b02f4a3190a9ebce0b | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /build/android/pylib/chrome_test_server_spawner.py | e1eb6b384f6e6a1e0bc2403273b1358a03e5acb7 | [
"BSD-3-Clause"
]
| permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 16,082 | py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
It's used to accept requests from the device to spawn and kill instances of the
chrome test server on the host.
"""
# pylint: disable=W0702
import BaseHTTPServer
import json
import logging
import os
import select
import struct
import subprocess
import sys
import threading
import time
import urlparse
from devil.android import forwarder
from devil.android import ports
from pylib import constants
from pylib.constants import host_paths
# Path that are needed to import necessary modules when launching a testserver.
os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
% (os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
'src'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'components', 'sync', 'tools',
'testserver')))
SERVER_TYPES = {
'http': '',
'ftp': '-f',
'sync': '', # Sync uses its own script, and doesn't take a server type arg.
'tcpecho': '--tcp-echo',
'udpecho': '--udp-echo',
}
# The timeout (in seconds) of starting up the Python test server.
TEST_SERVER_STARTUP_TIMEOUT = 10
def _WaitUntil(predicate, max_attempts=5):
"""Blocks until the provided predicate (function) is true.
Returns:
Whether the provided predicate was satisfied once (before the timeout).
"""
sleep_time_sec = 0.025
for _ in xrange(1, max_attempts):
if predicate():
return True
time.sleep(sleep_time_sec)
sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec.
return False
def _CheckPortAvailable(port):
"""Returns True if |port| is available."""
return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
def _CheckPortNotAvailable(port):
"""Returns True if |port| is not available."""
return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
def _CheckDevicePortStatus(device, port):
"""Returns whether the provided port is used."""
return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port))
def _GetServerTypeCommandLine(server_type):
"""Returns the command-line by the given server type.
Args:
server_type: the server type to be used (e.g. 'http').
Returns:
A string containing the command-line argument.
"""
if server_type not in SERVER_TYPES:
raise NotImplementedError('Unknown server type: %s' % server_type)
if server_type == 'udpecho':
raise Exception('Please do not run UDP echo tests because we do not have '
'a UDP forwarder tool.')
return SERVER_TYPES[server_type]
class TestServerThread(threading.Thread):
"""A thread to run the test server in a separate process."""
def __init__(self, ready_event, arguments, device, tool):
"""Initialize TestServerThread with the following argument.
Args:
ready_event: event which will be set when the test server is ready.
arguments: dictionary of arguments to run the test server.
device: An instance of DeviceUtils.
tool: instance of runtime error detection tool.
"""
threading.Thread.__init__(self)
self.wait_event = threading.Event()
self.stop_flag = False
self.ready_event = ready_event
self.ready_event.clear()
self.arguments = arguments
self.device = device
self.tool = tool
self.test_server_process = None
self.is_ready = False
self.host_port = self.arguments['port']
assert isinstance(self.host_port, int)
# The forwarder device port now is dynamically allocated.
self.forwarder_device_port = 0
# Anonymous pipe in order to get port info from test server.
self.pipe_in = None
self.pipe_out = None
self.process = None
self.command_line = []
def _WaitToStartAndGetPortFromTestServer(self):
"""Waits for the Python test server to start and gets the port it is using.
The port information is passed by the Python test server with a pipe given
by self.pipe_out. It is written as a result to |self.host_port|.
Returns:
Whether the port used by the test server was successfully fetched.
"""
assert self.host_port == 0 and self.pipe_out and self.pipe_in
(in_fds, _, _) = select.select([self.pipe_in, ], [], [],
TEST_SERVER_STARTUP_TIMEOUT)
if len(in_fds) == 0:
logging.error('Failed to wait to the Python test server to be started.')
return False
# First read the data length as an unsigned 4-byte value. This
# is _not_ using network byte ordering since the Python test server packs
# size as native byte order and all Chromium platforms so far are
# configured to use little-endian.
# TODO(jnd): Change the Python test server and local_test_server_*.cc to
# use a unified byte order (either big-endian or little-endian).
data_length = os.read(self.pipe_in, struct.calcsize('=L'))
if data_length:
(data_length,) = struct.unpack('=L', data_length)
assert data_length
if not data_length:
logging.error('Failed to get length of server data.')
return False
port_json = os.read(self.pipe_in, data_length)
if not port_json:
logging.error('Failed to get server data.')
return False
logging.info('Got port json data: %s', port_json)
port_json = json.loads(port_json)
if port_json.has_key('port') and isinstance(port_json['port'], int):
self.host_port = port_json['port']
return _CheckPortNotAvailable(self.host_port)
logging.error('Failed to get port information from the server data.')
return False
def _GenerateCommandLineArguments(self):
"""Generates the command line to run the test server.
Note that all options are processed by following the definitions in
testserver.py.
"""
if self.command_line:
return
args_copy = dict(self.arguments)
# Translate the server type.
type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
if type_cmd:
self.command_line.append(type_cmd)
# Use a pipe to get the port given by the instance of Python test server
# if the test does not specify the port.
assert self.host_port == args_copy['port']
if self.host_port == 0:
(self.pipe_in, self.pipe_out) = os.pipe()
self.command_line.append('--startup-pipe=%d' % self.pipe_out)
# Pass the remaining arguments as-is.
for key, values in args_copy.iteritems():
if not isinstance(values, list):
values = [values]
for value in values:
if value is None:
self.command_line.append('--%s' % key)
else:
self.command_line.append('--%s=%s' % (key, value))
def _CloseUnnecessaryFDsForTestServerProcess(self):
# This is required to avoid subtle deadlocks that could be caused by the
# test server child process inheriting undesirable file descriptors such as
# file lock file descriptors.
for fd in xrange(0, 1024):
if fd != self.pipe_out:
try:
os.close(fd)
except:
pass
def run(self):
logging.info('Start running the thread!')
self.wait_event.clear()
self._GenerateCommandLineArguments()
command = host_paths.DIR_SOURCE_ROOT
if self.arguments['server-type'] == 'sync':
command = [os.path.join(command, 'components', 'sync', 'tools',
'testserver',
'sync_testserver.py')] + self.command_line
else:
command = [os.path.join(command, 'net', 'tools', 'testserver',
'testserver.py')] + self.command_line
logging.info('Running: %s', command)
# Disable PYTHONUNBUFFERED because it has a bad interaction with the
# testserver. Remove once this interaction is fixed.
unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
# Pass DIR_SOURCE_ROOT as the child's working directory so that relative
# paths in the arguments are resolved correctly.
self.process = subprocess.Popen(
command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
cwd=host_paths.DIR_SOURCE_ROOT)
if unbuf:
os.environ['PYTHONUNBUFFERED'] = unbuf
if self.process:
if self.pipe_out:
self.is_ready = self._WaitToStartAndGetPortFromTestServer()
else:
self.is_ready = _CheckPortNotAvailable(self.host_port)
if self.is_ready:
forwarder.Forwarder.Map([(0, self.host_port)], self.device, self.tool)
# Check whether the forwarder is ready on the device.
self.is_ready = False
device_port = forwarder.Forwarder.DevicePortForHostPort(self.host_port)
if device_port and _CheckDevicePortStatus(self.device, device_port):
self.is_ready = True
self.forwarder_device_port = device_port
# Wake up the request handler thread.
self.ready_event.set()
# Keep thread running until Stop() gets called.
_WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
if self.process.poll() is None:
self.process.kill()
forwarder.Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device)
self.process = None
self.is_ready = False
if self.pipe_out:
os.close(self.pipe_in)
os.close(self.pipe_out)
self.pipe_in = None
self.pipe_out = None
logging.info('Test-server has died.')
self.wait_event.set()
def Stop(self):
"""Blocks until the loop has finished.
Note that this must be called in another thread.
"""
if not self.process:
return
self.stop_flag = True
self.wait_event.wait()
class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler used to process http GET/POST request."""
def _SendResponse(self, response_code, response_reason, additional_headers,
contents):
"""Generates a response sent to the client from the provided parameters.
Args:
response_code: number of the response status.
response_reason: string of reason description of the response.
additional_headers: dict of additional headers. Each key is the name of
the header, each value is the content of the header.
contents: string of the contents we want to send to client.
"""
self.send_response(response_code, response_reason)
self.send_header('Content-Type', 'text/html')
# Specify the content-length as without it the http(s) response will not
# be completed properly (and the browser keeps expecting data).
self.send_header('Content-Length', len(contents))
for header_name in additional_headers:
self.send_header(header_name, additional_headers[header_name])
self.end_headers()
self.wfile.write(contents)
self.wfile.flush()
def _StartTestServer(self):
"""Starts the test server thread."""
logging.info('Handling request to spawn a test server.')
content_type = self.headers.getheader('content-type')
if content_type != 'application/json':
raise Exception('Bad content-type for start request.')
content_length = self.headers.getheader('content-length')
if not content_length:
content_length = 0
try:
content_length = int(content_length)
except:
raise Exception('Bad content-length for start request.')
logging.info(content_length)
test_server_argument_json = self.rfile.read(content_length)
logging.info(test_server_argument_json)
assert not self.server.test_server_instance
ready_event = threading.Event()
self.server.test_server_instance = TestServerThread(
ready_event,
json.loads(test_server_argument_json),
self.server.device,
self.server.tool)
self.server.test_server_instance.setDaemon(True)
self.server.test_server_instance.start()
ready_event.wait()
if self.server.test_server_instance.is_ready:
self._SendResponse(200, 'OK', {}, json.dumps(
{'port': self.server.test_server_instance.forwarder_device_port,
'message': 'started'}))
logging.info('Test server is running on port: %d.',
self.server.test_server_instance.host_port)
else:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during starting a test server.')
def _KillTestServer(self):
"""Stops the test server instance."""
# There should only ever be one test server at a time. This may do the
# wrong thing if we try and start multiple test servers.
if not self.server.test_server_instance:
return
port = self.server.test_server_instance.host_port
logging.info('Handling request to kill a test server on port: %d.', port)
self.server.test_server_instance.Stop()
# Make sure the status of test server is correct before sending response.
if _CheckPortAvailable(port):
self._SendResponse(200, 'OK', {}, 'killed')
logging.info('Test server on port %d is killed', port)
else:
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during killing a test server.')
self.server.test_server_instance = None
def do_POST(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
logging.info('Action for POST method is: %s.', action)
if action == '/start':
self._StartTestServer()
else:
self._SendResponse(400, 'Unknown request.', {}, '')
logging.info('Encounter unknown request: %s.', action)
def do_GET(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
logging.info('Action for GET method is: %s.', action)
for param in params:
logging.info('%s=%s', param, params[param][0])
if action == '/kill':
self._KillTestServer()
elif action == '/ping':
# The ping handler is used to check whether the spawner server is ready
# to serve the requests. We don't need to test the status of the test
# server when handling ping request.
self._SendResponse(200, 'OK', {}, 'ready')
logging.info('Handled ping request and sent response.')
else:
self._SendResponse(400, 'Unknown request', {}, '')
logging.info('Encounter unknown request: %s.', action)
class SpawningServer(object):
"""The class used to start/stop a http server."""
def __init__(self, test_server_spawner_port, device, tool):
logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
SpawningServerRequestHandler)
self.server.device = device
self.server.tool = tool
self.server.test_server_instance = None
self.server.build_type = constants.GetBuildType()
def _Listen(self):
logging.info('Starting test server spawner')
self.server.serve_forever()
def Start(self):
"""Starts the test server spawner."""
listener_thread = threading.Thread(target=self._Listen)
listener_thread.setDaemon(True)
listener_thread.start()
def Stop(self):
"""Stops the test server spawner.
Also cleans the server state.
"""
self.CleanupState()
self.server.shutdown()
def CleanupState(self):
"""Cleans up the spawning server state.
This should be called if the test server spawner is reused,
to avoid sharing the test server instance.
"""
if self.server.test_server_instance:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
| [
"[email protected]"
]
| |
842b53f556e40e7ee2ce73b314af3c48d09ff59a | 44b87d9faad99d542914c35410ba7d354d5ba9cd | /1/examples/srearch_a_letter.py | db0f0ae9e3b4cbc2f8eb95912e5afe20241d5f02 | []
| no_license | append-knowledge/pythondjango | 586292d1c7d0ddace3630f0d77ca53f442667e54 | 0e5dab580e8cc48e9940fb93a71bcd36e8e6a84e | refs/heads/master | 2023-06-24T07:24:53.374998 | 2021-07-13T05:55:25 | 2021-07-13T05:55:25 | 385,247,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | x=input("enter the word ")
y=input("enter the letter you want to find ")
flag=0
for i in x:
if i in y:
flag=1
if flag==1:
print("entered word found ")
else:
print("not found") | [
"[email protected]"
]
| |
26c4e08d795fe5047e6277af93086c7796f3774d | f152d89efeebc5c00c54cf7819f539aec920aa2d | /reviewboard/webapi/decorators.py | 02674e04c176a61adb67121de06093f144b15995 | [
"MIT"
]
| permissive | yang/reviewboard | c1c0cee37133004c2857ed6daac136697baa92dd | b893e0f28bc5d561124aaf09bc8b0e164f42c7d5 | refs/heads/master | 2021-01-18T11:04:37.694088 | 2010-11-27T00:09:27 | 2010-11-30T00:48:14 | 1,115,897 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,188 | py | from django.http import HttpRequest
from djblets.siteconfig.models import SiteConfiguration
from djblets.util.decorators import simple_decorator
from djblets.webapi.core import WebAPIResponse, WebAPIResponseError
from djblets.webapi.decorators import webapi_login_required, \
webapi_response_errors
from djblets.webapi.encoders import BasicAPIEncoder
from djblets.webapi.errors import NOT_LOGGED_IN
@webapi_response_errors(NOT_LOGGED_IN)
@simple_decorator
def webapi_check_login_required(view_func):
"""
A decorator that checks whether login is required on this installation
and, if so, checks if the user is logged in. If login is required and
the user is not logged in, they'll get a NOT_LOGGED_IN error.
"""
def _check(*args, **kwargs):
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("auth_require_sitewide_login"):
return webapi_login_required(view_func)(*args, **kwargs)
else:
return view_func(*args, **kwargs)
view_func.checks_login_required = True
return _check
def webapi_deprecated(deprecated_in, force_error_http_status=None,
default_api_format=None, encoders=[]):
"""Marks an API handler as deprecated.
``deprecated_in`` specifies the version that first deprecates this call.
``force_error_http_status`` forces errors to use the specified HTTP
status code.
``default_api_format`` specifies the default api format (json or xml)
if one isn't provided.
"""
def _dec(view_func):
def _view(*args, **kwargs):
if default_api_format:
request = args[0]
assert isinstance(request, HttpRequest)
method_args = getattr(request, request.method, None)
if method_args and 'api_format' not in method_args:
method_args = method_args.copy()
method_args['api_format'] = default_api_format
setattr(request, request.method, method_args)
response = view_func(*args, **kwargs)
if isinstance(response, WebAPIResponse):
response.encoders = encoders
if isinstance(response, WebAPIResponseError):
response.api_data['deprecated'] = {
'in_version': deprecated_in,
}
if (force_error_http_status and
isinstance(response, WebAPIResponseError)):
response.status_code = force_error_http_status
return response
return _view
return _dec
_deprecated_api_encoders = []
def webapi_deprecated_in_1_5(view_func):
from reviewboard.webapi.encoder import DeprecatedReviewBoardAPIEncoder
global _deprecated_api_encoders
if not _deprecated_api_encoders:
_deprecated_api_encoders = [
DeprecatedReviewBoardAPIEncoder(),
BasicAPIEncoder(),
]
return webapi_deprecated(
deprecated_in='1.5',
force_error_http_status=200,
default_api_format='json',
encoders=_deprecated_api_encoders)(view_func)
| [
"[email protected]"
]
| |
d0f2925daf7db34217f561cf6b3a564ef62b0e35 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /93o6y6WKFpQKoDg4T_11.py | c80cfdfb6ad9743783da2aa0233e0db5d7d02133 | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60 | py |
def sort_by_length(lst):
return sorted(lst , key = len)
| [
"[email protected]"
]
| |
23c32a446cf61a05dcb2d470239b1d3c0c4f49da | 794f225c248e84b29f03e5ae472bd995b8dd86a4 | /doppler/all_data/4-18/20130418-13_0_10_peak_combine.py | 48b82d42269036337f95766e2c30e0ba539b80ea | []
| no_license | yuyichao/jlab2s13 | 1113a537bf9f1d44ff96324f290a16abf265fb20 | 9b09c3af9f4d3311633996635ccf75f04a97c117 | refs/heads/master | 2023-02-21T18:08:12.739055 | 2013-09-14T03:52:57 | 2013-09-14T03:52:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,082 | py | fit2_s = [3.9008432047451302, 3.8773342085960469, 3.9957833609585491]
fit_a = [-1.2819999999999998, -1.2679999999999993, -1.2711428571428618]
fit2_a = [500.51428571428562, 500.68571428571403, 500.61428571428576]
fit_s = [0.012207605867885846, 0.0094353400058313219, 0.010353561336037447]
peaks = [{'name': ['f85_3_4'], 'pos': -7.9996981822594853, 'pos_s': 0.011013846947679092, 'pos_i_s': 0.9490858455586706, 'pos_i': 1500.5435641014496}, {'name': ['f85_3_3', 'f85_3_4'], 'pos': -8.2967573220130149, 'pos_s': 0.012724357233702065, 'pos_i_s': 1.4310526680215732, 'pos_i': 1614.4619747504282}, {'name': ['f85_3_2', 'f85_3_4'], 'pos': -8.4343291746150335, 'pos_s': 0.011786948704679416, 'pos_i_s': 1.2032517401363374, 'pos_i': 1668.7193509592228}, {'name': ['f85_3_2', 'f85_3_3'], 'pos': -8.720160977519825, 'pos_s': 0.012858045194454387, 'pos_i_s': 1.636463811009885, 'pos_i': 1782.4988462413753}, {'name': ['f85_3_4'], 'pos': -8.0039884835099357, 'pos_s': 0.011799133475740702, 'pos_i_s': 1.2268992252757378, 'pos_i': 3979.13796792023}, {'name': ['f85_3_3', 'f85_3_4'], 'pos': -8.2977243140836912, 'pos_s': 0.012888055408478675, 'pos_i_s': 1.448285143908029, 'pos_i': 4090.4422261406667}, {'name': ['f85_3_2', 'f85_3_4'], 'pos': -8.4342855941752237, 'pos_s': 0.011899552853659508, 'pos_i_s': 1.389826247547356, 'pos_i': 4143.611888534533}, {'name': ['f85_3_2', 'f85_3_3'], 'pos': -8.718160279442678, 'pos_s': 0.012775187457901651, 'pos_i_s': 1.631549880675381, 'pos_i': 4256.522759082934}, {'name': ['f85_3_4'], 'pos': -8.006015880564151, 'pos_s': 0.012275941194589106, 'pos_i_s': 1.3872586407085559, 'pos_i': 6457.781447953739}, {'name': ['f85_3_3', 'f85_3_4'], 'pos': -8.2955682322274473, 'pos_s': 0.012460840014492304, 'pos_i_s': 1.374847594661195, 'pos_i': 6568.516429897438}, {'name': ['f85_3_2', 'f85_3_4'], 'pos': -8.4359818820726122, 'pos_s': 0.011658198624886231, 'pos_i_s': 1.206166415622737, 'pos_i': 6622.468062521684}, {'name': ['f85_3_2', 'f85_3_3'], 'pos': -8.7168896916836989, 'pos_s': 0.012560068171244025, 'pos_i_s': 1.562662238805832, 'pos_i': 6734.273836322585}]
| [
"[email protected]"
]
| |
f238d04a62268f719a0026d5246ae6552ad08c38 | bf99b1b14e9ca1ad40645a7423f23ef32f4a62e6 | /AtCoder/arc/025a.py | eca2c9978b657cb946922fb4f5f37b40b06e0566 | []
| no_license | y-oksaku/Competitive-Programming | 3f9c1953956d1d1dfbf46d5a87b56550ff3ab3db | a3ff52f538329bed034d3008e051f30442aaadae | refs/heads/master | 2021-06-11T16:14:12.635947 | 2021-05-04T08:18:35 | 2021-05-04T08:18:35 | 188,639,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | D = list(map(int, input().split()))
L = list(map(int, input().split()))
ans = 0
for d, l in zip(D, L):
ans += max(d, l)
print(ans) | [
"[email protected]"
]
| |
d130c48d189ce6a8f79f9a900a9f651c67482890 | 37fd103f6b0de68512e3cb6098d0abb9220f5a7d | /Python from scratch/027_inclass_reg_ip.py | 65e07f0162c6062580b2d1a4687b6a61fbc22782 | []
| no_license | FlyingMedusa/PythonELTIT | 720d48089738b7e629cad888f0032df3a4ccea2c | 36ab01fc9d42337e3c76c59c383d7b1a6142f9b9 | refs/heads/master | 2020-09-11T18:17:17.825390 | 2020-04-21T16:38:03 | 2020-04-21T16:38:03 | 222,150,066 | 0 | 0 | null | 2020-04-21T16:38:04 | 2019-11-16T19:37:33 | Python | UTF-8 | Python | false | false | 471 | py | import re
words = ["eloelo320", "blah@", "192.168.0.1", "asd.asd.20"]
pattern = "^\w+$" # or (longer): "^([A-Z]|[a-z]|(\d))*$"
id_pattern = "^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$"
for word in words:
match = re.search(pattern, word)
if match:
print("matched")
else:
print("not matched")
print("*"*80)
for word in words:
match = re.search(id_pattern, word)
if match:
print("matched")
else:
print("not matched")
| [
"[email protected]"
]
| |
acb51fce28782b1e64bb7fd83ce39d45260ae110 | 175d6cff12514da71aafef6b9ff48dd56a87db2d | /alveus/widgets/customized_menu.py | 76d6dbee02fa885e376a161e4dfb7dd9543930bf | [
"MIT"
]
| permissive | FrederikLehn/alveus | d309eea98bd36f06709c55a18f0855f38b5420a9 | 71a858d0cdd8a4bbd06a28eb35fa7a8a7bd4814b | refs/heads/main | 2023-06-26T02:29:59.236579 | 2021-07-30T11:07:17 | 2021-07-30T11:07:17 | 391,029,935 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 6,342 | py | import wx
from wx.lib.agw.flatmenu import FMRendererMgr, FMRenderer, FlatMenu, FlatMenuItem
from wx.lib.agw.flatmenu import FMRendererXP, FMRendererMSOffice2007, FMRendererVista
from wx.lib.agw.artmanager import ArtManager, DCSaver
import _icons as ico
class CustomFMRendererMgr(FMRendererMgr):
def __init__(self):
super().__init__()
#if hasattr(self, '_alreadyInitialized'):
# return
#self._alreadyInitialized = True
#self._currentTheme = StyleDefault
self._currentTheme = 0
self._renderers = []
self._renderers.append(CustomFMRenderer())
#self._renderers.append(FMRendererXP())
#self._renderers.append(FMRendererMSOffice2007())
#self._renderers.append(FMRendererVista())
class CustomFMRenderer(FMRendererVista):
def __init__(self):
super().__init__()
# self.menuBarFaceColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_3DFACE)
#
# self.buttonBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.buttonFaceColour = ArtManager.Get().LightColour(self.buttonBorderColour, 75)
# self.buttonFocusBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.buttonFocusFaceColour = ArtManager.Get().LightColour(self.buttonFocusBorderColour, 75)
# self.buttonPressedBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.buttonPressedFaceColour = ArtManager.Get().LightColour(self.buttonPressedBorderColour, 60)
#
# self.menuFocusBorderColour = wx.RED #wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.menuFocusFaceColour = ArtManager.Get().LightColour(self.buttonFocusBorderColour, 75)
# self.menuPressedBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.menuPressedFaceColour = ArtManager.Get().LightColour(self.buttonPressedBorderColour, 60)
#
# self.menuBarFocusBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.menuBarFocusFaceColour = ArtManager.Get().LightColour(self.buttonFocusBorderColour, 75)
# self.menuBarPressedBorderColour = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVECAPTION)
# self.menuBarPressedFaceColour = ArtManager.Get().LightColour(self.buttonPressedBorderColour, 60)
def DrawButtonColour(self, dc, rect, state, colour):
"""
Draws a button using the Vista theme.
:param `dc`: an instance of :class:`DC`;
:param `rect`: the an instance of :class:`Rect`, representing the button client rectangle;
:param integer `state`: the button state;
:param `colour`: a valid :class:`Colour` instance.
"""
artMgr = ArtManager.Get()
# Keep old pen and brush
dcsaver = DCSaver(dc)
# same colours as used on ribbon
outer = wx.Colour(242, 201, 88)
inner = wx.WHITE
top = wx.Colour(255, 227, 125)
bottom = wx.Colour(253, 243, 204)
bdrRect = wx.Rect(*rect)
filRect = wx.Rect(*rect)
filRect.Deflate(1, 1)
r1, g1, b1 = int(top.Red()), int(top.Green()), int(top.Blue())
r2, g2, b2 = int(bottom.Red()), int(bottom.Green()), int(bottom.Blue())
dc.GradientFillLinear(filRect, top, bottom, wx.SOUTH)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetPen(wx.Pen(outer))
dc.DrawRoundedRectangle(bdrRect, 3)
bdrRect.Deflate(1, 1)
dc.SetPen(wx.Pen(inner))
dc.DrawRoundedRectangle(bdrRect, 2)
class CustomMenu(FlatMenu):
def __init__(self, parent=None):
super().__init__(parent=parent)
self._rendererMgr = CustomFMRendererMgr()
def CustomPopup(self):
if self.GetMenuItems():
pos = wx.GetMousePosition()
self.Popup(wx.Point(pos.x, pos.y), self.GetParent())
# common item implementations for ease of use ----------------------------------------------------------------------
def AppendCollapseItem(self, method, bind_to=None):
return self.AppendGenericItem('Collapse all', method, bitmap=ico.collapse_16x16.GetBitmap(), bind_to=bind_to)
def AppendCopyItem(self, method, bind_to=None):
return self.AppendGenericItem('Copy', method, bitmap=ico.copy_16x16.GetBitmap(), bind_to=bind_to)
def AppendCutItem(self, method, bind_to=None):
return self.AppendGenericItem('Cut', method, bitmap=ico.cut_16x16.GetBitmap(), bind_to=bind_to)
def AppendDeleteItem(self, method, bind_to=None):
return self.AppendGenericItem('Delete', method, bitmap=ico.delete_16x16.GetBitmap(), bind_to=bind_to)
def AppendExpandItem(self, method, bind_to=None):
return self.AppendGenericItem('Expand all', method, bitmap=ico.expand_16x16.GetBitmap(), bind_to=bind_to)
def AppendExportExcel(self, method, bind_to=None):
return self.AppendGenericItem('Export to Excel', method, bitmap=ico.export_spreadsheet_16x16.GetBitmap(), bind_to=bind_to)
def AppendGenericItem(self, text, method, bitmap=wx.NullBitmap, bind_to=None):
if bind_to is None:
bind_to = self.GetParent()
item = CustomMenuItem(self, wx.ID_ANY, text, normalBmp=bitmap)
self.AppendItem(item)
bind_to.Bind(wx.EVT_MENU, method, item)
return item
def AppendOpenItem(self, method, bind_to=None):
return self.AppendGenericItem('Open', method, bitmap=ico.settings_page_16x16.GetBitmap(), bind_to=bind_to)
def AppendPasteItem(self, method, bind_to=None):
return self.AppendGenericItem('Paste', method, bitmap=ico.paste_16x16.GetBitmap(), bind_to=bind_to)
class CustomMenuItem(FlatMenuItem):
def __init__(self, parent, id=wx.ID_SEPARATOR, label="", helpString="", kind=wx.ITEM_NORMAL, subMenu=None,
normalBmp=wx.NullBitmap, disabledBmp=wx.NullBitmap, hotBmp=wx.NullBitmap):
super().__init__(parent, id=id, label=label, helpString=helpString, kind=kind, subMenu=subMenu,
normalBmp=normalBmp, disabledBmp=disabledBmp, hotBmp=hotBmp)
def SetBitmap(self, bmp):
self._normalBmp = bmp
| [
"[email protected]"
]
| |
2fdec4c0f0f3dab907001d6f75807c4de79d3ff9 | 6f1cadc49bc86ea49fd32c64397bfecfd9666f19 | /C2/pulsar/implant/migrations/0002_auto_20150827_1851.py | ca655a540d156e556deace4637d8d630fee4b98d | [
"BSD-3-Clause"
]
| permissive | killvxk/Pulsar-1 | f073c2273e9d4040acc3842963b018d920e78aa4 | d290c524674eabb0444ac8c0b1ee65ea1ad44f1f | refs/heads/master | 2020-06-24T22:38:25.551118 | 2019-07-27T03:45:25 | 2019-07-27T03:45:25 | 199,111,787 | 0 | 0 | null | 2019-07-27T03:44:52 | 2019-07-27T03:44:51 | null | UTF-8 | Python | false | false | 390 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('implant', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='implant',
name='uuid',
field=models.CharField(max_length=36),
),
]
| [
"[email protected]"
]
| |
fa8beb3f3c45d810e244afa3a207660de72aae1e | c829a8654d4adcba7944f1aa48c2643c2a2a2803 | /sony_utils/split.py | 64caf82d8c17086980dca4436a62a0b48901e234 | []
| no_license | muma378/Utils | d85390f84226b63474c815285acb6ce351ac0c22 | a6ae14f86de360bdabd9fa7f39cd8b05bbd505fb | refs/heads/master | 2020-05-21T13:35:51.908847 | 2017-02-05T06:11:45 | 2017-02-05T06:11:45 | 48,424,512 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | import os
import sys
import subprocess
import datetime
CMD_TEMPLATE = "cut.exe {src_wav} {dst_wav} {start} {end}"
NAME = "emotion_F_"
DECODING = 'gb2312' if os.name=='nt' else 'utf-8'
# split the wav as the information provided by several columns
def split_by_cols(cols_file, src_wav, dst_dir='.', name_prefix=NAME):
with open(cols_file, 'r') as f:
counter = 0
for timeline in f:
start, end, text = map(lambda x: x.strip(), timeline.split("\t"))
to_sec = lambda x: str(float(x.split(":")[0])*60 + float(x.split(":")[1]))
start, end = to_sec(start), to_sec(end)
counter += 1
dst_file = os.path.join(dst_dir, unicode(name_prefix+str(counter))).encode(DECODING)
# to generate the wave
dst_wav = dst_file + '.wav'
cmd = CMD_TEMPLATE.format(**locals())
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
subprocess.check_call(cmd, shell=True)
# to generate the text
with open(dst_file+".txt", "w") as t:
t.write(text)
if __name__ == '__main__':
split_by_cols(sys.argv[1], sys.argv[2])
| [
"[email protected]"
]
| |
d032794e6b78ff7d03d03deda884cfbf3e772619 | caa175a933aca08a475c6277e22cdde1654aca7b | /acondbs/db/__init__.py | 5656bd250d55b0a328a26007e1eeb74511f46e9f | [
"MIT"
]
| permissive | simonsobs/acondbs | 01d68ae40866461b85a6c9fcabdfbea46ef5f920 | d18c7b06474b0dacb1dcf1c6dbd1e743407645e2 | refs/heads/main | 2023-07-07T04:33:40.561273 | 2023-06-28T22:08:00 | 2023-06-28T22:08:00 | 239,022,783 | 0 | 1 | MIT | 2023-06-26T20:36:39 | 2020-02-07T21:07:46 | Python | UTF-8 | Python | false | false | 1,054 | py | """SQLAlchemy and DB related
This package contains functions, classes, and other objects that are
related to SQLAlchemy and the DB except ORM model declarations.
"""
from pathlib import Path
from flask import Flask
from flask_migrate import Migrate
from .cmds import (
backup_db_command,
dump_db_command,
export_csv_command,
import_csv_command,
init_db_command,
)
from .sa import sa
migrate = Migrate()
_MIGRATIONS_DIR = str(Path(__file__).resolve().parent.parent / 'migrations')
def init_app(app: Flask) -> None:
"""Initialize the Flask application object
This function is called by `create_app()` of Flask
Parameters
----------
app : Flask
The Flask application object, an instance of `Flask`
"""
sa.init_app(app)
migrate.init_app(app, sa, directory=_MIGRATIONS_DIR)
app.cli.add_command(init_db_command)
app.cli.add_command(dump_db_command)
app.cli.add_command(import_csv_command)
app.cli.add_command(export_csv_command)
app.cli.add_command(backup_db_command)
| [
"[email protected]"
]
| |
fd78af3570754694ae18160dcad79b077bc0eeb9 | 242086b8c6a39cbc7af3bd7f2fd9b78a66567024 | /python/PP4E-Examples-1.4/Examples/PP4E/Dbase/TableBrowser/dbview.py | 9975899912c220e9ca0a023de57601b57da0cc5b | []
| no_license | chuzui/algorithm | 7537d0aa051ac4cbe9f6a7ca9a3037204803a650 | c3006b24c4896c1242d3ceab43ace995c94f10c8 | refs/heads/master | 2021-01-10T13:05:30.902020 | 2015-09-27T14:39:02 | 2015-09-27T14:39:02 | 8,404,397 | 4 | 4 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | ##################################################################
# view any existing shelve directly; this is more general than a
# "formtable.py shelve 1 filename" cmdline--only works for Actor;
# pass in a filename (and mode) to use this to browse any shelve:
# formtable auto picks up class from the first instance fetched;
# run dbinit1 to (re)initialize dbase shelve with a template.
##################################################################
from sys import argv
from formtable import *
from formgui import FormGui
mode = 'class'
file = '../data/mydbase-' + mode
if len(argv) > 1: file = argv[1] # dbview.py file? mode??
if len(argv) > 2: mode = argv[2]
if mode == 'dict':
table = ShelveOfDictionary(file) # view dictionaries
else:
table = ShelveOfInstance(file) # view class objects
FormGui(table).mainloop()
table.close() # close needed for some dbm
| [
"zui"
]
| zui |
468d0a666c8113677278318e8707cab353abf5b9 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nninsid.py | 467654f6a73883390386eb7cdecf2765d99eb6ea | []
| no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 1,302 | py | ii = [('BentJDO2.py', 1), ('MarrFDI.py', 1), ('RogePAV2.py', 4), ('KembFFF.py', 1), ('RogePAV.py', 3), ('RennJIT.py', 5), ('LeakWTI2.py', 2), ('UnitAI.py', 3), ('KembFJ1.py', 1), ('WilkJMC3.py', 1), ('LeakWTI3.py', 2), ('PettTHE.py', 18), ('PeckJNG.py', 1), ('BailJD2.py', 1), ('AdamWEP.py', 33), ('FitzRNS3.py', 16), ('ClarGE2.py', 2), ('GellWPT2.py', 4), ('WilkJMC2.py', 8), ('CarlTFR.py', 5), ('RoscTTI3.py', 2), ('AinsWRR3.py', 2), ('BailJD1.py', 1), ('RoscTTI2.py', 1), ('CoolWHM.py', 3), ('CrokTPS.py', 2), ('ClarGE.py', 1), ('BuckWGM.py', 3), ('LyelCPG.py', 1), ('WestJIT2.py', 25), ('CrocDNL.py', 4), ('MedwTAI.py', 6), ('WadeJEB.py', 1), ('FerrSDO2.py', 1), ('KirbWPW2.py', 3), ('SoutRD2.py', 1), ('BackGNE.py', 7), ('LeakWTI.py', 1), ('SoutRD.py', 5), ('DickCSG.py', 2), ('BuckWGM2.py', 2), ('WheeJPT.py', 3), ('MereHHB3.py', 2), ('HowiWRL2.py', 1), ('BailJD3.py', 2), ('WilkJMC.py', 6), ('MackCNH.py', 1), ('WestJIT.py', 20), ('BabbCEM.py', 6), ('FitzRNS4.py', 7), ('DequTKM.py', 1), ('FitzRNS.py', 15), ('EdgeMHT.py', 1), ('BowrJMM.py', 2), ('FerrSDO.py', 1), ('RoscTTI.py', 1), ('KembFJ2.py', 1), ('LewiMJW.py', 4), ('BellCHM.py', 1), ('AinsWRR2.py', 2), ('BrewDTO.py', 4), ('JacoWHI.py', 1), ('ClarGE3.py', 4), ('FitzRNS2.py', 10), ('NortSTC.py', 1), ('KeigTSS.py', 1), ('KirbWPW.py', 5)] | [
"[email protected]"
]
| |
45d7bb9e577d90e6669bedad91fe02a0067a2061 | 41cd1bcff0166ed3aab28a183a2837adaa2d9a07 | /allauth/account/decorators.py | eb906aad176d794c9e8a3407a9d1495c7ae1d76d | [
"MIT"
]
| permissive | thomaspurchas/django-allauth | 694dde8615b90cd4768e7f9eda79fdcf6fe3cdb6 | d7a8b9e13456180648450431057a206afa689373 | refs/heads/master | 2022-02-04T03:18:25.851391 | 2013-05-20T11:26:55 | 2013-05-20T11:26:55 | 7,754,028 | 1 | 0 | MIT | 2022-02-01T23:04:02 | 2013-01-22T14:44:56 | Python | UTF-8 | Python | false | false | 1,627 | py | from django.contrib.auth.decorators import login_required
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.shortcuts import render
from .models import EmailAddress
from .utils import send_email_confirmation
def verified_email_required(function=None,
login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Even when email verification is not mandatory during signup, there
may be circumstances during which you really want to prevent
unverified users to proceed. This decorator ensures the user is
authenticated and has a verified email address. If the former is
not the case then the behavior is identical to that of the
standard `login_required` decorator. If the latter does not hold,
email verification mails are automatically resend and the user is
presented with a page informing him he needs to verify his email
address.
"""
def decorator(view_func):
@login_required(redirect_field_name=redirect_field_name,
login_url=login_url)
def _wrapped_view(request, *args, **kwargs):
if not EmailAddress.objects.filter(user=request.user,
verified=True).exists():
send_email_confirmation(request, request.user)
return render(request,
'account/verified_email_required.html')
return view_func(request, *args, **kwargs)
return _wrapped_view
if function:
return decorator(function)
return decorator
| [
"[email protected]"
]
| |
3397fdf03555cbfe28cc3fed54c3f4f02c8e6c2b | 091155389673325cfe8b0da3dc64c113f1ded707 | /playground/segmentation/coco/solo/solo.res50.fpn.coco.800size.1x/config.py | 66f251fa6baf96372bfaf789658e15cbd0595e82 | [
"Apache-2.0"
]
| permissive | Megvii-BaseDetection/cvpods | 7b7c808257b757d7f94d520ea03b370105fb05eb | 2deea5dc659371318c8a570c644201d913a83027 | refs/heads/master | 2023-03-22T00:26:06.248877 | 2023-03-10T10:05:26 | 2023-03-10T10:05:26 | 318,124,806 | 659 | 91 | Apache-2.0 | 2023-03-10T10:05:28 | 2020-12-03T08:26:57 | Python | UTF-8 | Python | false | false | 1,606 | py | import os.path as osp
from cvpods.configs.solo_config import SOLOConfig
_config_dict = dict(
MODEL=dict(
WEIGHTS="detectron2://ImageNetPretrained/MSRA/R-50.pkl",
),
DATASETS=dict(
TRAIN=("coco_2017_train",),
TEST=("coco_2017_val",),
),
SOLVER=dict(
LR_SCHEDULER=dict(
NAME="WarmupMultiStepLR",
MAX_ITER=90000,
STEPS=(60000, 80000),
WARMUP_FACTOR=1.0 / 1000,
WARMUP_ITERS=500,
WARMUP_METHOD="linear",
GAMMA=0.1,
),
OPTIMIZER=dict(
NAME="SGD",
BASE_LR=0.01,
WEIGHT_DECAY=0.0001,
MOMENTUM=0.9,
),
CHECKPOINT_PERIOD=5000,
IMS_PER_BATCH=16,
IMS_PER_DEVICE=2,
BATCH_SUBDIVISIONS=1,
),
INPUT=dict(
AUG=dict(
TRAIN_PIPELINES=[
("ResizeShortestEdge",
dict(short_edge_length=(800,), max_size=1333, sample_style="choice")),
("RandomFlip", dict()),
],
TEST_PIPELINES=[
("ResizeShortestEdge",
dict(short_edge_length=800, max_size=1333, sample_style="choice")),
],
)
),
OUTPUT_DIR=osp.join(
'/data/Outputs/model_logs/cvpods_playground',
osp.split(osp.realpath(__file__))[0].split("playground/")[-1]
),
)
class CustomSOLOConfig(SOLOConfig):
def __init__(self):
super(CustomSOLOConfig, self).__init__()
self._register_configuration(_config_dict)
config = CustomSOLOConfig()
| [
"[email protected]"
]
| |
73199b52b898b470c3bb8e2c68de555ebab6a237 | 354ff630d5eed81ffe67be28dd82b990a733a1cd | /pysim/information/histogram.py | b5c2e51802a07e918c9766dcc879d029036a221c | [
"MIT"
]
| permissive | superpig99/pysim | 22ba1521c0002f815f5d074114109461e0cc35fc | 4cd5f0987d3cbdeba1c932ca845df1b0bd9d46bf | refs/heads/master | 2023-05-15T05:30:01.272708 | 2020-04-02T14:25:35 | 2020-04-02T14:25:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,655 | py | from typing import Union, Optional, Dict
import numpy as np
from scipy import stats
def hist_entropy(
X: np.ndarray,
bins: Union[str, int] = "auto",
correction: bool = True,
hist_kwargs: Optional[Dict] = {},
) -> float:
"""Calculates the entropy using the histogram of a univariate dataset.
Option to do a Miller Maddow correction.
Parameters
----------
X : np.ndarray, (n_samples)
the univariate input dataset
bins : {str, int}, default='auto'
the number of bins to use for the histogram estimation
correction : bool, default=True
implements the Miller-Maddow correction for the histogram
entropy estimation.
hist_kwargs: Optional[Dict], default={}
the histogram kwargs to be used when constructing the histogram
See documention for more details:
https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram.html
Returns
-------
H_hist_entropy : float
the entropy for this univariate histogram
Example
-------
>> from scipy import stats
>> from pysim.information import histogram_entropy
>> X = stats.gamma(a=10).rvs(1_000, random_state=123)
>> histogram_entropy(X)
array(2.52771628)
"""
# get histogram
hist_counts = np.histogram(X, bins=bins, **hist_kwargs)
# create random variable
hist_dist = stats.rv_histogram(hist_counts)
# calculate entropy
H = hist_dist.entropy()
# MLE Estimator with Miller-Maddow Correction
if correction == True:
H += 0.5 * (np.sum(hist_counts[0] > 0) - 1) / hist_counts[0].sum()
return H
| [
"[email protected]"
]
| |
6bd7da921ee4e5f2c38d8dd8832742960949e196 | caac09a412ed9783e31e6254ba937d2ff1495dc8 | /test/calculator_tests.py | 974b8a61d0acd5e288c2f2b26d39039e3047ccc2 | [
"MIT"
]
| permissive | ace-racer/lint-ut-circleci | c01095e9e41137a80499a03a81075ec86b4a9862 | f1d6b43f97b5146c4a168636d8517a8d02a3b21e | refs/heads/master | 2020-08-29T07:15:51.532944 | 2019-10-28T05:30:34 | 2019-10-28T05:30:34 | 217,963,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | import unittest
from calculator import Calculator
class CalculatorTests(unittest.TestCase):
def test_add(self):
calculator = Calculator()
self.assertEqual(calculator.add(10, 20), 30)
def test_subtract(self):
calculator = Calculator()
self.assertEqual(calculator.subtract(10, 20), -10)
def test_multiply(self):
calculator = Calculator()
self.assertEqual(calculator.multiply(10, 20), 20)
def test_divide(self):
calculator = Calculator()
self.assertEqual(calculator.divide(10, 20), 0.5)
def suite():
"""
Test suite
:return: The test suite
"""
suite = unittest.TestSuite()
suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(CalculatorTests)
)
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite()) | [
"[email protected]"
]
| |
6a5cafcf6f8b670c1c3a830f0502074d89470102 | 0dfc473870552ac9384a8b24e96046728a42f6ed | /utest/model/test_control.py | 1a17f6adb5d948f24ea2250696f2a05d093168e7 | [
"Apache-2.0",
"CC-BY-3.0"
]
| permissive | rmf/robotframework | fecb4821fd308d107ae94ee3077a2d968ad9163d | a26cd326d1a397edc56993c453380dcd9b49e407 | refs/heads/master | 2023-09-03T07:04:30.300003 | 2021-11-16T11:01:32 | 2021-11-16T11:01:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,125 | py | import unittest
from robot.model import For, If, IfBranch, TestCase
from robot.utils.asserts import assert_equal
IF = If.IF
ELSE_IF = If.ELSE_IF
ELSE = If.ELSE
class TestFor(unittest.TestCase):
def test_string_reprs(self):
for for_, exp_str, exp_repr in [
(For(),
'FOR IN ',
"For(variables=(), flavor='IN', values=())"),
(For(('${x}',), 'IN RANGE', ('10',)),
'FOR ${x} IN RANGE 10',
"For(variables=('${x}',), flavor='IN RANGE', values=('10',))"),
(For(('${x}', '${y}'), 'IN ENUMERATE', ('a', 'b')),
'FOR ${x} ${y} IN ENUMERATE a b',
"For(variables=('${x}', '${y}'), flavor='IN ENUMERATE', values=('a', 'b'))"),
(For([u'${\xfc}'], 'IN', [u'f\xf6\xf6']),
u'FOR ${\xfc} IN f\xf6\xf6',
u"For(variables=[%r], flavor='IN', values=[%r])" % (u'${\xfc}', u'f\xf6\xf6'))
]:
assert_equal(str(for_), exp_str)
assert_equal(repr(for_), 'robot.model.' + exp_repr)
class TestIf(unittest.TestCase):
def test_type(self):
assert_equal(IfBranch().type, IF)
assert_equal(IfBranch(type=ELSE).type, ELSE)
assert_equal(IfBranch(type=ELSE_IF).type, ELSE_IF)
def test_type_with_nested_if(self):
branch = IfBranch()
branch.body.create_if()
assert_equal(branch.body[0].body.create_branch().type, IF)
assert_equal(branch.body[0].body.create_branch(ELSE_IF).type, ELSE_IF)
assert_equal(branch.body[0].body.create_branch(ELSE).type, ELSE)
def test_root_id(self):
assert_equal(If().id, None)
assert_equal(TestCase().body.create_if().id, None)
def test_branch_id_without_parent(self):
assert_equal(IfBranch().id, 'k1')
def test_branch_id_with_only_root(self):
root = If()
assert_equal(root.body.create_branch().id, 'k1')
assert_equal(root.body.create_branch().id, 'k2')
def test_branch_id_with_real_parent(self):
root = TestCase().body.create_if()
assert_equal(root.body.create_branch().id, 't1-k1')
assert_equal(root.body.create_branch().id, 't1-k2')
def test_string_reprs(self):
for if_, exp_str, exp_repr in [
(IfBranch(),
'IF None',
"IfBranch(type='IF', condition=None)"),
(IfBranch(condition='$x > 1'),
'IF $x > 1',
"IfBranch(type='IF', condition='$x > 1')"),
(IfBranch(ELSE_IF, condition='$x > 2'),
'ELSE IF $x > 2',
"IfBranch(type='ELSE IF', condition='$x > 2')"),
(IfBranch(ELSE),
'ELSE',
"IfBranch(type='ELSE', condition=None)"),
(IfBranch(condition=u'$x == "\xe4iti"'),
u'IF $x == "\xe4iti"',
u"IfBranch(type='IF', condition=%r)" % u'$x == "\xe4iti"'),
]:
assert_equal(str(if_), exp_str)
assert_equal(repr(if_), 'robot.model.' + exp_repr)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
29c48053784a6f6b40a6b5ba0c848c3ad67b2000 | 420eecae12598477a4005026a250a94bb872ef81 | /DAGMan/setup.py | 6c6ab5acc87dbf616290a6a869c1212b3cdc414c | []
| no_license | chadfreer/submit-examples | c65da1ebf7b6aee9b20a30a4d6b48a30bd02e1c1 | cc416b30c7ff7f133e7d3cd69854886a99e3fc91 | refs/heads/main | 2023-07-08T12:34:36.267389 | 2021-08-18T13:56:04 | 2021-08-18T13:56:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | #!/usr/bin/env python
cwd = os.getcwd()
condor_script = cwd+'/submit.condor'
retries = 2
njobs = 3
submit_script = cwd+'/scratch/dag.submit'
f_out = open(submit_script,'w')
for job_num in range(njobs):
outfile_name = 'outfile_'+str(job_num)+'A.txt'
outfile_loc = cwd+'/output/'
f_out.write("JOB\tjob" + str(job_num) +'\t' + condor_script+'\n')
f_out.write("VARS\tjob" + str(job_num) +'\t' + 'input_float = "'+str(job_num) +'"\n')
f_out.write("VARS\tjob" + str(job_num) +'\t' + 'outfile_loc = "'+str(outfile_loc) +'"\n')
f_out.write("VARS\tjob" + str(job_num) +'\t' + 'outfile_name = "'+str(outfile_name) +'"\n')
f_out.write("RETRY\tjob" + str(job_num) +'\t' + str(retries)+'\n')
f_out.close()
print('Ouput: '+submit_script)
| [
"[email protected]"
]
| |
7d8fb50e7ee7527432b24d8fb50d44b1c35dfd89 | 74482894c61156c13902044b4d39917df8ed9551 | /test/test_address_coins_transaction_confirmed_data_item_mined_in_block.py | fe17c1e565968234e57b107948c613cf49feb8da | [
"MIT"
]
| permissive | xan187/Crypto_APIs_2.0_SDK_Python | bb8898556ba014cc7a4dd31b10e24bec23b74a19 | a56c75df54ef037b39be1315ed6e54de35bed55b | refs/heads/main | 2023-06-22T15:45:08.273635 | 2021-07-21T03:41:05 | 2021-07-21T03:41:05 | 387,982,780 | 1 | 0 | NOASSERTION | 2021-07-21T03:35:29 | 2021-07-21T03:35:29 | null | UTF-8 | Python | false | false | 1,446 | py | """
CryptoAPIs
Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import cryptoapis
from cryptoapis.model.address_coins_transaction_confirmed_data_item_mined_in_block import AddressCoinsTransactionConfirmedDataItemMinedInBlock
class TestAddressCoinsTransactionConfirmedDataItemMinedInBlock(unittest.TestCase):
"""AddressCoinsTransactionConfirmedDataItemMinedInBlock unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAddressCoinsTransactionConfirmedDataItemMinedInBlock(self):
"""Test AddressCoinsTransactionConfirmedDataItemMinedInBlock"""
# FIXME: construct object with mandatory attributes with example values
# model = AddressCoinsTransactionConfirmedDataItemMinedInBlock() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
aac20397a75eddaa76c1781124bc4879759427c2 | b222a5b5a84ce5d4fa0ddb084cffd1619a84a17c | /sequence_equation/sequence_equation.py | 7a5ca7223035ab0aec3fa4aea0a2b337cc528cbd | []
| no_license | unabl4/HR | a51a5d461b3d126e1021646b9f210e099b8627b3 | 1aaf96734b8845c911d20a4955d3ffd64a2d16b9 | refs/heads/master | 2021-04-05T23:55:27.202440 | 2018-11-04T22:44:46 | 2018-11-04T22:44:46 | 125,117,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | # https://www.hackerrank.com/challenges/permutation-equation/problem
#!/bin/python3
# Complete the permutationEquation function below.
def permutationEquation(p):
m = {}
for a,b in enumerate(p):
m[b] = a+1
return [m[m[x+1]] for x in range(len(p))]
n = int(input())
p = list(map(int, input().rstrip().split()))
result = permutationEquation(p)
print('\n'.join(map(str, result)))
| [
"[email protected]"
]
| |
b0654c2a2d79501a23167110aa08c91d2f74bc55 | ff99c677aba11e27c252f773b52cd54f5de79279 | /ctt-server/openapi_server/models/test_artifact.py | eb26e77d966f8b9e136f61f7fd8c85e4776ebb27 | [
"Apache-2.0"
]
| permissive | radon-h2020/radon-ctt | b7eeb82f59e36e2a258d0a2ba9cd9483eb3dd247 | 97fcf5e800a0129d24e119b430d94f07ca248ba9 | refs/heads/master | 2023-01-04T23:44:49.611599 | 2021-09-15T15:34:41 | 2021-09-15T15:34:41 | 235,379,642 | 0 | 7 | Apache-2.0 | 2022-12-27T15:56:38 | 2020-01-21T15:48:45 | Python | UTF-8 | Python | false | false | 5,920 | py | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from openapi_server.models.base_model_ import Model
from openapi_server import util
class TestArtifact(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, uuid=None, project_uuid=None, sut_tosca_path=None, sut_inputs_path=None, ti_tosca_path=None, ti_inputs_path=None, commit_hash=None): # noqa: E501
"""TestArtifact - a model defined in OpenAPI
:param uuid: The uuid of this TestArtifact. # noqa: E501
:type uuid: str
:param project_uuid: The project_uuid of this TestArtifact. # noqa: E501
:type project_uuid: str
:param sut_tosca_path: The sut_tosca_path of this TestArtifact. # noqa: E501
:type sut_tosca_path: str
:param ti_tosca_path: The ti_tosca_path of this TestArtifact. # noqa: E501
:type ti_tosca_path: str
:param commit_hash: The commit_hash of this TestArtifact. # noqa: E501
:type commit_hash: str
"""
self.openapi_types = {
'uuid': str,
'project_uuid': str,
'sut_tosca_path': str,
'sut_inputs_path': str,
'ti_tosca_path': str,
'ti_inputs_path': str,
'commit_hash': str
}
self.attribute_map = {
'uuid': 'uuid',
'project_uuid': 'project_uuid',
'sut_tosca_path': 'sut_tosca_path',
'sut_inputs_path': 'sut_inputs_path',
'ti_tosca_path': 'ti_tosca_path',
'ti_inputs_path': 'ti_inputs_path',
'commit_hash': 'commit_hash'
}
self._uuid = uuid
self._project_uuid = project_uuid
self._sut_tosca_path = sut_tosca_path
self._sut_inputs_path = sut_inputs_path
self._ti_tosca_path = ti_tosca_path
self._ti_inputs_path = ti_inputs_path
self._commit_hash = commit_hash
@classmethod
def from_dict(cls, dikt) -> 'TestArtifact':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The TestArtifact of this TestArtifact. # noqa: E501
:rtype: TestArtifact
"""
return util.deserialize_model(dikt, cls)
@property
def uuid(self):
"""Gets the uuid of this TestArtifact.
:return: The uuid of this TestArtifact.
:rtype: str
"""
return self._uuid
@uuid.setter
def uuid(self, uuid):
"""Sets the uuid of this TestArtifact.
:param uuid: The uuid of this TestArtifact.
:type uuid: str
"""
self._uuid = uuid
@property
def project_uuid(self):
"""Gets the project_uuid of this TestArtifact.
:return: The project_uuid of this TestArtifact.
:rtype: str
"""
return self._project_uuid
@project_uuid.setter
def project_uuid(self, project_uuid):
"""Sets the project_uuid of this TestArtifact.
:param project_uuid: The project_uuid of this TestArtifact.
:type project_uuid: str
"""
self._project_uuid = project_uuid
@property
def sut_tosca_path(self):
"""Gets the sut_tosca_path of this TestArtifact.
:return: The sut_tosca_path of this TestArtifact.
:rtype: str
"""
return self._sut_tosca_path
@sut_tosca_path.setter
def sut_tosca_path(self, sut_tosca_path):
"""Sets the sut_tosca_path of this TestArtifact.
:param sut_tosca_path: The sut_tosca_path of this TestArtifact.
:type sut_tosca_path: str
"""
self._sut_tosca_path = sut_tosca_path
@property
def sut_inputs_path(self):
"""Gets the sut_inputs_path of this TestArtifact.
:return: The sut_inputs_path of this TestArtifact.
:rtype: str
"""
return self._sut_inputs_path
@sut_inputs_path.setter
def sut_inputs_path(self, sut_inputs_path):
"""Sets the sut_inputs_path of this TestArtifact.
:param sut_inputs_path: The sut_tosca_path of this TestArtifact.
:type sut_inputs_path: str
"""
self._sut_inputs_path = sut_inputs_path
@property
def ti_tosca_path(self):
"""Gets the ti_tosca_path of this TestArtifact.
:return: The ti_tosca_path of this TestArtifact.
:rtype: str
"""
return self._ti_tosca_path
@ti_tosca_path.setter
def ti_tosca_path(self, ti_tosca_path):
"""Sets the ti_tosca_path of this TestArtifact.
:param ti_tosca_path: The ti_tosca_path of this TestArtifact.
:type ti_tosca_path: str
"""
self._ti_tosca_path = ti_tosca_path
@property
def ti_inputs_path(self):
"""Gets the ti_inputs_path of this TestArtifact.
:return: The ti_inputs_path of this TestArtifact.
:rtype: str
"""
return self._ti_inputs_path
@ti_inputs_path.setter
def ti_inputs_path(self, ti_inputs_path):
"""Sets the ti_inputs_path of this TestArtifact.
:param ti_inputs_path: The ti_tosca_path of this TestArtifact.
:type ti_inputs_path: str
"""
self._ti_inputs_path = ti_inputs_path
@property
def commit_hash(self):
"""Gets the commit_hash of this TestArtifact.
:return: The commit_hash of this TestArtifact.
:rtype: str
"""
return self._commit_hash
@commit_hash.setter
def commit_hash(self, commit_hash):
"""Sets the commit_hash of this TestArtifact.
:param commit_hash: The commit_hash of this TestArtifact.
:type commit_hash: str
"""
self._commit_hash = commit_hash
| [
"[email protected]"
]
| |
149ff803cf2e12675ab01b204bcf549300d50aea | 0e1a0329e1b96405d3ba8426fd4f935aa4d8b04b | /base/tests/test_create_free_client.py | 33cbc04a108ef50da4ffb8fda7a8f0709f6032c5 | []
| no_license | ugik/Blitz | 6e3623a4a03309e33dcc0b312800e8cadc26d28c | 740f65ecaab86567df31d6a0055867be193afc3d | refs/heads/master | 2021-05-03T20:15:20.516014 | 2015-03-11T12:33:34 | 2015-03-11T12:33:34 | 25,015,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,535 | py | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class TestCreateFreeClient(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.driver.set_window_size(1300, 1000)
self.base_url = "http://127.0.0.1:8000"
self.verificationErrors = []
self.accept_next_alert = True
def test_create_free_client(self):
driver = self.driver
driver.get(self.base_url + "/client-signup?signup_key=TEST2")
driver.find_element_by_name("password1").clear()
driver.find_element_by_name("password1").send_keys("asdf")
driver.find_element_by_name("password2").clear()
driver.find_element_by_name("password2").send_keys("asdf")
driver.find_element_by_xpath("//button").click()
driver.find_element_by_link_text(u"Set up your profile →").click()
driver.find_element_by_css_selector("label.radio").click()
driver.find_element_by_name("age").clear()
driver.find_element_by_name("age").send_keys("30")
driver.find_element_by_xpath("//form[@id='setupForm']/div[3]/label[2]").click()
# Warning: assertTextPresent may require manual changes
self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text, r"^[\s\S]*$")
driver.find_element_by_name("weight").clear()
driver.find_element_by_name("weight").send_keys("100")
driver.find_element_by_name("height_feet").clear()
driver.find_element_by_name("height_feet").send_keys("1")
driver.find_element_by_name("height_inches").clear()
driver.find_element_by_name("height_inches").send_keys("80")
driver.find_element_by_css_selector("button.obtn.full-width").click()
driver.find_element_by_id("skip-headshot").click()
driver.find_element_by_link_text(u"Finish Signup →").click()
# Warning: assertTextPresent may require manual changes
self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text, r"^[\s\S]*$")
driver.get(self.base_url + "/")
# driver.find_element_by_link_text("Log Workout").click()
# import pdb; pdb.set_trace()
# driver.find_element_by_xpath("//div[2]/input").clear()
# driver.find_element_by_xpath("//div[2]/input").send_keys("90")
# driver.find_element_by_xpath("//div[3]/div[2]/input").clear()
# driver.find_element_by_xpath("//div[3]/div[2]/input").send_keys("95")
# driver.find_element_by_xpath("//div[3]/div[3]/input").clear()
# driver.find_element_by_xpath("//div[3]/div[3]/input").send_keys("7")
# driver.find_element_by_xpath("//div[4]/div[2]/input").clear()
# driver.find_element_by_xpath("//div[4]/div[2]/input").send_keys("100")
# driver.find_element_by_xpath("//div[4]/div[3]/input").clear()
# driver.find_element_by_xpath("//div[4]/div[3]/input").send_keys("8")
# driver.find_element_by_css_selector("span.small").click()
# time.sleep(1)
# driver.find_element_by_link_text("Save These Sets").click()
# driver.find_element_by_css_selector("button.obtn.log-workout-submit").click()
# Warning: assertTextPresent may require manual changes
# self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text, r"^[\s\S]*$")
driver.get(self.base_url + "/logout")
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
]
| |
29dc5b8d43fb5302a1441222a19a7d9099bcf929 | 8ce0fd5e5c5b858fa24e388f2114885160421c03 | /python/netuse/net_use.py | 1fd08a9bcb6cb202b36976078d0b840d73d473a4 | []
| no_license | kong-ling/scripts | 266e9975ae0156d6fdddf43b8f1d7ee20469b388 | 3c41c49646358d46871c8fd8ebe1ba52bdea046c | refs/heads/master | 2021-01-10T08:29:34.772634 | 2020-01-03T09:04:57 | 2020-01-03T09:04:57 | 43,275,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | import os
import sys
import subprocess
p = subprocess.Popen('net use',
stdout = subprocess.PIPE,
stdin = subprocess.PIPE)
print(type(p))
for drv in p.stdout.readlines():
print(drv.strip())
| [
"[email protected]"
]
| |
bfc4a81a2576286e533d2b117dd711bc3d73d013 | 3c27b86f0165ab24e6b04d505e8471e032594f0b | /pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GLES2/EXT/shadow_samplers.py | 119ce82880ccfe3b97741cc729ccd3611e990b3f | [
"LicenseRef-scancode-warranty-disclaimer",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LGPL-2.1-or-later",
"GPL-3.0-only",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"MIT"
]
| permissive | alexus37/AugmentedRealityChess | 8b9ccdfffc8aee93a86a44b8ef53c034ec6a10d1 | 7f600ad153270feff12aa7aa86d7ed0a49ebc71c | refs/heads/master | 2020-12-24T13:29:21.967833 | 2020-02-27T09:38:50 | 2020-02-27T09:38:50 | 31,264,034 | 1 | 1 | MIT | 2020-02-27T09:38:52 | 2015-02-24T14:36:34 | Python | UTF-8 | Python | false | false | 774 | py | '''OpenGL extension EXT.shadow_samplers
This module customises the behaviour of the
OpenGL.raw.GLES2.EXT.shadow_samplers to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/shadow_samplers.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.EXT.shadow_samplers import *
from OpenGL.raw.GLES2.EXT.shadow_samplers import _EXTENSION_NAME
def glInitShadowSamplersEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | [
"[email protected]"
]
| |
fa7e40b0bb754bc7b775b514c47ad6387e9aded8 | 1ecb394b10e9622a5a5d8845b44e4585f464d42e | /nncp-rpc/lib/logic/Ticket/jl.py | 6eba7a4aba23fda0584343d0701709d8cb297dec | []
| no_license | dragonflylxp/lottory | 7ec28d196f58692d9d417aa5d6963c182afe260a | b04f115df325a58148dc19d7cdfc21b28892a6a1 | refs/heads/master | 2020-04-28T08:53:09.007092 | 2020-04-17T10:50:41 | 2020-04-17T10:50:41 | 175,145,951 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,125 | py | #encoding=utf-8
import MySQLdb
import traceback
import define
from dbpool import db_pool
from util.tools import Log
from common.dbs import BaseModel, access
from baseticket import BaseTicket
logger = Log().getLog()
class JlTicket(BaseTicket):
def __init__(self):
super(JlTicket, self).__init__(47)
@access("w")
def save_tickets(self, params):
project = params.get("project")
tickets = params.get("tickets")
mid = params.get("mid", None)
uid = project.get("f_uid")
pid = project.get("f_pid")
lotid = project.get("f_lotid")
try:
if mid is not None:
#更新msg状态
sql = "UPDATE t_msg_record SET f_msgstatus=%s WHERE f_mid=%s AND f_msgstatus=%s"
ret = self.cursor.execute(sql, (define.MSG_STATUS_DONE, mid, define.MSG_STATUS_NEW))
if ret < 1:
logger.warning("Tickets already saved! lotid=%s|pid=%s|mid=%s", 28, pid, mid)
raise Exception("Tickets already saved!")
sql = """
INSERT INTO t_ticket_jl(
f_uid,
f_pid,
f_lotid,
f_wtype,
f_ggtype,
f_beishu,
f_zhushu,
f_allmoney,
f_fileorcode,
f_firstprocessid,
f_lastprocessid,
f_ticketstatus)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
"""
args = []
for tkt in tickets:
tpl = (uid, pid, lotid, tkt["wtype"], tkt["ggtype"], tkt["beishu"],tkt["zhushu"], tkt["allmoney"],
tkt["fileorcode"], tkt["firstprocessid"], tkt["lastprocessid"], define.TICKET_STATUS_SAVED)
args.append(tpl)
self.cursor.executemany(sql, args)
self.conn.commit()
except Exception as ex:
logger.error(traceback.format_exc())
self.conn.rollback()
raise
return pid
| [
"[email protected]"
]
| |
8882a3047b104ab1b3c17264e0c530a6d08c6907 | 9610621437f025aa97f99b67f0a5d8e13bbb715c | /com/vmware/vcenter/inventory_client.py | 4a5e81930923bd8759f59470aacabd3026107b08 | [
"MIT"
]
| permissive | adammillerio/vsphere-automation-sdk-python | 2b3b730db7da99f1313c26dc738b82966ecea6ce | c07e1be98615201139b26c28db3aa584c4254b66 | refs/heads/master | 2022-11-20T03:09:59.895841 | 2020-07-17T19:32:37 | 2020-07-17T19:32:37 | 280,499,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,717 | py | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.vcenter.inventory.
#---------------------------------------------------------------------------
"""
The ``com.vmware.vcenter.inventory_client`` component provides methods and
classes for retrieving vCenter datastore and network information for a given
:class:`list` of identifiers.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Datastore(VapiInterface):
"""
The ``Datastore`` class provides methods to retrieve information about
datastores.
"""
_VAPI_SERVICE_ID = 'com.vmware.vcenter.inventory.datastore'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _DatastoreStub)
self._VAPI_OPERATION_IDS = {}
class Info(VapiStruct):
"""
The ``Datastore.Info`` class contains information about a datastore.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
type=None,
):
"""
:type type: :class:`str`
:param type: Type of the datastore.
When clients pass a value of this class as a parameter, the
attribute must be one of ``Datastore`` or ``StoragePod``. When
methods return a value of this class as a return value, the
attribute will be one of ``Datastore`` or ``StoragePod``.
"""
self.type = type
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.vcenter.inventory.datastore.info', {
'type': type.StringType(),
},
Info,
False,
None))
def find(self,
datastores,
):
"""
Returns datastore information for the specified datastores. The key in
the return value :class:`dict` is the datastore identifier and the
value in the :class:`dict` is the datastore information.
:type datastores: :class:`list` of :class:`str`
:param datastores: Identifiers of the datastores for which information will be
returned.
The parameter must contain identifiers for the resource type:
``Datastore``.
:rtype: :class:`dict` of :class:`str` and (:class:`Datastore.Info` or ``None``)
:return: Datastore information for the specified datastores. The key in the
return value :class:`dict` is the datastore identifier and the
value in the :class:`dict` is the datastore information.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``Datastore``.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if no datastore can be found for one or more of the datastore
identifiers in ``datastores``
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires ``System.Read``.
* The resource ``Datastore`` referenced by the parameter
``datastores`` requires ``System.Read``.
"""
return self._invoke('find',
{
'datastores': datastores,
})
class Network(VapiInterface):
"""
The ``Network`` class provides methods to retrieve information about
vCenter Server networks.
"""
_VAPI_SERVICE_ID = 'com.vmware.vcenter.inventory.network'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NetworkStub)
self._VAPI_OPERATION_IDS = {}
class Info(VapiStruct):
"""
The ``Network.Info`` class contains information about a vCenter Server
network.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
type=None,
):
"""
:type type: :class:`str`
:param type: Type of the vCenter Server network.
When clients pass a value of this class as a parameter, the
attribute must be one of ``Network``,
``DistributedVirtualPortgroup``, or ``OpaqueNetwork``. When methods
return a value of this class as a return value, the attribute will
be one of ``Network``, ``DistributedVirtualPortgroup``, or
``OpaqueNetwork``.
"""
self.type = type
VapiStruct.__init__(self)
Info._set_binding_type(type.StructType(
'com.vmware.vcenter.inventory.network.info', {
'type': type.StringType(),
},
Info,
False,
None))
def find(self,
networks,
):
"""
Returns network information for the specified vCenter Server networks.
The key in the return value :class:`dict` is the network identifier and
the value in the :class:`dict` is the network information.
:type networks: :class:`list` of :class:`str`
:param networks: Identifiers of the vCenter Server networks for which information
will be returned.
The parameter must contain identifiers for the resource type:
``Network``.
:rtype: :class:`dict` of :class:`str` and (:class:`Network.Info` or ``None``)
:return: Network information for the specified vCenter Server networks. The
key in the return value :class:`dict` is the network identifier and
the value in the :class:`dict` is the network information.
The key in the return value :class:`dict` will be an identifier for
the resource type: ``Network``.
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
if no datastore can be found for one or more of the vCenter Server
network identifiers in ``networks``
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
if you do not have all of the privileges described as follows:
* Method execution requires ``System.Read``.
* The resource ``Network`` referenced by the parameter ``networks``
requires ``System.Read``.
"""
return self._invoke('find',
{
'networks': networks,
})
class _DatastoreStub(ApiInterfaceStub):
def __init__(self, config):
# properties for find operation
find_input_type = type.StructType('operation-input', {
'datastores': type.ListType(type.IdType()),
})
find_error_dict = {
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
find_input_value_validator_list = [
]
find_output_validator_list = [
]
find_rest_metadata = None
operations = {
'find': {
'input_type': find_input_type,
'output_type': type.MapType(type.IdType(), type.OptionalType(type.ReferenceType(__name__, 'Datastore.Info'))),
'errors': find_error_dict,
'input_value_validator_list': find_input_value_validator_list,
'output_validator_list': find_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'find': find_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vcenter.inventory.datastore',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _NetworkStub(ApiInterfaceStub):
def __init__(self, config):
# properties for find operation
find_input_type = type.StructType('operation-input', {
'networks': type.ListType(type.IdType()),
})
find_error_dict = {
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
find_input_value_validator_list = [
]
find_output_validator_list = [
]
find_rest_metadata = None
operations = {
'find': {
'input_type': find_input_type,
'output_type': type.MapType(type.IdType(), type.OptionalType(type.ReferenceType(__name__, 'Network.Info'))),
'errors': find_error_dict,
'input_value_validator_list': find_input_value_validator_list,
'output_validator_list': find_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'find': find_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vcenter.inventory.network',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class StubFactory(StubFactoryBase):
_attrs = {
'Datastore': Datastore,
'Network': Network,
}
| [
"[email protected]"
]
| |
54eef6b92d0dea189cce79be2163407619b9dcff | f85cc3fb482f1b71e7a749e1bcdbe90ba78fd059 | /swap_every_two_linked_list.py | 382cd105924360af622ba95190ca1d4012b07495 | []
| no_license | shan-mathi/InterviewBit | c94e091f728b9d18d55e86130756824a3637a744 | 6688e4ff54d56cf75297bb72ce67926b40e45127 | refs/heads/main | 2023-06-29T10:43:29.712472 | 2021-08-05T19:06:53 | 2021-08-05T19:06:53 | 364,321,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param A : head node of linked list
# @return the head node in the linked list
def swapPairs(self, A):
if A is None or A.next is None:
return A
temp = ListNode(-1)
temp.next = A
current = temp
while current.next is not None and current.next.next is not None:
first = current.next
second = current.next.next
first.next = second.next
current.next = second
current.next.next = first
current = current.next.next
return temp.next
| [
"[email protected]"
]
| |
b5fca79b1608f0797b4b9d9f43d800951d1a52d8 | d4c024cc1330aa86582e0e3f25d5c0f76a9ccbe0 | /align/predict.py | 2f6ebbfb812250d45681908e7d093c5c0b37572c | []
| no_license | jeehyun100/insta_crawling | 464d5a90a614ed4aab1ca28566ad87cbda279447 | 39ada39513bc3655adc2e624c786cc6fd8473a7e | refs/heads/master | 2021-09-07T15:41:57.013861 | 2018-02-25T09:32:36 | 2018-02-25T09:32:36 | 118,861,846 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,797 | py | import os
import cv2
import numpy as np
import tensorflow as tf
from scipy import misc
import align.detect_face as detect_face
#from facenet_tf.src.common import facenet
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
import datetime
import dlib
from imutils.face_utils import rect_to_bb
import face_recognition
import matplotlib.pyplot as plt
class face_detect_crawling(object):
def get_boxes_frame( minsize, pnet, rnet,onet, threshold, factor, frame, detect_type, margin):
boxes = []
img_size = np.asarray(frame.shape)[0:2]
if len(img_size) == 0:
return frame, boxes
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
bounding_boxes, _ = detect_face.detect_face(frame, minsize, pnet, rnet, onet,
threshold, factor)
for bounding_box in bounding_boxes:
det = np.squeeze(bounding_box[0:4])
bb = np.zeros(4, dtype=np.int32)
bb[0] = np.maximum(det[0] - margin / 2, 0)
bb[1] = np.maximum(det[1] - margin / 2, 0)
bb[2] = np.minimum(det[2] + margin / 2, img_size[1])
bb[3] = np.minimum(det[3] + margin / 2, img_size[0])
if detect_type == 'dlib':
bb[2] += bb[0]
bb[3] += bb[1]
elif detect_type == 'hog' or detect_type == 'cnn':
bb[1], bb[2], bb[3], bb[0] = bounding_box
if len(boxes) == 0:
boxes.append(bb)
else:
if boxes[0][2] - boxes[0][0] < bb[2] - bb[0]:
boxes[0] = bb
if len(boxes) > 0:
cropped = frame[boxes[0][1]:boxes[0][3], boxes[0][0]:boxes[0][2], :]
else:
cropped = None
return cropped, boxes
def main():
# Arguments #
_detecter = face_detect_crawling()
filename = '/home/dev/insta_crawling/data/2pmhouse/10_20180221064634.jpg'
image = cv2.imread(filename, flags=cv2.IMREAD_COLOR)
config = tf.ConfigProto(device_count={'GPU': 0})
with tf.Session(config=config) as sess:
pnet, rnet, onet = detect_face.create_mtcnn(sess, None)
#frame, self.minsize, self.pnet, self.rnet, self.onet,self.threshold, self.factor
minsize = 20
threshold = [0.6, 0.7, 0.7]
factor = 0.709
margin = 90
#image_size = 300
#cropped_size = 30 # rotation use
detect_type = 'mtcnn' # dlib, mtcnn, hog, cnn
rotation = False
aligned, boxes = face_detect_crawling.get_boxes_frame(minsize, pnet, rnet,onet, threshold, factor, image, detect_type, margin)
if aligned != None:
cv2.imshow("Window", aligned);
print("success")
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
0723e9d3c2f3ed3348f8962f73db031393fd5949 | c59738ddfb08134af01d75255c4469071a1e135e | /002_Python_data_analysis_from_entry_to_master/ch10_Numpy科学计算库/02_数组的操作_分片_索引_拼接/005_二维数组_水平_竖直分割split.py | 54eb1c3acdf6b7d1c6d0d1128504a00f3cc4eed3 | []
| no_license | FelixZFB/Python_data_analysis | 371a8460da79e8fdb30b10c02b662419b62a5998 | 62f018d88d8454afe65980efd8d771ac8691956a | refs/heads/master | 2020-05-20T14:46:00.606684 | 2020-02-04T14:25:20 | 2020-02-04T14:25:20 | 185,629,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py | # -*- coding:utf-8 -*-
import numpy as np
# 生成两个形状一样的二维数组
a = np.arange(16).reshape(4, 4)
print(a)
print("*" * 50)
# 水平竖直分割是拼接的反操作
# 竖直分割: 以行分割
# 水平分割: 以列分割
# 竖直分割,指定被分割为几个数组,数要被整除
b = np.vsplit(a, 2)
print(b)
print("*" * 50)
# 水平分割
c = np.hsplit(a, 2)
print(c)
print("*" * 50)
# 也可以直接使用split函数,指定轴号0,作用于列,以行分割,竖直分割列
e = np.split(a, 2, axis=0)
print(e) | [
"[email protected]"
]
| |
b0000f65f8955a9141b9c9455ff591324ae8ec6d | 6b183b67944b169048a930e34608925fb9abdc3e | /xicam/plugins/imagemixinplugin.py | 66f951c285c0e0940ac64a45eb590950abeb7fcb | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause-LBNL"
]
| permissive | ihumphrey/Xi-cam | cef31bba712ebf6d330402b9a7cc24d3d096e2b8 | a033a97c4dac55221167d9c4e914c65e835f015a | refs/heads/master | 2022-05-12T22:10:24.970713 | 2021-05-12T22:29:08 | 2021-05-12T22:29:08 | 190,625,609 | 0 | 0 | NOASSERTION | 2019-06-06T17:52:35 | 2019-06-06T17:52:34 | null | UTF-8 | Python | false | false | 509 | py | """
Nothing useful here!
Why?
Because with the PluginType Plugin, we need to register the SnifferPlugin as an entrypoint for the manager to
collect them. In this case, the only meaningful part is the name of the entrypoint, not what it points to. Of course,
it has to point to something, so...
"""
from .plugin import PluginType
class ImageMixinPlugin():
"""
This is just to direct Xi-cam for how to load these plugins; its not intended to be instantiated or subclassed.
"""
needs_qt = True | [
"[email protected]"
]
| |
bc7c9459c0f70e88e0dde36873b792973860a896 | 1a24def8879972f21d846ffb3813632070e1cf12 | /Chapter06/0602fib-func.py | 79885098128099a23319588b0f10e18295f92798 | []
| no_license | mushahiroyuki/beginning-python | 03bb78c8d3f678ce39662a44046a308c99f29916 | 4d761d165203dbbe3604173c404f70a3eb791fd8 | refs/heads/master | 2023-08-16T12:44:01.336731 | 2023-07-26T03:41:22 | 2023-07-26T03:41:22 | 238,684,870 | 5 | 4 | null | 2023-09-06T18:34:01 | 2020-02-06T12:33:26 | Python | UTF-8 | Python | false | false | 659 | py | #@@range_begin(list1) # ←この行は無視してください。本文に引用するためのものです。
#ファイル名 Chapter06/0602fib-func.py
def fibs(num):
result = [0, 1]
for i in range(num-2):
result.append(result[-2] + result[-1])
return result
#@@range_end(list1) # ←この行は無視してください。本文に引用するためのものです。
#実行
#@@range_begin(list2) # ←この行は無視してください。本文に引用するためのものです。
print(fibs(10))
print(fibs(15))
#@@range_end(list2) # ←この行は無視してください。本文に引用するためのものです。
| [
"[email protected]"
]
| |
548eab73bdde0f861d5c66edaeff558f9c6362e0 | 475d1b83b77e2730b53722f0d8d11b070f97018a | /authapp/migrations/backup/0015_auto_20210226_2036.py | f7748376c6cb26aa40cc60e4db0e3f89b135edda | [
"MIT"
]
| permissive | Gwellir/my-region | b651284ee4d4ec7ec892bb78a7ce3444c833d035 | baacb7f54a19c55854fd068d6e38b3048a03d13d | refs/heads/main | 2023-04-20T17:31:33.040419 | 2021-05-17T13:35:38 | 2021-05-17T13:35:38 | 336,533,029 | 0 | 1 | MIT | 2021-05-17T13:35:39 | 2021-02-06T12:31:08 | Python | UTF-8 | Python | false | false | 591 | py | # Generated by Django 3.1.6 on 2021-02-26 17:36
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('authapp', '0014_auto_20210226_2033'),
]
operations = [
migrations.AlterField(
model_name='appuser',
name='activation_key_expiry',
field=models.DateTimeField(default=datetime.datetime(2021, 2, 27, 17, 36, 39, 366149, tzinfo=utc), verbose_name='Крайний срок текущей активации'),
),
]
| [
"[email protected]"
]
| |
dccb5669c5b88153b3e54fa816eb2c14f67647eb | aa88548d729211428b3d5d7cfb9c3ba5881e168a | /resilient-sdk/tests/unit/test_cmds/test_dev.py | e757157a1b67107f6abd07b6898790070841f922 | [
"MIT"
]
| permissive | svetterIO/resilient-python-api | 784cb83aaff353e8aa6ce0000b241a693977b5b9 | d89440ccee621cb4268ee8ebb350e47e7c9ee26b | refs/heads/master | 2023-08-31T22:15:27.588822 | 2021-10-13T13:15:12 | 2021-10-13T13:15:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,733 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) Copyright IBM Corp. 2010, 2020. All Rights Reserved.
import sys
import os
import shutil
import pytest
from resilient_sdk.util import package_file_helpers as package_helpers
from resilient_sdk.util.sdk_exception import SDKException
from resilient_sdk.cmds import base_cmd, CmdDev
from tests.shared_mock_data import mock_paths
def test_cmd_dev(fx_get_sub_parser, fx_cmd_line_args_dev_set_version):
cmd_dev = CmdDev(fx_get_sub_parser)
assert isinstance(cmd_dev, base_cmd.BaseCmd)
assert cmd_dev.CMD_NAME == "dev"
assert cmd_dev.CMD_HELP == "Unsupported functionality used to help develop an app"
assert cmd_dev.CMD_USAGE == """
$ resilient-sdk dev -p <path_to_package> --set-version 36.0.0"""
assert cmd_dev.CMD_DESCRIPTION == "WARNING: Use the functionality of 'dev' at your own risk"
args = cmd_dev.parser.parse_known_args()[0]
assert args.package == "fn_main_mock_integration"
def test_set_version_bad_version(fx_get_sub_parser, fx_cmd_line_args_dev_set_bad_version):
cmd_dev = CmdDev(fx_get_sub_parser)
args = cmd_dev.parser.parse_known_args()[0]
with pytest.raises(SDKException, match=r"is not a valid version"):
CmdDev._set_version(args)
def test_set_version(fx_copy_fn_main_mock_integration, fx_get_sub_parser, fx_cmd_line_args_dev_set_version):
mock_integration_name = fx_copy_fn_main_mock_integration[0]
path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]
# Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
sys.argv[sys.argv.index(mock_integration_name)] = path_fn_main_mock_integration
# Parse the setup.py file
path_setup_py_file = os.path.join(path_fn_main_mock_integration, package_helpers.BASE_NAME_SETUP_PY)
setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)
# Get customize.py ImportDefinition
path_customize_py = package_helpers.get_configuration_py_file_path("customize", setup_py_attributes)
customize_py_import_definition = package_helpers.get_import_definition_from_customize_py(path_customize_py)
# Get the old_version
old_version = customize_py_import_definition["server_version"]["version"]
assert old_version == "36.0.0"
# Run _set_version
cmd_dev = CmdDev(fx_get_sub_parser)
args = cmd_dev.parser.parse_known_args()[0]
cmd_dev._set_version(args)
# Get the new_version
customize_py_import_definition = package_helpers.get_import_definition_from_customize_py(path_customize_py)
new_version = customize_py_import_definition["server_version"]["version"]
assert new_version == "35.0.0"
| [
"[email protected]"
]
| |
57ec71f8f366f169baa43555a895ff8842a42839 | c3a3beda6fe3a9bbd5b240477f542a46dd92823a | /functions/TH/08_keyword_args.py | c76f034292ded2cad08e665202843d5d48be93cc | []
| no_license | nfarnan/cs001X_examples | 2e64955b705c8ac9c4319becf6344d36b9560e78 | 80b612249fa97ff685f345582f184d57f94bff8e | refs/heads/master | 2020-12-11T14:06:00.890074 | 2020-04-14T19:41:02 | 2020-04-14T19:41:02 | 209,681,009 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | def kw_test(a=1, b=2):
print(a, b)
kw_test()
kw_test(5, 10)
kw_test(5)
kw_test(b=10)
# this will error
#kw_test(5, 10, 20)
| [
"[email protected]"
]
| |
c232565a21f5fd047d7623a39c6b072c1a8a23e5 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/HUAWEI-MUSA-MA5100-MIB.py | 4fef2a9becd67f938eaf9afaece57bb5779b3134 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 102,545 | py | #
# PySNMP MIB module HUAWEI-MUSA-MA5100-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-MUSA-MA5100-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:44:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection")
musa, = mibBuilder.importSymbols("HUAWEI-MIB", "musa")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, NotificationType, ObjectIdentity, Gauge32, Counter32, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, iso, MibIdentifier, Unsigned32, Integer32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "NotificationType", "ObjectIdentity", "Gauge32", "Counter32", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "iso", "MibIdentifier", "Unsigned32", "Integer32", "Bits")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class DisplayString(OctetString):
pass
hwMa5100Mib = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5))
hwMusaSysMib = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1))
hwMusaDevice = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1))
hwMusaEndOfMib = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 100))
hwMusaSysDate = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSysDate.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysDate.setDescription('the system date,include year,month,day.')
hwMusaSysTime = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSysTime.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysTime.setDescription('the system time,include hours,minutes,seconds.')
hwMusaSysCpuRatio = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSysCpuRatio.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysCpuRatio.setDescription('the musa system cpu ratio.')
hwMusaHostVersion = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaHostVersion.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaHostVersion.setDescription('the musa system host software version.')
hwMusaResetSys = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 13), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaResetSys.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaResetSys.setDescription('Reset Musa device.')
hwMusaIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 14), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddr.setDescription("The Musa device's Ethernet IP address.")
hwMusaIpMask = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 15), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaIpMask.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpMask.setDescription('The Musa IP SubNet Mask.')
hwMusaGatewayIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 16), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaGatewayIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaGatewayIpAddr.setDescription("The Musa gateway's IP address.")
hwMusaMacAddr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 17), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaMacAddr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaMacAddr.setDescription('The Musa Mac address.')
hwMusaIpAddrPermitTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18), )
if mibBuilder.loadTexts: hwMusaIpAddrPermitTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrPermitTable.setDescription('This table contains IP Addr scope that Musa device permit access.')
hwMusaIpAddrPermitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaIpPermitTableId"))
if mibBuilder.loadTexts: hwMusaIpAddrPermitEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrPermitEntry.setDescription('This list contains IP addr scope.')
hwMusaIpPermitTableId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaIpPermitTableId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpPermitTableId.setDescription('The IP Address table ID that permit access the Musa device.')
hwMusaIpAddrPermitOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("add", 0), ("del", 1), ("modify", 2), ("query", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaIpAddrPermitOper.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrPermitOper.setDescription('The operation that permit access the Musa device.')
hwMusaPermitBeginIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaPermitBeginIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPermitBeginIp.setDescription('The begin IP Address that permit access the Musa device.')
hwMusaPermitEndIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaPermitEndIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPermitEndIp.setDescription('The end Ip address that permit access the Musa device.')
hwMusaPermitIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 18, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaPermitIpMask.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPermitIpMask.setDescription('The Ip address subnet mask that permit access the Musa device.')
hwMusaIpAddrRejectTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19), )
if mibBuilder.loadTexts: hwMusaIpAddrRejectTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrRejectTable.setDescription('This table contains IP Addr scope that Musa device reject access.')
hwMusaIpAddrRejectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaIpRejectTableId"))
if mibBuilder.loadTexts: hwMusaIpAddrRejectEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrRejectEntry.setDescription('This list contains IP addr scope.')
hwMusaIpRejectTableId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaIpRejectTableId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpRejectTableId.setDescription('The IP Address table ID that reject access the Musa device.')
hwMusaIpAddrRejectOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("add", 0), ("del", 1), ("modify", 2), ("query", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaIpAddrRejectOper.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaIpAddrRejectOper.setDescription('The operation that reject access the Musa device.')
hwMusaRejectBeginIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaRejectBeginIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRejectBeginIp.setDescription('The begin IP Address that reject access the Musa device.')
hwMusaRejectEndIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaRejectEndIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRejectEndIp.setDescription('The end Ip address that reject access the Musa device.')
hwMusaRejectIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 19, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaRejectIpMask.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRejectIpMask.setDescription('The Ip address subnet mask that reject access the Musa device.')
hwMusaAtmIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 20), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAtmIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAtmIpAddr.setDescription("The Musa device's IP address.")
hwMusaAtmIpMask = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 21), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAtmIpMask.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAtmIpMask.setDescription('The Musa IP SubNet Mask.')
hwMusaMtu = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaMtu.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaMtu.setDescription('The Musa Mtu value.')
hwMusaOpticConvergentRate = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 23), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaOpticConvergentRate.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOpticConvergentRate.setDescription('The bandwidth convergentrate.')
hwMusaCellbusID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ma5100", 1), ("ma5103", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCellbusID.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCellbusID.setDescription('The Cellbus ID.')
hwMusaResetSlaveMMX = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loaddata", 1), ("noloaddata", 2)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaResetSlaveMMX.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaResetSlaveMMX.setDescription('Reset SLAVE MMX.')
hwMusaBiosVersion = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 26), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaBiosVersion.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaBiosVersion.setDescription('MMX Bios Version.')
hwMusaEthernetFirewall = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaEthernetFirewall.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaEthernetFirewall.setDescription('MMX ethernet firewall switch.')
hwMusaNmsPvcConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3), )
if mibBuilder.loadTexts: hwMusaNmsPvcConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsPvcConfTable.setDescription('Musa Nms PVC configuration table.')
hwMusaNmsPvcConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaNmsPvcIndex"))
if mibBuilder.loadTexts: hwMusaNmsPvcConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsPvcConfEntry.setDescription('This list contains Musa Nms Pvc Configuration parameters and variables.')
hwMusaNmsPvcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaNmsPvcIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsPvcIndex.setDescription('Nms Pvc index.')
hwMusaNmsRelayVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsRelayVpi.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsRelayVpi.setDescription('Nms Relay Pvc Vpi index.')
hwMusaNmsRelayVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsRelayVci.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsRelayVci.setDescription('Nms Relay Pvc Vci index.')
hwMusaNmsIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsIp.setDescription('Nms IP address.')
hwMusaNmsPvcOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("add", 0), ("del", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsPvcOper.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsPvcOper.setDescription('Nms operate state,include:add/del.')
hwMusaNmsRxTraffic = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsRxTraffic.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsRxTraffic.setDescription("Nms PVC's receive traffic index.")
hwMusaNmsTxTraffic = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsTxTraffic.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsTxTraffic.setDescription("Nms PVC's transmit traffic index.")
hwMusaNmsSarVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsSarVci.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsSarVci.setDescription("Nms PVC's SAR VCI")
hwMusaNmsLLCVC = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("llc", 1), ("vc", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsLLCVC.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsLLCVC.setDescription('1483B encapsulation state,include:llc/vc.')
hwMusaNmsENCAP = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("eipoa", 0), ("e1483B", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsENCAP.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsENCAP.setDescription('Nms encapsulation type,include:ipoa/1483B.')
hwMusaNmsFrameId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsFrameId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsFrameId.setDescription("Nms PVC's DstFrameId.")
hwMusaNmsSlotId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsSlotId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsSlotId.setDescription("Nms PVC's DstSlotId.")
hwMusaNmsPortVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 3, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsPortVlanId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsPortVlanId.setDescription("Nms PVC's DstPortVlanId.")
hwMusaNmsParaConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5), )
if mibBuilder.loadTexts: hwMusaNmsParaConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsParaConfTable.setDescription('Musa Nms PVC configuration table.')
hwMusaNmsParaConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaNmsID"))
if mibBuilder.loadTexts: hwMusaNmsParaConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsParaConfEntry.setDescription('This list contains Musa Nms Para Configuration parameters and variables.')
hwMusaNmsID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: hwMusaNmsID.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsID.setDescription('Nms ID,from 0 to 7.')
hwMusaNmsOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 4, 5))).clone(namedValues=NamedValues(("add", 0), ("del", 1), ("modify", 2), ("active", 4), ("deactive", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsOperState.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsOperState.setDescription('Nms operate state,include:add/del/modify/act/deact.')
hwMusaNmsName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsName.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsName.setDescription('Nms name.')
hwMusaNmsIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsIpAddr.setDescription('Nms IP address.')
hwMusaGetCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaGetCommunity.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaGetCommunity.setDescription('Snmp get community name.')
hwMusaSetCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSetCommunity.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSetCommunity.setDescription('Snmp set community name.')
hwMusaTrapPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaTrapPort.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrapPort.setDescription('Snmp trap port.')
hwMusaGetSetPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaGetSetPort.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaGetSetPort.setDescription('Snmp get/set port.')
hwMusaNmsStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("active", 1), ("deactive", 2), ("commfail", 3), ("uninstall", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaNmsStatus.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsStatus.setDescription('Nms status.')
hwMusaNmsStyle = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 5, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("bandin", 0), ("bandout", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaNmsStyle.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNmsStyle.setDescription('Nms admin style in-band or out-band.')
hwMusaSlotGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6))
hwMusaShelf = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1))
hwMusaFrame = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2))
hwMusaSlot = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3))
hwMusaShelfNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaShelfNumber.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfNumber.setDescription('Musa shelf numbers.')
hwMusaShelfConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2), )
if mibBuilder.loadTexts: hwMusaShelfConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfConfTable.setDescription('This table contains Musa slot configuration parameters, one entry per Musa slot.')
hwMusaShelfConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaShelfIndex"))
if mibBuilder.loadTexts: hwMusaShelfConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfConfEntry.setDescription('This list contains Musa shelf configuration parameters and status variables.')
hwMusaShelfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaShelfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfIndex.setDescription('the Musa shelf number. ')
hwMusaShelfType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("other", 1), ("empty", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaShelfType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfType.setDescription('The Musa shelf type.')
hwMusaShelfName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaShelfName.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaShelfName.setDescription('The Musa shelf name.')
hwMusaFrameNumbers = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaFrameNumbers.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameNumbers.setDescription('The frame numbers belong to this shelf.')
hwMusaFrameNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaFrameNumber.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameNumber.setDescription('Musa frame numbers.')
hwMusaFrameConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2), )
if mibBuilder.loadTexts: hwMusaFrameConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameConfTable.setDescription('This table contains Musa Frame configuration parameters.')
hwMusaFrameConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaShelfIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"))
if mibBuilder.loadTexts: hwMusaFrameConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameConfEntry.setDescription('This list contains Musa Frame configuration parameters and status variables.')
hwMusaFrameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaFrameIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameIndex.setDescription('The Musa Frame number.')
hwMusaFrameType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("other", 1), ("empty", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaFrameType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameType.setDescription('The Musa Frame type.')
hwMusaFrameName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaFrameName.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameName.setDescription('The Musa Frame name.')
hwMusaSlotNumbers = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotNumbers.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotNumbers.setDescription('The Slot numbers belong to this frame.')
hwMusaFrameBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaFrameBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameBandWidth.setDescription("The bandwidth(kpbs) of frame's backplane.")
hwMusaFrameUsedBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaFrameUsedBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFrameUsedBandWidth.setDescription("The current used bandwidth(kpbs) of frame's backplane.")
hwMusaSlotNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotNumber.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotNumber.setDescription('Musa slot numbers.')
hwMusaSlotConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2), )
if mibBuilder.loadTexts: hwMusaSlotConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotConfTable.setDescription('This table contains Musa slot configuration parameters, one entry per Musa slot.')
hwMusaSlotConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"))
if mibBuilder.loadTexts: hwMusaSlotConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotConfEntry.setDescription('This list contains Musa slot configuration parameters and status variables.')
hwMusaSlotIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15)))
if mibBuilder.loadTexts: hwMusaSlotIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotIndex.setDescription('the Musa slot number.')
hwMusaSlotCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 25))).clone(namedValues=NamedValues(("null", 0), ("mmx", 1), ("smx", 2), ("adl", 3), ("lanb", 4), ("lana", 5), ("cesa", 6), ("cesb", 7), ("spl", 8), ("fra", 9), ("adlb", 10), ("unknown", 11), ("splb", 12), ("sep", 13), ("smxa", 14), ("smxb", 15), ("pots", 16), ("splc", 18), ("lan", 19), ("adlc", 21), ("adld", 25)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotCardType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotCardType.setDescription('the Musa slot card type. The whole 16 bits is distributed to two parts. The whole low 8 bits is the type of board. In the high 8 bits, the whole high 4 bits is the type of the first subboard, the whole low 4 bits is the type of the second subboard. The subboard types of all kinds of board are listed below: To mmx: 0x00MMX_NO_SUBBOARD 0x01MMX_OIM_S155SM: 0x02MMX_OIM_S155MM: 0x03MMX_OIM_D155SM: 0x04MMX_OIM_D155MM: 0x05MMX_APON_R: 0x06MMX_APON_C: 0x07: MMX_IMA_TYPE 0x08: MMX_IMA_TYPE_120 To frc: 0x00FRC_NON_SUBBOARD 0x01FRC_E1 0x02FRC_T1 0x03FRC_V35 0x04: FRC_E1_120 To cesc: 0x00CESC_NON_SUBBOARD 0x01CESC_E1 0x02CESC_T1 0x03CESC_V35 0x04: CESC_E1_120 To aiua: 0x00AIU_NO_SUBBOARD 0x01AIU_155M_SINGLE_PATH 0x02AIU_155M_SINGLE_PATH_M 0x03AIU_155M_DOUBLE_PATH 0x04AIU_155M_DOUBLE_PATH_M 0x05AIU_622M 0x06AIU_APON_C 0x07AIU_APON_R 0x08AIU_E3 0x09: AIU_IMA 0x0a: AIU_IMA_120')
hwMusaSlotCardSerial = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotCardSerial.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotCardSerial.setDescription('The Musa slot card serial.')
hwMusaSlotCardVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotCardVersion.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotCardVersion.setDescription('The Musa slot card version.')
hwMusaSlotIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSlotIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotIpAddress.setDescription('The Musa slot card Ip address.')
hwMusaSlotCardAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13))).clone(namedValues=NamedValues(("noinstall", 0), ("normal", 1), ("fault", 2), ("mainnormal", 3), ("mainfault", 4), ("baknormal", 5), ("bakfault", 6), ("forbid", 7), ("config", 8), ("online", 10), ("none", 11), ("commok", 12), ("commfail", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotCardAdminStatus.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotCardAdminStatus.setDescription('The Musa slot card State.')
hwMusaSlotCardOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("del", 0), ("add", 1), ("reset", 2), ("use", 3), ("nouse", 4), ("inverse", 5), ("mmxswitchover", 6), ("delmmxsubboard", 7), ("addaiusubboard", 8), ("delaiusubboard", 9)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaSlotCardOperStatus.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotCardOperStatus.setDescription('The slot card operate status.')
hwMusaSlotDescript = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotDescript.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotDescript.setDescription('The sl1ot card description.')
hwMusaBoardCellLossPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("low", 0), ("high", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaBoardCellLossPriority.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaBoardCellLossPriority.setDescription('The board service priority.')
hwMusaBoardMaxBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("b-155M", 0), ("b-80M", 1), ("b-20M", 2), ("b-4M", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaBoardMaxBandwidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaBoardMaxBandwidth.setDescription('The board Max outburst cell rate.')
hwMusaCpuOccupyRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCpuOccupyRate.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCpuOccupyRate.setDescription('The board cpu occupy rate.')
hwMusaQueryMemory = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 12), DisplayString()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaQueryMemory.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryMemory.setDescription('Query the board memory.')
hwMusaLoadProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("tftp", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaLoadProtocol.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadProtocol.setDescription('The protocal of load file:tftp.')
hwMusaLoadContent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(6, 8, 7))).clone(namedValues=NamedValues(("program", 6), ("data", 8), ("fpga", 7)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaLoadContent.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadContent.setDescription('The type of load file:program or data.')
hwMusaLoadTftpServerIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 15), IpAddress()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaLoadTftpServerIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadTftpServerIp.setDescription('The NMS IP address.')
hwMusaLoadFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 16), DisplayString()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaLoadFileName.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadFileName.setDescription('The load file name.')
hwMusaLoadOperType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("load", 0), ("upback", 1), ("downback", 2), ("rollback", 3), ("clearflash", 4)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaLoadOperType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadOperType.setDescription('The type of back data.')
hwMusaSlotUpBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSlotUpBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotUpBandWidth.setDescription('The bandwidth(kpbs) of Up slot.')
hwMusaSlotDownBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSlotDownBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotDownBandWidth.setDescription('The main bandwidth(kpbs) of Down slot.')
hwMusaSlotUsedUpBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotUsedUpBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotUsedUpBandWidth.setDescription('The current used up bandwidth(kpbs) of slot.')
hwMusaSlotUsedDownBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotUsedDownBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotUsedDownBandWidth.setDescription('The current used down bandwidth(kpbs) of slot.')
hwMusaSlotUpPracticalBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotUpPracticalBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotUpPracticalBandWidth.setDescription('The practical up bandwidth(kpbs) of slot.')
hwMusaSlotDownPracticalBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 6, 3, 2, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaSlotDownPracticalBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSlotDownPracticalBandWidth.setDescription('The practical down bandwidth(kpbs) of slot.')
hwMusaOamGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7))
hwMusaOimPhyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1), )
if mibBuilder.loadTexts: hwMusaOimPhyTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOimPhyTable.setDescription('Musa OIM physical table.')
hwMusaOimPhyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwOIMPortIndex"))
if mibBuilder.loadTexts: hwMusaOimPhyEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOimPhyEntry.setDescription('This list contains Musa OIM physical variables.')
hwOIMPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: hwOIMPortIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwOIMPortIndex.setDescription('the Musa OIM number.(0--1) ')
hwMusaSetSrcLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notloop", 0), ("loop", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSetSrcLoop.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSetSrcLoop.setDescription('set source loop.')
hwMusaSetLineLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notloop", 0), ("loop", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSetLineLoop.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSetLineLoop.setDescription('set line loop.')
hwMusaSetUtopiaLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notloop", 0), ("loop", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSetUtopiaLoop.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSetUtopiaLoop.setDescription('set Utopia loop.')
hwMusaInsertLOF = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notinsert", 0), ("insert", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaInsertLOF.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertLOF.setDescription('insert LOF warning at the optic port.')
hwMusaInsertLOS = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notinsert", 0), ("insert", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaInsertLOS.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertLOS.setDescription('insert LOS warning at the optic port.')
hwMusaInsertBIP1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 7), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaInsertBIP1.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertBIP1.setDescription('Insert BIP1(SBIP) errorcode at the optic port.')
hwMusaInsertBIP2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 8), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaInsertBIP2.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertBIP2.setDescription('Insert BIP2(LBIP) errorcode at the optic port.')
hwMusaInsertBIP3 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 9), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaInsertBIP3.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertBIP3.setDescription('Insert BIP3(PBIP) errorcode at the optic port.')
hwMusaInsertLAIS = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notinsert", 0), ("insert", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaInsertLAIS.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertLAIS.setDescription('Insert LAIS warning at the optic port.')
hwMusaInsertPAIS = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notinsert", 0), ("insert", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaInsertPAIS.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertPAIS.setDescription('Insert PAIS warning at the optic port.')
hwMusaInsertLRDI = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notinsert", 0), ("insert", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaInsertLRDI.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaInsertLRDI.setDescription('Insert LRDI warning at the optic port.')
hwMusaOimOpticTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2), )
if mibBuilder.loadTexts: hwMusaOimOpticTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOimOpticTable.setDescription('Musa OIM optical performance table.')
hwMusaOimOpticEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwOIMPortIndex"))
if mibBuilder.loadTexts: hwMusaOimOpticEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOimOpticEntry.setDescription('This list contains Musa OIM optical performance variables.')
hwMusaQueryCurBIP1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurBIP1.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurBIP1.setDescription('Query current BIP1 errorcode since last query.')
hwMusaQueryCurBIP2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurBIP2.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurBIP2.setDescription('Query current BIP2 errorcode since last query.')
hwMusaQueryCurBIP3 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurBIP3.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurBIP3.setDescription('Query current BIP3 errorcode since last query.')
hwMusaQueryCurLFEBE = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurLFEBE.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurLFEBE.setDescription('Query current LFEBE errorcode since last query.')
hwMusaQueryCurPFEBE = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurPFEBE.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurPFEBE.setDescription('Query current PFEBE errorcode since last query.')
hwMusaQueryCurSendCellNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurSendCellNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurSendCellNum.setDescription('Query current send cell numbers since last query.')
hwMusaQueryCurReceiveCellNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurReceiveCellNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurReceiveCellNum.setDescription('Query current receive cell numbers since last query.')
hwMusaQueryCurCorrectHECNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurCorrectHECNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurCorrectHECNum.setDescription('Query current can correct HEC numbers since last query.')
hwMusaQueryCurNonCorrectHECNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurNonCorrectHECNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurNonCorrectHECNum.setDescription('Query current cannot correct HEC numbers since last query.')
hwMusaQueryCurLOCDNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurLOCDNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurLOCDNum.setDescription('Query current LOCD numbers since last query.')
hwMusaQueryCurUnmatchCellNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurUnmatchCellNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurUnmatchCellNum.setDescription('Query current Unmatch cell numbers since last query.')
hwMusaQueryCurOOFNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaQueryCurOOFNum.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaQueryCurOOFNum.setDescription('Query current OOF numbers since last query.')
hwMusaClearAllAlarmStat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 13), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaClearAllAlarmStat.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClearAllAlarmStat.setDescription('Clear port cell statistics.')
hwMusaClearOIMErrEventStat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 7, 2, 1, 14), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaClearOIMErrEventStat.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClearOIMErrEventStat.setDescription('Clear all OIM alarm statistics.')
hwMusaWarningCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9), )
if mibBuilder.loadTexts: hwMusaWarningCtrlTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningCtrlTable.setDescription('Musa warning control table.')
hwMusaWarningCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaWarningID"))
if mibBuilder.loadTexts: hwMusaWarningCtrlEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningCtrlEntry.setDescription('This list contains Musa warning Control parameters.')
hwMusaWarningID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaWarningID.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningID.setDescription('The MUSA Warning ID.')
hwMusaWarningLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("notify", 0), ("normal", 1), ("serious", 2), ("fatal", 3), ("default", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarningLevel.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningLevel.setDescription('The MUSA Warning level.')
hwMusaWarningNmsCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarningNmsCtrl.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningNmsCtrl.setDescription('The MUSA Warning NMS filter.')
hwMusaWarningTerminalCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarningTerminalCtrl.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningTerminalCtrl.setDescription('The MUSA Warning NMS filter.')
hwMusaWarningIsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarningIsCount.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningIsCount.setDescription('The Warning is statics or not.')
hwMusaWarn15MinThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarn15MinThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarn15MinThreshold.setDescription('The Warning 15 Minute threshold.')
hwMusaWarn24HourThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaWarn24HourThreshold.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarn24HourThreshold.setDescription('The Warning 24 Hour threshold.')
hwMusaWarningDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaWarningDesc.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningDesc.setDescription('The MUSA Warning Chinese description.')
hwMusaWarningEngDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 9, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaWarningEngDesc.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaWarningEngDesc.setDescription('The MUSA Warning English description.')
hwMusaSysRouteTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10), )
if mibBuilder.loadTexts: hwMusaSysRouteTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysRouteTable.setDescription('Musa sys route table.')
hwMusaSysRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSysRouteIndex"))
if mibBuilder.loadTexts: hwMusaSysRouteEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysRouteEntry.setDescription('This list contains Musa Sys Route Configuration parameters and variables.')
hwMusaSysRouteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaSysRouteIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysRouteIndex.setDescription('Sys Route index.')
hwMusaDstIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDstIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDstIp.setDescription('Destination IP address.')
hwMusaDstIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDstIpMask.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDstIpMask.setDescription('Destination IP address Mask.')
hwMusaGateIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaGateIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaGateIp.setDescription('Gateway ipaddree.')
hwMusaSysRouteOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("add", 0), ("del", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSysRouteOper.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSysRouteOper.setDescription('Sys route operate state,include:add/del.')
hwMusaLoadRateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 11), )
if mibBuilder.loadTexts: hwMusaLoadRateTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadRateTable.setDescription('This table contains Musa slot load rate.')
hwMusaLoadRateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 11, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"))
if mibBuilder.loadTexts: hwMusaLoadRateEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadRateEntry.setDescription('This list contains Musa slot Load rate variables.')
hwMusaLoadRate = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaLoadRate.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadRate.setDescription('Load progress.')
hwMusaLoadType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("noOper", 0), ("backData", 1), ("dumpData", 2), ("loadData", 3), ("loadProc", 4), ("loadFpga", 5), ("program", 6), ("fpga", 7), ("data", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaLoadType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaLoadType.setDescription('Load type.')
hwMusaTrafficTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13), )
if mibBuilder.loadTexts: hwMusaTrafficTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficTable.setDescription('Musa Pvc Traffic table.')
hwMusaTrafficEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaTrafficIndex"))
if mibBuilder.loadTexts: hwMusaTrafficEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficEntry.setDescription('This list contains Musa Pvc Traffic Configuration parameters and variables.')
hwMusaTrafficIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5000)))
if mibBuilder.loadTexts: hwMusaTrafficIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficIndex.setDescription('Musa Traffic Table Index.')
hwMusaTrafficType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("noclpnoscr", 1), ("clpnotaggingnoscr", 2), ("clptaggingnoscr", 3), ("noclpscr", 4), ("clpnotaggingscr", 5), ("clptaggingscr", 6), ("clpnotaggingmcr", 7), ("clptransparentnoscr", 8), ("clptransparentscr", 9), ("noclptaggingnoscr", 10), ("noclpnoscrcdvt", 11), ("noclpscrcdvt", 12), ("clpnotaggingscrcdvt", 13), ("clptaggingscrcdvt", 14)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaTrafficType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficType.setDescription('Musa Traffic table type.(0:NOCLPNOSCR,1:CLPNOTAGGINGNOSCR, 2:CLPTAGGINGNOSCR,3:NOCLPSCR,4:CLPNOTAGGINGSCR,5:CLPTAGGINGSCR, 6:CLPNOTAGGINGMCR,7:CLPTRANSPARENTNOSCR,8:CLPTRANSPARENTSCR, 9:NOCLPTAGGINGNOSCR,10:NOCLPNOSCRCDVT,11;NOCLPSCRCDVT,12;CLPNOTAGGINGSCRCDVT, 13:CLPTAGGINGSCRCDVT)')
hwMusaServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 6))).clone(namedValues=NamedValues(("cbr", 2), ("rtVBR", 3), ("nrtVBR", 4), ("ubr", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaServiceClass.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaServiceClass.setDescription('Musa Traffic service class.(1:other,2:CBR,3:rtVBR,4:nrtVBR,5:ABR:6:UBR)')
hwMusaRefCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaRefCount.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRefCount.setDescription('Musa Traffic Traffic Record reference count.')
hwMusaRecordState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1), ("module", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaRecordState.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRecordState.setDescription('Musa Traffic table record state. disable(0),enable(1),module(2).')
hwMusaClp01pcr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaClp01pcr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClp01pcr.setDescription('Musa Traffic table parameter Clp01pcr.')
hwMusaClp0pcr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaClp0pcr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClp0pcr.setDescription('Musa Traffic table parameter Clp0pcr.')
hwMusaClp01scr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaClp01scr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClp01scr.setDescription('Musa Traffic table parameter Clp01scr.')
hwMusaClp0scr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaClp0scr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaClp0scr.setDescription('Musa Traffic table parameter Clp0scr.')
hwMusaMbs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaMbs.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaMbs.setDescription('Musa Traffic table parameter Mbs.')
hwMusaMcr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaMcr.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaMcr.setDescription('Musa Traffic table parameter Mcr.')
hwMusaCDVT = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaCDVT.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCDVT.setDescription('Musa Traffic table parameter CDVT')
hwMusaOperat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("add", 0), ("del", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaOperat.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOperat.setDescription('Musa Traffic table operat.(0:add,1:del)')
hwMusaNextTrafficIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 13, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaNextTrafficIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaNextTrafficIndex.setDescription('Musa Traffic table next traffic index')
hwMusaCampusPvcConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15), )
if mibBuilder.loadTexts: hwMusaCampusPvcConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCampusPvcConfTable.setDescription('Musa campus pvc configuration table.')
hwMusaCampusPvcConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaVlanId"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaVlanIciIndex"))
if mibBuilder.loadTexts: hwMusaCampusPvcConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCampusPvcConfEntry.setDescription('This list contains Musa campus pvc Configuration parameters and variables.')
hwMusaVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaVlanId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaVlanId.setDescription('Musa campus VLan Id.')
hwMusaVlanIciIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaVlanIciIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaVlanIciIndex.setDescription('Musa campus VLanIciIndex.')
hwMusaAdlPortCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaAdlPortCount.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlPortCount.setDescription('Musa campus adsl port counter.')
hwMusaAdlFrameId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAdlFrameId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlFrameId.setDescription('Musa campus ADL frame ID.')
hwMusaAdlSlotId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAdlSlotId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlSlotId.setDescription('Musa campus ADL slot ID.')
hwMusaAdlPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAdlPortId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlPortId.setDescription('Musa campus ADL port ID.')
hwMusaAdlVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaAdlVpi.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlVpi.setDescription('Musa campus VPI.')
hwMusaAdlVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(32, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaAdlVci.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlVci.setDescription('Musa campus VCI.')
hwMusaToLanTrafficId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaToLanTrafficId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaToLanTrafficId.setDescription('Musa campus to lan board traffic index.')
hwMusaFromLanTrafficId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaFromLanTrafficId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaFromLanTrafficId.setDescription('Musa campus from lan board traffic index.')
hwMusaAdlPortOperat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 15, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("add", 0), ("del", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAdlPortOperat.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAdlPortOperat.setDescription('Musa campus pvc Operat')
hwMusaOpticBandwidthTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17), )
if mibBuilder.loadTexts: hwMusaOpticBandwidthTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOpticBandwidthTable.setDescription('Musa Optic port bandwidth configuration table.')
hwMusaOpticBandwidthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwOIMPortIndex"))
if mibBuilder.loadTexts: hwMusaOpticBandwidthEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaOpticBandwidthEntry.setDescription('This list contains Musa optic port bandwidth Configuration.')
hwMusaUpOpticMainBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaUpOpticMainBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaUpOpticMainBandWidth.setDescription('The main bandwidth(kpbs) of Up optic port.')
hwMusaDnOpticMainBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDnOpticMainBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDnOpticMainBandWidth.setDescription('The main bandwidth(kpbs) of Down optic port.')
hwMusaCurUsedUpBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCurUsedUpBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCurUsedUpBandWidth.setDescription('The current used up bandwidth(kpbs) of the optic port.')
hwMusaCurUsedDownBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCurUsedDownBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCurUsedDownBandWidth.setDescription('The current used down bandwidth(kpbs) of the optic port.')
hwMusaUpReservedBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaUpReservedBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaUpReservedBandWidth.setDescription('The up reserved bandwidth of the optic port.')
hwMusaDownReservedBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDownReservedBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDownReservedBandWidth.setDescription('The down reserved bandwidth of the optic port.')
hwMusaUpPracticalBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaUpPracticalBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaUpPracticalBandWidth.setDescription('The up practical bandwidth of the optic port.')
hwMusaDownPracticalBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 17, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDownPracticalBandWidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDownPracticalBandWidth.setDescription('The down practical bandwidth of the optic port.')
hwMusaTrafficCbrPcrTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 18), )
if mibBuilder.loadTexts: hwMusaTrafficCbrPcrTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficCbrPcrTable.setDescription('CBR traffic PCR parameter table.')
hwMusaTrafficCbrPcrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 18, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaCbrPcrIndex"))
if mibBuilder.loadTexts: hwMusaTrafficCbrPcrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficCbrPcrEntry.setDescription("This list contains CBR traffic's PCR parameter.")
hwMusaCbrPcrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 18, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaCbrPcrIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCbrPcrIndex.setDescription('The index of CBR traffic PCR parameter table.')
hwMusaCbrPcrValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 18, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaCbrPcrValue.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCbrPcrValue.setDescription('The CBR traffic PCR parameter value.')
hwMusaCbrPcrRefCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 18, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCbrPcrRefCount.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCbrPcrRefCount.setDescription('The CBR traffic PCR parameter reference count.')
hwMusaTrafficRtvbrScrTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 19), )
if mibBuilder.loadTexts: hwMusaTrafficRtvbrScrTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficRtvbrScrTable.setDescription('rtVbr traffic SCR parameter table.')
hwMusaTrafficRtvbrScrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 19, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaRtvbrScrIndex"))
if mibBuilder.loadTexts: hwMusaTrafficRtvbrScrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTrafficRtvbrScrEntry.setDescription("This list contains rtVbr traffic's SCR parameter.")
hwMusaRtvbrScrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 19, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaRtvbrScrIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRtvbrScrIndex.setDescription('The index of rtVbr traffic SCR parameter table.')
hwMusaRtvbrScrValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 19, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaRtvbrScrValue.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRtvbrScrValue.setDescription('The rtVbr traffic SCR parameter value.')
hwMusaRtvbrScrRefCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 19, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaRtvbrScrRefCount.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaRtvbrScrRefCount.setDescription('The rtVbr traffic SCR parameter reference count.')
hwMusaPvcTrafficStatisTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21), )
if mibBuilder.loadTexts: hwMusaPvcTrafficStatisTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcTrafficStatisTable.setDescription('PVC traffic statistics table.')
hwMusaPvcTrafficStatisEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSPvcIndex"))
if mibBuilder.loadTexts: hwMusaPvcTrafficStatisEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcTrafficStatisEntry.setDescription('PVC traffic statistics table entry.')
hwMusaUpStreamTrafficRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaUpStreamTrafficRx.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaUpStreamTrafficRx.setDescription('Up stream traffic Rx(cells).')
hwMusaUpStreamTrafficTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaUpStreamTrafficTx.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaUpStreamTrafficTx.setDescription('Up stream traffic Tx(cells).')
hwMusaDownStreamTrafficRx = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaDownStreamTrafficRx.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDownStreamTrafficRx.setDescription('Down stream traffic Rx(cells).')
hwMusaDownStreamTrafficTx = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 21, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaDownStreamTrafficTx.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDownStreamTrafficTx.setDescription('Down stream traffic Tx(cells).')
hwMusaAllPvcConfTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22), )
if mibBuilder.loadTexts: hwMusaAllPvcConfTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAllPvcConfTable.setDescription('The config talbe of all kinds of PVC.')
hwMusaAllPvcConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaTypeOfPvcPvp"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaCidIndex"))
if mibBuilder.loadTexts: hwMusaAllPvcConfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAllPvcConfEntry.setDescription('This lists config variables of all kinds of PVC.')
hwMusaCidIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaCidIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaCidIndex.setDescription('The CID index.')
hwMusaSrcFrameId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcFrameId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcFrameId.setDescription('The Frame id of source board.')
hwMuasSrcSlotId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMuasSrcSlotId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMuasSrcSlotId.setDescription('The slot id of source board.')
hwMusaSrcPortVlanVccId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcPortVlanVccId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcPortVlanVccId.setDescription('The Port/VlanID/VCC Index of source board.')
hwMusaSrcOnuId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcOnuId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcOnuId.setDescription('The ONUID of source board.')
hwMusaSrcBoardVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcBoardVpi.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcBoardVpi.setDescription('Src board Port vpi Index.')
hwMusaSrcBoardVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcBoardVci.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcBoardVci.setDescription('Src board VCI Index.')
hwMusaSrcPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("uni", 0), ("sdt", 1), ("udt", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcPortType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcPortType.setDescription('The type of CESC port.')
hwMusaSrcCescChannelId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcCescChannelId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcCescChannelId.setDescription('Cesc channel Id')
hwMusaSrcCescChannelBitmap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcCescChannelBitmap.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcCescChannelBitmap.setDescription('Cesc channel Bitmap')
hwMusaSrcCescFillDegree = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 47))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcCescFillDegree.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcCescFillDegree.setDescription('Cesc fill degree(20-47)')
hwMusaSrcFrcDlciType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcFrcDlciType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcFrcDlciType.setDescription('The ATM Logical Port DLCI.')
hwMusaSrcFrcIwfType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("network11", 0), ("service", 1), ("hdlc", 2), ("networkN1", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcFrcIwfType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcFrcIwfType.setDescription('The ATM Logical Port have service.')
hwMusaSrcFrcActiveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("block", 0), ("unblock", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcFrcActiveStatus.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcFrcActiveStatus.setDescription('The FRC ATM logical port BLOCK/UNBLOCK.')
hwMusaSrcFrcFreeBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcFrcFreeBandwidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcFrcFreeBandwidth.setDescription('The ATM logical port free bandwidth.')
hwMusaSrcApcConnectAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcApcConnectAttribute.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcApcConnectAttribute.setDescription('The APC connection attribute of MMX(source) board.')
hwMusaSrcCescV35N = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcCescV35N.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcCescV35N.setDescription('The N value of V35 subboard of CESC.')
hwMusaDestFrameId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestFrameId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestFrameId.setDescription('Destination frame Index.')
hwMusaDestSlotId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestSlotId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestSlotId.setDescription('Destination slot Index.')
hwMusaDestPortVlanVccId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestPortVlanVccId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestPortVlanVccId.setDescription('Destination port Index.')
hwMusaDestOnuId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 23), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestOnuId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestOnuId.setDescription('Destination ONU Index.')
hwMusaDestBoardVpi = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 24), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestBoardVpi.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestBoardVpi.setDescription('Destination Port vpi Index.')
hwMusaDestBoardVci = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 25), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestBoardVci.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestBoardVci.setDescription('Destination VCI Index.')
hwMusaDestPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("uni", 0), ("sdt", 1), ("udt", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestPortType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestPortType.setDescription('The type of CESC port.')
hwMusaDestCescChannelId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 27), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestCescChannelId.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestCescChannelId.setDescription('Cesc channelId')
hwMusaDestCescChannelBitmap = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 28), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestCescChannelBitmap.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestCescChannelBitmap.setDescription('Cesc channelbitmap')
hwMusaDestCescFillDegree = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 47))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestCescFillDegree.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestCescFillDegree.setDescription('Cesc fill degree(20-47)')
hwMusaDestFrcDlciType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestFrcDlciType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestFrcDlciType.setDescription('The ATM Logical Port DLCI.')
hwMusaDestFrcIwfType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("network11", 0), ("service", 1), ("hdlc", 2), ("networkN1", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestFrcIwfType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestFrcIwfType.setDescription('The ATM Logical Port have service.')
hwMusaDestFrcActiveStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("block", 0), ("unblock", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestFrcActiveStatus.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestFrcActiveStatus.setDescription('The FRC ATM logical port BLOCK/UNBLOCK.')
hwMusaDestFrcFreeBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 33), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestFrcFreeBandwidth.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestFrcFreeBandwidth.setDescription('The ATM logical port free bandwidth.')
hwMusaDestApcConnectAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 34), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestApcConnectAttribute.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestApcConnectAttribute.setDescription('connect attribut of AIU to source board.')
hwMusaDestCescV35N = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 35), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestCescV35N.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestCescV35N.setDescription('The N value of V35 subboard of CESC.')
hwMusaSrcToDestTraffic = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 38), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaSrcToDestTraffic.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaSrcToDestTraffic.setDescription('The source board to destination traffic.')
hwMusaDestToSrcTraffic = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 39), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaDestToSrcTraffic.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaDestToSrcTraffic.setDescription('The destination to source board traffic.')
hwMusaAllPvcOperater = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 40), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("add", 0), ("del", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaAllPvcOperater.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaAllPvcOperater.setDescription('Add or delete PVC.')
hwMusaTypeOfPvcPvp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 41), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("pvc", 0), ("pvp", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaTypeOfPvcPvp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaTypeOfPvcPvp.setDescription('Identified type of Pvc or Pvp')
hwMusaPvcPvpState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 22, 1, 42), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("invalid", 2), ("delete", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPvcPvpState.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcPvpState.setDescription('Pvc or Pvp state ')
hwMusaPvcCidTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 23), )
if mibBuilder.loadTexts: hwMusaPvcCidTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcCidTable.setDescription('The CID talbe of all kinds of PVC.')
hwMusaPvcCidEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 23, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaFrameIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSlotIndex"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSrcPortVlanVccId"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSrcOnuId"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSrcBoardVpi"), (0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaSrcBoardVci"))
if mibBuilder.loadTexts: hwMusaPvcCidEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcCidEntry.setDescription('This lists CID corresponding to the PVC.')
hwMusaPvcCid = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 23, 1, 1), Counter32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMusaPvcCid.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPvcCid.setDescription('The CID corresponding to the PVC.')
hwMusaPatchOperateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24), )
if mibBuilder.loadTexts: hwMusaPatchOperateTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchOperateTable.setDescription('Musa Mmx patch table.')
hwMusaPatchOperateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaPatchIdIndex"))
if mibBuilder.loadTexts: hwMusaPatchOperateEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchOperateEntry.setDescription('This list contains Musa patch description variables.')
hwMusaPatchIdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaPatchIdIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchIdIndex.setDescription('The PatchId index.')
hwMusaPatchLoadProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("tftp", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaPatchLoadProtocol.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchLoadProtocol.setDescription('The patch load protocol.')
hwMusaPatchLoadFilename = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1, 3), DisplayString()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaPatchLoadFilename.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchLoadFilename.setDescription('The patch load filename.')
hwMusaPatchLoadSerIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1, 4), IpAddress()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaPatchLoadSerIp.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchLoadSerIp.setDescription('The patch load server IP.')
hwMusaPatchOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 24, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("activate", 1), ("deactivate", 2), ("load", 3), ("remove", 4), ("run", 5)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: hwMusaPatchOper.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchOper.setDescription('The patch load operate type.')
hwMusaPatchTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25), )
if mibBuilder.loadTexts: hwMusaPatchTable.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchTable.setDescription('Musa Mmx patch table.')
hwMusaPatchEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1), ).setIndexNames((0, "HUAWEI-MUSA-MA5100-MIB", "hwMusaPatchIdIndex"))
if mibBuilder.loadTexts: hwMusaPatchEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchEntry.setDescription('This list contains Musa patch description variables.')
hwMusaPatchShowIdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 1), Integer32())
if mibBuilder.loadTexts: hwMusaPatchShowIdIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchShowIdIndex.setDescription('The PatchId index.')
hwMusaPatchCRC = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchCRC.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchCRC.setDescription('The Patch CRC.')
hwMusaPatchType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("c-Commonpatch", 1), ("t-Temporarypatch", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchType.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchType.setDescription('The Patch type.')
hwMusaPatchState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("run", 1), ("activate", 2), ("deactivate", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchState.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchState.setDescription('The Patch state.')
hwMusaPatchCodeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchCodeAddress.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchCodeAddress.setDescription('The Patch code address.')
hwMusaPatchCodeLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchCodeLength.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchCodeLength.setDescription('The Patch code length.')
hwMusaPatchDataAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchDataAddress.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchDataAddress.setDescription('The Patch data address.')
hwMusaPatchDataLength = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchDataLength.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchDataLength.setDescription('The Patch data length.')
hwMusaPatchFunctionNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 1, 25, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMusaPatchFunctionNumber.setStatus('mandatory')
if mibBuilder.loadTexts: hwMusaPatchFunctionNumber.setDescription('The Patch function number.')
hwMa5100EndOfMib = MibScalar((1, 3, 6, 1, 4, 1, 2011, 2, 6, 5, 100, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwMa5100EndOfMib.setStatus('mandatory')
if mibBuilder.loadTexts: hwMa5100EndOfMib.setDescription('End of HW MA5100 mib')
mibBuilder.exportSymbols("HUAWEI-MUSA-MA5100-MIB", hwMusaSetUtopiaLoop=hwMusaSetUtopiaLoop, hwMusaIpAddr=hwMusaIpAddr, hwMusaLoadType=hwMusaLoadType, hwMusaAllPvcOperater=hwMusaAllPvcOperater, hwMusaMcr=hwMusaMcr, hwMusaFrameNumber=hwMusaFrameNumber, hwMusaSysTime=hwMusaSysTime, hwMusaWarningDesc=hwMusaWarningDesc, hwMusaDownStreamTrafficTx=hwMusaDownStreamTrafficTx, hwMusaAdlPortId=hwMusaAdlPortId, hwMusaQueryCurOOFNum=hwMusaQueryCurOOFNum, hwMusaFrameConfTable=hwMusaFrameConfTable, hwMusaOpticBandwidthTable=hwMusaOpticBandwidthTable, hwMusaRecordState=hwMusaRecordState, hwMusaNmsPvcIndex=hwMusaNmsPvcIndex, hwMusaNmsRxTraffic=hwMusaNmsRxTraffic, hwMusaVlanIciIndex=hwMusaVlanIciIndex, hwMusaDestToSrcTraffic=hwMusaDestToSrcTraffic, hwMusaOimOpticEntry=hwMusaOimOpticEntry, hwMusaIpAddrPermitTable=hwMusaIpAddrPermitTable, hwMusaDestFrameId=hwMusaDestFrameId, hwMusaIpAddrRejectEntry=hwMusaIpAddrRejectEntry, hwMusaUpPracticalBandWidth=hwMusaUpPracticalBandWidth, hwMusaIpPermitTableId=hwMusaIpPermitTableId, hwOIMPortIndex=hwOIMPortIndex, hwMusaSrcFrameId=hwMusaSrcFrameId, hwMusaShelfNumber=hwMusaShelfNumber, hwMusaNmsIp=hwMusaNmsIp, hwMusaSlotNumbers=hwMusaSlotNumbers, hwMusaWarningNmsCtrl=hwMusaWarningNmsCtrl, hwMusaDestFrcDlciType=hwMusaDestFrcDlciType, hwMusaQueryCurCorrectHECNum=hwMusaQueryCurCorrectHECNum, hwMusaQueryCurLFEBE=hwMusaQueryCurLFEBE, hwMusaDestSlotId=hwMusaDestSlotId, hwMusaSrcFrcActiveStatus=hwMusaSrcFrcActiveStatus, hwMusaWarningLevel=hwMusaWarningLevel, hwMusaEthernetFirewall=hwMusaEthernetFirewall, hwMusaInsertLOF=hwMusaInsertLOF, hwMusaVlanId=hwMusaVlanId, hwMusaSysRouteEntry=hwMusaSysRouteEntry, hwMusaRtvbrScrRefCount=hwMusaRtvbrScrRefCount, hwMusaSlotGroup=hwMusaSlotGroup, hwMusaSrcPortVlanVccId=hwMusaSrcPortVlanVccId, hwMusaPatchLoadProtocol=hwMusaPatchLoadProtocol, hwMusaAtmIpMask=hwMusaAtmIpMask, hwMusaOpticConvergentRate=hwMusaOpticConvergentRate, hwMusaFrameIndex=hwMusaFrameIndex, hwMusaWarningID=hwMusaWarningID, hwMusaNmsIpAddr=hwMusaNmsIpAddr, hwMusaSlotDownPracticalBandWidth=hwMusaSlotDownPracticalBandWidth, hwMusaIpAddrPermitOper=hwMusaIpAddrPermitOper, hwMusaLoadTftpServerIp=hwMusaLoadTftpServerIp, hwMusaWarningCtrlTable=hwMusaWarningCtrlTable, hwMusaInsertLRDI=hwMusaInsertLRDI, hwMusaSrcPortType=hwMusaSrcPortType, hwMusaNmsTxTraffic=hwMusaNmsTxTraffic, hwMusaNmsPvcConfTable=hwMusaNmsPvcConfTable, hwMusaFrameType=hwMusaFrameType, hwMusaSlotCardAdminStatus=hwMusaSlotCardAdminStatus, hwMusaShelf=hwMusaShelf, hwMusaOamGroup=hwMusaOamGroup, hwMusaNmsID=hwMusaNmsID, hwMusaWarn15MinThreshold=hwMusaWarn15MinThreshold, hwMusaNmsParaConfTable=hwMusaNmsParaConfTable, hwMusaAdlVpi=hwMusaAdlVpi, hwMusaIpAddrRejectOper=hwMusaIpAddrRejectOper, hwMusaSlotNumber=hwMusaSlotNumber, hwMusaBoardMaxBandwidth=hwMusaBoardMaxBandwidth, hwMusaGateIp=hwMusaGateIp, hwMusaSrcFrcIwfType=hwMusaSrcFrcIwfType, hwMusaSlotUpPracticalBandWidth=hwMusaSlotUpPracticalBandWidth, hwMa5100Mib=hwMa5100Mib, hwMusaQueryCurBIP1=hwMusaQueryCurBIP1, hwMusaClearAllAlarmStat=hwMusaClearAllAlarmStat, hwMusaSysRouteIndex=hwMusaSysRouteIndex, hwMusaPvcCidTable=hwMusaPvcCidTable, hwMusaAllPvcConfTable=hwMusaAllPvcConfTable, hwMusaFromLanTrafficId=hwMusaFromLanTrafficId, hwMusaSrcCescChannelId=hwMusaSrcCescChannelId, hwMusaLoadProtocol=hwMusaLoadProtocol, hwMusaInsertBIP1=hwMusaInsertBIP1, hwMusaPatchLoadSerIp=hwMusaPatchLoadSerIp, hwMusaPatchShowIdIndex=hwMusaPatchShowIdIndex, hwMusaTrafficCbrPcrEntry=hwMusaTrafficCbrPcrEntry, hwMusaPatchOper=hwMusaPatchOper, hwMusaPatchFunctionNumber=hwMusaPatchFunctionNumber, hwMusaRejectBeginIp=hwMusaRejectBeginIp, hwMusaSlotCardVersion=hwMusaSlotCardVersion, hwMusaPvcTrafficStatisEntry=hwMusaPvcTrafficStatisEntry, hwMusaBoardCellLossPriority=hwMusaBoardCellLossPriority, hwMusaClp01pcr=hwMusaClp01pcr, hwMusaEndOfMib=hwMusaEndOfMib, hwMusaNmsParaConfEntry=hwMusaNmsParaConfEntry, hwMusaResetSlaveMMX=hwMusaResetSlaveMMX, hwMusaPatchCRC=hwMusaPatchCRC, hwMusaShelfConfEntry=hwMusaShelfConfEntry, hwMusaTrafficIndex=hwMusaTrafficIndex, hwMusaUpStreamTrafficTx=hwMusaUpStreamTrafficTx, hwMusaSlotIpAddress=hwMusaSlotIpAddress, hwMusaShelfName=hwMusaShelfName, hwMusaPvcTrafficStatisTable=hwMusaPvcTrafficStatisTable, hwMusaSrcBoardVpi=hwMusaSrcBoardVpi, hwMusaSlotConfEntry=hwMusaSlotConfEntry, hwMusaTrafficEntry=hwMusaTrafficEntry, hwMusaToLanTrafficId=hwMusaToLanTrafficId, hwMusaUpStreamTrafficRx=hwMusaUpStreamTrafficRx, hwMusaTrafficType=hwMusaTrafficType, hwMusaDestFrcActiveStatus=hwMusaDestFrcActiveStatus, hwMusaMbs=hwMusaMbs, hwMusaNmsName=hwMusaNmsName, hwMusaQueryCurReceiveCellNum=hwMusaQueryCurReceiveCellNum, DisplayString=DisplayString, hwMusaFrameUsedBandWidth=hwMusaFrameUsedBandWidth, hwMusaResetSys=hwMusaResetSys, hwMusaFrameName=hwMusaFrameName, hwMusaQueryCurBIP3=hwMusaQueryCurBIP3, hwMusaPatchOperateTable=hwMusaPatchOperateTable, hwMusaFrameBandWidth=hwMusaFrameBandWidth, hwMusaRejectEndIp=hwMusaRejectEndIp, hwMusaTrafficRtvbrScrTable=hwMusaTrafficRtvbrScrTable, hwMusaOimPhyTable=hwMusaOimPhyTable, hwMusaGetCommunity=hwMusaGetCommunity, hwMusaNmsSarVci=hwMusaNmsSarVci, hwMusaIpRejectTableId=hwMusaIpRejectTableId, hwMusaAtmIpAddr=hwMusaAtmIpAddr, hwMusaAdlVci=hwMusaAdlVci, hwMusaPatchCodeLength=hwMusaPatchCodeLength, hwMusaLoadRateTable=hwMusaLoadRateTable, hwMusaNmsOperState=hwMusaNmsOperState, hwMusaSlotCardOperStatus=hwMusaSlotCardOperStatus, hwMusaSlotIndex=hwMusaSlotIndex, hwMusaDestFrcIwfType=hwMusaDestFrcIwfType, hwMusaPatchCodeAddress=hwMusaPatchCodeAddress, hwMusaLoadRate=hwMusaLoadRate, hwMusaShelfIndex=hwMusaShelfIndex, hwMusaTrafficRtvbrScrEntry=hwMusaTrafficRtvbrScrEntry, hwMusaSrcBoardVci=hwMusaSrcBoardVci, hwMusaCurUsedUpBandWidth=hwMusaCurUsedUpBandWidth, hwMusaSysCpuRatio=hwMusaSysCpuRatio, hwMusaSrcCescV35N=hwMusaSrcCescV35N, hwMusaPatchType=hwMusaPatchType, hwMusaDstIpMask=hwMusaDstIpMask, hwMusaCbrPcrIndex=hwMusaCbrPcrIndex, hwMusaDestBoardVci=hwMusaDestBoardVci, hwMusaIpMask=hwMusaIpMask, hwMusaDnOpticMainBandWidth=hwMusaDnOpticMainBandWidth, hwMusaPvcCidEntry=hwMusaPvcCidEntry, hwMusaLoadFileName=hwMusaLoadFileName, hwMusaOpticBandwidthEntry=hwMusaOpticBandwidthEntry, hwMusaSetLineLoop=hwMusaSetLineLoop, hwMusaUpOpticMainBandWidth=hwMusaUpOpticMainBandWidth, hwMusaTrafficTable=hwMusaTrafficTable, hwMusaWarningCtrlEntry=hwMusaWarningCtrlEntry, hwMusaNmsRelayVci=hwMusaNmsRelayVci, hwMusaDownPracticalBandWidth=hwMusaDownPracticalBandWidth, hwMusaMtu=hwMusaMtu, hwMusaDestBoardVpi=hwMusaDestBoardVpi, hwMusaPatchOperateEntry=hwMusaPatchOperateEntry, hwMusaShelfType=hwMusaShelfType, hwMusaDownReservedBandWidth=hwMusaDownReservedBandWidth, hwMusaInsertLOS=hwMusaInsertLOS, hwMusaQueryCurLOCDNum=hwMusaQueryCurLOCDNum, hwMusaTypeOfPvcPvp=hwMusaTypeOfPvcPvp, hwMusaNmsPvcConfEntry=hwMusaNmsPvcConfEntry, hwMusaPatchState=hwMusaPatchState, hwMusaPvcPvpState=hwMusaPvcPvpState, hwMusaDevice=hwMusaDevice, hwMusaPermitIpMask=hwMusaPermitIpMask, hwMusaClp01scr=hwMusaClp01scr, hwMusaClp0scr=hwMusaClp0scr, hwMusaRejectIpMask=hwMusaRejectIpMask, hwMusaIpAddrPermitEntry=hwMusaIpAddrPermitEntry, hwMusaSlot=hwMusaSlot, hwMusaCampusPvcConfEntry=hwMusaCampusPvcConfEntry, hwMusaRtvbrScrIndex=hwMusaRtvbrScrIndex, hwMusaWarn24HourThreshold=hwMusaWarn24HourThreshold, hwMusaOperat=hwMusaOperat, hwMusaCampusPvcConfTable=hwMusaCampusPvcConfTable, hwMusaQueryCurNonCorrectHECNum=hwMusaQueryCurNonCorrectHECNum, hwMusaUpReservedBandWidth=hwMusaUpReservedBandWidth, hwMusaAdlPortOperat=hwMusaAdlPortOperat, hwMusaDownStreamTrafficRx=hwMusaDownStreamTrafficRx, hwMusaSysMib=hwMusaSysMib, hwMusaDstIp=hwMusaDstIp, hwMusaSetCommunity=hwMusaSetCommunity, hwMusaQueryMemory=hwMusaQueryMemory, hwMusaCDVT=hwMusaCDVT, hwMusaInsertBIP3=hwMusaInsertBIP3, hwMusaSlotUsedDownBandWidth=hwMusaSlotUsedDownBandWidth, hwMusaAdlSlotId=hwMusaAdlSlotId, hwMusaSlotCardSerial=hwMusaSlotCardSerial, hwMusaHostVersion=hwMusaHostVersion, hwMusaSlotConfTable=hwMusaSlotConfTable, hwMusaSlotUpBandWidth=hwMusaSlotUpBandWidth, hwMusaSlotUsedUpBandWidth=hwMusaSlotUsedUpBandWidth, hwMusaFrameNumbers=hwMusaFrameNumbers, hwMusaOimOpticTable=hwMusaOimOpticTable, hwMusaSysRouteTable=hwMusaSysRouteTable, hwMusaDestOnuId=hwMusaDestOnuId, hwMusaCbrPcrValue=hwMusaCbrPcrValue, hwMusaPatchLoadFilename=hwMusaPatchLoadFilename, hwMusaSrcCescFillDegree=hwMusaSrcCescFillDegree, hwMusaSrcApcConnectAttribute=hwMusaSrcApcConnectAttribute, hwMusaCidIndex=hwMusaCidIndex, hwMuasSrcSlotId=hwMuasSrcSlotId, hwMusaMacAddr=hwMusaMacAddr, hwMusaRtvbrScrValue=hwMusaRtvbrScrValue, hwMusaNmsStyle=hwMusaNmsStyle, hwMusaSlotDescript=hwMusaSlotDescript, hwMusaClearOIMErrEventStat=hwMusaClearOIMErrEventStat, hwMusaSlotCardType=hwMusaSlotCardType, hwMusaServiceClass=hwMusaServiceClass, hwMusaCbrPcrRefCount=hwMusaCbrPcrRefCount, hwMusaSrcFrcDlciType=hwMusaSrcFrcDlciType, hwMusaDestCescChannelBitmap=hwMusaDestCescChannelBitmap, hwMusaCpuOccupyRate=hwMusaCpuOccupyRate, hwMusaPermitEndIp=hwMusaPermitEndIp, hwMusaNmsPortVlanId=hwMusaNmsPortVlanId, hwMusaWarningIsCount=hwMusaWarningIsCount, hwMusaSysRouteOper=hwMusaSysRouteOper, hwMusaDestPortType=hwMusaDestPortType, hwMusaDestCescV35N=hwMusaDestCescV35N, hwMusaDestApcConnectAttribute=hwMusaDestApcConnectAttribute, hwMusaPermitBeginIp=hwMusaPermitBeginIp, hwMusaNmsENCAP=hwMusaNmsENCAP, hwMusaWarningEngDesc=hwMusaWarningEngDesc, hwMusaPatchIdIndex=hwMusaPatchIdIndex, hwMusaAllPvcConfEntry=hwMusaAllPvcConfEntry, hwMusaPvcCid=hwMusaPvcCid, hwMusaCurUsedDownBandWidth=hwMusaCurUsedDownBandWidth, hwMusaNextTrafficIndex=hwMusaNextTrafficIndex, hwMusaInsertPAIS=hwMusaInsertPAIS, hwMusaSrcOnuId=hwMusaSrcOnuId, hwMusaPatchEntry=hwMusaPatchEntry, hwMusaPatchDataLength=hwMusaPatchDataLength, hwMusaTrafficCbrPcrTable=hwMusaTrafficCbrPcrTable, hwMusaInsertLAIS=hwMusaInsertLAIS, hwMusaPatchDataAddress=hwMusaPatchDataAddress, hwMa5100EndOfMib=hwMa5100EndOfMib, hwMusaRefCount=hwMusaRefCount, hwMusaPatchTable=hwMusaPatchTable, hwMusaLoadOperType=hwMusaLoadOperType, hwMusaCellbusID=hwMusaCellbusID, hwMusaNmsFrameId=hwMusaNmsFrameId, hwMusaNmsLLCVC=hwMusaNmsLLCVC, hwMusaNmsStatus=hwMusaNmsStatus, hwMusaFrameConfEntry=hwMusaFrameConfEntry, hwMusaSrcCescChannelBitmap=hwMusaSrcCescChannelBitmap, hwMusaSysDate=hwMusaSysDate, hwMusaQueryCurUnmatchCellNum=hwMusaQueryCurUnmatchCellNum, hwMusaNmsRelayVpi=hwMusaNmsRelayVpi, hwMusaDestPortVlanVccId=hwMusaDestPortVlanVccId, hwMusaGetSetPort=hwMusaGetSetPort, hwMusaSrcToDestTraffic=hwMusaSrcToDestTraffic)
mibBuilder.exportSymbols("HUAWEI-MUSA-MA5100-MIB", hwMusaIpAddrRejectTable=hwMusaIpAddrRejectTable, hwMusaShelfConfTable=hwMusaShelfConfTable, hwMusaGatewayIpAddr=hwMusaGatewayIpAddr, hwMusaSetSrcLoop=hwMusaSetSrcLoop, hwMusaAdlPortCount=hwMusaAdlPortCount, hwMusaWarningTerminalCtrl=hwMusaWarningTerminalCtrl, hwMusaOimPhyEntry=hwMusaOimPhyEntry, hwMusaDestFrcFreeBandwidth=hwMusaDestFrcFreeBandwidth, hwMusaQueryCurPFEBE=hwMusaQueryCurPFEBE, hwMusaClp0pcr=hwMusaClp0pcr, hwMusaNmsSlotId=hwMusaNmsSlotId, hwMusaLoadContent=hwMusaLoadContent, hwMusaDestCescChannelId=hwMusaDestCescChannelId, hwMusaNmsPvcOper=hwMusaNmsPvcOper, hwMusaFrame=hwMusaFrame, hwMusaQueryCurBIP2=hwMusaQueryCurBIP2, hwMusaAdlFrameId=hwMusaAdlFrameId, hwMusaSlotDownBandWidth=hwMusaSlotDownBandWidth, hwMusaSrcFrcFreeBandwidth=hwMusaSrcFrcFreeBandwidth, hwMusaInsertBIP2=hwMusaInsertBIP2, hwMusaDestCescFillDegree=hwMusaDestCescFillDegree, hwMusaBiosVersion=hwMusaBiosVersion, hwMusaLoadRateEntry=hwMusaLoadRateEntry, hwMusaTrapPort=hwMusaTrapPort, hwMusaQueryCurSendCellNum=hwMusaQueryCurSendCellNum)
| [
"[email protected]"
]
| |
a098ada26a3eadfefcb12e2b1491533b9979db93 | 49e72df481bec1501202d7411a55b765c33355ba | /luminar project/functional_programming/list_comprension.py | c1b8cb686c667cb27c47cdcc0d73f8fa7a8b1deb | []
| no_license | JEENUMINI/pythonpgmsupdated | ae6d62bc58e1d44ba81a21637335140119c76869 | 4816ec24693034af36d4b76887d34c9a499f4cc8 | refs/heads/main | 2023-01-23T13:36:28.478938 | 2020-12-15T18:18:07 | 2020-12-15T18:18:07 | 321,749,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | lst=[1,2,3,4,5,6]
#squares
squares=[i*i for i in lst]
print(squares)
square2=[i**2 for i in lst]
print(square2)
#fetch even no from list
even=[i for i in lst if i%2==0]
print(even)
#list placement question
# task=[i+1 if i>5 else i-1 for i in lst]
# print(task)
task=[i+1 if i>5 else (i-1 if i<5 else i) for i in lst]
print(task) | [
"[email protected]"
]
| |
ec1776e8742cedfafef2050a4f6d43076bd74404 | 7b3b859dd633eb2240d987b37e487ea8388e2f8d | /core/main.py | a9b413f579a53f5bd8f1dcc138a83420fc140c2b | []
| no_license | yszhuang/assetPricing2 | 96956638f6c26e4e7d33e0abffe5c5c14460000a | 10af01a66bcd13cb516920e9cb1b46d8cfa6b598 | refs/heads/master | 2022-01-13T02:00:09.070100 | 2018-09-01T02:28:21 | 2018-09-01T02:28:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,113 | py | # -*-coding: utf-8 -*-
# Python 3.6
# Author:Zhang Haitao
# Email:[email protected]
# TIME:2018-04-28 14:58
# NAME:assetPricing2-main.py
import os
import shutil
import time
import pandas as pd
import numpy as np
from config import WINSORIZE_LIMITS
from data.dataApi import Database, Benchmark
from tool import monitor, summary_statistics, cal_corr, cal_persistence, \
cal_breakPoints, count_groups, my_average, \
assign_port_id, famaMacBeth, apply_col_by_col, newey_west, correlation_mixed
from zht.utils.mathu import winsorize, get_outer_frame
DATA=Database(sample_control=True) #TODO: use controlled data
# In the fm function,independent variables are winsorized,so we do not need to filter the raw data.
def combine_with_datalagged(indicators,sample_control=True):
datalagged=Database(sample_control).by_indicators(indicators + ['weight'])
datat = Database(sample_control).by_indicators(['stockEretM'])
'''
sort the lagged characteristics to construct portfolios
Notice:
before shift(1),we must groupby('sid').
'''
#trick:shift(1) we result in the sample loss of first month,upgrade this
#function to take shift in consideration.
comb = pd.concat([datalagged.groupby('sid').shift(1), datat],axis=1)
return comb
@apply_col_by_col
def adjust_with_riskModel(x, riskmodel=None):
'''
use risk model to adjust the the alpha,
the risk model can be None (unadjusted) or one of [capm,ff3,ffc,ff5,hxz4]
:param x:
:param riskmodel:one of ['capmM', 'ff3M', 'ffcM', 'ff5M', 'hxz4M']
:return:
'''
lags=5
d={'capm':'capmM',
'ff3':'ff3M',
'ffc':'ffcM',
'ff5':'ff5M',
'hxz4':'hxz4M'}
df = pd.DataFrame(x)
df.columns = ['y']
if riskmodel in d.keys():
'''
we do not need to shift the time index,the index in df denotes time t+1 (the indicators
have been shifted forward),so,here the time for Stock excess return is consistent with
the time for benchmarks.Both of them are from time t+1.
'''
bench=Benchmark().by_benchmark(riskmodel)
df=pd.concat([df,bench],axis=1)
formula='y ~ '+' + '.join(bench.columns.tolist())
nw = newey_west(formula, df, lags)
return nw['Intercept'].rename(index={'coef':'alpha_'+riskmodel,
't': 't_alpha_'+riskmodel})
else:
formula='y ~ 1'
nw = newey_west(formula, df, lags)
return nw['Intercept'].rename(index={'coef': 'excess return',
't': 't excess return'})
def risk_adjust(panel,riskmodels=None):
'''
risk adjusted alpha
:param panel:
:return:
'''
if riskmodels is None:
riskmodels=[None,'capm','ff3','ffc','ff5','hxz4']
return pd.concat([adjust_with_riskModel(panel,rm)
for rm in riskmodels],axis=0)
class OneFactor:
q=10
def __init__(self, factor,path):
self.factor=factor
self.path=path #project path
self.indicators=DATA.info[factor]
self.df=DATA.by_indicators(self.indicators)
self.groupnames=[self.factor+str(i) for i in range(1,self.q+1)]
self._build_environment()
self.results={}
def _build_environment(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
time.sleep(0.1)
os.makedirs(self.path)
@monitor
def summary(self):
series=[]
for indicator in self.indicators:
s=summary_statistics(self.df[indicator].unstack())
series.append(s.mean())
# pd.concat(series,keys=self.indicators,axis=1).to_csv(os.path.join(self.path,'summary.csv'))
self.results['summary']=pd.concat(series,keys=self.indicators,axis=1)
@monitor
def correlation(self,indicators=None):
if indicators is None:
comb=self.df
else:
comb=DATA.by_indicators(indicators)
corr=correlation_mixed(comb)
# corr.to_csv(os.path.join(self.path, 'corr.csv'))
self.results['corr']=corr
@monitor
def persistence(self):
# TODO: Table II of Asness, Clifford S., Andrea Frazzini, and Lasse Heje Pedersen. “Quality Minus Junk.” SSRN Scholarly Paper. Rochester, NY: Social Science Research Network, June 5, 2017. https://papers.ssrn.com/abstract=2312432.
perdf = pd.DataFrame()
for indicator in self.indicators:
per = cal_persistence(self.df[indicator].unstack(),
offsets=[1, 3, 6, 12, 24, 36, 48, 60, 120])
perdf[indicator] = per
# perdf.to_csv(os.path.join(self.path, 'persistence.csv'))
self.results['persistence']=perdf
@monitor
def breakPoints_and_countGroups(self):
dfs_bp = []
dfs_count = []
for indicator in self.indicators:
d = self.df[indicator].unstack()
# there is no samples for some months due to festival
# TODO: how to set the thresh?
d = d.dropna(axis=0,thresh=self.q * 10)
bps = cal_breakPoints(d, self.q)
dfs_bp.append(bps)
count = count_groups(d, self.q)
dfs_count.append(count)
result_bp = pd.concat(dfs_bp, keys=self.indicators, axis=0)
result_count = pd.concat(dfs_count, keys=self.indicators, axis=0)
# result_bp.to_csv(os.path.join(self.path, 'breakPoints.csv'))
# result_count.to_csv(os.path.join(self.path, 'count.csv'))
self.results['breakPoints']=result_bp
self.results['count']=result_count
# TODO:In fact,the count is not exactly the number of stocks to calculate the weighted return
# TODO:as some stocks will be deleted due to the missing of weights.
def _get_port_data(self, indicator):
groupid = DATA.by_indicators([indicator])
#trick: pd.qcut will just ignore NaNs,but if all the values are NaNs it will throw an error
try:
groupid['g'] = groupid.groupby('t', group_keys=False).apply(
lambda df: pd.qcut(df[indicator], self.q,
labels=[indicator + str(i) for i in range(1, self.q + 1)],
duplicates='raise')#trick: drop the duplicated bins
)
except ValueError:#trick:qcut with non unique values https://stackoverflow.com/questions/20158597/how-to-qcut-with-non-unique-bin-edges
groupid['g'] = groupid.groupby('t', group_keys=False).apply(
lambda df: pd.qcut(df[indicator].rank(method='first'), self.q,
labels=[indicator + str(i) for i in range(1, self.q + 1)]) # trick: drop the duplicated bins
)
return groupid
@monitor
def portfolio_characteristics(self, sortedIndicator, otherIndicators):
'''
as table 12.3 panel A
:param sortedIndicator:
:param otherIndicators:
:return:
'''
groupid = self._get_port_data(sortedIndicator)
comb = DATA.by_indicators(otherIndicators)
comb = pd.concat([groupid, comb], axis=1)
characteristics_avg = comb.groupby(['t', 'g']).mean().groupby('g').mean()
# characteristics_avg.to_csv(os.path.join(self.path, 'portfolio characteristics.csv'))
self.results['portfolio characteristics']=characteristics_avg
# TODO: upgrade this function
def _get_panel_stk_avg(self, comb, indicator, gcol):
panel_stk_eavg = comb.groupby(['t', gcol])['stockEretM'].mean() #equal weighted
if self.factor == 'size':
'''
when the factor is size,we also use the indicator (sort variable) as weight
Refer to page 159.
'''
panel_stk_wavg = comb.groupby(['t', gcol]).apply(
lambda df: my_average(df, 'stockEretM', wname=indicator)
)
else:
'''
the index denotes t+1,and the weight is from time t,
since we have shift weight forward in dataset.
'''
# def func(df):
# return my_average(df,'stockEretM',wname='weight')
#
# panel_stk_wavg=comb.groupby(['t',gcol]).apply(func)
panel_stk_wavg = comb.groupby(['t', gcol]).apply(
lambda df: my_average(df, 'stockEretM', wname='weight')
)
return panel_stk_eavg, panel_stk_wavg
@monitor
def portfolio_analysis(self):
'''
table 8.4
:return:
'''
comb=combine_with_datalagged(self.indicators)
# all_indicators = list(set(self.indicators + ['weight', 'stockEretM']))
# comb = DATA.by_indicators(all_indicators)
result_eavg = []
result_wavg = []
for indicator in self.indicators:
gcol = 'g_%s' % indicator
# comb[gcol]=comb.groupby('t').apply(
# lambda df:grouping(df[indicator].reset_index(level='t'),self.q,labels=self.groupnames))
comb[gcol] = comb.groupby('t', group_keys=False).apply(
lambda df: assign_port_id(df[indicator], self.q, self.groupnames))
# TODO:Add an alternative sorting method,that is,updating yearly as page 9 of Chen et al., “On the Predictability of Chinese Stock Returns.”
panel_stk_eavg, panel_stk_wavg = self._get_panel_stk_avg(comb, indicator, gcol)
for panel_stk in [panel_stk_eavg, panel_stk_wavg]:
panel = panel_stk.unstack(level=[gcol])
panel.columns = panel.columns.astype(str)
panel['_'.join([self.groupnames[-1], self.groupnames[0]])] = panel[self.groupnames[-1]] - panel[
self.groupnames[0]]
panel['avg'] = panel.mean(axis=1)
# TODO: use the risk models declared above
# part A
a_data = comb.groupby(['t', gcol])[indicator].mean()
a_data = a_data.unstack()
a_data.columns = a_data.columns.astype(str)
a_data.index = a_data.index.astype(str)
a_data['_'.join([self.groupnames[-1], self.groupnames[0]])] = a_data[self.groupnames[-1]] - a_data[
self.groupnames[0]]
a_data['avg'] = a_data.mean(axis=1)
a = a_data.mean()
a.name = 'avg'
a = a.to_frame().T
riskAdjusted = risk_adjust(panel)
# TODO:something must be wrong with size or portfolio_analysse.
if panel_stk is panel_stk_eavg:
result_eavg.append(pd.concat([a, riskAdjusted], axis=0))
else:
result_wavg.append(pd.concat([a, riskAdjusted], axis=0))
table_e = pd.concat(result_eavg, axis=0, keys=self.indicators)
table_w = pd.concat(result_wavg, axis=0, keys=self.indicators)
# reorder the columns
initialOrder = table_e.columns.tolist()
h=self.groupnames+['avg']
newOrder=h+[col for col in initialOrder if col not in h]
# newOrder = self.groupnames + [col for col in initialOrder if col not in self.groupnames]
table_e = table_e.reindex(columns=newOrder)
table_w = table_w.reindex(columns=newOrder)
#mark the t values to facilitate the following analysis
table_e['significant_positive']=table_e.iloc[:,-1].map(lambda v:1 if v>2 else np.nan)
table_e['significant_negative']=table_e.iloc[:,-2].map(lambda v:-1 if v<-2 else np.nan)
table_w['significant_positive']=table_w.iloc[:,-1].map(lambda v:1 if v>2 else np.nan)
table_w['significant_negative']=table_w.iloc[:,-2].map(lambda v:-1 if v<-2 else np.nan)
# table_e.to_csv(os.path.join(self.path, 'univariate portfolio analysis-equal weighted.csv'))
# table_w.to_csv(os.path.join(self.path, 'univariate portfolio analysis-value weighted.csv'))
self.results['uni_port_analysis_eq']=table_e
self.results['uni_port_analysis_vw']=table_w
def _one_indicator(self, indicator):
ns = range(1, 13)
all_indicators=[indicator,'weight','stockEretM']
comb = DATA.by_indicators(all_indicators)
comb = comb.dropna()
try:
comb['g'] = comb.groupby('t', group_keys=False).apply(
lambda df: pd.qcut(df[indicator], self.q,
labels=[indicator + str(i) for i in range(1, self.q + 1)],
duplicates='raise')
)
except ValueError:#trick:qcut with non unique values https://stackoverflow.com/questions/20158597/how-to-qcut-with-non-unique-bin-edges
comb['g'] = comb.groupby('t', group_keys=False).apply(
lambda df: pd.qcut(df[indicator].rank(method='first'), self.q,
labels=[indicator + str(i) for i in range(1, self.q + 1)])
)
def _one_indicator_one_weight_type(group_ts, indicator):
def _big_minus_small(s, ind):
time = s.index.get_level_values('t')[0]
return s[(time, ind + str(self.q))] - s[(time, ind + '1')]
spread_data = group_ts.groupby('t').apply(lambda series: _big_minus_small(series, indicator))
s = risk_adjust(spread_data)
return s
eret = comb['eret'].unstack()
s_es = []
s_ws = []
eret_names = []
for n in ns:
eret_name = 'eret_ahead%s' % (n + 1)
comb[eret_name] = eret.shift(-n).stack()
group_eavg_ts = comb.groupby(['t', 'g'])[eret_name].mean()
group_wavg_ts=comb.groupby(['t','g']).apply(
lambda df:my_average(df,eret_name,'weight')
)
# group_wavg_ts = comb.groupby(['t', 'g']).apply(
# lambda df: np.average(df[eret_name], weights=df['weight']))#fixme: what if there is nan values?
#TODO: If we are analyzing size,the weights should be the indicator
#we are analyzing,rather than weight
s_e = _one_indicator_one_weight_type(group_eavg_ts, indicator)
s_w = _one_indicator_one_weight_type(group_wavg_ts, indicator)
s_es.append(s_e)
s_ws.append(s_w)
eret_names.append(eret_name)
eq_table = pd.concat(s_es, axis=1, keys=eret_names)
vw_table = pd.concat(s_ws, axis=1, keys=eret_names)
return eq_table, vw_table
@monitor
def portfolio_anlayse_with_k_month_ahead_returns(self):
'''table 11.4'''
eq_tables = []
vw_tables = []
for indicator in self.indicators:
eq_table, vw_table = self._one_indicator(indicator)
eq_tables.append(eq_table)
vw_tables.append(vw_table)
print(indicator)
eq = pd.concat(eq_tables, axis=0, keys=self.indicators)
vw = pd.concat(vw_tables, axis=0, keys=self.indicators)
# eq.to_csv(os.path.join(self.path, 'univariate portfolio analysis_k-month-ahead-returns-eq.csv'))
# vw.to_csv(os.path.join(self.path, 'univariate portfolio analysis_k-month-ahead-returns-vw.csv'))
self.results['uni_port_analysis_ahead_k_eq']=eq
self.results['uni_port_analysis_ahead_k_vw']=vw
@monitor
def fm(self):
comb=combine_with_datalagged(self.indicators)
data = []
ps = []
for indicator in self.indicators:
subdf = comb[[indicator, 'stockEretM']]
subdf = subdf.dropna()
subdf.columns = ['y', 'x']
'''
(page 141)The independent variable is winsorized at a given level on a monthly basis.
(page 90)The independent variables are usually winsorized to ensure that a small number of extreme
independent variable values do not have a large effect on the results of the regression.
In some cases the dependent variable is also winsorized.When the dependent variable is a
security return or excess return,this variable is usually not winsorized.In most other
cases,it is common to winsorized the dependent variable.
'''
subdf['x'] = subdf.groupby('t')['x'].apply(lambda s: winsorize(s, limits=WINSORIZE_LIMITS))
subdf = subdf.reset_index()
formula = 'y ~ x'
r, adj_r2, n, p= famaMacBeth(formula, 't', subdf, lags=5)
# TODO: why intercept tvalue is so large?
# TODO: why some fm regression do not have a adj_r2 ?
data.append([r.loc['x', 'coef'], r.loc['x', 'tvalue'],
r.loc['Intercept', 'coef'], r.loc['Intercept', 'tvalue'],
adj_r2, n])
ps.append(p['x'])
print(indicator)
result = pd.DataFrame(data, index=self.indicators,
columns=['slope', 't', 'Intercept', 'Intercept_t', 'adj_r2', 'n']).T
# result.to_csv(os.path.join(self.path, 'fama macbeth regression analysis.csv'))
parameters = pd.concat(ps, axis=1, keys=self.indicators)
# parameters.to_csv(os.path.join(self.path, 'fama macbeth regression parameters in first stage.csv'))
self.results['fm']=result
self.results['fm_first_stage']=parameters
@monitor
def parameter_ts_fig(self):
'''
田利辉 and 王冠英, “我国股票定价五因素模型.”
:return:
'''
#TODO: Why not plot rolling parameters?
parameters=self.results['fm_first_stage']
# parameters = pd.read_csv(os.path.join(self.path, 'fama macbeth regression parameters in first stage.csv'),
# index_col=[0], parse_dates=True)
parameters['zero'] = 0.0
for indicator in self.indicators:
s=parameters[indicator].dropna()
positive_ratio=(s>0).sum()/s.shape[0]
fig = parameters[[indicator, 'zero']].plot(title='positive ratio: {:.3f}'.format(positive_ratio)).get_figure()
fig.savefig(os.path.join(self.path, 'fm parameter ts fig-{}.png'.format(indicator)))
def save_results(self):
excelWriter=pd.ExcelWriter(os.path.join(self.path,self.factor+'.xlsx'))
for k in self.results:
self.results[k].to_excel(excelWriter,k)
def run(self):
self.summary()
self.correlation()
self.persistence()
self.breakPoints_and_countGroups()
# self.portfolio_characteristics()
self.portfolio_analysis()
self.fm()
self.parameter_ts_fig()
self.save_results()
def __call__(self):
self.run()
class Bivariate:
q=5
def __init__(self,indicator1,indicator2,proj_path):
self.indicator1=indicator1
self.indicator2=indicator2
self.path=proj_path
self._build_environment()
self.results={}
def _build_environment(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
os.makedirs(self.path)
def _get_independent_data(self):
# TODO: add the method of ratios such as [0.3,0.7]
comb=combine_with_datalagged([self.indicator1,self.indicator2])
comb=comb.dropna()
comb['g1']=comb.groupby('t',group_keys=False).apply(
lambda df:assign_port_id(df[self.indicator1], self.q,
[self.indicator1 + str(i) for i in range(1, self.q + 1)]))
comb['g2']=comb.groupby('t',group_keys=False).apply(
lambda df:assign_port_id(df[self.indicator2], self.q,
[self.indicator2 + str(i) for i in range(1,self.q + 1)]))
return comb
def _get_dependent_data(self,control,target):
'''
:param indicators:list with two elements,the first is the controlling variable
:return:
'''
comb=combine_with_datalagged([control,target])
comb=comb.dropna()
comb['g1']=comb.groupby('t',group_keys=False).apply(
lambda df:assign_port_id(df[control], self.q,
[control + str(i) for i in range(1,self.q + 1)]))
comb['g2']=comb.groupby(['t','g1'],group_keys=False).apply(
lambda df:assign_port_id(df[target], self.q,
[target + str(i) for i in range(1,self.q + 1)]))
return comb
def _get_eret(self,comb):
group_eavg_ts = comb.groupby(['g1', 'g2', 't'])['stockEretM'].mean()
group_wavg_ts = comb.groupby(['g1', 'g2', 't']).apply(
lambda df:my_average(df,'stockEretM','weight'))
return group_eavg_ts,group_wavg_ts
def _cal_portfolio_return(self, group_ts, controlGroup='g1', targetGroup='g2'):
# Table 9.6
controlIndicator = group_ts.index.get_level_values(controlGroup)[0][:-1]
targetName = group_ts.index.get_level_values(targetGroup)[0][:-1]
# A
a_data = group_ts.groupby(['t', controlGroup, targetGroup]).mean().unstack(level=[controlGroup])
a_data.columns = a_data.columns.astype(str)
# A1
a1_data = group_ts.groupby(['t', controlGroup, targetGroup]).mean().groupby(['t', targetGroup]).mean()
a_data[controlIndicator + ' avg'] = a1_data
_a = a_data.groupby(targetGroup).mean()
def _get_spread(df):
time = df.index.get_level_values('t')[0]
return df.loc[(time, targetName + str(self.q))] - df.loc[(time, targetName + '1')]
# B
b_data = a_data.groupby('t').apply(_get_spread)
_b1=adjust_with_riskModel(b_data)
_b2=adjust_with_riskModel(b_data,'capm')
_b1.index = [targetName + str(self.q) + '-1', targetName + str(self.q) + '-1 t']
_b2.index = [targetName + str(self.q) + '-1 capm alpha', targetName + str(self.q) + '-1 capm alpha t']
_a.index = _a.index.astype(str)
_a.columns = _a.columns.astype(str)
return pd.concat([_a, _b1, _b2], axis=0)
def _average_control_variable_portfolios(self, group_ts, controlGroup='g2', targetGroup='g1'):
# table 10.5 panel B
targetIndicator = group_ts.index.get_level_values(targetGroup)[0][:-1] # targetGroup
# controlIndicator = group_ts.index.get_level_values(controlGroup)[0][:-1] # controlGroup
a1_data = group_ts.groupby(['t', targetGroup, controlGroup]).mean().groupby(['t', targetGroup]).mean()
stk = a1_data.unstack()
stk.index = stk.index.astype(str)
stk.columns = stk.columns.astype(str)
stk[targetIndicator + str(self.q) + '-1'] = stk[targetIndicator + str(self.q)] - stk[targetIndicator + '1']
_a=adjust_with_riskModel(stk)
_b=adjust_with_riskModel(stk,'capm')
table = pd.concat([_a, _b], axis=0)
return table
def _independent_portfolio_analysis(self, group_ts):
# table 9.8
table1 = self._cal_portfolio_return(group_ts, controlGroup='g1', targetGroup='g2')
table2 = self._cal_portfolio_return(group_ts, controlGroup='g2', targetGroup='g1').T
table1, table2 = get_outer_frame([table1, table2])
table = table1.fillna(table2)
return table
@monitor
def independent_portfolio_analysis(self):
comb = self._get_independent_data()
group_eavg_ts, group_wavg_ts = self._get_eret(comb)
table_eavg = self._independent_portfolio_analysis(group_eavg_ts)
table_wavg = self._independent_portfolio_analysis(group_wavg_ts)
self.results['ind_eq_{}_{}'.format(self.indicator1,self.indicator2)]=table_eavg
self.results['ind_vw_{}_{}'.format(self.indicator1,self.indicator2)]=table_wavg
# table_eavg.to_csv(os.path.join(self.path,
# 'bivariate independent-sort portfolio analysis_equal weighted_%s_%s.csv' % (
# self.indicator1, self.indicator2)))
# table_wavg.to_csv(os.path.join(self.path,
# 'bivariate independent-sort portfolio analysis_value weighted_%s_%s.csv' % (
# self.indicator1, self.indicator2)))
@monitor
def dependent_portfolio_analysis(self):
def _f(control,target):
comb = self._get_dependent_data(control,target)
group_eavg_ts, group_wavg_ts = self._get_eret(comb)
table_eavg = self._cal_portfolio_return(group_eavg_ts)
table_wavg = self._cal_portfolio_return(group_wavg_ts)
self.results['de_eq_{}_{}'.format(control,target)]=table_eavg
self.results['de_vw_{}_{}'.format(control,target)]=table_wavg
# table_eavg.to_csv(os.path.join(self.path,
# 'bivariate dependent-sort portfolio analysis_equal weighted_%s_%s.csv' % (
# indicators[0], indicators[1])))
# table_wavg.to_csv(os.path.join(self.path,
# 'bivariate dependent-sort portfolio analysis_value weighted_%s_%s.csv' % (
# indicators[0], indicators[1])))
_f(self.indicator1,self.indicator2)
_f(self.indicator2,self.indicator1)
@monitor
def dependent_portfolio_analysis_twin(self):
'''table 10.5 panel B'''
def _f(control,target):
comb = self._get_dependent_data(control,target)
group_eavg_ts, group_wavg_ts = self._get_eret(comb)
table_eavg = self._average_control_variable_portfolios(group_eavg_ts)
table_wavg = self._average_control_variable_portfolios(group_wavg_ts)
self.results['de1_eq_{}_{}'.format(control,target)]=table_eavg
self.results['de1_vw_{}_{}'.format(control,target)]=table_wavg
# table_eavg.to_csv(os.path.join(self.path,
# 'bivariate dependent-sort portfolio analysis_twin_equal weighted_%s_%s.csv' % (
# indicators[0], indicators[1])))
# table_wavg.to_csv(os.path.join(self.path,
# 'bivariate dependent-sort portfolio analysis_twin_weighted_%s_%s.csv' % (
# indicators[0], indicators[1])))
_f(self.indicator1, self.indicator2)
_f(self.indicator2, self.indicator1)
@staticmethod
def famaMacbeth_reg(indeVars):
#TODO: upgrade fm in Univariate by calling this function
'''
(page 141)The independent variable is winsorized at a given level on a monthly basis.
(page 90)The independent variables are usually winsorized to ensure that a small number of extreme
independent variable values do not have a large effect on the results of the regression.
In some cases the dependent variable is also winsorized.When the dependent variable is a
security return or excess return,this variable is usually not winsorized.In most other
cases,it is common to winsorized the dependent variable.
'''
comb=combine_with_datalagged(indeVars)
comb=comb.dropna()
#trick: the data is already winsorized before calling dataApi
# winsorize
# comb[indeVars]=comb.groupby('t')[indeVars].apply(
# lambda x:winsorize(x,limits=WINSORIZE_LIMITS,axis=0))
namedict={inde:'name{}'.format(i) for i,inde in enumerate(indeVars)}
comb=comb.rename(columns=namedict)
formula = 'stockEretM ~ ' + ' + '.join(namedict.values())
# TODO:lags?
r, adj_r2, n, firstStage_params = famaMacBeth(formula, 't', comb, lags=5)
r = r.rename(index={v:k for k,v in namedict.items()})
# save the first stage regression parameters
firstStage_params = firstStage_params.rename(
columns={v:k for k,v in namedict.items()})
params = r[['coef', 'tvalue']].stack()
params.index = params.index.map('{0[0]} {0[1]}'.format)
params['adj_r2'] = adj_r2
params['n'] = n
return params,firstStage_params
def _fm(self,x):
'''
:param x: a list of list,or just a list contains the name of independent variables
:return:
'''
if isinstance(x[0],str):
p, firstStage_params = self.famaMacbeth_reg(x)
self.results['fm_para']=firstStage_params
self.results['fm']=p
# firstStage_params.to_csv(os.path.join(self.path, 'first stage parameters ' + '_'.join(x) + '.csv'))
# p.to_csv(os.path.join(os.path.join(self.path,'fama macbeth regression analysis.csv')))
if isinstance(x[0],list):
ps=[]
for indeVars in x:
p,firstStage_params=self.famaMacbeth_reg(indeVars)
self.results['fm_para']=firstStage_params
# firstStage_params.to_csv(os.path.join(self.path, 'first stage parameters ' + '_'.join(indeVars) + '.csv'))
ps.append(p)
table = pd.concat(ps, axis=1, keys=range(1, len(x) + 1),sort=True)
all_indeVars = list(set(var for l_indeVars in x for var in l_indeVars))
newIndex = [var + ' ' + suffix for var in all_indeVars for suffix in ['coef', 'tvalue']] + \
['Intercept coef', 'Intercept tvalue', 'adj_r2', 'n']
table = table.reindex(index=newIndex)
# table.to_csv(os.path.join(os.path.join(self.path, 'fama macbeth regression analysis.csv')))
self.results['fm']=table
def save_results(self):
excelWriter=pd.ExcelWriter(os.path.join(self.path,'{}_{}.xlsx'.format(self.indicator1,self.indicator2)))
for k in self.results:
self.results[k].to_excel(excelWriter,k)
#TODO: wrong!!!! For predictors with accounting data updated annually
| [
"[email protected]"
]
| |
c1ca9fea4aec41dcab2df0653fc3476363d164e9 | ecf6fe6aa87b2c3f041acc30fab11b0cafe3dd46 | /architecture_py/archi_v3_9.py | 096d9099c7efed8b00206453651eecc348653e9d | []
| no_license | antgratia/Memoire_code | 73c7806c4576c2e73e00d9a84b1063a2c8f6b559 | 2cdc1339ea24896a6628238f6467edff80f98166 | refs/heads/main | 2023-06-20T16:19:07.041464 | 2021-07-13T11:53:48 | 2021-07-13T11:53:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,178 | py |
import numpy as np
import os
from keras import backend as K
from tensorflow import keras
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.models import Sequential, Model,load_model
from tensorflow.keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D, GlobalAveragePooling2D, MaxPool2D, Concatenate, Dropout
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.utils import plot_model
import tensorflow as tf
import sys
import traceback
import csv
from time import time
type_archi = 'ALL'
epsilon = 0.001
dropout_rate = 0.5
axis = 3
compress_factor = 0.5
# load dataset
(train_x, train_y), (test_x, test_y) = keras.datasets.cifar10.load_data()
# normalize to range 0-1
train_x = train_x / 255.0
test_x = test_x / 255.0
val_x = train_x[:5000]
val_y = train_y[:5000]
# init training time
training_time = 0
# init result test/train
test_result_loss = ""
test_result_acc = ""
train_result_loss = ""
train_result_acc = ""
nb_layers = "not build"
def id_block(X, f, filters, activation):
X_shortcut = X
X = Conv2D(filters=filters, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=filters, kernel_size=(f, f), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Add()([X, X_shortcut])# SKIP Connection
X = Activation(activation)(X)
return X
def conv_block(X, f, filters, activation, s=2):
X_shortcut = X
X = Conv2D(filters=filters, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=filters, kernel_size=(f, f), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X_shortcut = Conv2D(filters=filters, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
if epsilon != 0:
X_shortcut = BatchNormalization(epsilon = epsilon, axis=axis)(X_shortcut)
X = Add()([X, X_shortcut])
X = Activation(activation)(X)
return X
def denseBlock(X, f, nb_filter, nb_layer, padding, activation):
x_input = X
for _ in range(0,nb_layer):
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=nb_filter, kernel_size=(f, f), strides=(1, 1), padding=padding)(X)
if dropout_rate != 0:
X = Dropout(dropout_rate)(X)
X = Concatenate()([X, x_input])
return X
def transition_block(X, f, nb_filter, padding, activation, op, stride):
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=nb_filter, kernel_size=(f, f), strides=(1, 1), padding=padding)(X)
if dropout_rate != 0:
X = Dropout(dropout_rate)(X)
if (op == 'avg'):
X = AveragePooling2D(pool_size = f, strides=stride, padding=padding)(X)
else :
X = MaxPooling2D(pool_size=f, strides=stride, padding=padding)(X)
return X
try:
def getModel():
X_input = X = Input([32, 32, 3])
X = denseBlock(X, 4, 3, 2, 'same', 'tanh')
X = denseBlock(X, 4, 3, 2, 'same', 'tanh')
X = denseBlock(X, 4, 3, 2, 'same', 'tanh')
X = denseBlock(X, 4, 3, 2, 'same', 'tanh')
X = transition_block(X, 4, 3, 'same', 'tanh', 'avg', 1)
X = id_block(X, 5, 3, 'tanh')
X = Conv2D(18, kernel_size=2, strides=1, activation='relu', padding='same')(X)
X = Conv2D(36, kernel_size=3, strides=3, activation='tanh', padding='same')(X)
X = MaxPooling2D(pool_size=5, strides=4, padding='same')(X)
X = denseBlock(X, 7, 36, 1, 'same', 'tanh')
X = denseBlock(X, 7, 36, 1, 'same', 'tanh')
X = transition_block(X, 7, 36, 'same', 'tanh', 'avg', 5)
X = GlobalAveragePooling2D()(X)
X = Dense(10, activation='softmax')(X)
model = Model(inputs=X_input, outputs=X)
return model
model = getModel()
#plot_model(model, show_shapes=True, to_file="../architecture_img/archi_v3_9.png")
model.compile(optimizer='adam', loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])
start = time()
es = tf.keras.callbacks.EarlyStopping(monitor='loss', verbose=1, restore_best_weights=True, patience=1)
list_cb = [es]
history = model.fit(train_x, train_y, epochs=50, batch_size=64, validation_split=0.3, callbacks=list_cb)
training_time = time()-start
print(model.evaluate(test_x, test_y))
log_file = open("../architecture_log/archi_v3_9.log" , "w")
# save test result
log_file.write('test result : ' + str(model.evaluate(test_x, test_y)))
test_result_loss = model.evaluate(test_x, test_y)[0]
test_result_acc = model.evaluate(test_x, test_y)[1]
# save train result
log_file.write('train result : ' + str(model.evaluate(test_x, test_y)))
log_file.write('History train result : ' + str(history.history))
train_result_loss = model.evaluate(train_x, train_y)[0]
train_result_acc = model.evaluate(train_x, train_y)[1]
print('OK: file ../architecture_log/archi_v3_9.log has been create')
nb_layers = len(model.layers)
log_file.close()
except:
print('error: file ../architecture_log/archi_v3_9_error.log has been create')
error_file = open("../architecture_log/archi_v3_9_error.log" , "w")
traceback.print_exc(file=error_file)
result_loss = "Error"
result_acc = "Error"
error_file.close()
finally:
file = open('../architecture_results_v3.csv', 'a', newline ='')
with file:
# identifying header
header = ['file_name', 'training_time(s)', 'test_result_loss', 'test_result_acc', 'train_result_acc', 'train_result_loss', 'nb_layers', 'epochs', 'type_archi']
writer = csv.DictWriter(file, fieldnames = header)
# writing data row-wise into the csv file
# writer.writeheader()
writer.writerow({'file_name' : 'archi_v3_9',
'training_time(s)': training_time,
'test_result_loss': test_result_loss,
'test_result_acc': test_result_acc,
'train_result_acc': train_result_acc,
'train_result_loss': train_result_loss,
'nb_layers': nb_layers,
'epochs' : len(history.history['loss']),
'type_archi': type_archi})
print('add line into architecture_results_v3.csv')
file.close()
| [
"[email protected]"
]
| |
c18790f1c9ea9c59ebe70356fd6eafa773ba7a3f | 32ef8621468095bf9c6dd912767cb97e9863dc25 | /algorithms/kaprekar-numbers.py | d4e44e799005d22fc4109908b61ebb0ee1e5e43c | []
| no_license | Seungju182/Hackerrank | 286f1666be5797c1d318788753245696ef52decf | 264533f97bcc8dc771e4e6cbae1937df8ce6bafa | refs/heads/master | 2023-08-17T22:49:58.710410 | 2021-10-25T09:40:46 | 2021-10-25T09:40:46 | 337,652,088 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 626 | py | #!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'kaprekarNumbers' function below.
#
# The function accepts following parameters:
# 1. INTEGER p
# 2. INTEGER q
#
def kaprekarNumbers(p, q):
# Write your code here
lst = []
for num in range(p, q+1):
squared = num ** 2
d = 10 ** len(str(num))
if squared // d + squared % d == num:
lst.append(num)
if lst:
print(*lst)
else:
print("INVALID RANGE")
if __name__ == '__main__':
p = int(input().strip())
q = int(input().strip())
kaprekarNumbers(p, q)
| [
"[email protected]"
]
| |
fc931823e2e0c5dadbbef45f1c7f9f23c9c60607 | c52f407e12599b5b850b09c295b6254e07225e81 | /altair/vegalite/v4/api.py | f99f605f3c0e4110bade8d4f99c1ce45cc236e77 | [
"BSD-3-Clause"
]
| permissive | chris1610/altair | 35fa090f11b39a88f9789f86bd68107924d8d84a | 9662da9f702c441c8acec4bbb9d71a22475638c2 | refs/heads/master | 2020-09-27T18:48:06.470667 | 2019-12-08T19:44:29 | 2019-12-08T19:44:29 | 226,583,447 | 3 | 1 | BSD-3-Clause | 2019-12-07T22:22:08 | 2019-12-07T22:22:07 | null | UTF-8 | Python | false | false | 84,058 | py | # -*- coding: utf-8 -*-
import warnings
import hashlib
import io
import json
import jsonschema
import pandas as pd
from .schema import core, channels, mixins, Undefined, SCHEMA_URL
from .data import data_transformers, pipe
from ... import utils, expr
from .display import renderers, VEGALITE_VERSION, VEGAEMBED_VERSION, VEGA_VERSION
from .theme import themes
# ------------------------------------------------------------------------
# Data Utilities
def _dataset_name(values):
"""Generate a unique hash of the data
Parameters
----------
values : list or dict
A list/dict representation of data values.
Returns
-------
name : string
A unique name generated from the hash of the values.
"""
if isinstance(values, core.InlineDataset):
values = values.to_dict()
values_json = json.dumps(values, sort_keys=True)
hsh = hashlib.md5(values_json.encode()).hexdigest()
return 'data-' + hsh
def _consolidate_data(data, context):
"""If data is specified inline, then move it to context['datasets']
This function will modify context in-place, and return a new version of data
"""
values = Undefined
kwds = {}
if isinstance(data, core.InlineData):
if data.name is Undefined and data.values is not Undefined:
values = data.values
kwds = {'format': data.format}
elif isinstance(data, dict):
if 'name' not in data and 'values' in data:
values = data['values']
kwds = {k:v for k,v in data.items() if k != 'values'}
if values is not Undefined:
name = _dataset_name(values)
data = core.NamedData(name=name, **kwds)
context.setdefault('datasets', {})[name] = values
return data
def _prepare_data(data, context=None):
"""Convert input data to data for use within schema
Parameters
----------
data :
The input dataset in the form of a DataFrame, dictionary, altair data
object, or other type that is recognized by the data transformers.
context : dict (optional)
The to_dict context in which the data is being prepared. This is used
to keep track of information that needs to be passed up and down the
recursive serialization routine, such as global named datasets.
"""
if data is Undefined:
return data
# convert dataframes or objects with __geo_interface__ to dict
if isinstance(data, pd.DataFrame) or hasattr(data, '__geo_interface__'):
data = pipe(data, data_transformers.get())
# convert string input to a URLData
if isinstance(data, str):
data = core.UrlData(data)
# consolidate inline data to top-level datasets
if context is not None and data_transformers.consolidate_datasets:
data = _consolidate_data(data, context)
# if data is still not a recognized type, then return
if not isinstance(data, (dict, core.Data)):
warnings.warn("data of type {} not recognized".format(type(data)))
return data
# ------------------------------------------------------------------------
# Aliases & specializations
Bin = core.BinParams
@utils.use_signature(core.LookupData)
class LookupData(core.LookupData):
def to_dict(self, *args, **kwargs):
"""Convert the chart to a dictionary suitable for JSON export"""
copy = self.copy(deep=False)
copy.data = _prepare_data(copy.data, kwargs.get('context'))
return super(LookupData, copy).to_dict(*args, **kwargs)
@utils.use_signature(core.FacetMapping)
class FacetMapping(core.FacetMapping):
_class_is_valid_at_instantiation = False
def to_dict(self, *args, **kwargs):
copy = self.copy(deep=False)
context = kwargs.get('context', {})
data = context.get('data', None)
if isinstance(self.row, str):
copy.row = core.FacetFieldDef(**utils.parse_shorthand(self.row, data))
if isinstance(self.column, str):
copy.column = core.FacetFieldDef(**utils.parse_shorthand(self.column, data))
return super(FacetMapping, copy).to_dict(*args, **kwargs)
# ------------------------------------------------------------------------
# Encoding will contain channel objects that aren't valid at instantiation
core.FacetedEncoding._class_is_valid_at_instantiation = False
# ------------------------------------------------------------------------
# These are parameters that are valid at the top level, but are not valid
# for specs that are within a composite chart
# (layer, hconcat, vconcat, facet, repeat)
TOPLEVEL_ONLY_KEYS = {'background', 'config', 'autosize', 'padding', '$schema'}
def _get_channels_mapping():
mapping = {}
for attr in dir(channels):
cls = getattr(channels, attr)
if isinstance(cls, type) and issubclass(cls, core.SchemaBase):
mapping[cls] = attr.replace('Value', '').lower()
return mapping
# -------------------------------------------------------------------------
# Tools for working with selections
class Selection(object):
"""A Selection object"""
_counter = 0
@classmethod
def _get_name(cls):
cls._counter += 1
return "selector{:03d}".format(cls._counter)
def __init__(self, name, selection):
if name is None:
name = self._get_name()
self.name = name
self.selection = selection
def __repr__(self):
return "Selection({0!r}, {1})".format(self.name, self.selection)
def ref(self):
return {'selection': self.name}
def to_dict(self):
return {'selection': self.name}
def __invert__(self):
return Selection(core.SelectionNot(**{'not': self.name}), self.selection)
def __and__(self, other):
if isinstance(other, Selection):
other = other.name
return Selection(core.SelectionAnd(**{'and': [self.name, other]}), self.selection)
def __or__(self, other):
if isinstance(other, Selection):
other = other.name
return Selection(core.SelectionOr(**{'or': [self.name, other]}), self.selection)
def __getattr__(self, field_name):
return expr.core.GetAttrExpression(self.name, field_name)
def __getitem__(self, field_name):
return expr.core.GetItemExpression(self.name, field_name)
# ------------------------------------------------------------------------
# Top-Level Functions
def value(value, **kwargs):
"""Specify a value for use in an encoding"""
return dict(value=value, **kwargs)
def selection(name=None, type=Undefined, **kwds):
"""Create a named selection.
Parameters
----------
name : string (optional)
The name of the selection. If not specified, a unique name will be
created.
type : string
The type of the selection: one of ["interval", "single", or "multi"]
**kwds :
additional keywords will be used to construct a SelectionDef instance
that controls the selection.
Returns
-------
selection: Selection
The selection object that can be used in chart creation.
"""
return Selection(name, core.SelectionDef(type=type, **kwds))
@utils.use_signature(core.IntervalSelection)
def selection_interval(**kwargs):
"""Create a selection with type='interval'"""
return selection(type='interval', **kwargs)
@utils.use_signature(core.MultiSelection)
def selection_multi(**kwargs):
"""Create a selection with type='multi'"""
return selection(type='multi', **kwargs)
@utils.use_signature(core.SingleSelection)
def selection_single(**kwargs):
"""Create a selection with type='single'"""
return selection(type='single', **kwargs)
@utils.use_signature(core.Binding)
def binding(input, **kwargs):
"""A generic binding"""
return core.Binding(input=input, **kwargs)
@utils.use_signature(core.BindCheckbox)
def binding_checkbox(**kwargs):
"""A checkbox binding"""
return core.BindCheckbox(input='checkbox', **kwargs)
@utils.use_signature(core.BindRadioSelect)
def binding_radio(**kwargs):
"""A radio button binding"""
return core.BindRadioSelect(input='radio', **kwargs)
@utils.use_signature(core.BindRadioSelect)
def binding_select(**kwargs):
"""A select binding"""
return core.BindRadioSelect(input='select', **kwargs)
@utils.use_signature(core.BindRange)
def binding_range(**kwargs):
"""A range binding"""
return core.BindRange(input='range', **kwargs)
def condition(predicate, if_true, if_false, **kwargs):
"""A conditional attribute or encoding
Parameters
----------
predicate: Selection, LogicalOperandPredicate, expr.Expression, dict, or string
the selection predicate or test predicate for the condition.
if a string is passed, it will be treated as a test operand.
if_true:
the spec or object to use if the selection predicate is true
if_false:
the spec or object to use if the selection predicate is false
**kwargs:
additional keyword args are added to the resulting dict
Returns
-------
spec: dict or VegaLiteSchema
the spec that describes the condition
"""
test_predicates = (str, expr.Expression, core.LogicalOperandPredicate)
if isinstance(predicate, Selection):
condition = {'selection': predicate.name}
elif isinstance(predicate, core.SelectionOperand):
condition = {'selection': predicate}
elif isinstance(predicate, test_predicates):
condition = {'test': predicate}
elif isinstance(predicate, dict):
condition = predicate
else:
raise NotImplementedError("condition predicate of type {}"
"".format(type(predicate)))
if isinstance(if_true, core.SchemaBase):
# convert to dict for now; the from_dict call below will wrap this
# dict in the appropriate schema
if_true = if_true.to_dict()
elif isinstance(if_true, str):
if_true = {'shorthand': if_true}
if_true.update(kwargs)
condition.update(if_true)
if isinstance(if_false, core.SchemaBase):
# For the selection, the channel definitions all allow selections
# already. So use this SchemaBase wrapper if possible.
selection = if_false.copy()
selection.condition = condition
elif isinstance(if_false, str):
selection = {'condition': condition, 'shorthand': if_false}
selection.update(kwargs)
else:
selection = dict(condition=condition, **if_false)
return selection
# --------------------------------------------------------------------
# Top-level objects
class TopLevelMixin(mixins.ConfigMethodMixin):
"""Mixin for top-level chart objects such as Chart, LayeredChart, etc."""
_class_is_valid_at_instantiation = False
def to_dict(self, *args, **kwargs):
"""Convert the chart to a dictionary suitable for JSON export"""
# We make use of three context markers:
# - 'data' points to the data that should be referenced for column type
# inference.
# - 'top_level' is a boolean flag that is assumed to be true; if it's
# true then a "$schema" arg is added to the dict.
# - 'datasets' is a dict of named datasets that should be inserted
# in the top-level object
# note: not a deep copy because we want datasets and data arguments to
# be passed by reference
context = kwargs.get('context', {}).copy()
context.setdefault('datasets', {})
is_top_level = context.get('top_level', True)
copy = self.copy(deep=False)
original_data = getattr(copy, 'data', Undefined)
copy.data = _prepare_data(original_data, context)
if original_data is not Undefined:
context['data'] = original_data
# remaining to_dict calls are not at top level
context['top_level'] = False
kwargs['context'] = context
try:
dct = super(TopLevelMixin, copy).to_dict(*args, **kwargs)
except jsonschema.ValidationError:
dct = None
# If we hit an error, then re-convert with validate='deep' to get
# a more useful traceback. We don't do this by default because it's
# much slower in the case that there are no errors.
if dct is None:
kwargs['validate'] = 'deep'
dct = super(TopLevelMixin, copy).to_dict(*args, **kwargs)
# TODO: following entries are added after validation. Should they be validated?
if is_top_level:
# since this is top-level we add $schema if it's missing
if '$schema' not in dct:
dct['$schema'] = SCHEMA_URL
# apply theme from theme registry
the_theme = themes.get()
dct = utils.update_nested(the_theme(), dct, copy=True)
# update datasets
if context['datasets']:
dct.setdefault('datasets', {}).update(context['datasets'])
return dct
def to_html(self, base_url="https://cdn.jsdelivr.net/npm/",
output_div='vis', embed_options=None, json_kwds=None,
fullhtml=True, requirejs=False):
return utils.spec_to_html(self.to_dict(), mode='vega-lite',
vegalite_version=VEGALITE_VERSION,
vegaembed_version=VEGAEMBED_VERSION,
vega_version=VEGA_VERSION,
base_url=base_url, output_div=output_div,
embed_options=embed_options, json_kwds=json_kwds,
fullhtml=fullhtml, requirejs=requirejs)
def save(self, fp, format=None, override_data_transformer=True,
scale_factor=1.0,
vegalite_version=VEGALITE_VERSION,
vega_version=VEGA_VERSION,
vegaembed_version=VEGAEMBED_VERSION,
**kwargs):
"""Save a chart to file in a variety of formats
Supported formats are json, html, png, svg
Parameters
----------
fp : string filename or file-like object
file in which to write the chart.
format : string (optional)
the format to write: one of ['json', 'html', 'png', 'svg'].
If not specified, the format will be determined from the filename.
override_data_transformer : boolean (optional)
If True (default), then the save action will be done with
the MaxRowsError disabled. If False, then do not change the data
transformer.
scale_factor : float
For svg or png formats, scale the image by this factor when saving.
This can be used to control the size or resolution of the output.
Default is 1.0
**kwargs :
Additional keyword arguments are passed to the output method
associated with the specified format.
"""
from ...utils.save import save
kwds = dict(chart=self, fp=fp, format=format,
scale_factor=scale_factor,
vegalite_version=vegalite_version,
vega_version=vega_version,
vegaembed_version=vegaembed_version,
**kwargs)
# By default we override the data transformer. This makes it so
# that save() will succeed even for large datasets that would
# normally trigger a MaxRowsError
if override_data_transformer:
with data_transformers.disable_max_rows():
result = save(**kwds)
else:
result = save(**kwds)
return result
# Fallback for when rendering fails; the full repr is too long to be
# useful in nearly all cases.
def __repr__(self):
return "alt.{}(...)".format(self.__class__.__name__)
# Layering and stacking
def __add__(self, other):
if not isinstance(other, TopLevelMixin):
raise ValueError("Only Chart objects can be layered.")
return layer(self, other)
def __and__(self, other):
if not isinstance(other, TopLevelMixin):
raise ValueError("Only Chart objects can be concatenated.")
return vconcat(self, other)
def __or__(self, other):
if not isinstance(other, TopLevelMixin):
raise ValueError("Only Chart objects can be concatenated.")
return hconcat(self, other)
def repeat(self, repeat=Undefined, row=Undefined, column=Undefined, columns=Undefined, **kwargs):
"""Return a RepeatChart built from the chart
Fields within the chart can be set to correspond to the row or
column using `alt.repeat('row')` and `alt.repeat('column')`.
Parameters
----------
repeat : list
a list of data column names to be repeated. This cannot be
used along with the ``row`` or ``column`` argument.
row : list
a list of data column names to be mapped to the row facet
column : list
a list of data column names to be mapped to the column facet
columns : int
the maximum number of columns before wrapping. Only referenced
if ``repeat`` is specified.
**kwargs :
additional keywords passed to RepeatChart.
Returns
-------
chart : RepeatChart
a repeated chart.
"""
repeat_specified = (repeat is not Undefined)
rowcol_specified = (row is not Undefined or column is not Undefined)
if repeat_specified and rowcol_specified:
raise ValueError("repeat argument cannot be combined with row/column argument.")
if repeat_specified:
repeat = repeat
else:
repeat = core.RepeatMapping(row=row, column=column)
return RepeatChart(spec=self, repeat=repeat, columns=columns, **kwargs)
def properties(self, **kwargs):
"""Set top-level properties of the Chart.
Argument names and types are the same as class initialization.
"""
copy = self.copy(deep=False)
for key, val in kwargs.items():
if key == 'selection' and isinstance(val, Selection):
# For backward compatibility with old selection interface.
setattr(copy, key, {val.name: val.selection})
else:
# Don't validate data, because it hasn't been processed.
if key != 'data':
self.validate_property(key, val)
setattr(copy, key, val)
return copy
def project(self, type='mercator', center=Undefined, clipAngle=Undefined, clipExtent=Undefined,
coefficient=Undefined, distance=Undefined, fraction=Undefined, lobes=Undefined,
parallel=Undefined, precision=Undefined, radius=Undefined, ratio=Undefined,
reflectX=Undefined, reflectY=Undefined, rotate=Undefined, scale=Undefined,
spacing=Undefined, tilt=Undefined, translate=Undefined, **kwds):
"""Add a geographic projection to the chart.
This is generally used either with ``mark_geoshape`` or with the
``latitude``/``longitude`` encodings.
Available projection types are
['albers', 'albersUsa', 'azimuthalEqualArea', 'azimuthalEquidistant',
'conicConformal', 'conicEqualArea', 'conicEquidistant', 'equalEarth', 'equirectangular',
'gnomonic', 'identity', 'mercator', 'orthographic', 'stereographic', 'transverseMercator']
Attributes
----------
type : ProjectionType
The cartographic projection to use. This value is case-insensitive, for example
`"albers"` and `"Albers"` indicate the same projection type. You can find all valid
projection types [in the
documentation](https://vega.github.io/vega-lite/docs/projection.html#projection-types).
**Default value:** `mercator`
center : List(float)
Sets the projection’s center to the specified center, a two-element array of
longitude and latitude in degrees.
**Default value:** `[0, 0]`
clipAngle : float
Sets the projection’s clipping circle radius to the specified angle in degrees. If
`null`, switches to [antimeridian](http://bl.ocks.org/mbostock/3788999) cutting
rather than small-circle clipping.
clipExtent : List(List(float))
Sets the projection’s viewport clip extent to the specified bounds in pixels. The
extent bounds are specified as an array `[[x0, y0], [x1, y1]]`, where `x0` is the
left-side of the viewport, `y0` is the top, `x1` is the right and `y1` is the
bottom. If `null`, no viewport clipping is performed.
coefficient : float
distance : float
fraction : float
lobes : float
parallel : float
precision : Mapping(required=[length])
Sets the threshold for the projection’s [adaptive
resampling](http://bl.ocks.org/mbostock/3795544) to the specified value in pixels.
This value corresponds to the [Douglas–Peucker
distance](http://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm).
If precision is not specified, returns the projection’s current resampling
precision which defaults to `√0.5 ≅ 0.70710…`.
radius : float
ratio : float
reflectX : boolean
reflectY : boolean
rotate : List(float)
Sets the projection’s three-axis rotation to the specified angles, which must be a
two- or three-element array of numbers [`lambda`, `phi`, `gamma`] specifying the
rotation angles in degrees about each spherical axis. (These correspond to yaw,
pitch and roll.)
**Default value:** `[0, 0, 0]`
scale : float
Sets the projection's scale (zoom) value, overriding automatic fitting.
spacing : float
tilt : float
translate : List(float)
Sets the projection's translation (pan) value, overriding automatic fitting.
"""
projection = core.Projection(center=center, clipAngle=clipAngle, clipExtent=clipExtent,
coefficient=coefficient, distance=distance, fraction=fraction,
lobes=lobes, parallel=parallel, precision=precision,
radius=radius, ratio=ratio, reflectX=reflectX,
reflectY=reflectY, rotate=rotate, scale=scale, spacing=spacing,
tilt=tilt, translate=translate, type=type, **kwds)
return self.properties(projection=projection)
def _add_transform(self, *transforms):
"""Copy the chart and add specified transforms to chart.transform"""
copy = self.copy(deep=['transform'])
if copy.transform is Undefined:
copy.transform = []
copy.transform.extend(transforms)
return copy
def transform_aggregate(self, aggregate=Undefined, groupby=Undefined, **kwds):
"""
Add an AggregateTransform to the schema.
Parameters
----------
aggregate : List(:class:`AggregatedFieldDef`)
Array of objects that define fields to aggregate.
groupby : List(string)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
**kwds :
additional keywords are converted to aggregates using standard
shorthand parsing.
Returns
-------
self : Chart object
returns chart to allow for chaining
Examples
--------
The aggregate transform allows you to specify transforms directly using
the same shorthand syntax as used in encodings:
>>> import altair as alt
>>> chart1 = alt.Chart().transform_aggregate(
... mean_acc='mean(Acceleration)',
... groupby=['Origin']
... )
>>> print(chart1.transform[0].to_json()) # doctest: +NORMALIZE_WHITESPACE
{
"aggregate": [
{
"as": "mean_acc",
"field": "Acceleration",
"op": "mean"
}
],
"groupby": [
"Origin"
]
}
It also supports including AggregatedFieldDef instances or dicts directly,
so you can create the above transform like this:
>>> chart2 = alt.Chart().transform_aggregate(
... [alt.AggregatedFieldDef(field='Acceleration', op='mean',
... **{'as': 'mean_acc'})],
... groupby=['Origin']
... )
>>> chart2.transform == chart1.transform
True
See Also
--------
alt.AggregateTransform : underlying transform object
"""
if aggregate is Undefined:
aggregate = []
for key, val in kwds.items():
parsed = utils.parse_shorthand(val)
dct = {'as': key,
'field': parsed.get('field', Undefined),
'op': parsed.get('aggregate', Undefined)}
aggregate.append(core.AggregatedFieldDef(**dct))
return self._add_transform(core.AggregateTransform(aggregate=aggregate,
groupby=groupby))
def transform_bin(self, as_=Undefined, field=Undefined, bin=True, **kwargs):
"""
Add a BinTransform to the schema.
Parameters
----------
as_ : anyOf(string, List(string))
The output fields at which to write the start and end bin values.
bin : anyOf(boolean, :class:`BinParams`)
An object indicating bin properties, or simply ``true`` for using default bin
parameters.
field : string
The data field to bin.
Returns
-------
self : Chart object
returns chart to allow for chaining
Examples
--------
>>> import altair as alt
>>> chart = alt.Chart().transform_bin("x_binned", "x")
>>> chart.transform[0]
BinTransform({
as: 'x_binned',
bin: True,
field: 'x'
})
>>> chart = alt.Chart().transform_bin("x_binned", "x",
... bin=alt.Bin(maxbins=10))
>>> chart.transform[0]
BinTransform({
as: 'x_binned',
bin: BinParams({
maxbins: 10
}),
field: 'x'
})
See Also
--------
alt.BinTransform : underlying transform object
"""
if as_ is not Undefined:
if 'as' in kwargs:
raise ValueError("transform_bin: both 'as_' and 'as' passed as arguments.")
kwargs['as'] = as_
kwargs['bin'] = bin
kwargs['field'] = field
return self._add_transform(core.BinTransform(**kwargs))
def transform_calculate(self, as_=Undefined, calculate=Undefined, **kwargs):
"""
Add a CalculateTransform to the schema.
Parameters
----------
as_ : string
The field for storing the computed formula value.
calculate : string or alt.expr expression
A `expression <https://vega.github.io/vega-lite/docs/types.html#expression>`__
string. Use the variable ``datum`` to refer to the current data object.
**kwargs
transforms can also be passed by keyword argument; see Examples
Returns
-------
self : Chart object
returns chart to allow for chaining
Examples
--------
>>> import altair as alt
>>> from altair import datum, expr
>>> chart = alt.Chart().transform_calculate(y = 2 * expr.sin(datum.x))
>>> chart.transform[0]
CalculateTransform({
as: 'y',
calculate: (2 * sin(datum.x))
})
It's also possible to pass the ``CalculateTransform`` arguments directly:
>>> kwds = {'as': 'y', 'calculate': '2 * sin(datum.x)'}
>>> chart = alt.Chart().transform_calculate(**kwds)
>>> chart.transform[0]
CalculateTransform({
as: 'y',
calculate: '2 * sin(datum.x)'
})
As the first form is easier to write and understand, that is the
recommended method.
See Also
--------
alt.CalculateTransform : underlying transform object
"""
if as_ is Undefined:
as_ = kwargs.pop('as', Undefined)
elif 'as' in kwargs:
raise ValueError("transform_calculate: both 'as_' and 'as' passed as arguments.")
if as_ is not Undefined or calculate is not Undefined:
dct = {'as': as_, 'calculate': calculate}
self = self._add_transform(core.CalculateTransform(**dct))
for as_, calculate in kwargs.items():
dct = {'as': as_, 'calculate': calculate}
self = self._add_transform(core.CalculateTransform(**dct))
return self
def transform_density(self, density, as_=Undefined, bandwidth=Undefined, counts=Undefined,
cumulative=Undefined, extent=Undefined, groupby=Undefined,
maxsteps=Undefined, minsteps=Undefined, steps=Undefined):
"""Add a DensityTransform to the spec.
Attributes
----------
density : str
The data field for which to perform density estimation.
as_ : [str, str]
The output fields for the sample value and corresponding density estimate.
**Default value:** ``["value", "density"]``
bandwidth : float
The bandwidth (standard deviation) of the Gaussian kernel. If unspecified or set to
zero, the bandwidth value is automatically estimated from the input data using
Scott’s rule.
counts : boolean
A boolean flag indicating if the output values should be probability estimates
(false) or smoothed counts (true).
**Default value:** ``false``
cumulative : boolean
A boolean flag indicating whether to produce density estimates (false) or cumulative
density estimates (true).
**Default value:** ``false``
extent : List([float, float])
A [min, max] domain from which to sample the distribution. If unspecified, the
extent will be determined by the observed minimum and maximum values of the density
value field.
groupby : List(str)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
maxsteps : float
The maximum number of samples to take along the extent domain for plotting the
density. **Default value:** ``200``
minsteps : float
The minimum number of samples to take along the extent domain for plotting the
density. **Default value:** ``25``
steps : float
The exact number of samples to take along the extent domain for plotting the
density. If specified, overrides both minsteps and maxsteps to set an exact number
of uniform samples. Potentially useful in conjunction with a fixed extent to ensure
consistent sample points for stacked densities.
"""
return self._add_transform(core.DensityTransform(
density=density, bandwidth=bandwidth, counts=counts, cumulative=cumulative,
extent=extent, groupby=groupby, maxsteps=maxsteps, minsteps=minsteps, steps=steps,
**{'as': as_}
))
def transform_impute(self, impute, key, frame=Undefined, groupby=Undefined,
keyvals=Undefined, method=Undefined, value=Undefined):
"""
Add an ImputeTransform to the schema.
Parameters
----------
impute : string
The data field for which the missing values should be imputed.
key : string
A key field that uniquely identifies data objects within a group.
Missing key values (those occurring in the data but not in the current group) will
be imputed.
frame : List(anyOf(None, float))
A frame specification as a two-element array used to control the window over which
the specified method is applied. The array entries should either be a number
indicating the offset from the current data object, or null to indicate unbounded
rows preceding or following the current data object. For example, the value ``[-5,
5]`` indicates that the window should include five objects preceding and five
objects following the current object.
**Default value:** : ``[null, null]`` indicating that the window includes all
objects.
groupby : List(string)
An optional array of fields by which to group the values.
Imputation will then be performed on a per-group basis.
keyvals : anyOf(List(Mapping(required=[])), :class:`ImputeSequence`)
Defines the key values that should be considered for imputation.
An array of key values or an object defining a `number sequence
<https://vega.github.io/vega-lite/docs/impute.html#sequence-def>`__.
If provided, this will be used in addition to the key values observed within the
input data. If not provided, the values will be derived from all unique values of
the ``key`` field. For ``impute`` in ``encoding``, the key field is the x-field if
the y-field is imputed, or vice versa.
If there is no impute grouping, this property *must* be specified.
method : :class:`ImputeMethod`
The imputation method to use for the field value of imputed data objects.
One of ``value``, ``mean``, ``median``, ``max`` or ``min``.
**Default value:** ``"value"``
value : Mapping(required=[])
The field value to use when the imputation ``method`` is ``"value"``.
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.ImputeTransform : underlying transform object
"""
return self._add_transform(core.ImputeTransform(
impute=impute, key=key, frame=frame, groupby=groupby,
keyvals=keyvals, method=method, value=value
))
def transform_joinaggregate(self, joinaggregate=Undefined, groupby=Undefined, **kwargs):
"""
Add a JoinAggregateTransform to the schema.
Parameters
----------
joinaggregate : List(:class:`JoinAggregateFieldDef`)
The definition of the fields in the join aggregate, and what calculations to use.
groupby : List(string)
The data fields for partitioning the data objects into separate groups. If
unspecified, all data points will be in a single group.
**kwargs
joinaggregates can also be passed by keyword argument; see Examples.
Returns
-------
self : Chart object
returns chart to allow for chaining
Examples
--------
>>> import altair as alt
>>> chart = alt.Chart().transform_joinaggregate(x='sum(y)')
>>> chart.transform[0]
JoinAggregateTransform({
joinaggregate: [JoinAggregateFieldDef({
as: 'x',
field: 'y',
op: 'sum'
})]
})
See Also
--------
alt.JoinAggregateTransform : underlying transform object
"""
if joinaggregate is Undefined:
joinaggregate = []
for key, val in kwargs.items():
parsed = utils.parse_shorthand(val)
dct = {'as': key,
'field': parsed.get('field', Undefined),
'op': parsed.get('aggregate', Undefined)}
joinaggregate.append(core.JoinAggregateFieldDef(**dct))
return self._add_transform(core.JoinAggregateTransform(
joinaggregate=joinaggregate, groupby=groupby
))
def transform_filter(self, filter, **kwargs):
"""
Add a FilterTransform to the schema.
Parameters
----------
filter : a filter expression or :class:`LogicalOperandPredicate`
The `filter` property must be one of the predicate definitions:
(1) a string or alt.expr expression
(2) a range predicate
(3) a selection predicate
(4) a logical operand combining (1)-(3)
(5) a Selection object
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.FilterTransform : underlying transform object
"""
if isinstance(filter, Selection):
filter = {'selection': filter.name}
elif isinstance(filter, core.SelectionOperand):
filter = {'selection': filter}
return self._add_transform(core.FilterTransform(filter=filter, **kwargs))
def transform_flatten(self, flatten, as_=Undefined):
"""Add a FlattenTransform to the schema.
Parameters
----------
flatten : List(string)
An array of one or more data fields containing arrays to flatten.
If multiple fields are specified, their array values should have a parallel
structure, ideally with the same length.
If the lengths of parallel arrays do not match,
the longest array will be used with ``null`` values added for missing entries.
as : List(string)
The output field names for extracted array values.
**Default value:** The field name of the corresponding array field
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.FlattenTransform : underlying transform object
"""
return self._add_transform(core.FlattenTransform(flatten=flatten, **{'as': as_}))
def transform_fold(self, fold, as_=Undefined):
"""Add a FoldTransform to the spec.
Parameters
----------
fold : List(string)
An array of data fields indicating the properties to fold.
as : [string, string]
The output field names for the key and value properties produced by the fold
transform. Default: ``["key", "value"]``
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
Chart.transform_pivot : pivot transform - opposite of fold.
alt.FoldTransform : underlying transform object
"""
return self._add_transform(core.FoldTransform(fold=fold, **{'as': as_}))
def transform_loess(self, on, loess, as_=Undefined, bandwidth=Undefined, groupby=Undefined):
"""Add a LoessTransform to the spec.
Parameters
----------
on : str
The data field of the independent variable to use a predictor.
loess : str
The data field of the dependent variable to smooth.
as_ : [str, str]
The output field names for the smoothed points generated by the loess transform.
**Default value:** The field names of the input x and y values.
bandwidth : float
A bandwidth parameter in the range ``[0, 1]`` that determines the amount of
smoothing. **Default value:** ``0.3``
groupby : List(str)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
Chart.transform_regression: regression transform
alt.LoessTransform : underlying transform object
"""
return self._add_transform(core.LoessTransform(
loess=loess, on=on, bandwidth=bandwidth, groupby=groupby, **{'as': as_}
))
def transform_lookup(self, lookup=Undefined, from_=Undefined, as_=Undefined, default=Undefined, **kwargs):
"""Add a DataLookupTransform or SelectionLookupTransform to the chart
Attributes
----------
lookup : string
Key in primary data source.
from_ : anyOf(:class:`LookupData`, :class:`LookupSelection`)
Secondary data reference.
as_ : anyOf(string, List(string))
The output fields on which to store the looked up data values.
For data lookups, this property may be left blank if ``from_.fields``
has been specified (those field names will be used); if ``from_.fields``
has not been specified, ``as_`` must be a string.
For selection lookups, this property is optional: if unspecified,
looked up values will be stored under a property named for the selection;
and if specified, it must correspond to ``from_.fields``.
default : string
The default value to use if lookup fails. **Default value:** ``null``
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.DataLookupTransform : underlying transform object
alt.SelectionLookupTransform : underlying transform object
"""
if as_ is not Undefined:
if 'as' in kwargs:
raise ValueError("transform_lookup: both 'as_' and 'as' passed as arguments.")
kwargs['as'] = as_
if from_ is not Undefined:
if 'from' in kwargs:
raise ValueError("transform_lookup: both 'from_' and 'from' passed as arguments.")
kwargs['from'] = from_
kwargs['lookup'] = lookup
kwargs['default'] = default
return self._add_transform(core.LookupTransform(**kwargs))
def transform_pivot(self, pivot, value, groupby=Undefined, limit=Undefined, op=Undefined):
"""Add a pivot transform to the chart.
Parameters
----------
pivot : str
The data field to pivot on. The unique values of this field become new field names
in the output stream.
value : str
The data field to populate pivoted fields. The aggregate values of this field become
the values of the new pivoted fields.
groupby : List(str)
The optional data fields to group by. If not specified, a single group containing
all data objects will be used.
limit : float
An optional parameter indicating the maximum number of pivoted fields to generate.
The default ( ``0`` ) applies no limit. The pivoted ``pivot`` names are sorted in
ascending order prior to enforcing the limit.
**Default value:** ``0``
op : string
The aggregation operation to apply to grouped ``value`` field values.
**Default value:** ``sum``
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
Chart.transform_fold : fold transform - opposite of pivot.
alt.PivotTransform : underlying transform object
"""
return self._add_transform(core.PivotTransform(
pivot=pivot, value=value, groupby=groupby, limit=limit, op=op
))
def transform_quantile(self, quantile, as_=Undefined, groupby=Undefined,
probs=Undefined, step=Undefined):
"""Add a quantile transform to the chart
Parameters
----------
quantile : str
The data field for which to perform quantile estimation.
as : [str, str]
The output field names for the probability and quantile values.
groupby : List(str)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
probs : List(float)
An array of probabilities in the range (0, 1) for which to compute quantile values.
If not specified, the *step* parameter will be used.
step : float
A probability step size (default 0.01) for sampling quantile values. All values from
one-half the step size up to 1 (exclusive) will be sampled. This parameter is only
used if the *probs* parameter is not provided. **Default value:** ``["prob", "value"]``
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.QuantileTransform : underlying transform object
"""
return self._add_transform(core.QuantileTransform(
quantile=quantile, groupby=groupby, probs=probs, step=step, **{'as': as_}
))
def transform_regression(self, on, regression, as_=Undefined, extent=Undefined, groupby=Undefined,
method=Undefined, order=Undefined, params=Undefined):
"""Add a RegressionTransform to the chart.
Parameters
----------
on : str
The data field of the independent variable to use a predictor.
regression : str
The data field of the dependent variable to predict.
as_ : [str, str]
The output field names for the smoothed points generated by the regression
transform. **Default value:** The field names of the input x and y values.
extent : [float, float]
A [min, max] domain over the independent (x) field for the starting and ending
points of the generated trend line.
groupby : List(str)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
method : enum('linear', 'log', 'exp', 'pow', 'quad', 'poly')
The functional form of the regression model. One of ``"linear"``, ``"log"``,
``"exp"``, ``"pow"``, ``"quad"``, or ``"poly"``. **Default value:** ``"linear"``
order : float
The polynomial order (number of coefficients) for the 'poly' method.
**Default value:** ``3``
params : boolean
A boolean flag indicating if the transform should return the regression model
parameters (one object per group), rather than trend line points.
The resulting objects include a ``coef`` array of fitted coefficient values
(starting with the intercept term and then including terms of increasing order)
and an ``rSquared`` value (indicating the total variance explained by the model).
**Default value:** ``false``
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
Chart.transform_loess : LOESS transform
alt.RegressionTransform : underlying transform object
"""
return self._add_transform(core.RegressionTransform(
regression=regression, on=on, extent=extent, groupby=groupby,
method=method, order=order, params=params, **{'as': as_}
))
def transform_sample(self, sample=1000):
"""
Add a SampleTransform to the schema.
Parameters
----------
sample : float
The maximum number of data objects to include in the sample. Default: 1000.
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.SampleTransform : underlying transform object
"""
return self._add_transform(core.SampleTransform(sample))
def transform_stack(self, as_, stack, groupby, offset=Undefined, sort=Undefined):
"""
Add a StackTransform to the schema.
Parameters
----------
as_ : anyOf(string, List(string))
Output field names. This can be either a string or an array of strings with
two elements denoting the name for the fields for stack start and stack end
respectively.
If a single string(eg."val") is provided, the end field will be "val_end".
stack : string
The field which is stacked.
groupby : List(string)
The data fields to group by.
offset : enum('zero', 'center', 'normalize')
Mode for stacking marks. Default: 'zero'.
sort : List(:class:`SortField`)
Field that determines the order of leaves in the stacked charts.
Returns
-------
self : Chart object
returns chart to allow for chaining
See Also
--------
alt.StackTransform : underlying transform object
"""
return self._add_transform(core.StackTransform(
stack=stack, groupby=groupby, offset=offset, sort=sort, **{'as': as_}
))
def transform_timeunit(self, as_=Undefined, field=Undefined, timeUnit=Undefined, **kwargs):
"""
Add a TimeUnitTransform to the schema.
Parameters
----------
as_ : string
The output field to write the timeUnit value.
field : string
The data field to apply time unit.
timeUnit : :class:`TimeUnit`
The timeUnit.
**kwargs
transforms can also be passed by keyword argument; see Examples
Returns
-------
self : Chart object
returns chart to allow for chaining
Examples
--------
>>> import altair as alt
>>> from altair import datum, expr
>>> chart = alt.Chart().transform_timeunit(month='month(date)')
>>> chart.transform[0]
TimeUnitTransform({
as: 'month',
field: 'date',
timeUnit: 'month'
})
It's also possible to pass the ``TimeUnitTransform`` arguments directly;
this is most useful in cases where the desired field name is not a
valid python identifier:
>>> kwds = {'as': 'month', 'timeUnit': 'month', 'field': 'The Month'}
>>> chart = alt.Chart().transform_timeunit(**kwds)
>>> chart.transform[0]
TimeUnitTransform({
as: 'month',
field: 'The Month',
timeUnit: 'month'
})
As the first form is easier to write and understand, that is the
recommended method.
See Also
--------
alt.TimeUnitTransform : underlying transform object
"""
if as_ is Undefined:
as_ = kwargs.pop('as', Undefined)
else:
if 'as' in kwargs:
raise ValueError("transform_timeunit: both 'as_' and 'as' passed as arguments.")
if as_ is not Undefined:
dct = {'as': as_, 'timeUnit': timeUnit, 'field': field}
self = self._add_transform(core.TimeUnitTransform(**dct))
for as_, shorthand in kwargs.items():
dct = utils.parse_shorthand(shorthand,
parse_timeunits=True,
parse_aggregates=False,
parse_types=False)
dct.pop('type', None)
dct['as'] = as_
if 'timeUnit' not in dct:
raise ValueError("'{}' must include a valid timeUnit".format(shorthand))
self = self._add_transform(core.TimeUnitTransform(**dct))
return self
def transform_window(self, window=Undefined, frame=Undefined, groupby=Undefined,
ignorePeers=Undefined, sort=Undefined, **kwargs):
"""Add a WindowTransform to the schema
Parameters
----------
window : List(:class:`WindowFieldDef`)
The definition of the fields in the window, and what calculations to use.
frame : List(anyOf(None, float))
A frame specification as a two-element array indicating how the sliding window
should proceed. The array entries should either be a number indicating the offset
from the current data object, or null to indicate unbounded rows preceding or
following the current data object. The default value is ``[null, 0]``, indicating
that the sliding window includes the current object and all preceding objects. The
value ``[-5, 5]`` indicates that the window should include five objects preceding
and five objects following the current object. Finally, ``[null, null]`` indicates
that the window frame should always include all data objects. The only operators
affected are the aggregation operations and the ``first_value``, ``last_value``, and
``nth_value`` window operations. The other window operations are not affected by
this.
**Default value:** : ``[null, 0]`` (includes the current object and all preceding
objects)
groupby : List(string)
The data fields for partitioning the data objects into separate windows. If
unspecified, all data points will be in a single group.
ignorePeers : boolean
Indicates if the sliding window frame should ignore peer values. (Peer values are
those considered identical by the sort criteria). The default is false, causing the
window frame to expand to include all peer values. If set to true, the window frame
will be defined by offset values only. This setting only affects those operations
that depend on the window frame, namely aggregation operations and the first_value,
last_value, and nth_value window operations.
**Default value:** ``false``
sort : List(:class:`SortField`)
A sort field definition for sorting data objects within a window. If two data
objects are considered equal by the comparator, they are considered “peer” values of
equal rank. If sort is not specified, the order is undefined: data objects are
processed in the order they are observed and none are considered peers (the
ignorePeers parameter is ignored and treated as if set to ``true`` ).
**kwargs
transforms can also be passed by keyword argument; see Examples
Examples
--------
A cumulative line chart
>>> import altair as alt
>>> import numpy as np
>>> import pandas as pd
>>> data = pd.DataFrame({'x': np.arange(100),
... 'y': np.random.randn(100)})
>>> chart = alt.Chart(data).mark_line().encode(
... x='x:Q',
... y='ycuml:Q'
... ).transform_window(
... ycuml='sum(y)'
... )
>>> chart.transform[0]
WindowTransform({
window: [WindowFieldDef({
as: 'ycuml',
field: 'y',
op: 'sum'
})]
})
"""
if kwargs:
if window is Undefined:
window = []
for as_, shorthand in kwargs.items():
kwds = {'as': as_}
kwds.update(utils.parse_shorthand(shorthand,
parse_aggregates=False,
parse_window_ops=True,
parse_timeunits=False,
parse_types=False))
window.append(core.WindowFieldDef(**kwds))
return self._add_transform(core.WindowTransform(window=window, frame=frame, groupby=groupby,
ignorePeers=ignorePeers, sort=sort))
# Display-related methods
def _repr_mimebundle_(self, include, exclude):
"""Return a MIME bundle for display in Jupyter frontends."""
# Catch errors explicitly to get around issues in Jupyter frontend
# see https://github.com/ipython/ipython/issues/11038
try:
dct = self.to_dict()
except Exception:
utils.display_traceback(in_ipython=True)
return {}
else:
return renderers.get()(dct)
def display(self, renderer=Undefined, theme=Undefined, actions=Undefined,
**kwargs):
"""Display chart in Jupyter notebook or JupyterLab
Parameters are passed as options to vega-embed within supported frontends.
See https://github.com/vega/vega-embed#options for details.
Parameters
----------
renderer : string ('canvas' or 'svg')
The renderer to use
theme : string
The Vega theme name to use; see https://github.com/vega/vega-themes
actions : bool or dict
Specify whether action links ("Open In Vega Editor", etc.) are
included in the view.
**kwargs :
Additional parameters are also passed to vega-embed as options.
"""
from IPython.display import display
if renderer is not Undefined:
kwargs['renderer'] = renderer
if theme is not Undefined:
kwargs['theme'] = theme
if actions is not Undefined:
kwargs['actions'] = actions
if kwargs:
options = renderers.options.copy()
options['embed_options']= options.get('embed_options', {}).copy()
options['embed_options'].update(kwargs)
with renderers.enable(**options):
display(self)
else:
display(self)
def serve(self, ip='127.0.0.1', port=8888, n_retries=50, files=None,
jupyter_warning=True, open_browser=True, http_server=None,
**kwargs):
"""Open a browser window and display a rendering of the chart
Parameters
----------
html : string
HTML to serve
ip : string (default = '127.0.0.1')
ip address at which the HTML will be served.
port : int (default = 8888)
the port at which to serve the HTML
n_retries : int (default = 50)
the number of nearby ports to search if the specified port
is already in use.
files : dictionary (optional)
dictionary of extra content to serve
jupyter_warning : bool (optional)
if True (default), then print a warning if this is used
within the Jupyter notebook
open_browser : bool (optional)
if True (default), then open a web browser to the given HTML
http_server : class (optional)
optionally specify an HTTPServer class to use for showing the
figure. The default is Python's basic HTTPServer.
**kwargs :
additional keyword arguments passed to the save() method
"""
from ...utils.server import serve
html = io.StringIO()
self.save(html, format='html', **kwargs)
html.seek(0)
serve(html.read(), ip=ip, port=port, n_retries=n_retries,
files=files, jupyter_warning=jupyter_warning,
open_browser=open_browser, http_server=http_server)
@utils.use_signature(core.Resolve)
def _set_resolve(self, **kwargs):
"""Copy the chart and update the resolve property with kwargs"""
if not hasattr(self, 'resolve'):
raise ValueError("{} object has no attribute "
"'resolve'".format(self.__class__))
copy = self.copy(deep=['resolve'])
if copy.resolve is Undefined:
copy.resolve = core.Resolve()
for key, val in kwargs.items():
copy.resolve[key] = val
return copy
@utils.use_signature(core.AxisResolveMap)
def resolve_axis(self, *args, **kwargs):
return self._set_resolve(axis=core.AxisResolveMap(*args, **kwargs))
@utils.use_signature(core.LegendResolveMap)
def resolve_legend(self, *args, **kwargs):
return self._set_resolve(legend=core.LegendResolveMap(*args, **kwargs))
@utils.use_signature(core.ScaleResolveMap)
def resolve_scale(self, *args, **kwargs):
return self._set_resolve(scale=core.ScaleResolveMap(*args, **kwargs))
class _EncodingMixin(object):
@utils.use_signature(core.FacetedEncoding)
def encode(self, *args, **kwargs):
# Convert args to kwargs based on their types.
kwargs = utils.infer_encoding_types(args, kwargs, channels)
# get a copy of the dict representation of the previous encoding
copy = self.copy(deep=['encoding'])
encoding = copy._get('encoding', {})
if isinstance(encoding, core.VegaLiteSchema):
encoding = {k: v for k, v in encoding._kwds.items()
if v is not Undefined}
# update with the new encodings, and apply them to the copy
encoding.update(kwargs)
copy.encoding = core.FacetedEncoding(**encoding)
return copy
def facet(self, facet=Undefined, row=Undefined, column=Undefined, data=Undefined,
columns=Undefined, **kwargs):
"""Create a facet chart from the current chart.
Faceted charts require data to be specified at the top level; if data
is not specified, the data from the current chart will be used at the
top level.
Parameters
----------
facet : string or alt.Facet (optional)
The data column to use as an encoding for a wrapped facet.
If specified, then neither row nor column may be specified.
column : string or alt.Column (optional)
The data column to use as an encoding for a column facet.
May be combined with row argument, but not with facet argument.
row : string or alt.Column (optional)
The data column to use as an encoding for a row facet.
May be combined with column argument, but not with facet argument.
data : string or dataframe (optional)
The dataset to use for faceting. If not supplied, then data must
be specified in the top-level chart that calls this method.
columns : integer
the maximum number of columns for a wrapped facet.
Returns
-------
self :
for chaining
"""
facet_specified = (facet is not Undefined)
rowcol_specified = (row is not Undefined or column is not Undefined)
if facet_specified and rowcol_specified:
raise ValueError("facet argument cannot be combined with row/column argument.")
if data is Undefined:
if self.data is Undefined:
raise ValueError("Facet charts require data to be specified at the top level.")
self = self.copy(deep=False)
data, self.data = self.data, Undefined
if facet_specified:
if isinstance(facet, str):
facet = channels.Facet(facet)
else:
facet = FacetMapping(row=row, column=column)
return FacetChart(spec=self, facet=facet, data=data, columns=columns, **kwargs)
class Chart(TopLevelMixin, _EncodingMixin, mixins.MarkMethodMixin,
core.TopLevelUnitSpec):
"""Create a basic Altair/Vega-Lite chart.
Although it is possible to set all Chart properties as constructor attributes,
it is more idiomatic to use methods such as ``mark_point()``, ``encode()``,
``transform_filter()``, ``properties()``, etc. See Altair's documentation
for details and examples: http://altair-viz.github.io/.
Attributes
----------
data : Data
An object describing the data source
mark : AnyMark
A string describing the mark type (one of `"bar"`, `"circle"`, `"square"`, `"tick"`,
`"line"`, * `"area"`, `"point"`, `"rule"`, `"geoshape"`, and `"text"`) or a
MarkDef object.
encoding : FacetedEncoding
A key-value mapping between encoding channels and definition of fields.
autosize : anyOf(AutosizeType, AutoSizeParams)
Sets how the visualization size should be determined. If a string, should be one of
`"pad"`, `"fit"` or `"none"`. Object values can additionally specify parameters for
content sizing and automatic resizing. `"fit"` is only supported for single and
layered views that don't use `rangeStep`. __Default value__: `pad`
background : string
CSS color property to use as the background of visualization.
**Default value:** none (transparent)
config : Config
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
description : string
Description of this mark for commenting purpose.
height : float
The height of a visualization.
name : string
Name of the visualization for later reference.
padding : Padding
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format `{"left": 5, "top": 5, "right": 5,
"bottom": 5}` to specify padding for each side of the visualization. __Default
value__: `5`
projection : Projection
An object defining properties of geographic projection. Works with `"geoshape"`
marks and `"point"` or `"line"` marks that have a channel (one or more of `"X"`,
`"X2"`, `"Y"`, `"Y2"`) with type `"latitude"`, or `"longitude"`.
selection : Mapping(required=[])
A key-value mapping between selection names and definitions.
title : anyOf(string, TitleParams)
Title for the plot.
transform : List(Transform)
An array of data transformations such as filter and new field calculation.
width : float
The width of a visualization.
"""
def __init__(self, data=Undefined, encoding=Undefined, mark=Undefined,
width=Undefined, height=Undefined, **kwargs):
super(Chart, self).__init__(data=data, encoding=encoding, mark=mark,
width=width, height=height, **kwargs)
@classmethod
def from_dict(cls, dct, validate=True):
"""Construct class from a dictionary representation
Parameters
----------
dct : dictionary
The dict from which to construct the class
validate : boolean
If True (default), then validate the input against the schema.
Returns
-------
obj : Chart object
The wrapped schema
Raises
------
jsonschema.ValidationError :
if validate=True and dct does not conform to the schema
"""
for class_ in TopLevelMixin.__subclasses__():
if class_ is Chart:
class_ = super(Chart, cls)
try:
return class_.from_dict(dct, validate=validate)
except jsonschema.ValidationError:
pass
# As a last resort, try using the Root vegalite object
return core.Root.from_dict(dct, validate)
def add_selection(self, *selections):
"""Add one or more selections to the chart."""
if not selections:
return self
copy = self.copy(deep=['selection'])
if copy.selection is Undefined:
copy.selection = {}
for s in selections:
copy.selection[s.name] = s.selection
return copy
def interactive(self, name=None, bind_x=True, bind_y=True):
"""Make chart axes scales interactive
Parameters
----------
name : string
The selection name to use for the axes scales. This name should be
unique among all selections within the chart.
bind_x : boolean, default True
If true, then bind the interactive scales to the x-axis
bind_y : boolean, default True
If true, then bind the interactive scales to the y-axis
Returns
-------
chart :
copy of self, with interactive axes added
"""
encodings = []
if bind_x:
encodings.append('x')
if bind_y:
encodings.append('y')
return self.add_selection(selection_interval(bind='scales',
encodings=encodings))
def _check_if_valid_subspec(spec, classname):
"""Check if the spec is a valid sub-spec.
If it is not, then raise a ValueError
"""
err = ('Objects with "{0}" attribute cannot be used within {1}. '
'Consider defining the {0} attribute in the {1} object instead.')
if not isinstance(spec, (core.SchemaBase, dict)):
raise ValueError("Only chart objects can be used in {0}.".format(classname))
for attr in TOPLEVEL_ONLY_KEYS:
if isinstance(spec, core.SchemaBase):
val = getattr(spec, attr, Undefined)
else:
val = spec.get(attr, Undefined)
if val is not Undefined:
raise ValueError(err.format(attr, classname))
def _check_if_can_be_layered(spec):
"""Check if the spec can be layered."""
def _get(spec, attr):
if isinstance(spec, core.SchemaBase):
return spec._get(attr)
else:
return spec.get(attr, Undefined)
encoding = _get(spec, 'encoding')
if encoding is not Undefined:
for channel in ['row', 'column', 'facet']:
if _get(encoding, channel) is not Undefined:
raise ValueError("Faceted charts cannot be layered.")
if isinstance(spec, (Chart, LayerChart)):
return
if not isinstance(spec, (core.SchemaBase, dict)):
raise ValueError("Only chart objects can be layered.")
if _get(spec, 'facet') is not Undefined:
raise ValueError("Faceted charts cannot be layered.")
if isinstance(spec, FacetChart) or _get(spec, 'facet') is not Undefined:
raise ValueError("Faceted charts cannot be layered.")
if isinstance(spec, RepeatChart) or _get(spec, 'repeat') is not Undefined:
raise ValueError("Repeat charts cannot be layered.")
if isinstance(spec, ConcatChart) or _get(spec, 'concat') is not Undefined:
raise ValueError("Concatenated charts cannot be layered.")
if isinstance(spec, HConcatChart) or _get(spec, 'hconcat') is not Undefined:
raise ValueError("Concatenated charts cannot be layered.")
if isinstance(spec, VConcatChart) or _get(spec, 'vconcat') is not Undefined:
raise ValueError("Concatenated charts cannot be layered.")
@utils.use_signature(core.TopLevelRepeatSpec)
class RepeatChart(TopLevelMixin, core.TopLevelRepeatSpec):
"""A chart repeated across rows and columns with small changes"""
def __init__(self, data=Undefined, spec=Undefined, repeat=Undefined, **kwargs):
_check_if_valid_subspec(spec, 'RepeatChart')
super(RepeatChart, self).__init__(data=data, spec=spec, repeat=repeat, **kwargs)
def interactive(self, name=None, bind_x=True, bind_y=True):
"""Make chart axes scales interactive
Parameters
----------
name : string
The selection name to use for the axes scales. This name should be
unique among all selections within the chart.
bind_x : boolean, default True
If true, then bind the interactive scales to the x-axis
bind_y : boolean, default True
If true, then bind the interactive scales to the y-axis
Returns
-------
chart :
copy of self, with interactive axes added
"""
copy = self.copy(deep=False)
copy.spec = copy.spec.interactive(name=name, bind_x=bind_x, bind_y=bind_y)
return copy
def add_selection(self, *selections):
"""Add one or more selections to the chart."""
if not selections or self.spec is Undefined:
return self
copy = self.copy()
copy.spec = copy.spec.add_selection(*selections)
return copy
def repeat(repeater='repeat'):
"""Tie a channel to the row or column within a repeated chart
The output of this should be passed to the ``field`` attribute of
a channel.
Parameters
----------
repeater : {'row'|'column'|'repeat'}
The repeater to tie the field to. Default is 'repeat'.
Returns
-------
repeat : RepeatRef object
"""
if repeater not in ['row', 'column', 'repeat']:
raise ValueError("repeater must be one of ['row', 'column', 'repeat']")
return core.RepeatRef(repeat=repeater)
@utils.use_signature(core.TopLevelConcatSpec)
class ConcatChart(TopLevelMixin, core.TopLevelConcatSpec):
"""A chart with horizontally-concatenated facets"""
def __init__(self, data=Undefined, concat=(), columns=Undefined, **kwargs):
# TODO: move common data to top level?
for spec in concat:
_check_if_valid_subspec(spec, 'ConcatChart')
super(ConcatChart, self).__init__(data=data, concat=list(concat),
columns=columns, **kwargs)
self.data, self.concat = _combine_subchart_data(self.data, self.concat)
def __ior__(self, other):
_check_if_valid_subspec(other, 'ConcatChart')
self.concat.append(other)
self.data, self.concat = _combine_subchart_data(self.data, self.concat)
return self
def __or__(self, other):
copy = self.copy(deep=['concat'])
copy |= other
return copy
def add_selection(self, *selections):
"""Add one or more selections to all subcharts."""
if not selections or not self.concat:
return self
copy = self.copy()
copy.concat = [chart.add_selection(*selections)
for chart in copy.concat]
return copy
def concat(*charts, **kwargs):
"""Concatenate charts horizontally"""
return ConcatChart(concat=charts, **kwargs)
@utils.use_signature(core.TopLevelHConcatSpec)
class HConcatChart(TopLevelMixin, core.TopLevelHConcatSpec):
"""A chart with horizontally-concatenated facets"""
def __init__(self, data=Undefined, hconcat=(), **kwargs):
# TODO: move common data to top level?
for spec in hconcat:
_check_if_valid_subspec(spec, 'HConcatChart')
super(HConcatChart, self).__init__(data=data, hconcat=list(hconcat), **kwargs)
self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat)
def __ior__(self, other):
_check_if_valid_subspec(other, 'HConcatChart')
self.hconcat.append(other)
self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat)
return self
def __or__(self, other):
copy = self.copy(deep=['hconcat'])
copy |= other
return copy
def add_selection(self, *selections):
"""Add one or more selections to all subcharts."""
if not selections or not self.hconcat:
return self
copy = self.copy()
copy.hconcat = [chart.add_selection(*selections)
for chart in copy.hconcat]
return copy
def hconcat(*charts, **kwargs):
"""Concatenate charts horizontally"""
return HConcatChart(hconcat=charts, **kwargs)
@utils.use_signature(core.TopLevelVConcatSpec)
class VConcatChart(TopLevelMixin, core.TopLevelVConcatSpec):
"""A chart with vertically-concatenated facets"""
def __init__(self, data=Undefined, vconcat=(), **kwargs):
# TODO: move common data to top level?
for spec in vconcat:
_check_if_valid_subspec(spec, 'VConcatChart')
super(VConcatChart, self).__init__(data=data, vconcat=list(vconcat), **kwargs)
self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat)
def __iand__(self, other):
_check_if_valid_subspec(other, 'VConcatChart')
self.vconcat.append(other)
self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat)
return self
def __and__(self, other):
copy = self.copy(deep=['vconcat'])
copy &= other
return copy
def add_selection(self, *selections):
"""Add one or more selections to all subcharts."""
if not selections or not self.vconcat:
return self
copy = self.copy()
copy.vconcat = [chart.add_selection(*selections)
for chart in copy.vconcat]
return copy
def vconcat(*charts, **kwargs):
"""Concatenate charts vertically"""
return VConcatChart(vconcat=charts, **kwargs)
@utils.use_signature(core.TopLevelLayerSpec)
class LayerChart(TopLevelMixin, _EncodingMixin, core.TopLevelLayerSpec):
"""A Chart with layers within a single panel"""
def __init__(self, data=Undefined, layer=(), **kwargs):
# TODO: move common data to top level?
# TODO: check for conflicting interaction
for spec in layer:
_check_if_valid_subspec(spec, 'LayerChart')
_check_if_can_be_layered(spec)
super(LayerChart, self).__init__(data=data, layer=list(layer), **kwargs)
self.data, self.layer = _combine_subchart_data(self.data, self.layer)
def __iadd__(self, other):
_check_if_valid_subspec(other, 'LayerChart')
_check_if_can_be_layered(other)
self.layer.append(other)
self.data, self.layer = _combine_subchart_data(self.data, self.layer)
return self
def __add__(self, other):
copy = self.copy(deep=['layer'])
copy += other
return copy
def add_layers(self, *layers):
copy = self.copy(deep=['layer'])
for layer in layers:
copy += layer
return copy
def interactive(self, name=None, bind_x=True, bind_y=True):
"""Make chart axes scales interactive
Parameters
----------
name : string
The selection name to use for the axes scales. This name should be
unique among all selections within the chart.
bind_x : boolean, default True
If true, then bind the interactive scales to the x-axis
bind_y : boolean, default True
If true, then bind the interactive scales to the y-axis
Returns
-------
chart :
copy of self, with interactive axes added
"""
if not self.layer:
raise ValueError("LayerChart: cannot call interactive() until a "
"layer is defined")
copy = self.copy(deep=['layer'])
copy.layer[0] = copy.layer[0].interactive(name=name, bind_x=bind_x, bind_y=bind_y)
return copy
def add_selection(self, *selections):
"""Add one or more selections to all subcharts."""
if not selections or not self.layer:
return self
copy = self.copy()
copy.layer[0] = copy.layer[0].add_selection(*selections)
return copy
def layer(*charts, **kwargs):
"""layer multiple charts"""
return LayerChart(layer=charts, **kwargs)
@utils.use_signature(core.TopLevelFacetSpec)
class FacetChart(TopLevelMixin, core.TopLevelFacetSpec):
"""A Chart with layers within a single panel"""
def __init__(self, data=Undefined, spec=Undefined, facet=Undefined, **kwargs):
_check_if_valid_subspec(spec, 'FacetChart')
super(FacetChart, self).__init__(data=data, spec=spec, facet=facet, **kwargs)
def interactive(self, name=None, bind_x=True, bind_y=True):
"""Make chart axes scales interactive
Parameters
----------
name : string
The selection name to use for the axes scales. This name should be
unique among all selections within the chart.
bind_x : boolean, default True
If true, then bind the interactive scales to the x-axis
bind_y : boolean, default True
If true, then bind the interactive scales to the y-axis
Returns
-------
chart :
copy of self, with interactive axes added
"""
copy = self.copy(deep=False)
copy.spec = copy.spec.interactive(name=name, bind_x=bind_x, bind_y=bind_y)
return copy
def add_selection(self, *selections):
"""Add one or more selections to the chart."""
if not selections or self.spec is Undefined:
return self
copy = self.copy()
copy.spec = copy.spec.add_selection(*selections)
return copy
def topo_feature(url, feature, **kwargs):
"""A convenience function for extracting features from a topojson url
Parameters
----------
url : string
An URL from which to load the data set.
feature : string
The name of the TopoJSON object set to convert to a GeoJSON feature collection. For
example, in a map of the world, there may be an object set named `"countries"`.
Using the feature property, we can extract this set and generate a GeoJSON feature
object for each country.
**kwargs :
additional keywords passed to TopoDataFormat
"""
return core.UrlData(url=url, format=core.TopoDataFormat(type='topojson',
feature=feature, **kwargs))
def _combine_subchart_data(data, subcharts):
def remove_data(subchart):
if subchart.data is not Undefined:
subchart = subchart.copy()
subchart.data = Undefined
return subchart
if not subcharts:
# No subcharts = nothing to do.
pass
elif data is Undefined:
# Top level has no data; all subchart data must
# be identical to proceed.
subdata = subcharts[0].data
if subdata is not Undefined and all(c.data is subdata for c in subcharts):
data = subdata
subcharts = [remove_data(c) for c in subcharts]
else:
# Top level has data; subchart data must be either
# undefined or identical to proceed.
if all(c.data is Undefined or c.data is data for c in subcharts):
subcharts = [remove_data(c) for c in subcharts]
return data, subcharts
@utils.use_signature(core.SequenceParams)
def sequence(start, stop=None, step=Undefined, as_=Undefined, **kwds):
"""Sequence generator."""
if stop is None:
start, stop = 0, start
params = core.SequenceParams(
start=start, stop=stop, step=step, **{'as': as_}
)
return core.SequenceGenerator(sequence=params, **kwds)
@utils.use_signature(core.GraticuleParams)
def graticule(**kwds):
"""Graticule generator."""
if not kwds:
# graticule: True indicates default parameters
graticule = True
else:
graticule = core.GraticuleParams(**kwds)
return core.GraticuleGenerator(graticule=graticule)
def sphere():
"""Sphere generator."""
return core.SphereGenerator(sphere=True)
| [
"[email protected]"
]
| |
9ca5ac9b0309eeb6b7ae197443b0c2be0b04ea69 | 59ac1d0f09ebfb527701031f3ab2cfbfb8055f51 | /soapsales/customers/signals.py | fc93f81a6adc77f22799cb456aa27326ae4c6f21 | []
| no_license | DUMBALINYOLO/erpmanu | d4eb61b66cfa3704bd514b58580bdfec5639e3b0 | db979bafcc7481f60af467d1f48d0a81bbbfc1aa | refs/heads/master | 2023-04-28T13:07:45.593051 | 2021-05-12T09:30:23 | 2021-05-12T09:30:23 | 288,446,097 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
import uuid
from django.db import transaction
from customers.models import Customer
@receiver(post_save, sender=Customer)
def post_save_create_customer_number_and_customer_number(sender, instance, created, **kwargs):
if created:
instance.create_customer_account()
if instance.customer_number == '':
instance.customer_number = str(uuid.uuid4()).replace("-", '').upper()[:20]
instance.save()
| [
"[email protected]"
]
| |
33013989259884ab0ed306b1a8ffd64725df92f6 | 7c009d77bc0124b69abdd5bbf4d00ee00a6de881 | /process/migrations/0020_auto_20210606_1321.py | 23a2cb9944ae79b25e63e50e2bb315ad1da36180 | []
| no_license | Rajeshwari33/POProcess | 85598b3bb78c1bcc3bea583fcd106fd32eb97c99 | dde399029b01554f97988709688e14193a96cb1a | refs/heads/master | 2023-05-25T18:33:45.589819 | 2021-06-15T16:27:37 | 2021-06-15T16:27:37 | 367,557,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | # Generated by Django 3.2 on 2021-06-06 07:51
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('process', '0019_mailcredentials'),
]
operations = [
migrations.AddField(
model_name='mailcredentials',
name='created_by',
field=models.PositiveSmallIntegerField(null=True, verbose_name='User Id'),
),
migrations.AddField(
model_name='mailcredentials',
name='created_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='Created Date'),
),
migrations.AddField(
model_name='mailcredentials',
name='is_active',
field=models.BooleanField(default=True, verbose_name='Active ?'),
),
]
| [
"[email protected]"
]
| |
58574ad0dad5911c3bfcbe11a05877544c674def | 3f327d2654b85b922909925b9f475315d78f4652 | /Backend/lib/python3.6/site-packages/twilio/rest/api/v2010/account/message/media.py | 17f63437924a2838c56dc60c9b02c116a651dff3 | [
"MIT"
]
| permissive | brianwang1217/SelfImprovementWebApp | 8db45914027537aee9614f9d218c93cc08dc90f8 | 7892fc4ee5434307b74b14257b29a5f05a0a0dd7 | refs/heads/master | 2022-12-13T15:01:08.595735 | 2018-06-23T04:46:06 | 2018-06-23T04:46:06 | 137,548,289 | 1 | 1 | MIT | 2022-05-25T01:28:29 | 2018-06-16T02:48:52 | Python | UTF-8 | Python | false | false | 14,742 | py | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class MediaList(ListResource):
""" """
def __init__(self, version, account_sid, message_sid):
"""
Initialize the MediaList
:param Version version: Version that contains the resource
:param account_sid: The unique sid that identifies this account
:param message_sid: A string that uniquely identifies this message
:returns: twilio.rest.api.v2010.account.message.media.MediaList
:rtype: twilio.rest.api.v2010.account.message.media.MediaList
"""
super(MediaList, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'message_sid': message_sid,
}
self._uri = '/Accounts/{account_sid}/Messages/{message_sid}/Media.json'.format(**self._solution)
def stream(self, date_created_before=values.unset, date_created=values.unset,
date_created_after=values.unset, limit=None, page_size=None):
"""
Streams MediaInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param datetime date_created_before: Filter by date created
:param datetime date_created: Filter by date created
:param datetime date_created_after: Filter by date created
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.message.media.MediaInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
date_created_before=date_created_before,
date_created=date_created,
date_created_after=date_created_after,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, date_created_before=values.unset, date_created=values.unset,
date_created_after=values.unset, limit=None, page_size=None):
"""
Lists MediaInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param datetime date_created_before: Filter by date created
:param datetime date_created: Filter by date created
:param datetime date_created_after: Filter by date created
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.message.media.MediaInstance]
"""
return list(self.stream(
date_created_before=date_created_before,
date_created=date_created,
date_created_after=date_created_after,
limit=limit,
page_size=page_size,
))
def page(self, date_created_before=values.unset, date_created=values.unset,
date_created_after=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of MediaInstance records from the API.
Request is executed immediately
:param datetime date_created_before: Filter by date created
:param datetime date_created: Filter by date created
:param datetime date_created_after: Filter by date created
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaPage
"""
params = values.of({
'DateCreated<': serialize.iso8601_datetime(date_created_before),
'DateCreated': serialize.iso8601_datetime(date_created),
'DateCreated>': serialize.iso8601_datetime(date_created_after),
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return MediaPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of MediaInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return MediaPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a MediaContext
:param sid: Fetch by unique media Sid
:returns: twilio.rest.api.v2010.account.message.media.MediaContext
:rtype: twilio.rest.api.v2010.account.message.media.MediaContext
"""
return MediaContext(
self._version,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a MediaContext
:param sid: Fetch by unique media Sid
:returns: twilio.rest.api.v2010.account.message.media.MediaContext
:rtype: twilio.rest.api.v2010.account.message.media.MediaContext
"""
return MediaContext(
self._version,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.MediaList>'
class MediaPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the MediaPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The unique sid that identifies this account
:param message_sid: A string that uniquely identifies this message
:returns: twilio.rest.api.v2010.account.message.media.MediaPage
:rtype: twilio.rest.api.v2010.account.message.media.MediaPage
"""
super(MediaPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of MediaInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.message.media.MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaInstance
"""
return MediaInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.MediaPage>'
class MediaContext(InstanceContext):
""" """
def __init__(self, version, account_sid, message_sid, sid):
"""
Initialize the MediaContext
:param Version version: Version that contains the resource
:param account_sid: The account_sid
:param message_sid: The message_sid
:param sid: Fetch by unique media Sid
:returns: twilio.rest.api.v2010.account.message.media.MediaContext
:rtype: twilio.rest.api.v2010.account.message.media.MediaContext
"""
super(MediaContext, self).__init__(version)
# Path Solution
self._solution = {
'account_sid': account_sid,
'message_sid': message_sid,
'sid': sid,
}
self._uri = '/Accounts/{account_sid}/Messages/{message_sid}/Media/{sid}.json'.format(**self._solution)
def delete(self):
"""
Deletes the MediaInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def fetch(self):
"""
Fetch a MediaInstance
:returns: Fetched MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return MediaInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
sid=self._solution['sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.MediaContext {}>'.format(context)
class MediaInstance(InstanceResource):
""" """
def __init__(self, version, payload, account_sid, message_sid, sid=None):
"""
Initialize the MediaInstance
:returns: twilio.rest.api.v2010.account.message.media.MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaInstance
"""
super(MediaInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload['account_sid'],
'content_type': payload['content_type'],
'date_created': deserialize.rfc2822_datetime(payload['date_created']),
'date_updated': deserialize.rfc2822_datetime(payload['date_updated']),
'parent_sid': payload['parent_sid'],
'sid': payload['sid'],
'uri': payload['uri'],
}
# Context
self._context = None
self._solution = {
'account_sid': account_sid,
'message_sid': message_sid,
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: MediaContext for this MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaContext
"""
if self._context is None:
self._context = MediaContext(
self._version,
account_sid=self._solution['account_sid'],
message_sid=self._solution['message_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The unique sid that identifies this account
:rtype: unicode
"""
return self._properties['account_sid']
@property
def content_type(self):
"""
:returns: The default mime-type of the media
:rtype: unicode
"""
return self._properties['content_type']
@property
def date_created(self):
"""
:returns: The date this resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date this resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def parent_sid(self):
"""
:returns: The unique id of the resource that created the media.
:rtype: unicode
"""
return self._properties['parent_sid']
@property
def sid(self):
"""
:returns: A string that uniquely identifies this media
:rtype: unicode
"""
return self._properties['sid']
@property
def uri(self):
"""
:returns: The URI for this resource
:rtype: unicode
"""
return self._properties['uri']
def delete(self):
"""
Deletes the MediaInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def fetch(self):
"""
Fetch a MediaInstance
:returns: Fetched MediaInstance
:rtype: twilio.rest.api.v2010.account.message.media.MediaInstance
"""
return self._proxy.fetch()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.MediaInstance {}>'.format(context)
| [
"[email protected]"
]
| |
e077000339f79423ee4da9c789b77491ab87ac5f | fbe3a52d2dd02bec18f7f52b31e357aed192a308 | /misc/begin/exercises/def1.py | 4cbd865b23fdbc03446b3cc80ad9e332cc9a1e7d | []
| no_license | lherrada/python | 8fc5bd5ceb6038479fa6347dd6c0bd6e17f92e98 | d8260f35ba91b89590ef8e489188fb80ca1aed4e | refs/heads/master | 2022-10-29T06:23:17.297554 | 2022-09-24T15:45:59 | 2022-09-24T15:45:59 | 23,411,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | #!/usr/bin/python
def general(func,arg):
apply(func,(arg,))
def name(x):
print "Your name is " + x
def age(n):
print "Your age is %d" % n
print "=" * 30
datain=[(name,"Erikita"),(age,38)]
for i,j in datain:
apply(i,(j,))
for i,j in datain:
i(j)
#general(name,"Erikita")
#general(age,38)
#name("Erika")
#x=name
#x("Luis")
#age(37)
| [
"[email protected]"
]
| |
346a1d1871be3ea9c34e4439423a76c4f242e810 | 669e9241b02bdaa303fbc2fd4023b90d4d179a59 | /Randomized Pulled String/challenge3.py | 2d68218a4951ecd60943d4a45d32bde6066f8181 | []
| no_license | benjaminpotter/HatchProjects | 0854cf46ae7c3781468116a5d63b703dd54ae68c | 7f6a948d3474c755d071751b725c059e6c7f3553 | refs/heads/master | 2022-01-28T16:58:03.449073 | 2019-08-16T13:47:30 | 2019-08-16T13:47:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | def mouseClicked():
background(0)
strokeWeight(10)
for i in range (0, width) :
r = random(0, 255)
x = random (0, width)
stroke(r, r, r, 100)
line(i, 0, x, height)
| [
"[email protected]"
]
| |
83f325539952c23909157086bbb01b3725047fbd | d60f13e52d385fd7f839ee441d8df05b34f8c75b | /wirecell/test/__main__.py | ebc11006d8d4b16dfbd3ebe5983fdcf478ad5421 | []
| no_license | wenqiang-gu/wire-cell-python | 07fe7ac420fedf747e97ba424052e85222316234 | 981541f5618b94d55ee5f07c6eeff6fbbfa5fa93 | refs/heads/master | 2022-10-02T08:51:27.193403 | 2022-07-29T16:23:42 | 2022-07-29T16:23:42 | 228,528,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | import math
import click
from wirecell.util import ario, plottools
@click.group("test")
@click.pass_context
def cli(ctx):
'''
Wire Cell Test Commands
'''
@cli.command("plot")
@click.option("-n", "--name", default="noise",
help="The test name")
@click.argument("datafile")
@click.argument("output")
@click.pass_context
def plot(ctx, name, datafile, output):
'''
Make plots from file made by test_<test>.
'''
from importlib import import_module
mod = import_module(f'wirecell.test.{name}')
fp = ario.load(datafile)
with plottools.pages(output) as out:
mod.plot(fp, out)
def main():
cli(obj=dict())
if '__main__' == __name__:
main()
| [
"[email protected]"
]
| |
850ade5eeed22d497b51edf7a8f5ccd3b3049007 | 61efd764ae4586b6b2ee5e6e2c255079e2b01cfc | /azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/metric_specification.py | a86668fcff8d8d4acf56738739afb3ab74282378 | [
"MIT"
]
| permissive | AutorestCI/azure-sdk-for-python | a3642f53b5bf79d1dbb77851ec56f4cc0c5b3b61 | 60b0726619ce9d7baca41f6cd38f741d74c4e54a | refs/heads/master | 2021-01-21T02:23:59.207091 | 2018-01-31T21:31:27 | 2018-01-31T21:31:27 | 55,251,306 | 4 | 3 | null | 2017-11-13T17:57:46 | 2016-04-01T17:48:48 | Python | UTF-8 | Python | false | false | 4,163 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class MetricSpecification(Model):
"""Description of metrics specification.
:param name: The name of the metric.
:type name: str
:param display_name: The display name of the metric.
:type display_name: str
:param display_description: The description of the metric.
:type display_description: str
:param unit: Units the metric to be displayed in.
:type unit: str
:param aggregation_type: The aggregation type.
:type aggregation_type: str
:param availabilities: List of availability.
:type availabilities:
list[~azure.mgmt.network.v2017_10_01.models.Availability]
:param enable_regional_mdm_account: Whether regional MDM account enabled.
:type enable_regional_mdm_account: bool
:param fill_gap_with_zero: Whether gaps would be filled with zeros.
:type fill_gap_with_zero: bool
:param metric_filter_pattern: Pattern for the filter of the metric.
:type metric_filter_pattern: str
:param dimensions: List of dimensions.
:type dimensions: list[~azure.mgmt.network.v2017_10_01.models.Dimension]
:param is_internal: Whether the metric is internal.
:type is_internal: bool
:param source_mdm_account: The source MDM account.
:type source_mdm_account: str
:param source_mdm_namespace: The source MDM namespace.
:type source_mdm_namespace: str
:param resource_id_dimension_name_override: The resource Id dimension name
override.
:type resource_id_dimension_name_override: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'availabilities': {'key': 'availabilities', 'type': '[Availability]'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'bool'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'metric_filter_pattern': {'key': 'metricFilterPattern', 'type': 'str'},
'dimensions': {'key': 'dimensions', 'type': '[Dimension]'},
'is_internal': {'key': 'isInternal', 'type': 'bool'},
'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'},
'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
}
def __init__(self, name=None, display_name=None, display_description=None, unit=None, aggregation_type=None, availabilities=None, enable_regional_mdm_account=None, fill_gap_with_zero=None, metric_filter_pattern=None, dimensions=None, is_internal=None, source_mdm_account=None, source_mdm_namespace=None, resource_id_dimension_name_override=None):
super(MetricSpecification, self).__init__()
self.name = name
self.display_name = display_name
self.display_description = display_description
self.unit = unit
self.aggregation_type = aggregation_type
self.availabilities = availabilities
self.enable_regional_mdm_account = enable_regional_mdm_account
self.fill_gap_with_zero = fill_gap_with_zero
self.metric_filter_pattern = metric_filter_pattern
self.dimensions = dimensions
self.is_internal = is_internal
self.source_mdm_account = source_mdm_account
self.source_mdm_namespace = source_mdm_namespace
self.resource_id_dimension_name_override = resource_id_dimension_name_override
| [
"[email protected]"
]
| |
c9ce85723629a707758ea22deedc74f0c563ea12 | 4b89a7de426fb53b999b5f3834404215a90817df | /pyobjc-framework-GameCenter/setup.py | 21ba4c119f399fb8e08f6ccbc52b420a124e686f | []
| no_license | peeyush-tm/pyobjc | a1f3ec167482566ddc7c895cfa2aca436109cf66 | da488946f6cc67a83dcc26c04484ca4f10fabc82 | refs/heads/master | 2021-01-20T19:26:06.015044 | 2016-05-22T14:53:37 | 2016-05-22T14:53:37 | 60,502,688 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | '''
Wrappers for the "GameCenter" framework on MacOS X. The Address Book is
a centralized database for contact and other information for people. Appliations
that make use of the GameCenter framework all use the same database.
These wrappers don't include documentation, please check Apple's documention
for information on how to use this framework and PyObjC's documentation
for general tips and tricks regarding the translation between Python
and (Objective-)C frameworks
'''
from pyobjc_setup import setup, Extension
import os
VERSION="3.2a1"
setup(
name='pyobjc-framework-GameCenter',
version=VERSION,
description = "Wrappers for the framework GameCenter on Mac OS X",
long_description=__doc__,
packages = [ "GameCenter" ],
setup_requires = [
'pyobjc-core>=' + VERSION,
],
install_requires = [
'pyobjc-core>=' + VERSION,
'pyobjc-framework-Cocoa>=' + VERSION,
],
ext_modules = [
Extension("GameCenter._GameCenter",
[ "Modules/_GameCenter.m" ],
extra_link_args=["-framework", "GameKit"],
depends=[
os.path.join('Modules', fn)
for fn in os.listdir('Modules')
if fn.startswith('_GameCenter')
]
),
],
min_os_level='10.8',
)
| [
"[email protected]"
]
| |
53092dfd2bd0fa00448c9e96ce8c9b25bf3e34ce | 70fec09ceb625608d561937955c285c0c39f6d95 | /examples/basic_examples/http_middleware_service.py | c93260f707026e51e7be79c6dc733285377333fb | [
"MIT"
]
| permissive | kalaspuff/tomodachi | b285e2c73696d14e3c84a479745e00824fba7190 | deca849ec2b4cdc3d27f06e9ce0056fac0146a1a | refs/heads/master | 2023-08-31T00:32:12.042486 | 2023-08-21T13:02:24 | 2023-08-21T13:02:24 | 62,165,703 | 191 | 28 | MIT | 2023-09-11T23:32:51 | 2016-06-28T18:43:51 | Python | UTF-8 | Python | false | false | 1,707 | py | import asyncio
from typing import Any, Callable, Dict
from aiohttp import web
import tomodachi
from tomodachi import Options, http, http_error
async def middleware_function(
func: Callable, service: Any, request: web.Request, context: Dict, *args: Any, **kwargs: Any
) -> Any:
# Functionality before function is called
tomodachi.get_logger().info("middleware before")
return_value = await func(*args, **kwargs)
# There's also the possibility to pass in extra arguments or keywords arguments, for example:
# return_value = await func(*args, id='overridden', **kwargs)
# Functionality after function is called
tomodachi.get_logger().info("middleware after")
return return_value
class ExampleHttpMiddlewareService(tomodachi.Service):
name = "example-http-service"
# Adds a middleware function that is run on every HTTP call. Several middlewares can be chained.
http_middleware = [middleware_function]
# Some options can be specified to define credentials, used ports, hostnames, access log, etc.
options = Options(
http=Options.HTTP(
port=4711,
content_type="text/plain; charset=utf-8",
access_log=True,
),
)
@http("GET", r"/example/?")
async def example(self, request: web.Request, **kwargs: Any) -> str:
await asyncio.sleep(1)
return "友達" # tomodachi
@http("GET", r"/example/(?P<id>[^/]+?)/?")
async def example_with_id(self, request: web.Request, id: str) -> str:
return "友達 (id: {})".format(id)
@http_error(status_code=404)
async def error_404(self, request: web.Request, **kwargs: Any) -> str:
return "error 404"
| [
"[email protected]"
]
| |
625a77678dafad3f72ea2f4629bed9b901e7f7cd | 2919484ba494fdb9ce60005392286d293d98c325 | /deep_autoviml/models/big_deep.py | 14b1e0dc911254d0e247500c32d6d37fae9f5323 | [
"Apache-2.0"
]
| permissive | Arunava98/deep_autoviml | d6c8d7bb701967d671eae6a8329018e32589d09d | 9902bb230f90d9da367445656fcefad2e2d5aea3 | refs/heads/master | 2023-07-20T03:31:38.705198 | 2021-08-26T14:19:38 | 2021-08-26T14:19:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,506 | py | ############################################################################################
#Copyright 2021 Google LLC
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
############################################################################################
import tensorflow as tf
from tensorflow import keras
#### Make sure it is Tensorflow 2.4 or greater!
from tensorflow.keras.optimizers import SGD, Adam, RMSprop
from tensorflow.keras import layers
from tensorflow.keras import optimizers
from tensorflow.keras import models
from tensorflow.keras import callbacks
from tensorflow.keras import backend as K
from tensorflow.keras import utils
from tensorflow.keras import layers
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.optimizers import SGD
from tensorflow.keras import regularizers
from tensorflow.keras.layers import Reshape, MaxPooling1D, MaxPooling2D
from tensorflow.keras.layers import AveragePooling2D, AveragePooling1D
from tensorflow.keras import Model, Sequential
from tensorflow.keras.layers import Embedding, Reshape, Dropout, Dense
from tensorflow.keras.layers import Activation, Dense, Embedding, GlobalAveragePooling1D
from tensorflow.keras.layers import GlobalMaxPooling1D, Dropout, Conv1D
from tensorflow.keras.layers.experimental.preprocessing import TextVectorization
############################################################################################
model = models.Sequential([
BatchNormalization(),
Dropout(0.5),
layers.Dense(128, activation='relu', kernel_initializer='he_normal'),
BatchNormalization(),
Dropout(0.5),
layers.Dense(64, activation='relu', kernel_initializer='he_normal'),
BatchNormalization(),
Dropout(0.2),
layers.Dense(32, activation='relu', kernel_initializer='he_normal'),
BatchNormalization(),
Dropout(0.2),
]) | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.