blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
33c8c7e6cc382a9dbcd9a3eb49171fbcf67e4e72 | bedf68a6e2bb337d2848a4a55a24c71fd62484c7 | /tests/test_NMT_architectures/bidir_deep_LSTM_ConditionalGRU.py | 0151b7f437481dcaae0d6cdc7546fecde3951030 | [
"MIT"
] | permissive | 19ai/nmt-keras | 941d5bbffe1889d72e4d58ae77fd92d8db3b0df7 | ec56acb619b0c4be0558f737d5d848971fa282db | refs/heads/master | 2020-03-11T16:37:06.633273 | 2018-04-18T11:46:03 | 2018-04-18T11:46:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,746 | py | import argparse
import pytest
from keras import backend as K
from config import load_parameters
from data_engine.prepare_data import build_dataset
from main import train_model, apply_NMT_model
from sample_ensemble import sample_ensemble
from score import score_corpus
def load_tests_params():
params = load_parameters()
params['BATCH_SIZE'] = 10
params['WEIGHT_DECAY'] = 1e-4
params['RECURRENT_WEIGHT_DECAY'] = 1e-4
params['DROPOUT_P'] = 0.01
params['RECURRENT_INPUT_DROPOUT_P'] = 0.01
params['RECURRENT_DROPOUT_P'] = 0.01
params['USE_NOISE'] = True
params['NOISE_AMOUNT'] = 0.01
params['USE_BATCH_NORMALIZATION'] = True
params['BATCH_NORMALIZATION_MODE'] = 1
params['SOURCE_TEXT_EMBEDDING_SIZE'] = 8
params['TARGET_TEXT_EMBEDDING_SIZE'] = 8
params['DECODER_HIDDEN_SIZE'] = 4
params['ENCODER_HIDDEN_SIZE'] = 4
params['ATTENTION_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['SKIP_VECTORS_HIDDEN_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['DOUBLE_STOCHASTIC_ATTENTION_REG'] = 0.7
params['RELOAD'] = 0
params['MAX_EPOCH'] = 2
return params
def test_NMT_Bidir_deep_LSTM_ConditionalGRU():
params = load_tests_params()
# Current test params: Two-layered LSTM - ConditionalGRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 2
params['BIDIRECTIONAL_DEEP_ENCODER'] = False
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'ConditionalGRU'
params['N_LAYERS_DECODER'] = 2
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
params['STORE_PATH'] = K.backend() + '_test_train_models/' + params['MODEL_NAME'] + '/'
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
train_model(params)
params['RELOAD'] = 2
apply_NMT_model(params)
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = params['DATASET_STORE_PATH'] + '/Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl'
parser.text = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(2)]
parser.verbose = 0
parser.dest = None
parser.source = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.target = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['TRG_LAN']
parser.weights = []
for n_best in [True, False]:
parser.n_best = n_best
sample_ensemble(parser, params)
score_corpus(parser, params)
if __name__ == '__main__':
pytest.main([__file__])
| [
"[email protected]"
] | |
fe8974fa7e751cfea487290d10694d7ad661d211 | 491f29501fa7d484a5860f64aef3fa89fb18ca3d | /examples/mechanics/GeometricPrimitives/disk_on_box.py | 275e8e9cb3d623f8b232906ba95792f7316f040e | [
"Apache-2.0"
] | permissive | siconos/siconos-tutorials | e7e6ffbaaea49add49eddd317c46760393e3ef9a | 0472c74e27090c76361d0b59283625ea88f80f4b | refs/heads/master | 2023-06-10T16:43:13.060120 | 2023-06-01T07:21:25 | 2023-06-01T07:21:25 | 152,255,663 | 7 | 2 | Apache-2.0 | 2021-04-08T12:00:39 | 2018-10-09T13:26:39 | Jupyter Notebook | UTF-8 | Python | false | false | 4,596 | py | #!/usr/bin/env python
#
# Example of one object under gravity with one contactor and a ground
# using the Siconos proposed mechanics API
#
from siconos.mechanics.collision.tools import Contactor
from siconos.io.mechanics_run import MechanicsHdf5Runner, MechanicsHdf5Runner_run_options
from siconos.mechanics.collision.bullet import SiconosBulletOptions, SICONOS_BULLET_2D
import siconos.numerics as sn
import siconos.kernel as sk
import math
restart=False
if not restart:
# Creation of the hdf5 file for input/output
with MechanicsHdf5Runner() as io:
# Definition of a sphere
io.add_primitive_shape('Disk', 'Disk', (2,),
insideMargin=0.0, outsideMargin=0.0)
# Definition of the ground shape
io.add_primitive_shape('Ground', 'Box2d', (20, 1),
insideMargin=0.0, outsideMargin=0.0)
# Definition of a non smooth law. As no group ids are specified it
# is between contactors of group id 0.
io.add_Newton_impact_friction_nsl('contact', mu=0.1, e=0.5)
# The sphere object made with an unique Contactor : the sphere shape.
# As a mass is given, it is a dynamic system involved in contact
# detection and in the simulation. With no group id specified the
# Contactor belongs to group 0
io.add_object('disk', [Contactor('Disk')],
translation=[-1, 2.],
orientation = [math.pi/4.0],
velocity=[0, 0, 0.0],
mass=1., inertia =2.0)
# io.add_object('disk2', [Contactor('Disk')],
# translation=[0, 6.],
# velocity=[0, 0, -10.0],
# mass=1., inertia =2.0)
io.add_object('disk2', [Contactor('Disk')],
translation=[4*math.sqrt(2)/2., 2+4*math.sqrt(2)/2.],
orientation = [math.pi/4.0],
velocity=[0, 0, 0.0],
mass=1., inertia =2.0)
io.add_object('disk3', [Contactor('Disk')],
translation=[4*math.sqrt(2), 2.],
orientation = [math.pi/4.0],
velocity=[0, 0, -1.0],
mass=1., inertia =2.0)
# the ground object made with the ground shape. As the mass is
# not given, it is a static object only involved in contact
# detection.
io.add_object('ground', [Contactor('Ground')],
translation=[0, -.5])
# Run the simulation from the inputs previously defined and add
# results to the hdf5 file. The visualisation of the output may be done
# with the vview command.
bullet_options = SiconosBulletOptions()
bullet_options.worldScale = 1.0
bullet_options.contactBreakingThreshold = 0.04
bullet_options.dimension = SICONOS_BULLET_2D
bullet_options.perturbationIterations = 0
bullet_options.minimumPointsPerturbationThreshold = 0
options = sk.solver_options_create(sn.SICONOS_FRICTION_2D_NSGS)
options.iparam[sn.SICONOS_IPARAM_MAX_ITER] = 100000
options.dparam[sn.SICONOS_DPARAM_TOL] = 1e-8
T=2.0
if restart:
T=2.0
#T=1*0.001
hstep=0.01
run_options=MechanicsHdf5Runner_run_options()
run_options['t0']=0
run_options['T']=T
run_options['h']=hstep
run_options['bullet_options']=bullet_options
run_options['solver_options']=options
run_options['constraint_activation_threshold']=1e-05
run_options['Newton_options']=sk.SICONOS_TS_LINEAR
run_options['osns_assembly_type']= sk.GLOBAL_REDUCED
run_options['osi']= sk.MoreauJeanGOSI
run_options['verbose']=True
run_options['with_timer']=True
run_options['explode_Newton_solve']=True
run_options['explode_computeOneStep']=True
#run_options['output_frequency']=output_frequency
with MechanicsHdf5Runner(mode='r+') as io:
# By default earth gravity is applied and the units are those
# of the International System of Units.
# io.run(verbose=True,
# with_timer=False,
# bullet_options=bullet_options,
# face_class=None,
# edge_class=None,
# t0=0,
# T=T,
# h=0.001,
# theta=0.50001,
# Newton_max_iter=1,
# set_external_forces=None,
# solver_options=options,
# numerics_verbose=True,
# output_frequency=None,
# Newton_options= sk.SICONOS_TS_LINEAR_IMPLICIT,
# constraint_activation_threshold=1e-5,
# osi=sk.MoreauJeanGOSI,
# osns_assembly_type= sk.GLOBAL_REDUCED
# )
io.run(run_options)
| [
"[email protected]"
] | |
128763e94d58774059e8218f401b3f0fd84cad73 | bc9ebb347af6804c1bce6e960148ece1fbb34a47 | /1_python/bronze/10870.py | d22572f825e791fd8b0ffbb53df9d5bdaad4045a | [] | no_license | mooncs/BOJ | 48416fec1a059197a72de61c8d6e72f7fc8b542b | 45d2d5a8a6bf0f10d026f3846b70009914aa90d3 | refs/heads/main | 2023-08-13T15:30:47.395359 | 2021-10-11T13:37:04 | 2021-10-11T13:37:04 | 392,885,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | # 피보나치 수 5
def fibo(x):
if x <= 1:
return x
return fibo(x-2) + fibo(x-1)
n = int(input())
print(fibo(n))
# # for문
# def fibo(x):
# a, b = 0, 1
# for _ in range(x):
# a, b = b, a+b
# return(a)
# n = int(input())
# print(fibo(n))
| [
"[email protected]"
] | |
40aa8ad79278c7537cdc7550405b8ad12b72d6e7 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4081/codes/1643_1055.py | 5bb3ece083af6c50c3772003c55246d4aea20a12 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Use as mensagens de erro para corrigir seu código.
from math import*
v=float(input("velocidade inicial:"))
a=radians(float(input("angulos de tiro:")))
d=float(input("valor da distancia: "))
r=((v**2)*sin(2*a))/9.8
p=d-r
if(abs(p)<0.1):
print("sim")
else:
print("nao") | [
"[email protected]"
] | |
6c2b98a894099f068a128d68de56fc0ff0dcdde7 | 2b11e7aa28b84af2e2a7fd8719af89f5fffd8a5b | /tests/test_models/test_user.py | 4b734786dc7c17a6ae7e51cd396963dfe334a4dd | [] | no_license | nikolasribeiro/AirBnB_clone | 6a3e3d65314a0131252461757943468628394ced | 4529c56a706f0d956a238522d912cf6260f2fa28 | refs/heads/main | 2023-03-10T22:10:10.665939 | 2021-02-27T19:33:11 | 2021-02-27T19:33:11 | 338,063,410 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,112 | py | #!/usr/bin/python3
""" Module tests/test_models/test_user"""
import models
from models.base_model import BaseModel
import os
import unittest
class TestBase_Model(unittest.TestCase):
""" class TestBase_Model """
def test_docstring(self):
""" function test_docstring """
msj = "Módulo does not has docstring"
self.assertIsNotNone(models.base_model.__doc__, msj)
msj = "Clase does not has docstring"
self.assertIsNotNone(BaseModel.__doc__, msj)
def test_executable_file(self):
""" function test_executable_file """
is_read_true = os.access("models/base_model.py", os.R_OK)
self.assertTrue(is_read_true)
is_write_true = os.access("models/base_model.py", os.W_OK)
self.assertTrue(is_write_true)
is_exec_true = os.access("models/base_model.py", os.X_OK)
self.assertTrue(is_exec_true)
def test_is_an_instance(self):
""" function test_is_an_instance """
my_model = BaseModel()
self.assertIsInstance(my_model, BaseModel)
def test_id(self):
""" function test_id """
my_model = BaseModel()
my_model1 = BaseModel()
self.assertNotEqual(my_model.id, my_model1.id)
def test_save(self):
""" function test_save """
my_model2 = BaseModel()
first_updated = my_model2.updated_at
my_model2.save()
second_updated = my_model2.updated_at
self.assertNotEqual(first_updated, second_updated)
def test_to_dict(self):
""" function test_to_dict """
my_model3 = BaseModel()
my_dict_model3 = my_model3.to_dict()
self.assertIsInstance(my_dict_model3, dict)
for key, value in my_dict_model3.items():
flag = 0
if my_dict_model3["__class__"] == "BaseModel":
flag += 1
self.assertTrue(flag == 1)
for key, value in my_dict_model3.items():
if key == "created_at":
self.assertIsInstance(value, str)
if key == "updated_at":
self.assertIsInstance(value, str)
| [
"[email protected]"
] | |
9315cc8bf5f6132cf366ce7e7d880acd7293cd3f | 88eeba6df8382687f36a4765bb298f76465c8e81 | /general/chainerrl/chainerrl/tests/links_tests/test_noisy_linear.py | 49b094838cec68f4f40aa91df7f9371a2755ba50 | [
"MIT"
] | permissive | daniellawson9999/quick_start | db0b6e382efd640754ca1e7800753c94e668423a | 947d61f118433dcd4cb845f27649ebfbc8062ecc | refs/heads/master | 2022-02-23T21:54:16.273530 | 2019-09-27T01:46:41 | 2019-09-27T01:46:41 | 197,873,032 | 0 | 0 | null | 2019-07-20T03:12:34 | 2019-07-20T03:12:31 | null | UTF-8 | Python | false | false | 2,798 | py | import unittest
import chainer
from chainer import cuda
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
import numpy
from chainerrl.links import noisy_linear
@testing.parameterize(*testing.product({
'size_args': [
(5,), # uninitialized from Chainer v2
(None, 5), # uninitialized
(6, 5), # initialized
],
'nobias': [False, True],
}))
class TestFactorizedNoisyLinear(unittest.TestCase):
def setUp(self):
mu = chainer.links.Linear(*self.size_args, nobias=self.nobias)
self.linear = noisy_linear.FactorizedNoisyLinear(mu)
def _test_calls(self, xp):
x_data = xp.arange(12).astype(numpy.float32).reshape((2, 6))
x = chainer.Variable(x_data)
self.linear(x)
self.linear(x_data + 1)
self.linear(x_data.reshape((2, 3, 2)))
def test_calls_cpu(self):
self._test_calls(numpy)
@attr.gpu
def test_calls_gpu(self):
self.linear.to_gpu(0)
self._test_calls(cuda.cupy)
@attr.gpu
def test_calls_gpu_after_to_gpu(self):
mu = self.linear.mu
mu.to_gpu(0)
self.linear = noisy_linear.FactorizedNoisyLinear(mu)
self._test_calls(cuda.cupy)
def _test_randomness(self, xp):
x = xp.random.standard_normal((10, 6)).astype(numpy.float32)
y1 = self.linear(x).array
y2 = self.linear(x).array
d = float(xp.mean(xp.square(y1 - y2)))
# The parameter name suggests that
# xp.sqrt(d / 2) is approx to sigma_scale = 0.4
# In fact, (for each element _[i, j],) it holds:
# \E[(y2 - y1) ** 2] = 2 * \Var(y) = (4 / pi) * sigma_scale ** 2
target = (0.4 ** 2) * 2
if self.nobias:
target *= 2 / numpy.pi
else:
target *= 2 / numpy.pi + numpy.sqrt(2 / numpy.pi) / y1.shape[1]
self.assertGreater(d, target / 3.)
self.assertLess(d, target * 3.)
@condition.retry(3)
def test_randomness_cpu(self):
self._test_randomness(numpy)
@attr.gpu
@condition.retry(3)
def test_randomness_gpu(self):
self.linear.to_gpu(0)
self._test_randomness(cuda.cupy)
def _test_non_randomness(self, xp):
# Noises should be the same in a batch
x0 = xp.random.standard_normal((1, 6)).astype(numpy.float32)
x = xp.broadcast_to(x0, (2, 6))
y = self.linear(x).array
xp.testing.assert_allclose(y[0], y[1], rtol=1e-4)
def test_non_randomness_cpu(self):
self._test_non_randomness(numpy)
@attr.gpu
def test_non_randomness_gpu(self):
self.linear.to_gpu(0)
self._test_non_randomness(cuda.cupy)
| [
"[email protected]"
] | |
e68cd4e6bd9c58300783c58ef5af7d4a342b9a02 | a5fe2130ea434f958f6151cd4d8c92d43f1c1ca1 | /src/tests/test_foo.py | 58530c34370ed3e758e56350196a2c76cf087398 | [] | no_license | DavidArmendariz/django-movies-app | 44da33cc200773ef473ea21f67a1dfff57ea0e96 | b77f1f538bae4a906d0b00597fef8fef97ea409b | refs/heads/master | 2023-03-11T16:43:02.956765 | 2021-02-23T04:28:17 | 2021-02-23T04:28:17 | 338,206,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | def test_hello_world():
assert "hello_world" == "hello_world"
assert "foo" != "bar" | [
"[email protected]"
] | |
a7d4d5bf7c36dad18109efd3495f3312e958580c | 931515a9fdd4404cb548fb6b80c91590f5d5e3c9 | /presalytics/client/presalytics_ooxml_automation/models/chart_column_collections.py | 91c556a70437a9633b3b183127aef59d065963d3 | [
"MIT"
] | permissive | presalytics/python-client | 2e2fbd617b493ed8be90b844e23b736f294065e3 | 5d80b78562126feeeb49af4738e2c1aed12dce3a | refs/heads/master | 2021-08-18T02:41:06.938468 | 2020-12-07T15:04:18 | 2020-12-07T15:04:18 | 203,414,411 | 4 | 1 | MIT | 2020-03-31T19:27:47 | 2019-08-20T16:31:57 | Python | UTF-8 | Python | false | false | 3,705 | py | # coding: utf-8
"""
OOXML Automation
This API helps users convert Excel and Powerpoint documents into rich, live dashboards and stories. # noqa: E501
The version of the OpenAPI document: 0.1.0-no-tags
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class ChartColumnCollections(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'chart_data_id': 'str',
'id': 'str'
}
attribute_map = {
'chart_data_id': 'chartDataId',
'id': 'id'
}
def __init__(self, chart_data_id=None, id=None): # noqa: E501
"""ChartColumnCollections - a model defined in OpenAPI""" # noqa: E501
self._chart_data_id = None
self._id = None
self.discriminator = None
self.chart_data_id = chart_data_id
if id is not None:
self.id = id
@property
def chart_data_id(self):
"""Gets the chart_data_id of this ChartColumnCollections. # noqa: E501
:return: The chart_data_id of this ChartColumnCollections. # noqa: E501
:rtype: str
"""
return self._chart_data_id
@chart_data_id.setter
def chart_data_id(self, chart_data_id):
"""Sets the chart_data_id of this ChartColumnCollections.
:param chart_data_id: The chart_data_id of this ChartColumnCollections. # noqa: E501
:type: str
"""
self._chart_data_id = chart_data_id
@property
def id(self):
"""Gets the id of this ChartColumnCollections. # noqa: E501
:return: The id of this ChartColumnCollections. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ChartColumnCollections.
:param id: The id of this ChartColumnCollections. # noqa: E501
:type: str
"""
self._id = id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ChartColumnCollections):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
5a0826ad9f7cbc75cb16320948b0a920328fccb2 | a0d6cbae196c24254fb6f1411d756da0029e092a | /trunk/src/appserver/apps/user_srv_d/main.py | e626d55800d34accfb379fe41899e7ed973f72ca | [] | no_license | newguangzhou/haha-cluster | 8101ee1cb5b5ddbf916268029a33336c6fa0b06d | 4cee4172f3bd7939e0369d46603a62087e206277 | refs/heads/master | 2021-05-16T10:21:38.245881 | 2017-09-25T03:36:07 | 2017-09-25T03:36:07 | 104,700,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,348 | py | # -*- coding: utf-8 -*-
import sys
sys.path.append("../../")
sys.path.append("../terminal_srv_d/")
reload(sys)
sys.setdefaultencoding('utf-8')
#import setproctitle
from tornado import ioloop, gen
from tornado.web import Application, url
import tornado.options
from tornado.options import define, options
from lib.console import Console
from lib.pyloader import PyLoader
from lib.auth_dao import AuthDAO
from lib.user_dao import UserDAO
from lib.pet_dao import PetDAO
from lib.global_dao import GlobalDAO
#from lib.device_dao import DeivceDAO
from lib.sys_config import SysConfig
from lib.new_device_dao import NewDeviceDAO
from lib.gid_rpc import GIDRPC
from lib.msg_rpc import MsgRPC
from lib.boradcast_rpc import BroadcastRPC
from lib import sys_config, discover_config
from lib.service_discovery import server_discoverer_worker
from lib.mongo_dao_base import GetMongoClientAndAuth
from concurrent.futures import ThreadPoolExecutor
from lib.service_discovery import server_discoverer_worker
from lib import discover_config
import logging
logger = logging.getLogger(__name__)
support_setptitle = True
try:
import setproctitle
except:
support_setptitle = False
import handlers
define("debug_mode", 0, int,
"Enable debug mode, 1 is local debug, 2 is test, 0 is disable")
define("port", 9100, int, "Listen port, default is 9100")
define("address", "0.0.0.0", str, "Bind address, default is 127.0.0.1")
define("console_port", 9110, int, "Console listen port, default is 9110")
# Parse commandline
tornado.options.parse_command_line()
max_thread_count = 30
# Init pyloader
pyloader = PyLoader("config")
conf = pyloader.ReloadInst("Config")
mongo_pyloader = PyLoader("configs.mongo_config")
mongo_conf = mongo_pyloader.ReloadInst("MongoConfig",
debug_mode=options.debug_mode)
# Set process title
if support_setptitle:
setproctitle.setproctitle(conf.proctitle)
#
worker = server_discoverer_worker.ServerDiscovererWorker()
msg_rpc = MsgRPC(worker.get_discover())
broadcast_rpc = BroadcastRPC(worker.get_discover())
#
thread_pool = ThreadPoolExecutor(max_thread_count)
mongo_client = GetMongoClientAndAuth(mongo_conf.default_meta)
# Init web application
webapp = Application(
[
(r"/user/get_verify_code", handlers.GetVerifyCode),
(r"/user/push_message_cmd", handlers.PushMessageCmd),
(r"/user/login", handlers.Login),
(r"/user/register", handlers.Register),
(r"/user/logout", handlers.Logout),
(r"/user/regen_token", handlers.RegenToken),
(r"/user/set_home_wifi", handlers.SetHomeWifi),
(r"/user/set_home_location", handlers.SetHomeLocation),
(r"/user/get_base_infomation", handlers.GetBaseInfo),
(r"/user/suggest", handlers.Suggest),
(r"/pet/location", handlers.PetLocation),
(r"/pet/location_test", handlers.PetLocation2),
(r"/pet/walk", handlers.PetWalk),
(r"/pet/find", handlers.PetFind),
(r"/pet/get_pet_type_info", handlers.PetTypeInfo),
(r"/pet/get_pet_info", handlers.GetPetInfo),
(r"/pet/get_pet_status", handlers.GetPetStatusInfo),
(r"/pet/add_pet_info", handlers.AddPetInfo),
(r"/pet/update_pet_info", handlers.UpdatePetInfo),
(r"/pet/healthy/get_activity_info", handlers.GetActivityInfo),
(r"/pet/healthy/get_sleep_info", handlers.GetSleepInfo),
(r"/pet/healthy/summary", handlers.Summary),
(r"/pet/healthy/set_sport_info", handlers.SetTargetStep),
(r"/pet/activity", handlers.PetActivity),
(r"/device/add_device_info", handlers.AddDeviceInfo),
(r"/device/get_info", handlers.GetDeviceInfo),
(r"/device/remove_device_info", handlers.RemoveDeviceInfo),
(r"/device/set_sim_info", handlers.SetSimInfo),
(r"/device/switch_light", handlers.SwitchLight),
(r"/device/get_light_status", handlers.GetDeviceSwitchLightStatus),
(r"/device/send_get_wifi_list_cmd", handlers.SendGetWifiListCmd),
(r"/device/get_wifi_list", handlers.GetWifiList),
(r"/device/reboot_device_cmd", handlers.RebootDeviceCmd),
(r"/user/agree_policy", handlers.AgreePolicy),
(r"/device/get_device_status", handlers.GetPetStatusInfo),
(r"/app/get_config", handlers.AppConfig),
(r"/user/set_outdoor_on_off", handlers.OutdoorOnOff),
(r"/user/set_outdoor_wifi", handlers.SetOutdoorWifi),
],
debug=True,
autoreload=True,
pyloader=pyloader,
user_dao=UserDAO.new(mongo_client, thread_pool),
global_dao=GlobalDAO.new(mongo_client, thread_pool),
auth_dao=AuthDAO.new(mongo_client, thread_pool),
pet_dao=PetDAO.new(mongo_client, thread_pool),
device_dao=NewDeviceDAO.new(mongo_client, thread_pool),
broadcast_rpc = broadcast_rpc,
msg_rpc=msg_rpc,
appconfig=conf, )
class _UserSrvConsole(Console):
def handle_cmd(self, stream, address, cmd):
if len(cmd) == 1 and cmd[0] == "quit":
self.send_response(stream, "Byte!")
return False
elif len(cmd) == 0:
pass
elif len(cmd) == 1 and cmd[0] == "reload-config":
newconf = pyloader.ReloadInst("Config")
webapp.settings["appconfig"] = newconf
webapp.settings["gid_rpc"] = GIDRPC(newconf.gid_rpc_url)
self.send_response(stream, "done")
elif len(cmd) == 1 and cmd[0] == "reload-sysconfig":
webapp.settings["sysconfig"].reload()
self.send_response(stream, "done")
else:
self.send_response(stream, "Invalid command!")
return True
# Init console
console = _UserSrvConsole()
console.bind(options.console_port, "127.0.0.1")
console.start()
# Init async
@gen.coroutine
def _async_init():
SysConfig.new(sys_config.DEFAULT_CATEGORY,mongo_client, thread_pool)
yield SysConfig.current().open()
webapp.settings["gid_rpc"] = GIDRPC(SysConfig.current().get(sys_config.SC_GID_RPC_URL))
try:
worker.register(discover_config.USER_SRV_D, options.port, 0, None)
worker.work()
except Exception, e:
print "worker register error exception:", e
logger.exception(e)
exit(0)
ioloop.IOLoop.current().run_sync(_async_init)
# Run web app loop
webapp.listen(options.port, options.address, xheaders=True)
ioloop.IOLoop.current().start()
| [
"[email protected]"
] | |
828855bc5a1f6617ef25c47b606649d873810864 | 1d49dcfe7a725ed9c21d5e614b7e61c81aae1c88 | /modules/critics/CentralV.py | 62f3043f15838fbfacbcde6b6d31b5066599a20e | [
"Apache-2.0"
] | permissive | xiaojize/SMAC-1 | c405aa22d30a7f176b4b2a29669ae82ea7f0b3c7 | 7aaf4673b0eecafc4ab25f381eea20fc762af56a | refs/heads/master | 2023-06-30T14:37:44.870652 | 2021-07-23T15:15:49 | 2021-07-23T15:15:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | import torch.nn as nn
import torch.nn.functional as F
class CentralV_Critic(nn.Module):
def __init__(self, input_shape, args):
super(CentralV_Critic, self).__init__()
self.args = args
self.fc1 = nn.Linear(input_shape, 128)
self.fc2 = nn.Linear(128, 128)
self.fc3 = nn.Linear(128, 1)
def forward(self, state):
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
q = self.fc3(x)
return q | [
"[email protected]"
] | |
6cc03fb54250c0b2f6556012d2bf83b75474b3f2 | 9d278285f2bc899ac93ec887b1c31880ed39bf56 | /ondoc/cart/migrations/0006_merge_20190326_1307.py | 67a383ce2f25bfc403a88150e01f3f911f341528 | [] | no_license | ronit29/docprime | 945c21f8787387b99e4916cb3ba1618bc2a85034 | 60d4caf6c52a8b70174a1f654bc792d825ba1054 | refs/heads/master | 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | # Generated by Django 2.0.5 on 2019-03-26 07:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cart', '0005_auto_20190315_1612'),
('cart', '0004_auto_20190318_1424'),
]
operations = [
]
| [
"[email protected]"
] | |
bbc9346e361617f40137e996c9caee2f66e94355 | 032a0c939d96d0e5307dbce86e11faf7060f4ed9 | /lte/gateway/python/magma/pipelined/tests/test_ipv6_prefix_mapper.py | d33410b7b423133760753874c76ffd7d50ae75a6 | [
"BSD-3-Clause"
] | permissive | radha0018/magma | cac9ff3491dd2661e5dc0aa1f9a304a5428e2d2a | 8436966a4bb3cf7fdc3f567704062b6f9568db25 | refs/heads/master | 2023-05-05T08:26:07.132969 | 2021-05-27T18:44:44 | 2021-05-27T18:44:44 | 371,097,174 | 0 | 2 | NOASSERTION | 2021-05-26T16:26:21 | 2021-05-26T16:15:53 | Go | UTF-8 | Python | false | false | 2,280 | py | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from magma.pipelined.ipv6_prefix_store import (
InterfaceIDToPrefixMapper,
get_ipv6_interface_id,
get_ipv6_prefix,
)
class InterfaceMappersTest(unittest.TestCase):
def setUp(self):
self._interface_to_prefix_mapper = InterfaceIDToPrefixMapper()
self._interface_to_prefix_mapper._prefix_by_interface = {}
def test_prefix_mapper_test(self):
ipv6_addrs = ['ba10:5:6c:9:9d21:4407:d337:1928',
'321b:534:6c:9:999:0:d337:1928',
'222b:5334:111c:111::d337:1928']
prefixes = [get_ipv6_prefix(ipv6_addrs[0]),
get_ipv6_prefix(ipv6_addrs[1])]
interfaces = [get_ipv6_interface_id(ipv6_addrs[0]),
get_ipv6_interface_id(ipv6_addrs[1]),
get_ipv6_interface_id(ipv6_addrs[2])]
self._interface_to_prefix_mapper.save_prefix(
interfaces[0], prefixes[0])
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[0]),
'ba10:5:6c:9::')
self._interface_to_prefix_mapper.save_prefix(
interfaces[1], prefixes[1])
self.assertEqual(interfaces[1], '::999:0:d337:1928')
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[1]),
prefixes[1])
self._interface_to_prefix_mapper.save_prefix(
interfaces[0], prefixes[1])
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[0]),
'321b:534:6c:9::')
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[2]),
None)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
17261e523784bf2f34722edf40c070552af6fe36 | 3e77a86429ba0f6968f709e77e204cdfe920a041 | /python/python/src/python_problems/webAutomationTestScripts/testSuites/glbRestTests/getMasterBillingAccount.py | 5169d96ece8bece21ea979551aafa018b45765e0 | [] | no_license | ramyamango123/test | a2d9bb6cafe8d7406b76eba526ddded2acf2a3b2 | 85420c9406109a72e1b1d455ea29a5cae9def5a3 | refs/heads/master | 2020-06-07T23:34:08.344051 | 2014-03-12T05:19:05 | 2014-03-12T05:19:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,701 | py | #Get Master Plan Billing Account
#Includes both positive and negative test cases.
#Created by Tarja Rechsteiner on 12.01.09.
import sys
from testSuiteBase import TestSuiteBase
from selenium import selenium
import time
CLIENTIPADDRESS = '127.0.0.1'
FIRSTNAME = 'Tester'
LASTNAME = 'Dummy'
ADDRESS1 = '123 Fake Street'
CITY = 'San Mateo'
STATE = 'CA'
COUNTRY = 'US'
ZIPCODE = '94403'
PHONENUMBER = '555-555-5555'
CREDITCARDNO='378282246310005'
SECURENO='123'
CCMONTH='10'
CCYEAR='2011'
GAMEURL='http://gazillion.com'
PLANID='10003936'
class GetMasterBillingAccount(TestSuiteBase):
def setUp(self):
self.toolBox = self.getGlbToolbox()
self.selenium = selenium("localhost", 4444, "*firefox", "https://stage.ariasystems.net/webclients/dreamworksPay/Handler.php")
self.selenium.start()
self.selenium.window_maximize()
def tearDown(self):
self.selenium.close()
self.selenium.stop()
def test_validInfo(self):
'''Valid information -- TC1'''
userid, billingId = self.validAccountCreation()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 200,\
"http status code: " + str(result.httpStatus()))
#structure check
self.assertTrue('masterAccount' in result, "No masterAccount found")
self.assertTrue('accountId' in result['masterAccount'], "No accountId found")
self.assertFalse('errors' in result, "Errors in success XML")
#values check
self.assertEqual(billingId, result['masterAccount']['accountId'], "values don't match")
self.toolBox.scriptOutput("getMasterBillingAccount valid info account", {"userid": userid, "billingId": billingId})
def test_validParentInfo(self):
'''Valid parent information -- TC2'''
userid, billingId = self.validAccountCreationParent()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 200,\
"http status code: " + str(result.httpStatus()))
#structure check
self.assertTrue('masterAccount' in result, "No masterAccount found")
self.assertTrue('accountId' in result['masterAccount'], "No accountId found")
self.assertFalse('errors' in result, "Errors in success XML")
#values check
self.assertEqual(billingId, result['masterAccount']['accountId'], "values don't match")
self.toolBox.scriptOutput("getMasterBillingAccount valid parent account", {"userid": userid, "billingId": billingId})
def test_validPaypalInfo(self):
'''Valid Paypal information -- TC3'''
#Failing this testcase since Paypal flow is still inactive
self.fail()
userid, billingId = self.validPaypalAccountCreation()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 200,\
"http status code: " + str(result.httpStatus()))
#structure check
self.assertTrue('masterAccount' in result, "No masterAccount found")
self.assertTrue('accountId' in result['masterAccount'], "No accountId found")
self.assertFalse('errors' in result, "Errors in success XML")
#values check
self.assertEqual(billingId, result['masterAccount']['accountId'], "values don't match")
self.toolBox.scriptOutput("getMasterBillingAccount valid paypal account", {"userid": userid, "billingId": billingId})
def test_validParentPaypalInfo(self):
'''Valid Paypal parent information -- TC4'''
#Failing this testcase since Paypal flow is still inactive
self.fail()
userid, billingId = self.validPaypalAccountCreationParent()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 200,\
"http status code: " + str(result.httpStatus()))
#structure check
self.assertTrue('masterAccount' in result, "No masterAccount found")
self.assertTrue('accountId' in result['masterAccount'], "No accountId found")
self.assertFalse('errors' in result, "Errors in success XML")
#values check
self.assertEqual(billingId, result['masterAccount']['accountId'], "values don't match")
self.toolBox.scriptOutput("getMasterBillingAccount valid parent paypal account", {"userid": userid, "billingId": billingId})
def test_validInfoNoBilling(self):
'''Valid information with no billing account attached -- TC5'''
_, result = self.toolBox.registerNewUsername()
self.assertTrue('user' in result, "XML from register does not contain user")
userid = result['user']['id']
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['No billing account exists for this user', '16032'])
self.infoFailCheck(result, userid)
self.toolBox.scriptOutput("getMasterBillingAccount no billing account", {"userid": userid})
def test_unvalidatedInfo(self):
'''Unvalidated CC user -- TC6'''
userid, billingId = self.invalidAccountCreation()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['No billing account exists for this user', '16032'])
self.infoFailCheck(result, userid)
self.toolBox.scriptOutput("getMasterBillingAccount invalid CC account", {"userid": userid, "billingId": billingId})
def test_unvalidatedPaypalInfo(self):
'''Unvalidated paypal user -- TC7'''
userid, billingId = self.invalidPaypalAccountCreation()
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['No billing account exists for this user', '16032'])
self.infoFailCheck(result, userid)
self.toolBox.scriptOutput("getMasterBillingAccount invalid paypal account", {"userid": userid, "billingId": billingId})
def test_missingParams(self):
'''Missing information -- TC8'''
result = self.toolBox.blankGet('getMasterBillingAccount')
self.assertTrue(result.httpStatus() == 400,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['Not enough parameters to satisfy request', '4000'])
def test_unexpectedValues(self):
'''Empty values -- TC9'''
result = self.toolBox.getMasterBillingAccount('')
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['Parameter values are empty for the request', '4003'])
self.infoFailCheck(result, '')
def test_invalidInfo(self):
'''Invalid account id -- TC10'''
result = self.toolBox.getMasterBillingAccount('00000000000000000')
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['Id does not match any records', '17000'])
self.infoFailCheck(result, '00000000000000000')
def test_invalidTitleCode(self):
'''Invalid title code -- TC11'''
userid, billingId = self.validAccountCreation()
self.toolBox.setTitleCodeParam('somejunk')
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ["Title code does not match any records", '17002'])
self.infoFailCheck(result, userid, 'somejunk')
self.toolBox.setTitleCodeParam('KFPW')
def test_emptyTitleCode(self):
'''Blank Title Code -- TC12'''
userid, billingId = self.validAccountCreation()
self.toolBox.setTitleCodeParam('')
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 499,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ["Parameter values are empty for the request", '4003'])
self.infoFailCheck(result, userid, '')
self.toolBox.setTitleCodeParam('KFPW')
def test_missingTitleCode(self):
'''No Title Code -- TC13'''
userid, billingId = self.validAccountCreation()
self.toolBox.setTitleCodeParam(None)
result = self.toolBox.getMasterBillingAccount(userid)
self.assertTrue(result.httpStatus() == 400,\
"http status code: " + str(result.httpStatus()))
self.failureCheck(result, ['Not enough parameters to satisfy request', '4000'])
self.infoFailCheck(result, userid, None)
self.toolBox.setTitleCodeParam('KFPW')
# Helper Methods #
def validAccountCreation(self):
'''Registers an account for the valid info test'''
username, result = self.toolBox.registerNewUsername()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '1'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID,firstName=FIRSTNAME,lastName=LASTNAME,
address1=ADDRESS1,city=CITY,state=STATE,country=COUNTRY,zipCode=ZIPCODE,gameUrl=GAMEURL)
self.assertTrue('account' in result, result)
sessionId = result['account']['inSessionID']
flowId = result['account']['flowID']
self.ariaHostedPage(sessionId, flowId)
return id, result['account']['accountId']
def validAccountCreationParent(self):
'''Registers an account for the valid parent info test'''
username, result = self.toolBox.registerNewParent()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '1'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID,firstName=FIRSTNAME,lastName=LASTNAME,
address1=ADDRESS1,city=CITY,state=STATE,country=COUNTRY,zipCode=ZIPCODE,gameUrl=GAMEURL)
self.assertTrue('account' in result, result)
sessionId = result['account']['inSessionID']
flowId = result['account']['flowID']
self.ariaHostedPage(sessionId, flowId)
return id, result['account']['accountId']
def validPaypalAccountCreation(self):
'''Registers a paypal account for the valid info test'''
username, result = self.toolBox.registerNewUsername()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '11'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID)
self.assertTrue('account' in result, result)
masterBillingAcctId = result['account']['accountId']
paypalResult = self.toolBox.startPaypalPlan(masterBillingAcctId)
paypalToken = paypalResult['paypal']['paypalToken']
paypalURL = paypalResult['paypal']['returnUrl']
URL = paypalURL + paypalToken
self.acceptPaypalAgreementUsingSelenium(URL)
paypalResult2 = self.toolBox.finishPaypalPlan(masterBillingAcctId, paypalToken)
return id, masterBillingAcctId
def validPaypalAccountCreationParent(self):
'''Registers a paypal account for the valid parent info test'''
username, result = self.toolBox.registerNewParent()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '11'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID)
self.assertTrue('account' in result, result)
masterBillingAcctId = result['account']['accountId']
paypalResult = self.toolBox.startPaypalPlan(masterBillingAcctId)
paypalToken = paypalResult['paypal']['paypalToken']
paypalURL = paypalResult['paypal']['returnUrl']
URL = paypalURL + paypalToken
self.acceptPaypalAgreementUsingSelenium(URL)
paypalResult2 = self.toolBox.finishPaypalPlan(masterBillingAcctId, paypalToken)
return id, masterBillingAcctId
def invalidAccountCreation(self):
'''Registers an invalid account for the valid info test'''
username, result = self.toolBox.registerNewUsername()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '1'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID,firstName=FIRSTNAME,lastName=LASTNAME,
address1=ADDRESS1,city=CITY,state=STATE,country=COUNTRY,zipCode=ZIPCODE,gameUrl=GAMEURL)
self.assertTrue('account' in result, result)
return id, result['account']['accountId']
def invalidPaypalAccountCreation(self):
'''Registers an invalid paypal account for the valid info test'''
username, result = self.toolBox.registerNewUsername()
self.assertTrue('user' in result, "XML from register does not contain user")
gameAcctId = self.toolBox.getGameIdFromUser(username)
id = result['user']['id']
billingType = '11'
result = self.toolBox.createBillingAcct(id,gameAcctId,billingType,CLIENTIPADDRESS,PLANID)
self.assertTrue('account' in result, result)
masterBillingAcctId = result['account']['accountId']
return id, masterBillingAcctId
def ariaHostedPage(self, sessionId, flowId):
'''Entering credit card information through selenium'''
sel = self.selenium
sel.open(r"file://///hq-fs01/dept/Dev/QA/Web/KungFuPandaWorld/Web_Services/DB/Web%20Services%20Test.html")
sel.select("wsUrl", "label=" + str(self.toolBox.webHost))
sel.click("//input[@value='set environment']")
sel.wait_for_page_to_load("30000")
sel.is_text_present("Current Environment: " + str(self.toolBox.webHost))
sel.type("ahp_inSessionID", sessionId)
sel.type("ahp_flowID", flowId)
sel.click("ahp_submit")
sel.wait_for_page_to_load("30000")
time.sleep(2)
sel.type("cc_number", "4111111111111111")
sel.click("cc_expire_mm")
sel.select("cc_expire_mm", "label=January")
sel.click("//option[@value='1']")
sel.click("cc_expire_yyyy")
sel.select("cc_expire_yyyy", "label=2012")
sel.click("//option[@value='2012']")
sel.click("cvv")
sel.type("cvv", "123")
sel.click("submitButton")
sel.wait_for_page_to_load("30000")
def acceptPaypalAgreementUsingSelenium(self, URL):
'''Entering paypal information through selenium'''
sel = self.selenium
sel.open("https://www.sandbox.paypal.com/cgi-bin/webscr?cmd=_express-checkout&token=/")
sel.click("link=PayPal Sandbox")
sel.wait_for_page_to_load("30000")
#login to paypal
sel.type("login_email", "[email protected]")
sel.type("login_password", "password")
sel.click("submit")
sel.wait_for_page_to_load("30000")
time.sleep(6)
sel.open(URL)
sel.wait_for_page_to_load("30000")
time.sleep(2)
#login to sandbox test account
sel.type("login_email", "[email protected]")
sel.type("login_password", "gazillion")
sel.click("login.x")
sel.wait_for_page_to_load("30000")
sel.click("continue")
sel.wait_for_page_to_load("30000")
self.assertEqual("Paypal Callback", sel.get_title())
time.sleep(1)
def failureCheck(self, result, expected) :
'''Determines whether there are multiple error messages or not and calls appropriate helper method'''
#checking for XML structure
self.assertFalse('user' in result, "XML structure returned success XML")
self.assertTrue('errors' in result, "XML structure failed, no errors")
self.assertTrue('error' in result['errors'], "XML structure failed, no error")
self.assertTrue('code' in result['errors']['error'], "XML structure failed, no code")
self.assertTrue('message' in result['errors']['error'], "XML structure failed, no message")
self.assertTrue('parameters' in result['errors']['error'], "XML structure failed, parameters")
self.assertFalse('masterAccount' in result, "XML structure failed, masterAccount present")
# Checks for messages
self.assertEqual(result['errors']['error']['message'], expected[0], "Expected error message not found. Found: " + str(result['errors']['error']['message']) + " " + expected[0])
self.assertEqual(result['errors']['error']['code'], expected[1], "Expected error code not found. Found: " + str(result['errors']['error']['code']))
def infoFailCheck(self, result, userId, titleCode='KFPW') :
'''Checks that the information passed is equal to the information given for one error message'''
parameters = self.toolBox.httpParamToDict(result['errors']['error']['parameters'])
self.assertTrue(len(parameters) != 0, "Parameters string did not resolve to pairs" + str(result))
self.assertTrue(parameters['accountId'] == userId, "UserId returned not equal to userId given: " + userId + " " + str(parameters))
self.assertTrue(parameters['service'] == "getMasterBillingAccount", "Service returned not equal to service called: getMasterBillingAccount" + str(parameters))
if titleCode == None :
self.assertFalse('titleCode' in parameters, "titleCode not passed, but included in return XML: " + str(parameters))
else :
self.assertTrue(parameters['titleCode'] == titleCode, "Title code returned not equal to title code called: " + titleCode + " " + str(parameters)) | [
"[email protected]"
] | |
285c58cff6bab461a2a96c005792abe48c41134e | a713542969e64a251266e215f55ef7f753d83874 | /src/base/nets/vgg.py | 90c96d662c8a34a3a982255da29f7d2a93f35b27 | [] | no_license | czczup/MusesStyle | 874aeae17d1eb017c6f7de012839867371d0e0a6 | 0f6f767c5e58e16101fa63e4b524da6dffd2381c | refs/heads/master | 2022-04-16T12:29:28.051394 | 2020-04-08T13:45:42 | 2020-04-08T13:45:42 | 166,505,943 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,258 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains model definitions for versions of the Oxford VGG network.
These model definitions were introduced in the following technical report:
Very Deep Convolutional Networks For Large-Scale Image Recognition
Karen Simonyan and Andrew Zisserman
arXiv technical report, 2015
PDF: http://arxiv.org/pdf/1409.1556.pdf
ILSVRC 2014 Slides: http://www.robots.ox.ac.uk/~karen/pdf/ILSVRC_2014.pdf
CC-BY-4.0
More information can be obtained from the VGG website:
www.robots.ox.ac.uk/~vgg/research/very_deep/
Usage:
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_a(inputs)
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_16(inputs)
@@vgg_a
@@vgg_16
@@vgg_19
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def vgg_arg_scope(weight_decay=0.0005):
"""Defines the VGG arg scope.
Args:
weight_decay: The l2 regularization coefficient.
Returns:
An arg_scope.
"""
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc:
return arg_sc
def vgg_a(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_a'):
"""Oxford Net VGG 11-Layers version A Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.name+'_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 1, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 1, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 2, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name+'/fc8'] = net
return net, end_points
vgg_a.default_image_size = 224
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_16'):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.name+'_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name+'/fc8'] = net
return net, end_points
vgg_16.default_image_size = 224
def vgg_19(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_19'):
"""Oxford Net VGG 19-Layers version E Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name+'_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name+'/fc8'] = net
return net, end_points
vgg_19.default_image_size = 224
# Alias
vgg_d = vgg_16
vgg_e = vgg_19
| [
"[email protected]"
] | |
d581e305ac079f2af1725f50e4bd33b9987b30cf | 79b1d3d8ffbda5297fff6fefe2528e303bf2110a | /RSGGenFragment/RSToQQ/RSGravitonToQuarkQuark_W-0p1_M_3250_TuneCUETP8M1_13TeV_pythia8_cfi.py | 4743fb46d70ff29d63a01653fe65bceda8571ccf | [] | no_license | yguler/MCFragments-1 | 25745a043653d02be3a4c242c1a85af221fc34b3 | 7c4d10ee59e00f997221109bf006819fd645b92f | refs/heads/master | 2021-01-13T14:09:12.811554 | 2016-12-11T15:57:37 | 2016-12-11T15:57:37 | 76,184,433 | 0 | 0 | null | 2016-12-11T15:59:22 | 2016-12-11T15:59:22 | null | UTF-8 | Python | false | false | 1,323 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(0.00000782),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'ExtraDimensionsG*:ffbar2G* = on',
'ExtraDimensionsG*:kappaMG = 1.439532822',
'5100039:m0 = 3250',
'5100039:onMode = off',
'5100039:onIfAny = 1 2 3 4 5'
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
651829bd3adb0e1664849bd3a398cbaf87f47495 | 81f7d86117e5701673f2c302544c4081bcd66067 | /pytorch_forecasting/utils.py | 92c1bbbf8ede65c4045eb66799d10202a39f32cd | [
"MIT"
] | permissive | kashif/pytorch-forecasting | 3bc46cf4bdf7248201d35a17483927188cbce122 | a60367a5014c972cd648f901b7d1a7caa95e8cd1 | refs/heads/master | 2023-06-05T07:27:26.390368 | 2021-06-16T06:50:41 | 2021-06-16T07:21:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,877 | py | """
Helper functions for PyTorch forecasting
"""
from contextlib import redirect_stdout
import os
from typing import Any, Callable, Dict, List, Tuple, Union
import torch
from torch.fft import irfft, rfft
import torch.nn.functional as F
from torch.nn.utils import rnn
def integer_histogram(
data: torch.LongTensor, min: Union[None, int] = None, max: Union[None, int] = None
) -> torch.Tensor:
"""
Create histogram of integers in predefined range
Args:
data: data for which to create histogram
min: minimum of histogram, is inferred from data by default
max: maximum of histogram, is inferred from data by default
Returns:
histogram
"""
uniques, counts = torch.unique(data, return_counts=True)
if min is None:
min = uniques.min()
if max is None:
max = uniques.max()
hist = torch.zeros(max - min + 1, dtype=torch.long, device=data.device).scatter(
dim=0, index=uniques - min, src=counts
)
return hist
def groupby_apply(
keys: torch.Tensor, values: torch.Tensor, bins: int = 95, reduction: str = "mean", return_histogram: bool = False
) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]:
"""
Groupby apply for torch tensors
Args:
keys: tensor of groups (``0`` to ``bins``)
values: values to aggregate - same size as keys
bins: total number of groups
reduction: either "mean" or "sum"
return_histogram: if to return histogram on top
Returns:
tensor of size ``bins`` with aggregated values and optionally with counts of values
"""
if reduction == "mean":
reduce = torch.mean
elif reduction == "sum":
reduce = torch.sum
else:
raise ValueError(f"Unknown reduction '{reduction}'")
uniques, counts = keys.unique(return_counts=True)
groups = torch.stack([reduce(item) for item in torch.split_with_sizes(values, tuple(counts))])
reduced = torch.zeros(bins, dtype=values.dtype, device=values.device).scatter(dim=0, index=uniques, src=groups)
if return_histogram:
hist = torch.zeros(bins, dtype=torch.long, device=values.device).scatter(dim=0, index=uniques, src=counts)
return reduced, hist
else:
return reduced
def profile(function: Callable, profile_fname: str, filter: str = "", period=0.0001, **kwargs):
"""
Profile a given function with ``vmprof``.
Args:
function (Callable): function to profile
profile_fname (str): path where to save profile (`.txt` file will be saved with line profile)
filter (str, optional): filter name (e.g. module name) to filter profile. Defaults to "".
period (float, optional): frequency of calling profiler in seconds. Defaults to 0.0001.
"""
import vmprof
from vmprof.show import LinesPrinter
# profiler config
with open(profile_fname, "wb+") as fd:
# start profiler
vmprof.enable(fd.fileno(), lines=True, period=period)
# run function
function(**kwargs)
# stop profiler
vmprof.disable()
# write report to disk
if kwargs.get("lines", True):
with open(f"{os.path.splitext(profile_fname)[0]}.txt", "w") as f:
with redirect_stdout(f):
LinesPrinter(filter=filter).show(profile_fname)
def get_embedding_size(n: int, max_size: int = 100) -> int:
"""
Determine empirically good embedding sizes (formula taken from fastai).
Args:
n (int): number of classes
max_size (int, optional): maximum embedding size. Defaults to 100.
Returns:
int: embedding size
"""
if n > 2:
return min(round(1.6 * n ** 0.56), max_size)
else:
return 1
def create_mask(size: int, lengths: torch.LongTensor, inverse: bool = False) -> torch.BoolTensor:
"""
Create boolean masks of shape len(lenghts) x size.
An entry at (i, j) is True if lengths[i] > j.
Args:
size (int): size of second dimension
lengths (torch.LongTensor): tensor of lengths
inverse (bool, optional): If true, boolean mask is inverted. Defaults to False.
Returns:
torch.BoolTensor: mask
"""
if inverse: # return where values are
return torch.arange(size, device=lengths.device).unsqueeze(0) < lengths.unsqueeze(-1)
else: # return where no values are
return torch.arange(size, device=lengths.device).unsqueeze(0) >= lengths.unsqueeze(-1)
_NEXT_FAST_LEN = {}
def next_fast_len(size):
"""
Returns the next largest number ``n >= size`` whose prime factors are all
2, 3, or 5. These sizes are efficient for fast fourier transforms.
Equivalent to :func:`scipy.fftpack.next_fast_len`.
Implementation from pyro
:param int size: A positive number.
:returns: A possibly larger number.
:rtype int:
"""
try:
return _NEXT_FAST_LEN[size]
except KeyError:
pass
assert isinstance(size, int) and size > 0
next_size = size
while True:
remaining = next_size
for n in (2, 3, 5):
while remaining % n == 0:
remaining //= n
if remaining == 1:
_NEXT_FAST_LEN[size] = next_size
return next_size
next_size += 1
def autocorrelation(input, dim=0):
"""
Computes the autocorrelation of samples at dimension ``dim``.
Reference: https://en.wikipedia.org/wiki/Autocorrelation#Efficient_computation
Implementation copied form `pyro <https://github.com/pyro-ppl/pyro/blob/dev/pyro/ops/stats.py>`_.
:param torch.Tensor input: the input tensor.
:param int dim: the dimension to calculate autocorrelation.
:returns torch.Tensor: autocorrelation of ``input``.
"""
# Adapted from Stan implementation
# https://github.com/stan-dev/math/blob/develop/stan/math/prim/mat/fun/autocorrelation.hpp
N = input.size(dim)
M = next_fast_len(N)
M2 = 2 * M
# transpose dim with -1 for Fourier transform
input = input.transpose(dim, -1)
# centering and padding x
centered_signal = input - input.mean(dim=-1, keepdim=True)
# Fourier transform
freqvec = torch.view_as_real(rfft(centered_signal, n=M2))
# take square of magnitude of freqvec (or freqvec x freqvec*)
freqvec_gram = freqvec.pow(2).sum(-1)
# inverse Fourier transform
autocorr = irfft(freqvec_gram, n=M2)
# truncate and normalize the result, then transpose back to original shape
autocorr = autocorr[..., :N]
autocorr = autocorr / torch.tensor(range(N, 0, -1), dtype=input.dtype, device=input.device)
autocorr = autocorr / autocorr[..., :1]
return autocorr.transpose(dim, -1)
def unpack_sequence(sequence: Union[torch.Tensor, rnn.PackedSequence]) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Unpack RNN sequence.
Args:
sequence (Union[torch.Tensor, rnn.PackedSequence]): RNN packed sequence or tensor of which
first index are samples and second are timesteps
Returns:
Tuple[torch.Tensor, torch.Tensor]: tuple of unpacked sequence and length of samples
"""
if isinstance(sequence, rnn.PackedSequence):
sequence, lengths = rnn.pad_packed_sequence(sequence, batch_first=True)
# batch sizes reside on the CPU by default -> we need to bring them to GPU
lengths = lengths.to(sequence.device)
else:
lengths = torch.ones(sequence.size(0), device=sequence.device, dtype=torch.long) * sequence.size(1)
return sequence, lengths
def padded_stack(
tensors: List[torch.Tensor], side: str = "right", mode: str = "constant", value: Union[int, float] = 0
) -> torch.Tensor:
"""
Stack tensors along first dimension and pad them along last dimension to ensure their size is equal.
Args:
tensors (List[torch.Tensor]): list of tensors to stack
side (str): side on which to pad - "left" or "right". Defaults to "right".
mode (str): 'constant', 'reflect', 'replicate' or 'circular'. Default: 'constant'
value (Union[int, float]): value to use for constant padding
Returns:
torch.Tensor: stacked tensor
"""
full_size = max([x.size(-1) for x in tensors])
def make_padding(pad):
if side == "left":
return (pad, 0)
elif side == "right":
return (0, pad)
else:
raise ValueError(f"side for padding '{side}' is unknown")
out = torch.stack(
[
F.pad(x, make_padding(full_size - x.size(-1)), mode=mode, value=value) if full_size - x.size(-1) > 0 else x
for x in tensors
],
dim=0,
)
return out
def to_list(value: Any) -> List[Any]:
"""
Convert value or list to list of values.
If already list, return object directly
Args:
value (Any): value to convert
Returns:
List[Any]: list of values
"""
if isinstance(value, (tuple, list)) and not isinstance(value, rnn.PackedSequence):
return value
else:
return [value]
def unsqueeze_like(tensor: torch.Tensor, like: torch.Tensor):
"""
Unsqueeze last dimensions of tensor to match another tensor's number of dimensions.
Args:
tensor (torch.Tensor): tensor to unsqueeze
like (torch.Tensor): tensor whose dimensions to match
"""
n_unsqueezes = like.ndim - tensor.ndim
if n_unsqueezes < 0:
raise ValueError(f"tensor.ndim={tensor.ndim} > like.ndim={like.ndim}")
elif n_unsqueezes == 0:
return tensor
else:
return tensor[(...,) + (None,) * n_unsqueezes]
def apply_to_list(obj: Union[List[Any], Any], func: Callable) -> Union[List[Any], Any]:
"""
Apply function to a list of objects or directly if passed value is not a list.
This is useful if the passed object could be either a list to whose elements
a function needs to be applied or just an object to whicht to apply the function.
Args:
obj (Union[List[Any], Any]): list/tuple on whose elements to apply function,
otherwise object to whom to apply function
func (Callable): function to apply
Returns:
Union[List[Any], Any]: list of objects or object depending on function output and
if input ``obj`` is of type list/tuple
"""
if isinstance(obj, (list, tuple)) and not isinstance(obj, rnn.PackedSequence):
return [func(o) for o in obj]
else:
return func(obj)
class OutputMixIn:
"""
MixIn to give namedtuple some access capabilities of a dictionary
"""
def __getitem__(self, k):
if isinstance(k, str):
return getattr(self, k)
else:
return super().__getitem__(k)
def get(self, k, default=None):
return getattr(self, k, default)
def items(self):
return zip(self._fields, self)
def keys(self):
return self._fields
def move_to_device(
x: Union[
Dict[str, Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]]],
torch.Tensor,
List[torch.Tensor],
Tuple[torch.Tensor],
],
device: Union[str, torch.DeviceObjType],
) -> Union[
Dict[str, Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]]],
torch.Tensor,
List[torch.Tensor],
Tuple[torch.Tensor],
]:
"""
Move object to device.
Args:
x (dictionary of list of tensors): object (e.g. dictionary) of tensors to move to device
device (Union[str, torch.DeviceObjType]): device, e.g. "cpu"
Returns:
x on targeted device
"""
if isinstance(device, str):
device = torch.device(device)
if isinstance(x, dict):
for name in x.keys():
x[name] = move_to_device(x[name], device=device)
elif isinstance(x, OutputMixIn):
for xi in x:
move_to_device(xi, device=device)
return x
elif isinstance(x, torch.Tensor) and x.device != device:
x = x.to(device)
elif isinstance(x, (list, tuple)) and x[0].device != device:
x = [move_to_device(xi, device=device) for xi in x]
return x
def detach(
x: Union[
Dict[str, Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]]],
torch.Tensor,
List[torch.Tensor],
Tuple[torch.Tensor],
],
) -> Union[
Dict[str, Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]]],
torch.Tensor,
List[torch.Tensor],
Tuple[torch.Tensor],
]:
"""
Detach object
Args:
x: object to detach
Returns:
detached object
"""
if isinstance(x, torch.Tensor):
return x.detach()
elif isinstance(x, (OutputMixIn, dict)):
return {name: detach(xi) for name, xi in x.items()}
elif isinstance(x, (list, tuple)):
return [detach(xi) for xi in x]
else:
return x
| [
"[email protected]"
] | |
0997db820df5512beb330aedeb592bcd7ec5f840 | cb7c3673ad937c282a39be74d0aee8628e75928d | /tests/test_utils/output/uriandcurie.py | 2c0bb6edc92ee3846661835fdd4a574c30b2da97 | [
"CC0-1.0"
] | permissive | bpow/linkml | 649d6d48f39a8c51efa92fba7eb25c1d8854b472 | ab83c0caee9c02457ea5a748e284dee6b547fcd6 | refs/heads/main | 2023-05-05T18:46:04.501897 | 2021-05-13T21:17:03 | 2021-05-13T21:17:03 | 371,163,928 | 0 | 0 | CC0-1.0 | 2021-05-26T20:42:13 | 2021-05-26T20:42:12 | null | UTF-8 | Python | false | false | 4,918 | py | # Auto generated from uriandcurie.yaml by pythongen.py version: 0.9.0
# Generation date: 2021-03-26 14:22
# Schema: uriandcurie
#
# id: http://example.org/test/uriandcurie
# description:
# license:
import dataclasses
import sys
import re
from typing import Optional, List, Union, Dict, ClassVar, Any
from dataclasses import dataclass
from linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions
from linkml.utils.slot import Slot
from linkml.utils.metamodelcore import empty_list, empty_dict, bnode
from linkml.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int
from linkml.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
from linkml.utils.formatutils import camelcase, underscore, sfx
from linkml.utils.enumerations import EnumDefinitionImpl
from rdflib import Namespace, URIRef
from linkml.utils.curienamespace import CurieNamespace
from linkml.utils.metamodelcore import Curie, ElementIdentifier, NCName, NodeIdentifier, URI, URIorCURIE
metamodel_version = "1.7.0"
# Overwrite dataclasses _init_fn to add **kwargs in __init__
dataclasses._init_fn = dataclasses_init_fn_with_kwargs
# Namespaces
M = CurieNamespace('m', 'http://example.org/test/uriandcurie')
SHEX = CurieNamespace('shex', 'http://www.w3.org/ns/shex#')
XSD = CurieNamespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
DEFAULT_ = M
# Types
class String(str):
""" A character string """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "string"
type_model_uri = M.String
class Uriorcurie(URIorCURIE):
""" a URI or a CURIE """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uriorcurie"
type_model_uri = M.Uriorcurie
class Uri(URI):
""" a complete URI """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uri"
type_model_uri = M.Uri
class Curie(Curie):
""" a CURIE """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "curie"
type_model_uri = M.Curie
class Ncname(NCName):
""" Prefix part of CURIE """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "ncname"
type_model_uri = M.Ncname
class Objectidentifier(ElementIdentifier):
""" A URI or CURIE that represents an object in the model. """
type_class_uri = SHEX.iri
type_class_curie = "shex:iri"
type_name = "objectidentifier"
type_model_uri = M.Objectidentifier
class Nodeidentifier(NodeIdentifier):
""" A URI, CURIE or BNODE that represents a node in a model. """
type_class_uri = SHEX.nonliteral
type_class_curie = "shex:nonliteral"
type_name = "nodeidentifier"
type_model_uri = M.Nodeidentifier
# Class references
class C1Id(ElementIdentifier):
pass
@dataclass
class C1(YAMLRoot):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = M.C1
class_class_curie: ClassVar[str] = "m:C1"
class_name: ClassVar[str] = "c1"
class_model_uri: ClassVar[URIRef] = M.C1
id: Union[str, C1Id] = None
hasCurie: Optional[Union[str, Curie]] = None
hasURI: Optional[Union[str, URI]] = None
hasNcName: Optional[Union[str, NCName]] = None
id2: Optional[Union[str, NodeIdentifier]] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self.id is None:
raise ValueError("id must be supplied")
if not isinstance(self.id, C1Id):
self.id = C1Id(self.id)
if self.hasCurie is not None and not isinstance(self.hasCurie, Curie):
self.hasCurie = Curie(self.hasCurie)
if self.hasURI is not None and not isinstance(self.hasURI, URI):
self.hasURI = URI(self.hasURI)
if self.hasNcName is not None and not isinstance(self.hasNcName, NCName):
self.hasNcName = NCName(self.hasNcName)
if self.id2 is not None and not isinstance(self.id2, NodeIdentifier):
self.id2 = NodeIdentifier(self.id2)
super().__post_init__(**kwargs)
# Enumerations
# Slots
class slots:
pass
slots.id = Slot(uri=M.id, name="id", curie=M.curie('id'),
model_uri=M.id, domain=None, range=URIRef)
slots.hasCurie = Slot(uri=M.hasCurie, name="hasCurie", curie=M.curie('hasCurie'),
model_uri=M.hasCurie, domain=None, range=Optional[Union[str, Curie]])
slots.hasURI = Slot(uri=M.hasURI, name="hasURI", curie=M.curie('hasURI'),
model_uri=M.hasURI, domain=None, range=Optional[Union[str, URI]])
slots.hasNcName = Slot(uri=M.hasNcName, name="hasNcName", curie=M.curie('hasNcName'),
model_uri=M.hasNcName, domain=None, range=Optional[Union[str, NCName]])
slots.id2 = Slot(uri=M.id2, name="id2", curie=M.curie('id2'),
model_uri=M.id2, domain=None, range=Optional[Union[str, NodeIdentifier]]) | [
"[email protected]"
] | |
b6e2ce22fb67076c267ba2e1fd71f0b24c1d2878 | 20dba145fd988d5901cfd335efe238c0dce8ac5b | /analytics/decorators/cache_dec.py | df12f6fbc3b61d39bd1710094aebd7c6bc2533c3 | [
"BSD-3-Clause"
] | permissive | ModelDBRepository/228604 | 10be01bf0eeea3ea07ef4c38ebb3b4c771000923 | 8f641f73bcac2700b476663fe656fcad7d63470d | refs/heads/master | 2020-05-29T18:25:57.095212 | 2019-05-31T03:47:54 | 2019-05-31T03:47:54 | 189,299,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | """ a simple caching of function return values
using the decorator "cached", e.g.
@cached
def foo(a, b, c):
return a*b-c
will cache the result of the calculation foo does, which of course better not be this trivial.
works also for numpy arrays in the parameters.
should of course only be used on functions that do not depend on global parameters (as their state would not be cashed)
"""
import hashlib
import numpy as np
from functools import wraps
cache = {}
hits = 0
misses = 0
no_caching = False
def cached(func):
global cache
def hashit(a):
# builtin hash does weird things with complex number with integer real (or imag?) part : hash(1.5j-1) == hash(1.5j-2)
return (a.__hash__() if not isinstance(a,np.ndarray) else hashlib.sha1(a).hexdigest())
@wraps(func)
def wrapper(*args, **kwargs): # kwargs does not work yet!
global misses, hits
key = tuple([func.__name__]) + tuple(("",hashit(a)) for a in args) + tuple((k,hashit(v)) for k, v in sorted(kwargs.items()))
if no_caching:
return func(*args, **kwargs)
elif not cache.has_key(key):
#print func.__name__ + " missed " + str(key)
cache[key] = func(*args, **kwargs)
misses += 1
else:
hits += 1
#print func.__name__ + " hit"
return cache[key]
return wrapper
def clear_cache():
global cache, misses, hits
cache = {}
hits = 0
misses = 0
| [
"[email protected]"
] | |
52a9cfb92f3c0dad0659e22d6a6cb0ad3a802dd1 | e37a8fbfad0172f5e952219d77f9cac4525ded5f | /doctr/__init__.py | 20b78857e757acf3aefefd7d7fa5d8ff77f9275e | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | kapitsa2811/doctr | 4c4cce4b6c5852d2fe811e8b8484ecf99d36d9d2 | 63b1ceec7a1532c9218351234c23eda6f210d5fb | refs/heads/main | 2023-04-22T14:55:56.568309 | 2021-04-28T16:51:40 | 2021-04-28T16:51:40 | 362,700,910 | 1 | 0 | Apache-2.0 | 2021-04-29T05:31:07 | 2021-04-29T05:31:06 | null | UTF-8 | Python | false | false | 98 | py | from .version import __version__ # noqa: F401
from . import documents, models, transforms, utils
| [
"[email protected]"
] | |
119a8220aff09fff6e4a24259634f20681f8b04d | 238e46a903cf7fac4f83fa8681094bf3c417d22d | /output/python37/Lib/test/test_smtplib.py | 06168e1cb07685c76cf317c32142c808ca20aef4 | [
"BSD-3-Clause",
"bzip2-1.0.6",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-newlib-historical",
"OpenSSL",
"Python-2.0",
"TCL"
] | permissive | baojunli/FastCAE | da1277f90e584084d461590a3699b941d8c4030b | a3f99f6402da564df87fcef30674ce5f44379962 | refs/heads/master | 2023-02-25T20:25:31.815729 | 2021-02-01T03:17:33 | 2021-02-01T03:17:33 | 268,390,180 | 1 | 0 | BSD-3-Clause | 2020-06-01T00:39:31 | 2020-06-01T00:39:31 | null | UTF-8 | Python | false | false | 52,178 | py | import asyncore
import base64
import email.mime.text
from email.message import EmailMessage
from email.base64mime import body_encode as encode_base64
import email.utils
import hmac
import socket
import smtpd
import smtplib
import io
import re
import sys
import time
import select
import errno
import textwrap
import threading
import unittest
from test import support, mock_socket
from test.support import HOST, HOSTv4, HOSTv6
if sys.platform == 'darwin':
# select.poll returns a select.POLLHUP at the end of the tests
# on darwin, so just ignore it
def handle_expt(self):
pass
smtpd.SMTPChannel.handle_expt = handle_expt
def server(evt, buf, serv):
serv.listen()
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
n = 500
while buf and n > 0:
r, w, e = select.select([], [conn], [])
if w:
sent = conn.send(buf)
buf = buf[sent:]
n -= 1
conn.close()
finally:
serv.close()
evt.set()
class GeneralTests(unittest.TestCase):
def setUp(self):
smtplib.socket = mock_socket
self.port = 25
def tearDown(self):
smtplib.socket = socket
# This method is no longer used but is retained for backward compatibility,
# so test to make sure it still works.
def testQuoteData(self):
teststr = "abc\n.jkl\rfoo\r\n..blue"
expected = "abc\r\n..jkl\r\nfoo\r\n...blue"
self.assertEqual(expected, smtplib.quotedata(teststr))
def testBasic1(self):
mock_socket.reply_with(b"220 Hola mundo")
# connects
smtp = smtplib.SMTP(HOST, self.port)
smtp.close()
def testSourceAddress(self):
mock_socket.reply_with(b"220 Hola mundo")
# connects
smtp = smtplib.SMTP(HOST, self.port,
source_address=('127.0.0.1',19876))
self.assertEqual(smtp.source_address, ('127.0.0.1', 19876))
smtp.close()
def testBasic2(self):
mock_socket.reply_with(b"220 Hola mundo")
# connects, include port in host name
smtp = smtplib.SMTP("%s:%s" % (HOST, self.port))
smtp.close()
def testLocalHostName(self):
mock_socket.reply_with(b"220 Hola mundo")
# check that supplied local_hostname is used
smtp = smtplib.SMTP(HOST, self.port, local_hostname="testhost")
self.assertEqual(smtp.local_hostname, "testhost")
smtp.close()
def testTimeoutDefault(self):
mock_socket.reply_with(b"220 Hola mundo")
self.assertIsNone(mock_socket.getdefaulttimeout())
mock_socket.setdefaulttimeout(30)
self.assertEqual(mock_socket.getdefaulttimeout(), 30)
try:
smtp = smtplib.SMTP(HOST, self.port)
finally:
mock_socket.setdefaulttimeout(None)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
def testTimeoutNone(self):
mock_socket.reply_with(b"220 Hola mundo")
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
smtp = smtplib.SMTP(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(smtp.sock.gettimeout())
smtp.close()
def testTimeoutValue(self):
mock_socket.reply_with(b"220 Hola mundo")
smtp = smtplib.SMTP(HOST, self.port, timeout=30)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
def test_debuglevel(self):
mock_socket.reply_with(b"220 Hello world")
smtp = smtplib.SMTP()
smtp.set_debuglevel(1)
with support.captured_stderr() as stderr:
smtp.connect(HOST, self.port)
smtp.close()
expected = re.compile(r"^connect:", re.MULTILINE)
self.assertRegex(stderr.getvalue(), expected)
def test_debuglevel_2(self):
mock_socket.reply_with(b"220 Hello world")
smtp = smtplib.SMTP()
smtp.set_debuglevel(2)
with support.captured_stderr() as stderr:
smtp.connect(HOST, self.port)
smtp.close()
expected = re.compile(r"^\d{2}:\d{2}:\d{2}\.\d{6} connect: ",
re.MULTILINE)
self.assertRegex(stderr.getvalue(), expected)
# Test server thread using the specified SMTP server class
def debugging_server(serv, serv_evt, client_evt):
serv_evt.set()
try:
if hasattr(select, 'poll'):
poll_fun = asyncore.poll2
else:
poll_fun = asyncore.poll
n = 1000
while asyncore.socket_map and n > 0:
poll_fun(0.01, asyncore.socket_map)
# when the client conversation is finished, it will
# set client_evt, and it's then ok to kill the server
if client_evt.is_set():
serv.close()
break
n -= 1
except socket.timeout:
pass
finally:
if not client_evt.is_set():
# allow some time for the client to read the result
time.sleep(0.5)
serv.close()
asyncore.close_all()
serv_evt.set()
MSG_BEGIN = '---------- MESSAGE FOLLOWS ----------\n'
MSG_END = '------------ END MESSAGE ------------\n'
# NOTE: Some SMTP objects in the tests below are created with a non-default
# local_hostname argument to the constructor, since (on some systems) the FQDN
# lookup caused by the default local_hostname sometimes takes so long that the
# test server times out, causing the test to fail.
# Test behavior of smtpd.DebuggingServer
class DebuggingServerTests(unittest.TestCase):
maxDiff = None
def setUp(self):
self.real_getfqdn = socket.getfqdn
socket.getfqdn = mock_socket.getfqdn
# temporarily replace sys.stdout to capture DebuggingServer output
self.old_stdout = sys.stdout
self.output = io.StringIO()
sys.stdout = self.output
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Capture SMTPChannel debug output
self.old_DEBUGSTREAM = smtpd.DEBUGSTREAM
smtpd.DEBUGSTREAM = io.StringIO()
# Pick a random unused port by passing 0 for the port number
self.serv = smtpd.DebuggingServer((HOST, 0), ('nowhere', -1),
decode_data=True)
# Keep a note of what server host and port were assigned
self.host, self.port = self.serv.socket.getsockname()[:2]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
socket.getfqdn = self.real_getfqdn
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
# restore sys.stdout
sys.stdout = self.old_stdout
# restore DEBUGSTREAM
smtpd.DEBUGSTREAM.close()
smtpd.DEBUGSTREAM = self.old_DEBUGSTREAM
def get_output_without_xpeer(self):
test_output = self.output.getvalue()
return re.sub(r'(.*?)^X-Peer:\s*\S+\n(.*)', r'\1\2',
test_output, flags=re.MULTILINE|re.DOTALL)
def testBasic(self):
# connect
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.quit()
def testSourceAddress(self):
# connect
src_port = support.find_unused_port()
try:
smtp = smtplib.SMTP(self.host, self.port, local_hostname='localhost',
timeout=3, source_address=(self.host, src_port))
self.assertEqual(smtp.source_address, (self.host, src_port))
self.assertEqual(smtp.local_hostname, 'localhost')
smtp.quit()
except OSError as e:
if e.errno == errno.EADDRINUSE:
self.skipTest("couldn't bind to source port %d" % src_port)
raise
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, b'OK')
self.assertEqual(smtp.noop(), expected)
smtp.quit()
def testRSET(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, b'OK')
self.assertEqual(smtp.rset(), expected)
smtp.quit()
def testELHO(self):
# EHLO isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, b'\nSIZE 33554432\nHELP')
self.assertEqual(smtp.ehlo(), expected)
smtp.quit()
def testEXPNNotImplemented(self):
# EXPN isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (502, b'EXPN not implemented')
smtp.putcmd('EXPN')
self.assertEqual(smtp.getreply(), expected)
smtp.quit()
def testVRFY(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (252, b'Cannot VRFY user, but will accept message ' + \
b'and attempt delivery')
self.assertEqual(smtp.vrfy('[email protected]'), expected)
self.assertEqual(smtp.verify('[email protected]'), expected)
smtp.quit()
def testSecondHELO(self):
# check that a second HELO returns a message that it's a duplicate
# (this behavior is specific to smtpd.SMTPChannel)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.helo()
expected = (503, b'Duplicate HELO/EHLO')
self.assertEqual(smtp.helo(), expected)
smtp.quit()
def testHELP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
self.assertEqual(smtp.help(), b'Supported commands: EHLO HELO MAIL ' + \
b'RCPT DATA RSET NOOP QUIT VRFY')
smtp.quit()
def testSend(self):
# connect and send mail
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('John', 'Sally', m)
# XXX(nnorwitz): this test is flaky and dies with a bad file descriptor
# in asyncore. This sleep might help, but should really be fixed
# properly by using an Event variable.
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
def testSendBinary(self):
m = b'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('John', 'Sally', m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.decode('ascii'), MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
def testSendNeedingDotQuote(self):
# Issue 12283
m = '.A test\n.mes.sage.'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('John', 'Sally', m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
def testSendNullSender(self):
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('<>', 'Sally', m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: <>$", re.MULTILINE)
self.assertRegex(debugout, sender)
def testSendMessage(self):
m = email.mime.text.MIMEText('A test message')
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m, from_addr='John', to_addrs='Sally')
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# Remove the X-Peer header that DebuggingServer adds as figuring out
# exactly what IP address format is put there is not easy (and
# irrelevant to our test). Typically 127.0.0.1 or ::1, but it is
# not always the same as socket.gethostbyname(HOST). :(
test_output = self.get_output_without_xpeer()
del m['X-Peer']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
def testSendMessageWithAddresses(self):
m = email.mime.text.MIMEText('A test message')
m['From'] = '[email protected]'
m['To'] = 'John'
m['CC'] = 'Sally, Fred'
m['Bcc'] = 'John Root <root@localhost>, "Dinsdale" <[email protected]>'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
# make sure the Bcc header is still in the message.
self.assertEqual(m['Bcc'], 'John Root <root@localhost>, "Dinsdale" '
'<[email protected]>')
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# Remove the X-Peer header that DebuggingServer adds.
test_output = self.get_output_without_xpeer()
del m['X-Peer']
# The Bcc header should not be transmitted.
del m['Bcc']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: [email protected]$", re.MULTILINE)
self.assertRegex(debugout, sender)
for addr in ('John', 'Sally', 'Fred', 'root@localhost',
'[email protected]'):
to_addr = re.compile(r"^recips: .*'{}'.*$".format(addr),
re.MULTILINE)
self.assertRegex(debugout, to_addr)
def testSendMessageWithSomeAddresses(self):
# Make sure nothing breaks if not all of the three 'to' headers exist
m = email.mime.text.MIMEText('A test message')
m['From'] = '[email protected]'
m['To'] = 'John, Dinsdale'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# Remove the X-Peer header that DebuggingServer adds.
test_output = self.get_output_without_xpeer()
del m['X-Peer']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: [email protected]$", re.MULTILINE)
self.assertRegex(debugout, sender)
for addr in ('John', 'Dinsdale'):
to_addr = re.compile(r"^recips: .*'{}'.*$".format(addr),
re.MULTILINE)
self.assertRegex(debugout, to_addr)
def testSendMessageWithSpecifiedAddresses(self):
# Make sure addresses specified in call override those in message.
m = email.mime.text.MIMEText('A test message')
m['From'] = '[email protected]'
m['To'] = 'John, Dinsdale'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m, from_addr='[email protected]', to_addrs='[email protected]')
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# Remove the X-Peer header that DebuggingServer adds.
test_output = self.get_output_without_xpeer()
del m['X-Peer']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: [email protected]$", re.MULTILINE)
self.assertRegex(debugout, sender)
for addr in ('John', 'Dinsdale'):
to_addr = re.compile(r"^recips: .*'{}'.*$".format(addr),
re.MULTILINE)
self.assertNotRegex(debugout, to_addr)
recip = re.compile(r"^recips: .*'[email protected]'.*$", re.MULTILINE)
self.assertRegex(debugout, recip)
def testSendMessageWithMultipleFrom(self):
# Sender overrides To
m = email.mime.text.MIMEText('A test message')
m['From'] = 'Bernard, Bianca'
m['Sender'] = '[email protected]'
m['To'] = 'John, Dinsdale'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# Remove the X-Peer header that DebuggingServer adds.
test_output = self.get_output_without_xpeer()
del m['X-Peer']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: [email protected]$", re.MULTILINE)
self.assertRegex(debugout, sender)
for addr in ('John', 'Dinsdale'):
to_addr = re.compile(r"^recips: .*'{}'.*$".format(addr),
re.MULTILINE)
self.assertRegex(debugout, to_addr)
def testSendMessageResent(self):
m = email.mime.text.MIMEText('A test message')
m['From'] = '[email protected]'
m['To'] = 'John'
m['CC'] = 'Sally, Fred'
m['Bcc'] = 'John Root <root@localhost>, "Dinsdale" <[email protected]>'
m['Resent-Date'] = 'Thu, 1 Jan 1970 17:42:00 +0000'
m['Resent-From'] = '[email protected]'
m['Resent-To'] = 'Martha <[email protected]>, Jeff'
m['Resent-Bcc'] = '[email protected]'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.send_message(m)
# XXX (see comment in testSend)
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
# The Resent-Bcc headers are deleted before serialization.
del m['Bcc']
del m['Resent-Bcc']
# Remove the X-Peer header that DebuggingServer adds.
test_output = self.get_output_without_xpeer()
del m['X-Peer']
mexpect = '%s%s\n%s' % (MSG_BEGIN, m.as_string(), MSG_END)
self.assertEqual(test_output, mexpect)
debugout = smtpd.DEBUGSTREAM.getvalue()
sender = re.compile("^sender: [email protected]$", re.MULTILINE)
self.assertRegex(debugout, sender)
for addr in ('[email protected]', 'Jeff', '[email protected]'):
to_addr = re.compile(r"^recips: .*'{}'.*$".format(addr),
re.MULTILINE)
self.assertRegex(debugout, to_addr)
def testSendMessageMultipleResentRaises(self):
m = email.mime.text.MIMEText('A test message')
m['From'] = '[email protected]'
m['To'] = 'John'
m['CC'] = 'Sally, Fred'
m['Bcc'] = 'John Root <root@localhost>, "Dinsdale" <[email protected]>'
m['Resent-Date'] = 'Thu, 1 Jan 1970 17:42:00 +0000'
m['Resent-From'] = '[email protected]'
m['Resent-To'] = 'Martha <[email protected]>, Jeff'
m['Resent-Bcc'] = '[email protected]'
m['Resent-Date'] = 'Thu, 2 Jan 1970 17:42:00 +0000'
m['Resent-To'] = '[email protected]'
m['Resent-From'] = 'Martha <[email protected]>, Jeff'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
with self.assertRaises(ValueError):
smtp.send_message(m)
smtp.close()
class NonConnectingTests(unittest.TestCase):
def testNotConnected(self):
# Test various operations on an unconnected SMTP object that
# should raise exceptions (at present the attempt in SMTP.send
# to reference the nonexistent 'sock' attribute of the SMTP object
# causes an AttributeError)
smtp = smtplib.SMTP()
self.assertRaises(smtplib.SMTPServerDisconnected, smtp.ehlo)
self.assertRaises(smtplib.SMTPServerDisconnected,
smtp.send, 'test msg')
def testNonnumericPort(self):
# check that non-numeric port raises OSError
self.assertRaises(OSError, smtplib.SMTP,
"localhost", "bogus")
self.assertRaises(OSError, smtplib.SMTP,
"localhost:bogus")
# test response of client to a non-successful HELO message
class BadHELOServerTests(unittest.TestCase):
def setUp(self):
smtplib.socket = mock_socket
mock_socket.reply_with(b"199 no hello for you!")
self.old_stdout = sys.stdout
self.output = io.StringIO()
sys.stdout = self.output
self.port = 25
def tearDown(self):
smtplib.socket = socket
sys.stdout = self.old_stdout
def testFailingHELO(self):
self.assertRaises(smtplib.SMTPConnectError, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
class TooLongLineTests(unittest.TestCase):
respdata = b'250 OK' + (b'.' * smtplib._MAXLINE * 2) + b'\n'
def setUp(self):
self.old_stdout = sys.stdout
self.output = io.StringIO()
sys.stdout = self.output
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = support.bind_port(self.sock)
servargs = (self.evt, self.respdata, self.sock)
thread = threading.Thread(target=server, args=servargs)
thread.start()
self.addCleanup(thread.join)
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
sys.stdout = self.old_stdout
def testLineTooLong(self):
self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
sim_users = {'[email protected]':'John A',
'[email protected]':'Sally B',
'[email protected]':'Ruth C',
}
sim_auth = ('[email protected]', 'somepassword')
sim_cram_md5_challenge = ('PENCeUxFREJoU0NnbmhNWitOMjNGNn'
'dAZWx3b29kLmlubm9zb2Z0LmNvbT4=')
sim_lists = {'list-1':['[email protected]','[email protected]'],
'list-2':['[email protected]',],
}
# Simulated SMTP channel & server
class ResponseException(Exception): pass
class SimSMTPChannel(smtpd.SMTPChannel):
quit_response = None
mail_response = None
rcpt_response = None
data_response = None
rcpt_count = 0
rset_count = 0
disconnect = 0
AUTH = 99 # Add protocol state to enable auth testing.
authenticated_user = None
def __init__(self, extra_features, *args, **kw):
self._extrafeatures = ''.join(
[ "250-{0}\r\n".format(x) for x in extra_features ])
super(SimSMTPChannel, self).__init__(*args, **kw)
# AUTH related stuff. It would be nice if support for this were in smtpd.
def found_terminator(self):
if self.smtp_state == self.AUTH:
line = self._emptystring.join(self.received_lines)
print('Data:', repr(line), file=smtpd.DEBUGSTREAM)
self.received_lines = []
try:
self.auth_object(line)
except ResponseException as e:
self.smtp_state = self.COMMAND
self.push('%s %s' % (e.smtp_code, e.smtp_error))
return
super().found_terminator()
def smtp_AUTH(self, arg):
if not self.seen_greeting:
self.push('503 Error: send EHLO first')
return
if not self.extended_smtp or 'AUTH' not in self._extrafeatures:
self.push('500 Error: command "AUTH" not recognized')
return
if self.authenticated_user is not None:
self.push(
'503 Bad sequence of commands: already authenticated')
return
args = arg.split()
if len(args) not in [1, 2]:
self.push('501 Syntax: AUTH <mechanism> [initial-response]')
return
auth_object_name = '_auth_%s' % args[0].lower().replace('-', '_')
try:
self.auth_object = getattr(self, auth_object_name)
except AttributeError:
self.push('504 Command parameter not implemented: unsupported '
' authentication mechanism {!r}'.format(auth_object_name))
return
self.smtp_state = self.AUTH
self.auth_object(args[1] if len(args) == 2 else None)
def _authenticated(self, user, valid):
if valid:
self.authenticated_user = user
self.push('235 Authentication Succeeded')
else:
self.push('535 Authentication credentials invalid')
self.smtp_state = self.COMMAND
def _decode_base64(self, string):
return base64.decodebytes(string.encode('ascii')).decode('utf-8')
def _auth_plain(self, arg=None):
if arg is None:
self.push('334 ')
else:
logpass = self._decode_base64(arg)
try:
*_, user, password = logpass.split('\0')
except ValueError as e:
self.push('535 Splitting response {!r} into user and password'
' failed: {}'.format(logpass, e))
return
self._authenticated(user, password == sim_auth[1])
def _auth_login(self, arg=None):
if arg is None:
# base64 encoded 'Username:'
self.push('334 VXNlcm5hbWU6')
elif not hasattr(self, '_auth_login_user'):
self._auth_login_user = self._decode_base64(arg)
# base64 encoded 'Password:'
self.push('334 UGFzc3dvcmQ6')
else:
password = self._decode_base64(arg)
self._authenticated(self._auth_login_user, password == sim_auth[1])
del self._auth_login_user
def _auth_cram_md5(self, arg=None):
if arg is None:
self.push('334 {}'.format(sim_cram_md5_challenge))
else:
logpass = self._decode_base64(arg)
try:
user, hashed_pass = logpass.split()
except ValueError as e:
self.push('535 Splitting response {!r} into user and password'
'failed: {}'.format(logpass, e))
return False
valid_hashed_pass = hmac.HMAC(
sim_auth[1].encode('ascii'),
self._decode_base64(sim_cram_md5_challenge).encode('ascii'),
'md5').hexdigest()
self._authenticated(user, hashed_pass == valid_hashed_pass)
# end AUTH related stuff.
def smtp_EHLO(self, arg):
resp = ('250-testhost\r\n'
'250-EXPN\r\n'
'250-SIZE 20000000\r\n'
'250-STARTTLS\r\n'
'250-DELIVERBY\r\n')
resp = resp + self._extrafeatures + '250 HELP'
self.push(resp)
self.seen_greeting = arg
self.extended_smtp = True
def smtp_VRFY(self, arg):
# For max compatibility smtplib should be sending the raw address.
if arg in sim_users:
self.push('250 %s %s' % (sim_users[arg], smtplib.quoteaddr(arg)))
else:
self.push('550 No such user: %s' % arg)
def smtp_EXPN(self, arg):
list_name = arg.lower()
if list_name in sim_lists:
user_list = sim_lists[list_name]
for n, user_email in enumerate(user_list):
quoted_addr = smtplib.quoteaddr(user_email)
if n < len(user_list) - 1:
self.push('250-%s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('250 %s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('550 No access for you!')
def smtp_QUIT(self, arg):
if self.quit_response is None:
super(SimSMTPChannel, self).smtp_QUIT(arg)
else:
self.push(self.quit_response)
self.close_when_done()
def smtp_MAIL(self, arg):
if self.mail_response is None:
super().smtp_MAIL(arg)
else:
self.push(self.mail_response)
if self.disconnect:
self.close_when_done()
def smtp_RCPT(self, arg):
if self.rcpt_response is None:
super().smtp_RCPT(arg)
return
self.rcpt_count += 1
self.push(self.rcpt_response[self.rcpt_count-1])
def smtp_RSET(self, arg):
self.rset_count += 1
super().smtp_RSET(arg)
def smtp_DATA(self, arg):
if self.data_response is None:
super().smtp_DATA(arg)
else:
self.push(self.data_response)
def handle_error(self):
raise
class SimSMTPServer(smtpd.SMTPServer):
channel_class = SimSMTPChannel
def __init__(self, *args, **kw):
self._extra_features = []
self._addresses = {}
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accepted(self, conn, addr):
self._SMTPchannel = self.channel_class(
self._extra_features, self, conn, addr,
decode_data=self._decode_data)
def process_message(self, peer, mailfrom, rcpttos, data):
self._addresses['from'] = mailfrom
self._addresses['tos'] = rcpttos
def add_feature(self, feature):
self._extra_features.append(feature)
def handle_error(self):
raise
# Test various SMTP & ESMTP commands/behaviors that require a simulated server
# (i.e., something with more features than DebuggingServer)
class SMTPSimTests(unittest.TestCase):
def setUp(self):
self.real_getfqdn = socket.getfqdn
socket.getfqdn = mock_socket.getfqdn
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = SimSMTPServer((HOST, 0), ('nowhere', -1), decode_data=True)
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
socket.getfqdn = self.real_getfqdn
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
def testBasic(self):
# smoke test
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.quit()
def testEHLO(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
# no features should be present before the EHLO
self.assertEqual(smtp.esmtp_features, {})
# features expected from the test server
expected_features = {'expn':'',
'size': '20000000',
'starttls': '',
'deliverby': '',
'help': '',
}
smtp.ehlo()
self.assertEqual(smtp.esmtp_features, expected_features)
for k in expected_features:
self.assertTrue(smtp.has_extn(k))
self.assertFalse(smtp.has_extn('unsupported-feature'))
smtp.quit()
def testVRFY(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for addr_spec, name in sim_users.items():
expected_known = (250, bytes('%s %s' %
(name, smtplib.quoteaddr(addr_spec)),
"ascii"))
self.assertEqual(smtp.vrfy(addr_spec), expected_known)
u = '[email protected]'
expected_unknown = (550, ('No such user: %s' % u).encode('ascii'))
self.assertEqual(smtp.vrfy(u), expected_unknown)
smtp.quit()
def testEXPN(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for listname, members in sim_lists.items():
users = []
for m in members:
users.append('%s %s' % (sim_users[m], smtplib.quoteaddr(m)))
expected_known = (250, bytes('\n'.join(users), "ascii"))
self.assertEqual(smtp.expn(listname), expected_known)
u = 'PSU-Members-List'
expected_unknown = (550, b'No access for you!')
self.assertEqual(smtp.expn(u), expected_unknown)
smtp.quit()
def testAUTH_PLAIN(self):
self.serv.add_feature("AUTH PLAIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
resp = smtp.login(sim_auth[0], sim_auth[1])
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
def testAUTH_LOGIN(self):
self.serv.add_feature("AUTH LOGIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
resp = smtp.login(sim_auth[0], sim_auth[1])
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
def testAUTH_CRAM_MD5(self):
self.serv.add_feature("AUTH CRAM-MD5")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
resp = smtp.login(sim_auth[0], sim_auth[1])
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
def testAUTH_multiple(self):
# Test that multiple authentication methods are tried.
self.serv.add_feature("AUTH BOGUS PLAIN LOGIN CRAM-MD5")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
resp = smtp.login(sim_auth[0], sim_auth[1])
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
def test_auth_function(self):
supported = {'CRAM-MD5', 'PLAIN', 'LOGIN'}
for mechanism in supported:
self.serv.add_feature("AUTH {}".format(mechanism))
for mechanism in supported:
with self.subTest(mechanism=mechanism):
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost', timeout=15)
smtp.ehlo('foo')
smtp.user, smtp.password = sim_auth[0], sim_auth[1]
method = 'auth_' + mechanism.lower().replace('-', '_')
resp = smtp.auth(mechanism, getattr(smtp, method))
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
def test_quit_resets_greeting(self):
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost',
timeout=15)
code, message = smtp.ehlo()
self.assertEqual(code, 250)
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
self.assertNotIn('size', smtp.esmtp_features)
smtp.connect(HOST, self.port)
self.assertNotIn('size', smtp.esmtp_features)
smtp.ehlo_or_helo_if_needed()
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
def test_with_statement(self):
with smtplib.SMTP(HOST, self.port) as smtp:
code, message = smtp.noop()
self.assertEqual(code, 250)
self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
with smtplib.SMTP(HOST, self.port) as smtp:
smtp.close()
self.assertRaises(smtplib.SMTPServerDisconnected, smtp.send, b'foo')
def test_with_statement_QUIT_failure(self):
with self.assertRaises(smtplib.SMTPResponseException) as error:
with smtplib.SMTP(HOST, self.port) as smtp:
smtp.noop()
self.serv._SMTPchannel.quit_response = '421 QUIT FAILED'
self.assertEqual(error.exception.smtp_code, 421)
self.assertEqual(error.exception.smtp_error, b'QUIT FAILED')
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
# Issue 17498: make sure _rset does not raise SMTPServerDisconnected exception
def test__rest_from_mail_cmd(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.noop()
self.serv._SMTPchannel.mail_response = '451 Requested action aborted'
self.serv._SMTPchannel.disconnect = True
with self.assertRaises(smtplib.SMTPSenderRefused):
smtp.sendmail('John', 'Sally', 'test message')
self.assertIsNone(smtp.sock)
# Issue 5713: make sure close, not rset, is called if we get a 421 error
def test_421_from_mail_cmd(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.noop()
self.serv._SMTPchannel.mail_response = '421 closing connection'
with self.assertRaises(smtplib.SMTPSenderRefused):
smtp.sendmail('John', 'Sally', 'test message')
self.assertIsNone(smtp.sock)
self.assertEqual(self.serv._SMTPchannel.rset_count, 0)
def test_421_from_rcpt_cmd(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.noop()
self.serv._SMTPchannel.rcpt_response = ['250 accepted', '421 closing']
with self.assertRaises(smtplib.SMTPRecipientsRefused) as r:
smtp.sendmail('John', ['Sally', 'Frank', 'George'], 'test message')
self.assertIsNone(smtp.sock)
self.assertEqual(self.serv._SMTPchannel.rset_count, 0)
self.assertDictEqual(r.exception.args[0], {'Frank': (421, b'closing')})
def test_421_from_data_cmd(self):
class MySimSMTPChannel(SimSMTPChannel):
def found_terminator(self):
if self.smtp_state == self.DATA:
self.push('421 closing')
else:
super().found_terminator()
self.serv.channel_class = MySimSMTPChannel
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.noop()
with self.assertRaises(smtplib.SMTPDataError):
smtp.sendmail('[email protected]', ['[email protected]'], 'test message')
self.assertIsNone(smtp.sock)
self.assertEqual(self.serv._SMTPchannel.rcpt_count, 0)
def test_smtputf8_NotSupportedError_if_no_server_support(self):
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
smtp.ehlo()
self.assertTrue(smtp.does_esmtp)
self.assertFalse(smtp.has_extn('smtputf8'))
self.assertRaises(
smtplib.SMTPNotSupportedError,
smtp.sendmail,
'John', 'Sally', '', mail_options=['BODY=8BITMIME', 'SMTPUTF8'])
self.assertRaises(
smtplib.SMTPNotSupportedError,
smtp.mail, 'John', options=['BODY=8BITMIME', 'SMTPUTF8'])
def test_send_unicode_without_SMTPUTF8(self):
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
self.assertRaises(UnicodeEncodeError, smtp.sendmail, 'Alice', 'Böb', '')
self.assertRaises(UnicodeEncodeError, smtp.mail, 'Älice')
def test_name_field_not_included_in_envelop_addresses(self):
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3
)
self.addCleanup(smtp.close)
message = EmailMessage()
message['From'] = email.utils.formataddr(('Michaël', '[email protected]'))
message['To'] = email.utils.formataddr(('René', '[email protected]'))
self.assertDictEqual(smtp.send_message(message), {})
self.assertEqual(self.serv._addresses['from'], '[email protected]')
self.assertEqual(self.serv._addresses['tos'], ['[email protected]'])
class SimSMTPUTF8Server(SimSMTPServer):
def __init__(self, *args, **kw):
# The base SMTP server turns these on automatically, but our test
# server is set up to munge the EHLO response, so we need to provide
# them as well. And yes, the call is to SMTPServer not SimSMTPServer.
self._extra_features = ['SMTPUTF8', '8BITMIME']
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accepted(self, conn, addr):
self._SMTPchannel = self.channel_class(
self._extra_features, self, conn, addr,
decode_data=self._decode_data,
enable_SMTPUTF8=self.enable_SMTPUTF8,
)
def process_message(self, peer, mailfrom, rcpttos, data, mail_options=None,
rcpt_options=None):
self.last_peer = peer
self.last_mailfrom = mailfrom
self.last_rcpttos = rcpttos
self.last_message = data
self.last_mail_options = mail_options
self.last_rcpt_options = rcpt_options
class SMTPUTF8SimTests(unittest.TestCase):
maxDiff = None
def setUp(self):
self.real_getfqdn = socket.getfqdn
socket.getfqdn = mock_socket.getfqdn
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = SimSMTPUTF8Server((HOST, 0), ('nowhere', -1),
decode_data=False,
enable_SMTPUTF8=True)
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
socket.getfqdn = self.real_getfqdn
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
def test_test_server_supports_extensions(self):
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
smtp.ehlo()
self.assertTrue(smtp.does_esmtp)
self.assertTrue(smtp.has_extn('smtputf8'))
def test_send_unicode_with_SMTPUTF8_via_sendmail(self):
m = '¡a test message containing unicode!'.encode('utf-8')
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
smtp.sendmail('Jőhn', 'Sálly', m,
mail_options=['BODY=8BITMIME', 'SMTPUTF8'])
self.assertEqual(self.serv.last_mailfrom, 'Jőhn')
self.assertEqual(self.serv.last_rcpttos, ['Sálly'])
self.assertEqual(self.serv.last_message, m)
self.assertIn('BODY=8BITMIME', self.serv.last_mail_options)
self.assertIn('SMTPUTF8', self.serv.last_mail_options)
self.assertEqual(self.serv.last_rcpt_options, [])
def test_send_unicode_with_SMTPUTF8_via_low_level_API(self):
m = '¡a test message containing unicode!'.encode('utf-8')
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
smtp.ehlo()
self.assertEqual(
smtp.mail('Jő', options=['BODY=8BITMIME', 'SMTPUTF8']),
(250, b'OK'))
self.assertEqual(smtp.rcpt('János'), (250, b'OK'))
self.assertEqual(smtp.data(m), (250, b'OK'))
self.assertEqual(self.serv.last_mailfrom, 'Jő')
self.assertEqual(self.serv.last_rcpttos, ['János'])
self.assertEqual(self.serv.last_message, m)
self.assertIn('BODY=8BITMIME', self.serv.last_mail_options)
self.assertIn('SMTPUTF8', self.serv.last_mail_options)
self.assertEqual(self.serv.last_rcpt_options, [])
def test_send_message_uses_smtputf8_if_addrs_non_ascii(self):
msg = EmailMessage()
msg['From'] = "Páolo <fő[email protected]>"
msg['To'] = 'Dinsdale'
msg['Subject'] = 'Nudge nudge, wink, wink \u1F609'
# XXX I don't know why I need two \n's here, but this is an existing
# bug (if it is one) and not a problem with the new functionality.
msg.set_content("oh là là, know what I mean, know what I mean?\n\n")
# XXX smtpd converts received /r/n to /n, so we can't easily test that
# we are successfully sending /r/n :(.
expected = textwrap.dedent("""\
From: Páolo <fő[email protected]>
To: Dinsdale
Subject: Nudge nudge, wink, wink \u1F609
Content-Type: text/plain; charset="utf-8"
Content-Transfer-Encoding: 8bit
MIME-Version: 1.0
oh là là, know what I mean, know what I mean?
""")
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
self.assertEqual(smtp.send_message(msg), {})
self.assertEqual(self.serv.last_mailfrom, 'fő[email protected]')
self.assertEqual(self.serv.last_rcpttos, ['Dinsdale'])
self.assertEqual(self.serv.last_message.decode(), expected)
self.assertIn('BODY=8BITMIME', self.serv.last_mail_options)
self.assertIn('SMTPUTF8', self.serv.last_mail_options)
self.assertEqual(self.serv.last_rcpt_options, [])
def test_send_message_error_on_non_ascii_addrs_if_no_smtputf8(self):
msg = EmailMessage()
msg['From'] = "Páolo <fő[email protected]>"
msg['To'] = 'Dinsdale'
msg['Subject'] = 'Nudge nudge, wink, wink \u1F609'
smtp = smtplib.SMTP(
HOST, self.port, local_hostname='localhost', timeout=3)
self.addCleanup(smtp.close)
self.assertRaises(smtplib.SMTPNotSupportedError,
smtp.send_message(msg))
EXPECTED_RESPONSE = encode_base64(b'\0psu\0doesnotexist', eol='')
class SimSMTPAUTHInitialResponseChannel(SimSMTPChannel):
def smtp_AUTH(self, arg):
# RFC 4954's AUTH command allows for an optional initial-response.
# Not all AUTH methods support this; some require a challenge. AUTH
# PLAIN does those, so test that here. See issue #15014.
args = arg.split()
if args[0].lower() == 'plain':
if len(args) == 2:
# AUTH PLAIN <initial-response> with the response base 64
# encoded. Hard code the expected response for the test.
if args[1] == EXPECTED_RESPONSE:
self.push('235 Ok')
return
self.push('571 Bad authentication')
class SimSMTPAUTHInitialResponseServer(SimSMTPServer):
channel_class = SimSMTPAUTHInitialResponseChannel
class SMTPAUTHInitialResponseSimTests(unittest.TestCase):
def setUp(self):
self.real_getfqdn = socket.getfqdn
socket.getfqdn = mock_socket.getfqdn
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = SimSMTPAUTHInitialResponseServer(
(HOST, 0), ('nowhere', -1), decode_data=True)
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
socket.getfqdn = self.real_getfqdn
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
def testAUTH_PLAIN_initial_response_login(self):
self.serv.add_feature('AUTH PLAIN')
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost', timeout=15)
smtp.login('psu', 'doesnotexist')
smtp.close()
def testAUTH_PLAIN_initial_response_auth(self):
self.serv.add_feature('AUTH PLAIN')
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost', timeout=15)
smtp.user = 'psu'
smtp.password = 'doesnotexist'
code, response = smtp.auth('plain', smtp.auth_plain)
smtp.close()
self.assertEqual(code, 235)
@support.reap_threads
def test_main(verbose=None):
support.run_unittest(
BadHELOServerTests,
DebuggingServerTests,
GeneralTests,
NonConnectingTests,
SMTPAUTHInitialResponseSimTests,
SMTPSimTests,
TooLongLineTests,
)
if __name__ == '__main__':
test_main()
| [
"l”[email protected]“"
] | |
a108d8f0631873f4b65550ed4b7d482f12e3e8a6 | 02422812b5e93225f6c842ec57aae601cb939a8d | /tests/client/internal_messaging/test_producer.py | fc80a258c21a90b32dbe40386e709df21e14b6aa | [
"Apache-2.0"
] | permissive | gcollard/lightbus | 1af20564bb05df76ed7302f6eb93487c5b17592d | d04deeda8ccef5a582b79255725ca2025a085c02 | refs/heads/master | 2022-12-27T01:02:45.505846 | 2020-10-02T02:18:05 | 2020-10-02T02:18:05 | 300,042,306 | 0 | 0 | Apache-2.0 | 2020-10-02T02:18:06 | 2020-09-30T19:44:52 | Python | UTF-8 | Python | false | false | 2,372 | py | import asyncio
import logging
import pytest
from _pytest.logging import LogCaptureFixture
from lightbus.client.internal_messaging.producer import InternalProducer
pytestmark = pytest.mark.unit
@pytest.mark.asyncio
async def test_queue_monitor(producer: InternalProducer, caplog: LogCaptureFixture, fake_coroutine):
"""Ensure the queue monitor logs as we expect
Note that something we implicitly test for here is that the monitor
does not log lots of duplicate lines. Rather it only logs when
something changes.
"""
producer.size_warning = 3
producer.monitor_interval = 0.01
caplog.set_level(logging.WARNING)
# Start the producer running
producer.start()
# No logging yet
assert not caplog.records
# Add a couple of items to the queue (still under size_warning)
producer.queue.put_nowait(None)
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
# Still no logging yet
assert not caplog.records
# One more gets us up to the warning level
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
# Now we have logging
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == "Queue in InternalProducer now has 3 commands."
caplog.clear() # Clear the log messages
# Let's check we get another messages when the queue gets bigger again
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == "Queue in InternalProducer now has 4 commands."
caplog.clear() # Clear the log messages
# Now check we get logging when the queue shrinks, but is still above the warning level
producer.queue.get_nowait()
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == (
"Queue in InternalProducer has shrunk back down to 3 commands."
)
caplog.clear() # Clear the log messages
# Now check we get logging when the queue shrinks to BELOW the warning level
producer.queue.get_nowait()
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == (
"Queue in InternalProducer has shrunk back down to 2 commands. "
"Queue is now at an OK size again."
)
caplog.clear() # Clear the log messages
| [
"[email protected]"
] | |
3b5adac9dca8f817319ada3b9e7cefa9ca0912f5 | 8f9ea3f14bdf2187de759939b2bbc87fe68ccfc0 | /tensorflow/python/training/optimizer.py | a9287a0f0d0391cc6e0b297cce18eebaf9f64291 | [
"Apache-2.0"
] | permissive | davidstanke/bazel-mvn-demo | 4ea43f0ba293a28b916a27eab5f0812e9b753c2c | cff14dddce15ea7152988da576673bd15bab6c6e | refs/heads/master | 2022-10-20T07:52:29.651851 | 2018-11-22T13:17:51 | 2018-11-22T13:17:51 | 157,782,756 | 2 | 0 | Apache-2.0 | 2022-10-04T23:47:05 | 2018-11-15T22:54:09 | C++ | UTF-8 | Python | false | false | 48,470 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base class for optimizers."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import slot_creator
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
def get_filtered_grad_fn(grad_fn):
# `distributed_context.join()` requires that its arguments are parallel
# across threads, and in particular that `grads_and_vars` has the same
# variables in the same order.
# When computing gradients in eager mode with multiple threads, you
# can get extra variables with a gradient of `None`. This happens when
# those variables are accessed in another thread during the gradient
# computation. To get a consistent set of variables, we filter out
# those with `None` gradients.
def filtered_grad_fn(x=None):
return [(g, v) for g, v in grad_fn(x) if g is not None]
return filtered_grad_fn
def _deduplicate_indexed_slices(values, indices):
"""Sums `values` associated with any non-unique `indices`.
Args:
values: A `Tensor` with rank >= 1.
indices: A one-dimensional integer `Tensor`, indexing into the first
dimension of `values` (as in an IndexedSlices object).
Returns:
A tuple of (`summed_values`, `unique_indices`) where `unique_indices` is a
de-duplicated version of `indices` and `summed_values` contains the sum of
`values` slices associated with each unique index.
"""
unique_indices, new_index_positions = array_ops.unique(indices)
summed_values = math_ops.unsorted_segment_sum(
values, new_index_positions,
array_ops.shape(unique_indices)[0])
return (summed_values, unique_indices)
def _var_key(var):
if context.executing_eagerly():
return var._unique_id # pylint: disable=protected-access
return (var.op.graph, var.op.name)
class _OptimizableVariable(object):
"""Interface for abstracting over variables in the optimizers."""
@abc.abstractmethod
def target(self):
"""Returns the optimization target for this variable."""
raise NotImplementedError("Calling an abstract method.")
@abc.abstractmethod
def update_op(self, optimizer, g):
"""Returns the update ops for updating the variable."""
raise NotImplementedError("Calling an abstract method.")
class _RefVariableProcessor(_OptimizableVariable):
"""Processor for Variable."""
def __init__(self, v):
self._v = v
def __str__(self):
return "<_RefVariableProcessor(%s)>" % self._v
def target(self):
return self._v._ref() # pylint: disable=protected-access
def update_op(self, optimizer, g):
if isinstance(g, ops.Tensor):
update_op = optimizer._apply_dense(g, self._v) # pylint: disable=protected-access
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
else:
assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a "
"tensor nor IndexedSlices.")
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
# pylint: disable=protected-access
return optimizer._apply_sparse_duplicate_indices(g, self._v)
class _DenseReadResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
update_op = optimizer._resource_apply_dense(g, self._v.op.inputs[0])
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _DenseResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
if isinstance(g, ops.IndexedSlices):
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
return optimizer._resource_apply_sparse_duplicate_indices(
g.values, self._v, g.indices)
update_op = optimizer._resource_apply_dense(g, self._v)
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _TensorProcessor(_OptimizableVariable):
"""Processor for ordinary Tensors.
Even though a Tensor can't really be updated, sometimes it is useful to
compute the gradients with respect to a Tensor using the optimizer. Updating
the Tensor is, of course, unsupported.
"""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
raise NotImplementedError("Trying to update a Tensor ", self._v)
def _get_processor(v):
"""The processor of v."""
if context.executing_eagerly():
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
else:
return _DenseResourceVariableProcessor(v)
if isinstance(
v, resource_variable_ops.ResourceVariable) and not v._in_graph_mode: # pylint: disable=protected-access
# True if and only if `v` was initialized eagerly.
return _DenseResourceVariableProcessor(v)
if v.op.type == "VarHandleOp":
return _DenseResourceVariableProcessor(v)
if isinstance(v, variables.Variable):
return _RefVariableProcessor(v)
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
raise NotImplementedError("Trying to optimize unsupported type ", v)
@tf_export("train.Optimizer")
class Optimizer(
# Optimizers inherit from CheckpointableBase rather than Checkpointable
# since they do most of their dependency management themselves (slot
# variables are special-cased, and non-slot variables are keyed to graphs).
checkpointable.CheckpointableBase):
"""Base class for optimizers.
This class defines the API to add Ops to train a model. You never use this
class directly, but instead instantiate one of its subclasses such as
`GradientDescentOptimizer`, `AdagradOptimizer`, or `MomentumOptimizer`.
### Usage
```python
# Create an optimizer with the desired parameters.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Add Ops to the graph to minimize a cost by updating a list of variables.
# "cost" is a Tensor, and the list of variables contains tf.Variable
# objects.
opt_op = opt.minimize(cost, var_list=<list of variables>)
```
In the training program you will just have to run the returned Op.
```python
# Execute opt_op to do one step of training:
opt_op.run()
```
### Processing gradients before applying them.
Calling `minimize()` takes care of both computing the gradients and
applying them to the variables. If you want to process the gradients
before applying them you can instead use the optimizer in three steps:
1. Compute the gradients with `compute_gradients()`.
2. Process the gradients as you wish.
3. Apply the processed gradients with `apply_gradients()`.
Example:
```python
# Create an optimizer.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Compute the gradients for a list of variables.
grads_and_vars = opt.compute_gradients(loss, <list of variables>)
# grads_and_vars is a list of tuples (gradient, variable). Do whatever you
# need to the 'gradient' part, for example cap them, etc.
capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars]
# Ask the optimizer to apply the capped gradients.
opt.apply_gradients(capped_grads_and_vars)
```
### Gating Gradients
Both `minimize()` and `compute_gradients()` accept a `gate_gradients`
argument that controls the degree of parallelism during the application of
the gradients.
The possible values are: `GATE_NONE`, `GATE_OP`, and `GATE_GRAPH`.
<b>`GATE_NONE`</b>: Compute and apply gradients in parallel. This provides
the maximum parallelism in execution, at the cost of some non-reproducibility
in the results. For example the two gradients of `matmul` depend on the input
values: With `GATE_NONE` one of the gradients could be applied to one of the
inputs _before_ the other gradient is computed resulting in non-reproducible
results.
<b>`GATE_OP`</b>: For each Op, make sure all gradients are computed before
they are used. This prevents race conditions for Ops that generate gradients
for multiple inputs where the gradients depend on the inputs.
<b>`GATE_GRAPH`</b>: Make sure all gradients for all variables are computed
before any one of them is used. This provides the least parallelism but can
be useful if you want to process all gradients before applying any of them.
### Slots
Some optimizer subclasses, such as `MomentumOptimizer` and `AdagradOptimizer`
allocate and manage additional variables associated with the variables to
train. These are called <i>Slots</i>. Slots have names and you can ask the
optimizer for the names of the slots that it uses. Once you have a slot name
you can ask the optimizer for the variable it created to hold the slot value.
This can be useful if you want to log debug a training algorithm, report stats
about the slots, etc.
"""
# Values for gate_gradients.
GATE_NONE = 0
GATE_OP = 1
GATE_GRAPH = 2
def __init__(self, use_locking, name):
"""Create a new Optimizer.
This must be called by the constructors of subclasses.
Args:
use_locking: Bool. If True apply use locks to prevent concurrent updates
to variables.
name: A non-empty string. The name to use for accumulators created
for the optimizer.
Raises:
ValueError: If name is malformed.
"""
if not name:
raise ValueError("Must specify the optimizer name")
self._use_locking = use_locking
self._name = name
# Dictionary of slots.
# {slot_name :
# {_var_key(variable_to_train): slot_for_the_variable, ... },
# ... }
self._slots = {}
self._non_slot_dict = {}
# For implementing Checkpointable. Stores information about how to restore
# slot variables which have not yet been created
# (checkpointable._CheckpointPosition objects).
# {slot_name :
# {_var_key(variable_to_train): [checkpoint_position, ... ], ... },
# ... }
self._deferred_slot_restorations = {}
# TODO(isaprykin): When using a DistributionStrategy, and when an
# optimizer is created in each tower, it might be dangerous to
# rely on some Optimer methods. When such methods are called on a
# per-tower optimizer, an exception needs to be thrown. We do
# allow creation per-tower optimizers however, because the
# compute_gradients()->apply_gradients() sequence is safe.
def get_name(self):
return self._name
def minimize(self, loss, global_step=None, var_list=None,
gate_gradients=GATE_OP, aggregation_method=None,
colocate_gradients_with_ops=False, name=None,
grad_loss=None):
"""Add operations to minimize `loss` by updating `var_list`.
This method simply combines calls `compute_gradients()` and
`apply_gradients()`. If you want to process the gradient before applying
them call `compute_gradients()` and `apply_gradients()` explicitly instead
of using this function.
Args:
loss: A `Tensor` containing the value to minimize.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
var_list: Optional list or tuple of `Variable` objects to update to
minimize `loss`. Defaults to the list of variables collected in
the graph under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
name: Optional name for the returned operation.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
An Operation that updates the variables in `var_list`. If `global_step`
was not `None`, that operation also increments `global_step`.
Raises:
ValueError: If some of the variables are not `Variable` objects.
@compatibility(eager)
When eager execution is enabled, `loss` should be a Python function that
takes elements of `var_list` as arguments and computes the value to be
minimized. If `var_list` is None, `loss` should take no arguments.
Minimization (and gradient computation) is done with respect to the
elements of `var_list` if not None, else with respect to any trainable
variables created during the execution of the `loss` function.
`gate_gradients`, `aggregation_method`, `colocate_gradients_with_ops` and
`grad_loss` are ignored when eager execution is enabled.
@end_compatibility
"""
grads_and_vars = self.compute_gradients(
loss, var_list=var_list, gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
vars_with_grad = [v for g, v in grads_and_vars if g is not None]
if not vars_with_grad:
raise ValueError(
"No gradients provided for any variable, check your graph for ops"
" that do not support gradients, between variables %s and loss %s." %
([str(v) for _, v in grads_and_vars], loss))
return self.apply_gradients(grads_and_vars, global_step=global_step,
name=name)
def compute_gradients(self, loss, var_list=None,
gate_gradients=GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
grad_loss=None):
"""Compute gradients of `loss` for the variables in `var_list`.
This is the first part of `minimize()`. It returns a list
of (gradient, variable) pairs where "gradient" is the gradient
for "variable". Note that "gradient" can be a `Tensor`, an
`IndexedSlices`, or `None` if there is no gradient for the
given variable.
Args:
loss: A Tensor containing the value to minimize or a callable taking
no arguments which returns the value to minimize. When eager execution
is enabled it must be a callable.
var_list: Optional list or tuple of `tf.Variable` to update to minimize
`loss`. Defaults to the list of variables collected in the graph
under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
A list of (gradient, variable) pairs. Variable is always present, but
gradient can be `None`.
Raises:
TypeError: If `var_list` contains anything else than `Variable` objects.
ValueError: If some arguments are invalid.
RuntimeError: If called with eager execution enabled and `loss` is
not callable.
@compatibility(eager)
When eager execution is enabled, `gate_gradients`, `aggregation_method`,
and `colocate_gradients_with_ops` are ignored.
@end_compatibility
"""
if callable(loss):
with backprop.GradientTape() as tape:
if var_list is not None:
tape.watch(var_list)
loss_value = loss()
# Scale loss if using a "mean" loss reduction and multiple towers.
# Have to be careful to call distribute_lib.get_loss_reduction()
# *after* loss() is evaluated, so we know what loss reduction it uses.
# TODO(josh11b): Test that we handle weight decay in a reasonable way.
if distribute_lib.get_loss_reduction() == "mean":
num_towers = distribute_lib.get_distribution_strategy().num_towers
if num_towers > 1:
loss_value *= (1. / num_towers)
if var_list is None:
var_list = tape.watched_variables()
grads = tape.gradient(loss_value, var_list, grad_loss)
return list(zip(grads, var_list))
# Non-callable/Tensor loss case
if context.executing_eagerly():
raise RuntimeError(
"`loss` passed to Optimizer.compute_gradients should "
"be a function when eager execution is enabled.")
# Scale loss if using a "mean" loss reduction and multiple towers.
if distribute_lib.get_loss_reduction() == "mean":
num_towers = distribute_lib.get_distribution_strategy().num_towers
if num_towers > 1:
loss *= (1. / num_towers)
if gate_gradients not in [Optimizer.GATE_NONE, Optimizer.GATE_OP,
Optimizer.GATE_GRAPH]:
raise ValueError("gate_gradients must be one of: Optimizer.GATE_NONE, "
"Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" %
gate_gradients)
self._assert_valid_dtypes([loss])
if grad_loss is not None:
self._assert_valid_dtypes([grad_loss])
if var_list is None:
var_list = (
variables.trainable_variables() +
ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES))
else:
var_list = nest.flatten(var_list)
# pylint: disable=protected-access
var_list += ops.get_collection(ops.GraphKeys._STREAMING_MODEL_PORTS)
# pylint: enable=protected-access
processors = [_get_processor(v) for v in var_list]
if not var_list:
raise ValueError("No variables to optimize.")
var_refs = [p.target() for p in processors]
grads = gradients.gradients(
loss, var_refs, grad_ys=grad_loss,
gate_gradients=(gate_gradients == Optimizer.GATE_OP),
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops)
if gate_gradients == Optimizer.GATE_GRAPH:
grads = control_flow_ops.tuple(grads)
grads_and_vars = list(zip(grads, var_list))
self._assert_valid_dtypes(
[v for g, v in grads_and_vars
if g is not None and v.dtype != dtypes.resource])
return grads_and_vars
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
This is the second part of `minimize()`. It returns an `Operation` that
applies gradients.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients. If `global_step`
was not None, that operation also increments `global_step`.
Raises:
TypeError: If `grads_and_vars` is malformed.
ValueError: If none of the variables have gradients.
RuntimeError: If you should use `_distributed_apply()` instead.
"""
# This is a default implementation of apply_gradients() that can be shared
# by most optimizers. It relies on the subclass implementing the following
# methods: _create_slots(), _prepare(), _apply_dense(), and _apply_sparse().
# Handle DistributionStrategy case.
if distribute_lib.get_cross_tower_context():
raise RuntimeError("Use `_distributed_apply()` instead of "
"`apply_gradients()` in a cross-tower context.")
# TODO(isaprykin): Get rid of `has_distribution_strategy()` check by
# always calling _distributed_apply(), using the default distribution
# as needed.
if distribute_lib.has_distribution_strategy():
grads_and_vars = get_filtered_grad_fn(lambda _: grads_and_vars)()
return distribute_lib.get_tower_context().merge_call(
self._distributed_apply, grads_and_vars, global_step, name)
# No DistributionStrategy case.
grads_and_vars = tuple(grads_and_vars) # Make sure repeat iteration works.
if not grads_and_vars:
raise ValueError("No variables provided.")
converted_grads_and_vars = []
for g, v in grads_and_vars:
if g is not None:
try:
# Convert the grad to Tensor or IndexedSlices if necessary.
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError(
"Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
p = _get_processor(v)
converted_grads_and_vars.append((g, v, p))
converted_grads_and_vars = tuple(converted_grads_and_vars)
var_list = [v for g, v, _ in converted_grads_and_vars if g is not None]
if not var_list:
raise ValueError("No gradients provided for any variable: %s." %
([str(v) for _, _, v in converted_grads_and_vars],))
with ops.init_scope():
self._create_slots(var_list)
update_ops = []
with ops.name_scope(name, self._name) as name:
self._prepare()
for grad, var, processor in converted_grads_and_vars:
if grad is None:
continue
# We colocate all ops created in _apply_dense or _apply_sparse
# on the same device as the variable.
# TODO(apassos): figure out how to get the variable name here.
if context.executing_eagerly() or isinstance(
var,
resource_variable_ops.ResourceVariable) and not var._in_graph_mode: # pylint: disable=protected-access
scope_name = ""
else:
scope_name = var.op.name
with ops.name_scope("update_" + scope_name), ops.colocate_with(var):
update_ops.append(processor.update_op(self, grad))
if global_step is None:
apply_updates = self._finish(update_ops, name)
else:
with ops.control_dependencies([self._finish(update_ops, "update")]):
with ops.colocate_with(global_step):
if isinstance(global_step, resource_variable_ops.ResourceVariable):
# TODO(apassos): the implicit read in assign_add is slow; consider
# making it less so.
apply_updates = resource_variable_ops.assign_add_variable_op(
global_step.handle,
ops.convert_to_tensor(1, dtype=global_step.dtype),
name=name)
else:
apply_updates = state_ops.assign_add(global_step, 1, name=name)
if not context.executing_eagerly():
if isinstance(apply_updates, ops.Tensor):
apply_updates = apply_updates.op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def _distributed_apply(self,
distribution,
grads_and_vars,
global_step=None,
name=None):
"""A version of `apply_gradients` for cross-tower context.
This is a version of `apply_gradients()` for when you are using a
`DistributionStrategy` and are in a cross-tower context. If in a
tower context, use `apply_gradients()` as normal.
Args:
distribution: A `DistributionStrategy` object.
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`, and then aggregated across towers.
global_step: Optional (mirrored) `Variable` to increment by one
after the variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients across all
towers. If `global_step` was not None, that operation also
increments `global_step`.
"""
reduced_grads = distribution.batch_reduce("sum", grads_and_vars)
var_list = [v for _, v in grads_and_vars]
grads_and_vars = zip(reduced_grads, var_list)
# Note that this is called in a cross-tower context.
self._create_slots(var_list)
def update(v, g):
"""Apply gradients to a replica variable."""
assert v is not None
try:
# Convert the grad to Tensor or IndexedSlices if necessary.
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError("Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
p = _get_processor(v)
scope_name = "" if context.executing_eagerly() else v.op.name
# device_policy is set because non-mirrored tensors will be read in
# `update_op`. `_resource_apply_dense`, `lr_t`, `beta1_t` and `beta2_t`
# is an example.
with ops.name_scope("update_" + scope_name):
return p.update_op(self, g)
with ops.name_scope(name, self._name) as name:
self._prepare()
update_ops = [
op
for grad, var in grads_and_vars
for op in distribution.unwrap(distribution.update(var, update, grad))
]
def finish(self, update_ops):
return self._finish(update_ops, "update")
non_slot_devices = distribution.non_slot_devices(var_list)
finish_updates = distribution.update_non_slot(
non_slot_devices, finish, self, update_ops)
if global_step is None:
apply_updates = distribution.group(finish_updates, name=name)
else:
with ops.control_dependencies(distribution.unwrap(finish_updates)):
apply_updates = distribution.group(distribution.update(
global_step, state_ops.assign_add, 1, name=name))
if not context.executing_eagerly():
if isinstance(apply_updates, ops.Tensor):
apply_updates = apply_updates.op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def get_slot(self, var, name):
"""Return a slot named `name` created for `var` by the Optimizer.
Some `Optimizer` subclasses use additional variables. For example
`Momentum` and `Adagrad` use variables to accumulate updates. This method
gives access to these `Variable` objects if for some reason you need them.
Use `get_slot_names()` to get the list of slot names created by the
`Optimizer`.
Args:
var: A variable passed to `minimize()` or `apply_gradients()`.
name: A string.
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
# pylint: disable=protected-access
named_slots = self._slots.get(name, None)
if not named_slots:
return None
if hasattr(var, "_mirrored_container"):
# NOTE: If this isn't patched, then there is no `handle` in
# `_resource_apply_dense`.
mirrored_container = var._mirrored_container()
assert mirrored_container is not None
if context.executing_eagerly():
key = mirrored_container._unique_id
else:
key = (mirrored_container.graph, mirrored_container._shared_name)
# pylint: enable=protected-access
mirrored_slot = named_slots.get(key, None)
if mirrored_slot is None: return None
return mirrored_slot.get(device=var.device)
return named_slots.get(_var_key(var), None)
def get_slot_names(self):
"""Return a list of the names of slots created by the `Optimizer`.
See `get_slot()`.
Returns:
A list of strings.
"""
return sorted(self._slots.keys())
def variables(self):
"""A list of variables which encode the current state of `Optimizer`.
Includes slot variables and additional global variables created by the
optimizer in the current default graph.
Returns:
A list of variables.
"""
executing_eagerly = context.executing_eagerly()
current_graph = ops.get_default_graph()
def _from_current_graph(variable):
if executing_eagerly:
# No variable.op in eager mode. We don't expect lots of eager graphs,
# but behavior should be consistent with graph mode.
return variable._graph_key == current_graph._graph_key # pylint: disable=protected-access
else:
return variable.op.graph is current_graph
optimizer_variables = [v for v in self._non_slot_variables()
if _from_current_graph(v)]
for _, variable_dict in self._slots.items():
for _, slot_for_variable in variable_dict.items():
if _from_current_graph(slot_for_variable):
optimizer_variables.append(slot_for_variable)
# Sort variables by name so that the return is deterministic.
return sorted(optimizer_variables, key=lambda v: v.name)
def _create_non_slot_variable(self, initial_value, name, colocate_with):
"""Add an extra variable, not associated with a slot."""
# Recommendation: Use OptimizerV2 if your optimizer uses non-slot variables.
eager = context.executing_eagerly()
graph = None if eager else colocate_with.graph
key = (name, graph)
v = self._non_slot_dict.get(key, None)
if v is None:
self._maybe_initialize_checkpointable()
distribution_strategy = distribute_lib.get_distribution_strategy()
with distribution_strategy.colocate_vars_with(colocate_with):
if eager:
restored_initial_value = self._preload_simple_restoration(
name=name, shape=None)
if restored_initial_value is not None:
initial_value = restored_initial_value
v = variable_scope.variable(initial_value, name=name, trainable=False)
# Restore this variable by name if necessary, but don't add a
# Checkpointable dependency. Optimizers return the current graph's
# non-slot variables from _checkpoint_dependencies explicitly rather
# than unconditionally adding dependencies (since there may be multiple
# non-slot variables with the same name in different graphs, trying to
# save all of them would result in errors).
self._handle_deferred_dependencies(name=name, checkpointable=v)
self._non_slot_dict[key] = v
return v
@property
def _checkpoint_dependencies(self):
"""From Checkpointable. Gather graph-specific non-slot variables to save."""
current_graph_non_slot_variables = []
current_graph_key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
for (name, _), variable_object in sorted(self._non_slot_dict.items(),
# Avoid comparing graphs
key=lambda item: item[0][0]):
if variable_object._graph_key == current_graph_key: # pylint: disable=protected-access
current_graph_non_slot_variables.append(
checkpointable.CheckpointableReference(
name=name, ref=variable_object))
return (super(Optimizer, self)._checkpoint_dependencies
+ current_graph_non_slot_variables)
def _lookup_dependency(self, name):
"""From Checkpointable. Find a non-slot variable in the current graph."""
unconditional = super(Optimizer, self)._lookup_dependency(name)
if unconditional is not None:
return unconditional
graph = None if context.executing_eagerly() else ops.get_default_graph()
return self._get_non_slot_variable(name, graph=graph)
def _get_non_slot_variable(self, name, graph=None):
non_slot = self._non_slot_dict.get((name, graph), None)
if hasattr(non_slot, "_mirrored_container"):
# This is a mirrored non-slot. In order to enable code like `_finish`
# to assign to a non-slot, return the current context replica.
return non_slot.get()
else:
return non_slot
def _non_slot_variables(self):
"""Additional variables created by the `Optimizer`.
Returns:
A list or tuple of variables.
"""
return self._non_slot_dict.values()
def _assert_valid_dtypes(self, tensors):
"""Asserts tensors are all valid types (see `_valid_dtypes`).
Args:
tensors: Tensors to check.
Raises:
ValueError: If any tensor is not a valid type.
"""
valid_dtypes = self._valid_dtypes()
for t in tensors:
dtype = t.dtype.base_dtype
if dtype not in valid_dtypes:
raise ValueError(
"Invalid type %r for %s, expected: %s." % (
dtype, t.name, [v for v in valid_dtypes]))
# --------------
# Methods to be implemented by subclasses if they want to use the
# inherited implementation of apply_gradients() or compute_gradients().
# --------------
def _valid_dtypes(self):
"""Valid types for loss, variables and gradients.
Subclasses should override to allow other float types.
Returns:
Valid types for loss, variables and gradients.
"""
return set(
[dtypes.float16, dtypes.bfloat16, dtypes.float32, dtypes.float64])
def _create_slots(self, var_list):
"""Create all slots needed by the variables.
Args:
var_list: A list of `Variable` objects.
"""
# No slots needed by default
pass
def _prepare(self):
"""Create all needed tensors before applying gradients.
This is called with the name_scope using the "name" that
users have chosen for the application of gradients.
"""
pass
def _apply_dense(self, grad, var):
"""Add ops to apply dense gradients to `var`.
Args:
grad: A `Tensor`.
var: A `Variable` object.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _resource_apply_dense(self, grad, handle):
"""Add ops to apply dense gradients to the variable `handle`.
Args:
grad: a `Tensor` representing the gradient.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _resource_apply_sparse_duplicate_indices(self, grad, handle, indices):
"""Add ops to apply sparse gradients to `handle`, with repeated indices.
Optimizers which override this method must deal with repeated indices. See
the docstring of `_apply_sparse_duplicate_indices` for details. By default
the correct behavior, to sum non-unique indices and their associated
gradients, is enforced by first pre-processing `grad` and `indices` and
passing them on to `_resource_apply_sparse`. Optimizers which deal correctly
with duplicate indices may instead override this method to avoid the
overhead of summing.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices may be repeated.
Returns:
An `Operation` which updates the value of the variable.
"""
summed_grad, unique_indices = _deduplicate_indexed_slices(
values=grad, indices=indices)
return self._resource_apply_sparse(summed_grad, handle, unique_indices)
def _resource_apply_sparse(self, grad, handle, indices):
"""Add ops to apply sparse gradients to the variable `handle`.
Similar to `_apply_sparse`, the `indices` argument to this method has been
de-duplicated. Optimizers which deal correctly with non-unique indices may
instead override `_resource_apply_sparse_duplicate_indices` to avoid this
overhead.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices are unique.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _apply_sparse_duplicate_indices(self, grad, var):
"""Add ops to apply sparse gradients to `var`, with repeated sparse indices.
Optimizers which override this method must deal with IndexedSlices objects
such as the following:
IndexedSlicesValue(values=[1, 1], indices=[0, 0], dense_shape=[1])
The correct interpretation is:
IndexedSlicesValue(values=[2], indices=[0], dense_shape=[1])
Many optimizers deal incorrectly with repeated indices when updating based
on sparse gradients (e.g. summing squares rather than squaring the sum, or
applying momentum terms multiple times). Adding first is always the correct
behavior, so this is enforced here by reconstructing the IndexedSlices to
have only unique indices, then calling _apply_sparse.
Optimizers which deal correctly with repeated indices may instead override
this method to avoid the overhead of summing indices.
Args:
grad: `IndexedSlices`.
var: A `Variable` object.
Returns:
An `Operation`.
"""
summed_values, unique_indices = _deduplicate_indexed_slices(
values=grad.values, indices=grad.indices)
gradient_no_duplicate_indices = ops.IndexedSlices(
indices=unique_indices,
values=summed_values,
dense_shape=grad.dense_shape)
return self._apply_sparse(gradient_no_duplicate_indices, var)
def _apply_sparse(self, grad, var):
"""Add ops to apply sparse gradients to `var`.
The IndexedSlices object passed to `grad` in this function is by default
pre-processed in `_apply_sparse_duplicate_indices` to remove duplicate
indices (see its docstring for details). Optimizers which can tolerate or
have correct special cases for duplicate sparse indices may override
`_apply_sparse_duplicate_indices` instead of this function, avoiding that
overhead.
Args:
grad: `IndexedSlices`, with no repeated indices.
var: A `Variable` object.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _finish(self, update_ops, name_scope):
"""Do what is needed to finish the update.
This is called with the `name_scope` using the "name" that
users have chosen for the application of gradients.
Args:
update_ops: List of `Operation` objects to update variables. This list
contains the values returned by the `_apply_dense()` and
`_apply_sparse()` calls.
name_scope: String. Name to use for the returned operation.
Returns:
The operation to apply updates.
"""
return control_flow_ops.group(*update_ops, name=name_scope)
# --------------
# Utility methods for subclasses.
# --------------
def _slot_dict(self, slot_name):
"""Returns a dict for caching slots created under the given name.
Args:
slot_name: Name for the slot.
Returns:
A dict that maps primary `Variable` objects to the slot created
for that variable, under the given slot name.
"""
named_slots = self._slots.get(slot_name, None)
if named_slots is None:
named_slots = {}
self._slots[slot_name] = named_slots
return named_slots
def _get_or_make_slot(self, var, val, slot_name, op_name):
"""Find or create a slot for a variable.
Args:
var: A `Variable` object.
val: A `Tensor`. The initial value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_slot(var, val, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, op_name):
"""Find or create a slot for a variable, using an Initializer.
Args:
var: A `Variable` object.
initializer: An `Initializer`. The initial value of the slot.
shape: Shape of the initial value of the slot.
dtype: Type of the value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_slot_with_initializer(
var, initializer, shape, dtype, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
def _zeros_slot(self, var, slot_name, op_name):
"""Find or create a slot initialized with 0.0.
Args:
var: A `Variable` object.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_zeros_slot(var, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
# --------------
# For implementing the Checkpointable interface.
# --------------
def _restore_slot_variable(self, slot_name, variable, slot_variable):
"""Restore a newly created slot variable's value."""
variable_key = _var_key(variable)
deferred_restorations = self._deferred_slot_restorations.get(
slot_name, {}).pop(variable_key, [])
# Iterate over restores, highest restore UID first to minimize the number
# of assignments.
deferred_restorations.sort(key=lambda position: position.restore_uid,
reverse=True)
for checkpoint_position in deferred_restorations:
checkpoint_position.restore(slot_variable)
def _create_or_restore_slot_variable(
self, slot_variable_position, slot_name, variable):
"""Restore a slot variable's value, possibly creating it.
Called when a variable which has an associated slot variable is created or
restored. When executing eagerly, we create the slot variable with a
restoring initializer.
No new variables are created when graph building. Instead,
_restore_slot_variable catches these after normal creation and adds restore
ops to the graph. This method is nonetheless important when graph building
for the case when a slot variable has already been created but `variable`
has just been added to a dependency graph (causing us to realize that the
slot variable needs to be restored).
Args:
slot_variable_position: A `checkpointable._CheckpointPosition` object
indicating the slot variable `Checkpointable` object to be restored.
slot_name: The name of this `Optimizer`'s slot to restore into.
variable: The variable object this slot is being created for.
"""
named_slots = self._slot_dict(slot_name)
variable_key = _var_key(variable)
slot_variable = named_slots.get(variable_key, None)
if (slot_variable is None and context.executing_eagerly() and
slot_variable_position.is_simple_variable()
# Defer slot variable creation if there is an active variable creator
# scope. Generally we'd like to eagerly create/restore slot variables
# when possible, but this may mean that scopes intended to catch
# `variable` also catch its eagerly created slot variable
# unintentionally (specifically make_template would add a dependency on
# a slot variable if not for this case). Deferring is mostly harmless
# (aside from double initialization), and makes variable creator scopes
# behave the same way they do when graph building.
and not ops.get_default_graph()._variable_creator_stack): # pylint: disable=protected-access
initializer = checkpointable.CheckpointInitialValue(
checkpoint_position=slot_variable_position)
slot_variable = self._get_or_make_slot(
var=variable,
val=initializer,
slot_name=slot_name,
op_name=self._name)
# Slot variables are not owned by any one object (because we don't want to
# save the slot variable if the optimizer is saved without the non-slot
# variable, or if the non-slot variable is saved without the optimizer;
# it's a dependency hypergraph with edges of the form (optimizer, non-slot
# variable, variable)). So we don't _track_ slot variables anywhere, and
# instead special-case this dependency and otherwise pretend it's a normal
# graph.
if slot_variable is not None:
# If we've either made this slot variable, or if we've pulled out an
# existing slot variable, we should restore it.
slot_variable_position.restore(slot_variable)
else:
# We didn't make the slot variable. Defer restoring until it gets created
# normally. We keep a list rather than the one with the highest restore
# UID in case slot variables have their own dependencies, in which case
# those could differ between restores.
self._deferred_slot_restorations.setdefault(
slot_name, {}).setdefault(variable_key, []).append(
slot_variable_position)
| [
"[email protected]"
] | |
cb2c66246218d18c73711d4760222ad0c1230cb8 | 571a89f94f3ebd9ec8e6b618cddb7d05811e0d62 | /chokudai_S001/h/main.py | dee5983b58febe63c07ef1f8bf5b7db686e13a53 | [] | no_license | ryu19-1/atcoder_python | 57de9e1db8ff13a107b5861f8f6a231e40366313 | cc24b3c2895aad71d40cefbb8e2893dc397b8f4f | refs/heads/master | 2023-05-10T05:32:16.507207 | 2021-05-19T17:48:10 | 2021-05-19T17:48:10 | 368,954,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | #!/usr/bin/env python3
import sys
from collections import deque, Counter
from heapq import heappop, heappush
from bisect import bisect_left
from itertools import accumulate
sys.setrecursionlimit(10**6)
INF = 10**12
m = 10**9 + 7
def main():
N = int(input())
a = list(map(int, input().split()))
dp = [INF] * N
for i in range(N):
d = bisect_left(dp, a[i])
dp[d] = a[i]
# print(i, dp)
ans = bisect_left(dp, INF)
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
eac93448f682961cac9392c005e6e93abf7cac29 | e5664b40c9d0a828c009b30ed8fe62666d04bf62 | /falcon_marshmallow/_version.py | ceaa700e54e94982b6e19e2fb7dede45e5f07725 | [
"MIT"
] | permissive | evilr00t/falcon-marshmallow | 9eb348fd68e1b0c85927e77f62bc02fc093ad28e | 97f169c78f11a638b1f21b3a977bb5df8d071be5 | refs/heads/master | 2022-02-23T05:04:37.315682 | 2019-10-12T19:37:11 | 2019-10-12T19:37:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | # -*- coding: utf-8 -*-
"""
version.py module
The version set here will be automatically incorporated into setup.py
and also set as the __version__ attribute for the package.
"dev", "rc", and other verison tags should be added using the
``setup.py egg_info`` command when creating distributions.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
__version_info__ = (0, 4, 0)
__version__ = ".".join([str(ver) for ver in __version_info__])
| [
"[email protected]"
] | |
93c094ec3ff67c2547a4273d6b6d7dd5b2d36e17 | 528c811306faa4a34bf51fca7955b7a24ac2e30c | /Python/Number of Islands II.py | ea9b85418e2cf1f4baca66002f08cbad1d4cd15e | [] | no_license | ganjingcatherine/LeetCode-1 | 1addbd7e4d9254a146601f9d5e28b8becb8235a6 | 488782d3f1e759da2d32b4e82dbf55b96c431244 | refs/heads/master | 2021-05-11T03:15:16.810035 | 2016-02-06T06:19:18 | 2016-02-06T06:19:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,429 | py | """
A 2d grid map of m rows and n columns is initially filled with water. We may perform an addLand operation which turns the water at position (row, col) into a land. Given a list of positions to operate, count the number of islands after each addLand operation. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water.
Example:
Given m = 3, n = 3, positions = [[0,0], [0,1], [1,2], [2,1]].
Initially, the 2d grid grid is filled with water. (Assume 0 represents water and 1 represents land).
0 0 0
0 0 0
0 0 0
Operation #1: addLand(0, 0) turns the water at grid[0][0] into a land.
1 0 0
0 0 0 Number of islands = 1
0 0 0
Operation #2: addLand(0, 1) turns the water at grid[0][1] into a land.
1 1 0
0 0 0 Number of islands = 1
0 0 0
Operation #3: addLand(1, 2) turns the water at grid[1][2] into a land.
1 1 0
0 0 1 Number of islands = 2
0 0 0
Operation #4: addLand(2, 1) turns the water at grid[2][1] into a land.
1 1 0
0 0 1 Number of islands = 3
0 1 0
We return the result as an array: [1, 1, 2, 3]
"""
class union_find:
def __init__(self, m, n):
self.father = {}
self.m = m
self.n = n
for i in range(m):
for j in range(n):
id = self.convert_to_id(i, j)
self.father[id] = id
def find(self, x, y):
parent = self.father[self.convert_to_id(x, y)]
while parent != self.father[parent]:
parent = self.father[parent]
return parent
def compressed_find(self, x, y):
parent = self.father[self.convert_to_id(x, y)]
while parent != self.father[parent]:
parent = self.father[parent]
# set all father to be parent we just get
prev_father = self.father[self.convert_to_id(x, y)]
while prev_father != self.father[prev_father]:
prev_father, self.father[prev_father] = self.father[prev_father], parent
return parent
def union(self, x1, y1, x2, y2):
f1 = self.find(x1, y1)
f2 = self.find(x2, y2)
if f1 != f2:
self.father[f1] = f2
def convert_to_id(self, x, y):
return x * self.n + y
class Solution(object):
def numIslands2(self, m, n, positions):
"""
:type m: int
:type n: int
:type positions: List[List[int]]
:rtype: List[int]
"""
if m == 0 or n == 0:
return []
if not positions or len(positions) == 0:
return []
island = [[False for _ in range(n)] for _ in range(m)]
directions = [[0, -1], [0, 1], [1, 0], [-1, 0]]
count, uf, result = 0, union_find(m, n), []
for position in positions:
x, y = position[0], position[1]
if not island[x][y]:
count += 1
island[x][y] = True
for i in range(4):
nx, ny = x + directions[i][0], y + directions[i][1]
if 0 <= nx < m and 0 <= ny < n and island[nx][ny]:
position_father = uf.find(x, y)
now_father = uf.find(nx, ny)
if position_father != now_father:
count -= 1
uf.union(x, y, nx, ny)
result.append(count)
return result
| [
"[email protected]"
] | |
8b5353bb413efa3cbabe1730e3767936265568a8 | 0d0efed91a1e320509a7625bd72ebea1b64fc95b | /numpy_learn/5_numpy_function.py | 0fc81c094aa3704f8098fde5e9b67f07f7783576 | [] | no_license | starryrbs/python_ai | ed74a3c2d53378b47b2be910d97255f2706fd25e | 80f8fd361d7b366ba0607417f0272bbaa3672e51 | refs/heads/master | 2020-04-24T03:48:52.260392 | 2019-02-20T13:56:42 | 2019-02-20T13:56:42 | 171,681,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,783 | py | import numpy as np
# numpy的随机数函数
# numpy的random子库:np.random.*,主要有np.random.rand() np.random.randn() np.random.randint()
# rand(d0,d1,d2,……,dn) : 根据d0-dn创建随机数组,浮点数,范围是[0,1),均匀分布
a = np.random.rand(2, 3, 4)
print(a)
"""
[[[0.4506612 0.5296636 0.9747625 0.90105177]
[0.25850117 0.90704491 0.87144252 0.00418912]
[0.69423447 0.690204 0.4432447 0.37734196]]
[[0.41056822 0.4220897 0.80819521 0.99022746]
[0.61803924 0.93554027 0.3742707 0.94081985]
[0.15283965 0.09844152 0.25726209 0.24488101]]]
"""
# randn(d0,d1,d2,……,dn) : 根据d0-dn创建随机数组,标准正态分布
a = np.random.randn(2, 3, 4)
print(a)
# randint(low, high, shape) : 根据shape创建随机数数组,范围是[low, high)
a = np.random.randint(5, 10, (2, 3, 4))
print(a)
"""
[[[5 6 5 7]
[7 5 9 5]
[6 7 6 5]]
[[8 6 7 5]
[6 8 5 6]
[8 8 7 9]]]
"""
# seed(s) : 随机数种子,s是给定的种子值
# np.random.seed(5)
# a = np.random.randint(5, 10, (2, 3, 4))
# print(a)
"""
[[[8 5 6 5]
[9 8 5 5]
[9 6 5 8]]
[[9 8 6 9]
[7 6 6 7]
[6 6 6 7]]]
"""
# 如上图:当给定的种子值为4时,数组的值并不会改变
# shuffle(a): 根据数组a的每一纵列进行随机排列,数组a发生改变
a = np.random.randint(5, 10, (3, 4))
print(a)
"""
[[[6 8 7 8]
[9 7 7 9]
[5 6 6 8]]
[[6 6 5 6]
[5 7 5 5]
[6 8 5 9]]]
"""
np.random.shuffle(a)
print(a)
"""
[[8 7 8 7]
[5 6 5 8]
[7 9 5 5]]
[[5 6 5 8]
[8 7 8 7]
[7 9 5 5]]
"""
# permutation(a) :根据数组a的每一纵列进行随机排列,数组a不改变
a = np.random.randint(5, 10, (3, 4))
print(a)
"""
[[8 7 5 9]
[5 9 8 6]
[6 6 5 5]]
"""
b = np.random.permutation(a)
print(a)
"""
[[9 5 7 9]
[5 9 5 7]
[6 8 6 7]]
"""
print(b)
"""
[[5 9 5 7]
[6 8 6 7]
[9 5 7 9]]
"""
# choice(a, size, replace, p):从一维数组a中以概率p抽取元素,形成size形状的新数组,replace表示是否可以重用元素,默认为True
a = np.arange(6)
print(np.random.choice(a, 2, replace=False, p=a / np.sum(a)))
# replace在一维数组中有效
"""
uniform(low, high, size) : 产生具有均匀分布的数组,low起始值,high结束值,size形状
normal(loc,scale,size) : 产生具有正态分布的数组,loc均值,scale标准差,size形状
poisson(lam,size) : 产生具有泊松分布的数组,lam随机事件发生率,size形状
"""
# numpy的统计函数:
# np.sum(a, axis=None) : 根据给定轴axis计算数组a相关元素之和,axis整数或元组。
a = np.arange(15).reshape((3, 5))
print(a)
"""
[[ 0 1 2 3 4]
[ 5 6 7 8 9]
[10 11 12 13 14]]
"""
print(np.sum(a, axis=0))
# [15 18 21 24 27]
print(np.sum(a, axis=1))
# [10 35 60]
"""
当axis=None时,np.sum(a)表示数组a的所有元素总和
当axis=0时,表示的是数组a各纵列元素之和
当axis=1时,表示的是数组a各横列元素之和
mean(a, axis=None) :根据给定轴axis计算数组a相关元素的期望,axis整数或元组
"""
# mean 求取均值
print(1, np.mean(a))
print(np.mean(a, axis=0))
# average(a,axis=None,weights=None):根据给定轴axis计算数组a相关元素的加权平均值
print(np.average(a, axis=0, weights=[2, 3, 4]))
# [ 6.11111111 7.11111111 8.11111111 9.11111111 10.11111111]
# 6.111111111111111是这样计算出来的: (0 * 2 + 5 * 3 + 4 * 10) / (2 + 3 + 4)
"""
std(a, axis=None) : 根据给定轴axis计算数组a相关元素的标准差
var(a, axis=None) : 根据给定轴axis计算数组a相关元素的方差
min(a) max(a) : 计算数组a中元素的最小值、最大值
argmin(a) argmax(a) : 计算数组a中元素最小值、最大值的降一维后下标
unravel_index(index, shape) : 根据shape将一维下标index转换成多维下标
ptp(a) : 计算数组a中元素最大值与最小值的差
median(a) : 计算数组a中元素的中位数(中值)
"""
print("----------梯度函数------------")
"""
np.gradient(a) :计算数组a中元素的梯度,当a为多维时,返回每个维度梯度
梯度:连续值之间的变化率,即斜率
XY坐标轴连续三个X坐标对应的Y轴值:a, b, c,其中,b的梯度是: (c‐a)/2
"""
a = np.random.randint(0, 20, (5))
print(a)
# [ 5 5 13 6 10]
print(np.gradient(a))
# [ 0. 4. 0.5 -1.5 4. ]
# 0 : (5-5)/1
# 4. : (10-6)/1
# 0.5: (6-5)/2
# 4. : (13-5)/2
# 当a为多维数组时
a = np.arange(12).reshape(2,6)
print(a)
"""
[[ 0 1 2 3 4 5]
[ 6 7 8 9 10 11]]
"""
print(np.gradient(a))
"""
[array([[6., 6., 6., 6., 6., 6.],
[6., 6., 6., 6., 6., 6.]]), array([[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.]])]
"""
# 上侧表示最外层维度(axis=0)的梯度,下侧表示第二层维度(axis=1)的梯度。 | [
"[email protected]"
] | |
6aec87a1fbe7be776d760cf637c53614801b725b | 35286efd76814a1f3bc05da07f2968d05737c238 | /esim/test.py | b013aa69540a306acd2cfacf63915c5ba49b3226 | [
"Apache-2.0"
] | permissive | jiniaoxu/text_matching | ac41c7de8f66f61a6958a35dfd4584539cd97c51 | 154de91000e8677703192cf5eae49fc6c3c09eea | refs/heads/master | 2020-06-04T05:45:09.320991 | 2019-06-13T02:50:54 | 2019-06-13T02:50:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 782 | py | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from esim.graph import Graph
import tensorflow as tf
from utils.load_data import load_data
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
p, h, y = load_data('ccb/test.csv', data_size=1000)
model = Graph()
saver = tf.train.Saver()
with tf.Session()as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, '../output/esim/esim_12.ckpt')
loss, acc = sess.run([model.loss, model.acc],
feed_dict={model.p: p,
model.h: h,
model.y: y,
model.keep_prob: 1})
print('loss: ', loss, ' acc:', acc)
| [
"[email protected]"
] | |
561107764d55ee75983f3adc71f5cf85b27d5ea0 | 5a45981c89d0d9c0f2e9453abdefc333deb53e80 | /nanodet/model/fpn/fpn.py | b031c6c81b0d7eacf7b045c53975dc5b07aa5c94 | [
"Apache-2.0"
] | permissive | zhiqwang/nanodet | fd0b2e9c4badf492649aef7c3b397394c3110d1d | dd94177c0cb411ee21f4fc4ebc2ef01647e64823 | refs/heads/main | 2023-03-17T12:23:12.788037 | 2021-03-15T12:00:19 | 2021-03-15T12:00:19 | 348,642,567 | 2 | 0 | Apache-2.0 | 2021-03-17T09:01:43 | 2021-03-17T09:01:43 | null | UTF-8 | Python | false | false | 3,241 | py | # Modification 2020 RangiLyu
# Copyright 2018-2019 Open-MMLab.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.nn as nn
import torch.nn.functional as F
from ..module.conv import ConvModule
from ..module.init_weights import xavier_init
class FPN(nn.Module):
def __init__(self,
in_channels,
out_channels,
num_outs,
start_level=0,
end_level=-1,
conv_cfg=None,
norm_cfg=None,
activation=None
):
super(FPN, self).__init__()
assert isinstance(in_channels, list)
self.in_channels = in_channels
self.out_channels = out_channels
self.num_ins = len(in_channels)
self.num_outs = num_outs
self.fp16_enabled = False
if end_level == -1:
self.backbone_end_level = self.num_ins
assert num_outs >= self.num_ins - start_level
else:
# if end_level < inputs, no extra level is allowed
self.backbone_end_level = end_level
assert end_level <= len(in_channels)
assert num_outs == end_level - start_level
self.start_level = start_level
self.end_level = end_level
self.lateral_convs = nn.ModuleList()
for i in range(self.start_level, self.backbone_end_level):
l_conv = ConvModule(
in_channels[i],
out_channels,
1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
activation=activation,
inplace=False)
self.lateral_convs.append(l_conv)
self.init_weights()
# default init_weights for conv(msra) and norm in ConvModule
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
xavier_init(m, distribution='uniform')
def forward(self, inputs):
assert len(inputs) == len(self.in_channels)
# build laterals
laterals = [
lateral_conv(inputs[i + self.start_level])
for i, lateral_conv in enumerate(self.lateral_convs)
]
# build top-down path
used_backbone_levels = len(laterals)
for i in range(used_backbone_levels - 1, 0, -1):
prev_shape = laterals[i - 1].shape[2:]
laterals[i - 1] += F.interpolate(
laterals[i], size=prev_shape, mode='bilinear')
# build outputs
outs = [
# self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels)
laterals[i] for i in range(used_backbone_levels)
]
return tuple(outs)
# if __name__ == '__main__':
| [
"[email protected]"
] | |
f1b6f23525382617a5501166f87ecca57e0d62c3 | 938a496fe78d5538af94017c78a11615a8498682 | /algorithms/401-500/442.find-all-duplicates-in-an-array.py | 6a7dfa1426ec528b0bb7cf1b4da44bb4ceb85ca5 | [] | no_license | huilizhou/Leetcode-pyhton | 261280044d15d0baeb227248ade675177efdb297 | 6ae85bf79c5a21735e3c245c0c256f29c1c60926 | refs/heads/master | 2020-03-28T15:57:52.762162 | 2019-11-26T06:14:13 | 2019-11-26T06:14:13 | 148,644,059 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | # 数组中重复的数据
class Solution(object):
def findDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
# 我的写法,不合题意。时间复杂度上
# dic = {}
# res = []
# for i in nums:
# dic[i] = dic.get(i, 0) + 1
# if dic[i] > 1:
# res.append(i)
# return res
# 人家的写法,将元素变成索引,因为题目中的范围是1<=a[i]<=n
res = []
for n in nums:
if nums[abs(n) - 1] > 0:
nums[abs(n) - 1] *= -1
else:
res.append(abs(n))
return res
print(Solution().findDuplicates([4, 3, 2, 7, 8, 2, 3, 1]))
| [
"[email protected]"
] | |
f2b3256c22467e1b32dda229247fffda1cde9b95 | e3bb63f93e36aab4a78356ba9d0e82f935325906 | /bitmovin/resources/models/manifests/hls/vtt_media.py | 78827f4ae4353e744ea3c2459772045c4d003fa8 | [
"Unlicense"
] | permissive | camberbridge/bitmovin-python | 1668367980df49f9088b93e4b6764563cbdb8bcf | 3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95 | refs/heads/master | 2020-04-09T17:51:46.786389 | 2018-11-30T14:46:34 | 2018-11-30T14:46:34 | 160,493,890 | 0 | 0 | Unlicense | 2018-12-05T09:31:18 | 2018-12-05T09:31:17 | null | UTF-8 | Python | false | false | 1,259 | py | from .abstract_media import AbstractMedia
class VttMedia(AbstractMedia):
def __init__(self, name, group_id, vtt_url, language=None, assoc_language=None, is_default=None, autoselect=None,
characteristics=None, id_=None):
super().__init__(id_=id_, name=name, group_id=group_id, language=language, assoc_language=assoc_language,
is_default=is_default, autoselect=autoselect, characteristics=characteristics)
self.vttUrl = vtt_url
@classmethod
def parse_from_json_object(cls, json_object):
media = super().parse_from_json_object(json_object=json_object)
id_ = media.id
name = media.name
group_id = media.groupId
language = media.language
assoc_language = media.assocLanguage
is_default = media.isDefault
autoselect = media.autoselect
characteristics = media.characteristics
vtt_url = json_object.get('vttUrl')
vtt_media = VttMedia(id_=id_, name=name, group_id=group_id, language=language, assoc_language=assoc_language,
is_default=is_default, autoselect=autoselect, characteristics=characteristics,
vtt_url=vtt_url)
return vtt_media
| [
"[email protected]"
] | |
3c32af0c8c3dd971d0aaa4bddbac2f32bc78ea47 | 93d361d1cfaf5065aada52ff53833b67302c2b1c | /project/urls.py | 9cef54d5038b91c04d21c889fda0d9087dcbd3ed | [] | no_license | append-knowledge/restapi-with-jwt-token | 0fe573cd45633829645544447f66e6d6b43458ad | fbd276fb38cbd687253176b1dd96f07e16707dfd | refs/heads/master | 2023-08-27T02:55:20.826945 | 2021-10-09T18:33:52 | 2021-10-09T18:33:52 | 415,391,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | from django.urls import path
from project import views
urlpatterns=[
path('accounts/signup',views.SignUpview.as_view(),name='signup'),
path('accounts/signin',views.SignInView.as_view(),name='signin'),
path('accounts/signout',views.SignOutView.as_view(),name='logout'),
path('accounts/home',views.HomeView.as_view(),name='home'),
path('accounts/change/<int:id>',views.ChangeDetailsView.as_view(),name='editdetails'),
path('accounts/remove/<int:id>',views.delete,name='removeitem')
] | [
"[email protected]"
] | |
ff867b19969fb12f7c9a4f8cd4865f82f49e6c70 | 9c8857d980cc53bc4f69eee3355226fcd0b42746 | /app/main.py | 0712ce2ce6088a1f2105ac717532b0ac3b048a3f | [] | no_license | moh-hosseini98/FastAPI-crud-tortoise-orm-and-postgres | bbaf9a305504d45e0db549edf11fe00b625404f9 | 75f2c5d4eeee38113f5bb0d19956f721c0836db1 | refs/heads/main | 2023-08-17T22:16:54.912308 | 2021-10-20T14:41:08 | 2021-10-20T14:41:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | import uvicorn
if __name__=="__main__":
uvicorn.run("server.app:app",host="0.0.0.0",port=8000,reload=True) | [
"[email protected]"
] | |
94792a1bda13eac1d3f97a44481616c63e24d376 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/1642.py | c31871167442ed89cb0e8fb17031677d335e0e83 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,477 | py | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Nishant
#
# Created: 12-04-2014
# Copyright: (c) Nishant 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
def main():
input_file = "E:\Dropbox\CodeBase\Python\GoogleCodeJam_2014\A-small-attempt0.in"
output_file = "E:\Dropbox\CodeBase\Python\GoogleCodeJam_2014\A-output.txt"
f = open(input_file, 'r')
o = open(output_file, 'w')
cases = int(f.readline())
lst = list(f)
i = 0
j = 1
while i < (cases * 10):
first = int(lst[i])
# print (first)
arr1 = [lst[i+1], lst[i+2], lst[i+3], lst[i+4]][first-1]
# print (arr1)
i += 5
sec = int(lst[i])
# print (sec)
arr2 = [lst[i+1], lst[i+2], lst[i+3], lst[i+4]][sec-1]
# print (arr2)
i += 5
set1 = set(arr1.split())
set2 = set(arr2.split())
# print (set1)
# print (set2)
res = set1 & set2
if len(res) == 0:
o.write ("Case #%s: Volunteer cheated!\n" %(j))
elif len(res) > 1:
o.write ("Case #%s: Bad magician!\n" %(j))
else:
o.write ("Case #%s: %s\n" %(j, next(iter(res))))
j += 1
f.close()
o.close()
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
b05399a6ff94cb5efa0799a162c6431e21c5440a | e68a40e90c782edae9d8f89b827038cdc69933c4 | /res_bw/scripts/common/lib/plat-mac/carbon/carbonevt.py | a403e280d89920be14e1e2e9b2990efb37dd6195 | [] | no_license | webiumsk/WOT-0.9.16 | 2486f8b632206b992232b59d1a50c770c137ad7d | 71813222818d33e73e414e66daa743bd7701492e | refs/heads/master | 2021-01-10T23:12:33.539240 | 2016-10-11T21:00:57 | 2016-10-11T21:00:57 | 70,634,922 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 372 | py | # 2016.10.11 22:21:54 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/plat-mac/Carbon/CarbonEvt.py
from _CarbonEvt import *
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\plat-mac\carbon\carbonevt.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.10.11 22:21:54 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
7331db6bbc26b8c2088cca46dffdc7622db5ffc5 | aa15002c5316b4c7e0a9563a40826057729e0b13 | /tensorflow/python/keras/layers/preprocessing/table_utils.py | f5397da1f3eb482547e40b4ab293d3051753f429 | [
"Apache-2.0"
] | permissive | kkimdev/tensorflow | 8238c5594ae44f084725ddf9b34d6d41645d4072 | 2fb75db6ad4f4a7f01ef4755b96b49f8eb6108db | refs/heads/master | 2020-07-07T18:09:40.662883 | 2020-05-14T18:59:11 | 2020-05-14T19:05:05 | 203,429,154 | 0 | 0 | Apache-2.0 | 2019-08-20T18:07:46 | 2019-08-20T18:07:46 | null | UTF-8 | Python | false | false | 7,427 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for working with tf.lookup tables in Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.keras import backend as K
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops.ragged import ragged_functional_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import gfile
class TableHandler(object):
"""Wrapper object that holds a lookup table and provides accessors."""
def __init__(self, table, oov_tokens=None, use_v1_apis=False):
self.table = table
self.use_v1_apis = use_v1_apis
if oov_tokens is None:
self.oov_tokens = oov_tokens
else:
if not isinstance(oov_tokens, (list, tuple, np.ndarray)):
oov_tokens = [oov_tokens]
self.oov_tokens = math_ops.cast(oov_tokens, table._value_dtype) # pylint: disable=protected-access
def data(self):
keys, values = self.table.export()
return (self._eval(keys), self._eval(values))
def vocab_size(self):
return self._eval(self.table.size())
def clear(self):
keys, _ = self.table.export()
self._run(self.table.remove(keys))
def insert(self, keys, values):
if len(values) != len(keys):
raise RuntimeError("Size mismatch between values and key arrays. "
"Keys had size %s, values had size %s." %
(len(keys), len(values)))
self._run(self.table.insert(keys, values))
def _replace_oov_buckets(self, inputs, lookups):
"""Replace the default OOV value with one of the OOV bucket values."""
if self.oov_tokens is None:
return lookups
num_oov_elements = self.oov_tokens.shape.num_elements()
if inputs.dtype.is_integer:
oov_indices = math_ops.floormod(inputs, num_oov_elements)
else:
oov_indices = string_ops.string_to_hash_bucket_fast(
inputs, num_buckets=num_oov_elements)
oov_values = array_ops.gather(self.oov_tokens, oov_indices)
oov_locations = math_ops.equal(lookups, self.table._default_value) # pylint: disable=protected-access
return array_ops.where(oov_locations, oov_values, lookups)
def _ragged_lookup(self, inputs):
"""Perform a table lookup on a ragged tensor."""
# The table lookup ops don't natively support ragged tensors, so if we have
# a RT we need to use map_flat_values to look up every element.
indexed_data = ragged_functional_ops.map_flat_values(
self.table.lookup, inputs)
indexed_data = ragged_functional_ops.map_flat_values(
self._replace_oov_buckets, inputs, indexed_data)
# Composite tensors can pass tensor values through, which will cause
# errors if all operations in the TF graph do so. We can break this chain
# with an identity here.
return array_ops.identity(indexed_data)
def _sparse_lookup(self, inputs):
"""Perform a table lookup on a sparse tensor."""
values = self.table.lookup(inputs.values)
values = self._replace_oov_buckets(inputs.values, values)
indexed_data = sparse_tensor.SparseTensor(inputs.indices, values,
inputs.dense_shape)
# Composite tensors can pass tensor values through, which will cause
# errors if all operations in the TF graph do so. We can break this chain
# with an identity here.
return array_ops.identity(indexed_data)
def _tensor_lookup(self, inputs):
"""Perform a table lookup on a tf.tensor."""
values = self.table.lookup(inputs)
indexed_data = self._replace_oov_buckets(inputs, values)
# (b/149446477): output does not preserve input shape.
indexed_data.set_shape(inputs.shape)
return indexed_data
def lookup(self, inputs):
"""Perform a table lookup."""
# Sparse tensors don't play nicely with tensor conversion, so we handle
# them before attempting to convert lists or arrays to tensors.
if isinstance(
inputs, (sparse_tensor.SparseTensor, sparse_tensor.SparseTensorValue)):
return self._sparse_lookup(inputs)
# Try to convert lists/arrays to tensors or RaggedTensors.
inputs = ragged_tensor.convert_to_tensor_or_ragged_tensor(inputs)
# Run the lookup operation on the converted tensor.
if ragged_tensor.is_ragged(inputs):
return self._ragged_lookup(inputs)
else:
return self._tensor_lookup(inputs)
def _eval(self, tensor):
if self.use_v1_apis:
return K.get_session().run(tensor)
else:
return tensor.numpy()
def _run(self, op):
if self.use_v1_apis:
K.get_session().run(op)
def get_vocabulary_from_file(vocabulary_path, encoding="utf-8"):
"""Read a vocabulary in from a file."""
vocab = []
with gfile.GFile(vocabulary_path, "r") as reader:
while True:
# Get the next line (incl. \n), and break if nothing is left to read.
text = reader.readline()
if not text:
break
# Convert the raw text and strip whitespace.
if isinstance(text, str):
token = text
elif isinstance(text, bytes):
token = text.decode(encoding, "ignore")
token = token.strip()
vocab.append(token)
return vocab
def validate_vocabulary_is_unique(vocabulary):
"""Validate that a vocabulary contains no repeated tokens."""
vocabulary_set = set(vocabulary)
if len(vocabulary) != len(vocabulary_set):
repeated_items = [
item for item, count in collections.Counter(vocabulary).items()
if count > 1
]
raise ValueError("The passed vocabulary has at least one repeated "
"term. Please uniquify your dataset. The repeated terms "
"are %s" % repeated_items)
def assert_same_type(expected_type, values, value_name):
"""Assert that 'values' is of type 'expected_type'."""
if dtypes.as_dtype(expected_type) != dtypes.as_dtype(values.dtype):
raise RuntimeError("Expected %s type %s, got %s" %
(value_name, expected_type, values.dtype))
def convert_to_ndarray(x, dtype=None):
"""Convert 'x' to a numpy array."""
array = np.array(x) if isinstance(x, (list, tuple)) else x
if dtype not in (None, dtypes.string):
# If the dtype is an integer, we do permissive casting. This allows
# users to examine int32 data if the dtype is int64 without trouble.
np_dtype = dtypes.as_dtype(dtype).as_numpy_dtype
if np.can_cast(array.dtype, np_dtype):
array = array.astype(np_dtype, casting="safe")
return array
| [
"[email protected]"
] | |
ae2f3de1b7eacdc7cfaca05fea27de5ee8f08410 | da1d21bb8d0760bfba61cd5d9800400f928868aa | /misc/scripts/category_transformation_001.py | 0226537d27ec40ac6726d5b97eb9d427f608ba0e | [] | no_license | biznixcn/WR | 28e6a5d10f53a0bfe70abc3a081c0bf5a5457596 | 5650fbe59f8dfef836503b8092080f06dd214c2c | refs/heads/master | 2021-01-20T23:53:52.887225 | 2014-05-13T02:00:33 | 2014-05-13T02:00:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | # -*- coding: utf-8 -*-
from circuits.models import Circuit
from circuits.utils import CircuitCategory
"""
Transformaciones
Literary + Culture + Music + Art + Academic & Education => Arts & Culture
3 4 8 10 19 => 4
Lifestyle + Green + Fashion + Design + Technology + Business + Geek + Spiritual + Entertainment => Lifestyle
18 7 6 11 16 14 17 21 25 18
"""
for circuit in Circuits.objects.filter(category__in=[3, 8, 10, 19]):
circuit.category = 4
circuit.save()
for circuit in Circuits.objects.filter(category__in=[7, 6, 11, 16, 14, 17, 21, 25]):
circuit.category = 18
circuit.save()
| [
"[email protected]"
] | |
8c5db1946658ab443d7300038473c82702e1de90 | 04125b74273ad8b648343691565ab0cd6e25fa50 | /image_comp_test.py | b32b911b4decb827e8360a480808f031846c8d3a | [] | no_license | alpha0080/spineToolAdv | 32918fa10b47ec9f19586b8878b243afd9dae945 | c394e382502c11fb2b19f86f1e6352dee76444b5 | refs/heads/master | 2021-07-25T09:10:38.883564 | 2018-12-11T00:56:12 | 2018-12-11T00:56:12 | 142,319,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | import os, math,time
try:
sys.path.append("C:/Program Files/Pixar/RenderManProServer-22.1/lib/python2.7/Libs/ite-packages")
#sys.path.append("C:/Program Files/Pixar/RenderManProServer-21.7/lib/python2.7/Lib/site-packages")
import ice
except:
pass
import ice
max_frames_row = 10.0
frames = []
tile_width = 0
tile_height = 0
spritesheet_width = 0
spritesheet_height = 0
folder = "C:/Temp/testImage/1"
files = os.listdir(folder)
files.sort()
#print(files)
for i in files:
filename = folder +'/' +i
image = ice.Load(filename)
imageMetaData = image.GetMetaData()
frames.append(imageMetaData)
print frames
# imageSize = imageMetaData['Original Size']
#imageWidth = int(imageMetaData['Original Size'].split(" ")[0].split("(")[1])
#imageHeight = int(imageMetaData['Original Size'].split(" ")[1].split(")")[0])
###ref https://minzkraut.com/2016/11/23/making-a-simple-spritesheet-generator-in-python/ | [
"[email protected]"
] | |
2d51f24f75bb3b6b21fb1210c3409e1c3063acde | f7778bf3b8173915c97193f51ff8a1ac2260a68a | /Section 3 code files/Code/webdirbuster.py | 8645d7b32c1044ce087596ec1ac46444c4785168 | [
"MIT"
] | permissive | PacktPublishing/Python-for-Automating-Information-Security | 35f5ab480c430788e881017ec8c919be1524cc30 | d6d1eaa053c3a5f5b103e17fefe8b4d9b33c0858 | refs/heads/master | 2023-05-25T12:34:43.912975 | 2023-01-30T09:16:51 | 2023-01-30T09:16:51 | 245,961,846 | 26 | 24 | MIT | 2023-05-22T22:44:20 | 2020-03-09T06:39:43 | Python | UTF-8 | Python | false | false | 3,798 | py | import argparse
import json
import re
import requests
from typing import List
from urllib3.exceptions import InsecureRequestWarning
quiet = False
def print_message(message: str):
"""
Print message to STDOUT if the quiet option is set to False (this is the default).
:param message: message to print
:return: None
"""
global quiet
if not quiet:
print(message)
def enumerate(base_url: str, dirs_file: str, recurse=False) -> List:
"""
Enumerate valid directories reachable via HTTP/HTTPS.
:param base_url: base URL to search
:param dirs_file: file containing names of commonly hosted directories
:param recurse: whether or not to recursively enumerate discovered directories
:return: list containing valid, reachable URLs
"""
# suppress insecure HTTPS warning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
valid_urls = []
with open(dirs_file, 'r') as f:
while True:
tmp_dir = f.readline()
if not tmp_dir:
break
tmp_dir = tmp_dir.strip()
if tmp_dir == '':
test_url = base_url
else:
if re.search(r'/$', base_url):
test_url = '{}{}'.format(base_url, tmp_dir)
else:
test_url = '{}/{}'.format(base_url, tmp_dir)
print_message('Checking {}'.format(test_url))
result = requests.get('{}'.format(test_url), verify=False)
if result.status_code == 200:
url = result.url
print_message('Found URL: {}'.format(url))
valid_urls.append(url)
if recurse and tmp_dir != '':
recurse_results = enumerate(url, dirs_file, recurse)
valid_urls.extend(recurse_results)
return valid_urls
def main():
"""
Main logic.
:return: None
"""
global quiet
parser = argparse.ArgumentParser(description='A smart-ish web directory enumeration tool.')
parser.add_argument('url', help='Base URL to search (must start with http:// or https://)')
parser.add_argument('dirs_file', help='File containing directory names to enumerate')
parser.add_argument('-r', '--recurse', help='Recursively enumerate subdirectories of discovered directories',
action='store_true')
parser.add_argument('-o', '--output', help='Output file to write to')
parser.add_argument('-f', '--format', help='Output format (default is json)', default='json',
choices=['json', 'plain'])
parser.add_argument('-q', '--quiet', help='Do not print informative messages', action='store_true')
args = parser.parse_args()
base_url = args.url
if not re.search(r'^https?://', base_url):
print('Error, url parameter must begin with either http:// or https://')
return
dirs_file = args.dirs_file
recurse = args.recurse
output = args.output
output_format = args.format
quiet = args.quiet
print_message('Enumerating web directories.')
valid_urls = list(set(enumerate(base_url, dirs_file, recurse)))
# print results
if output:
print_message('Writing output to {}.'.format(output))
with open(output, 'w') as of:
if output_format == 'json':
json.dump(valid_urls, of, indent=2)
else:
for line in valid_urls:
of.write('{}\n'.format(line))
else:
print_message('Writing output to STDOUT.')
if output_format == 'json':
print(json.dumps(valid_urls, indent=2))
else:
for line in valid_urls:
print(line)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
34fc5d7be9fdfc130eb473c15b4b7bdb80a10ee2 | 463c053bcf3f4a7337b634890720ea9467f14c87 | /python/ray/workflow/tests/test_lifetime.py | ece91c8445d32ee09fe290f16ece2e35641d73c0 | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | pdames/ray | e8faddc4440976211a6bcead8f8b6e62c1dcda01 | 918d3601c6519d333f10910dc75eb549cbb82afa | refs/heads/master | 2023-01-23T06:11:11.723212 | 2022-05-06T22:55:59 | 2022-05-06T22:55:59 | 245,515,407 | 1 | 1 | Apache-2.0 | 2023-01-14T08:02:21 | 2020-03-06T20:59:04 | Python | UTF-8 | Python | false | false | 1,596 | py | import os
import ray
import time
import pytest
from ray._private.test_utils import (
run_string_as_driver_nonblocking,
run_string_as_driver,
)
from ray.tests.conftest import * # noqa
from ray import workflow
from unittest.mock import patch
driver_script = """
import time
import ray
from ray import workflow
@ray.remote
def foo(x):
time.sleep(1)
if x < 20:
return workflow.continuation(foo.bind(x + 1))
else:
return 20
if __name__ == "__main__":
ray.init(storage="{}")
output = workflow.create(foo.bind(0)).run_async(workflow_id="driver_terminated")
time.sleep({})
"""
def test_workflow_lifetime_1(workflow_start_cluster):
# Case 1: driver exits normally
address, storage_uri = workflow_start_cluster
with patch.dict(os.environ, {"RAY_ADDRESS": address}):
ray.init(storage=storage_uri)
run_string_as_driver(driver_script.format(storage_uri, 5))
output = workflow.get_output("driver_terminated")
assert ray.get(output) == 20
def test_workflow_lifetime_2(workflow_start_cluster):
# Case 2: driver terminated
address, storage_uri = workflow_start_cluster
with patch.dict(os.environ, {"RAY_ADDRESS": address}):
ray.init(storage=storage_uri)
proc = run_string_as_driver_nonblocking(driver_script.format(storage_uri, 100))
time.sleep(10)
proc.kill()
time.sleep(1)
output = workflow.get_output("driver_terminated")
assert ray.get(output) == 20
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", __file__]))
| [
"[email protected]"
] | |
56da124f05b01e70233a87435baf0156aca9e476 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /own_year/new_thing.py | 49f9081472a90473e35618be695869cba50090c3 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py |
#! /usr/bin/env python
def way_or_great_work(str_arg):
same_way_or_group(str_arg)
print('year')
def same_way_or_group(str_arg):
print(str_arg)
if __name__ == '__main__':
way_or_great_work('few_point_or_different_thing')
| [
"[email protected]"
] | |
6b7f77fe2120e75b0a3c0d682447587076ab6d0d | f93f03dac8e7340f35ddb8ac75e9fdbb19b935a8 | /toontown/golf/DistributedGolfHole.py | 0b98a83f3d638d695be462c2a35497eeb5967066 | [] | no_license | toontown-classic/toontown-otp-ai | 5d07f26658ca23e52c65254f23a70cbc5936ae6d | b0be971b4689f811f6abacb7af33242b06d8f9be | refs/heads/develop | 2022-03-10T10:42:37.203938 | 2022-03-08T05:56:52 | 2022-03-08T05:56:52 | 158,168,681 | 5 | 5 | null | 2022-02-26T19:32:31 | 2018-11-19T05:50:36 | Python | UTF-8 | Python | false | false | 71,952 | py | import math
import random
import time
from pandac.PandaModules import TextNode, BitMask32, Point3, Vec3, Vec4, deg2Rad, Mat3, NodePath, VBase4, OdeTriMeshData, OdeTriMeshGeom, OdeRayGeom, CollisionTraverser, CollisionSegment, CollisionNode, CollisionHandlerQueue
from direct.distributed import DistributedObject
from direct.directnotify import DirectNotifyGlobal
from otp.otpbase import OTPGlobals
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownTimer
from direct.gui.DirectGui import DirectWaitBar, DGG, DirectLabel
from direct.task import Task
from direct.fsm.FSM import FSM
from toontown.minigame import ArrowKeys
from direct.showbase import PythonUtil
from toontown.golf import BuildGeometry
from toontown.golf import DistributedPhysicsWorld
from toontown.golf import GolfGlobals
from direct.interval.IntervalGlobal import Sequence, Parallel, LerpScaleInterval, LerpFunctionInterval, Func, Wait, SoundInterval, ParallelEndTogether, LerpPosInterval, ActorInterval, LerpPosHprInterval, LerpColorScaleInterval, WaitInterval
from direct.actor import Actor
from toontown.golf import GolfHoleBase
from toontown.distributed import DelayDelete
class DistributedGolfHole(DistributedPhysicsWorld.DistributedPhysicsWorld, FSM, GolfHoleBase.GolfHoleBase):
defaultTransitions = {'Off': ['Cleanup', 'ChooseTee', 'WatchTee'],
'ChooseTee': ['Aim', 'Cleanup'],
'WatchTee': ['WatchAim',
'Cleanup',
'WatchTee',
'ChooseTee',
'Aim'],
'Wait': ['Aim',
'WatchAim',
'Playback',
'Cleanup',
'ChooseTee',
'WatchTee'],
'Aim': ['Shoot',
'Playback',
'Cleanup',
'Aim',
'WatchAim'],
'WatchAim': ['WatchAim',
'WatchShoot',
'Playback',
'Cleanup',
'Aim',
'ChooseTee',
'WatchTee'],
'Playback': ['Wait',
'Aim',
'WatchAim',
'Cleanup',
'ChooseTee',
'WatchTee'],
'Cleanup': ['Off']}
id = 0
notify = directNotify.newCategory('DistributedGolfHole')
unlimitedAimTime = base.config.GetBool('unlimited-aim-time', 0)
unlimitedTeeTime = base.config.GetBool('unlimited-tee-time', 0)
golfPowerSpeed = base.config.GetDouble('golf-power-speed', 3)
golfPowerExponent = base.config.GetDouble('golf-power-exponent', 0.75)
DefaultCamP = -16
MaxCamP = -90
def __init__(self, cr):
self.notify.debug('Hole Init')
DistributedPhysicsWorld.DistributedPhysicsWorld.__init__(self, base.cr)
GolfHoleBase.GolfHoleBase.__init__(self, 1)
FSM.__init__(self, 'Golf_%s_FSM' % self.id)
self.currentGolfer = 0
self.ballDict = {}
self.ballShadowDict = {}
self.holeNodes = []
self.golfCourse = None
self.golfCourseRequest = None
self.holePositions = []
self.timer = None
self.teeTimer = None
self.aimStart = None
self.titleLabel = None
self.teeInstructions = None
self.aimInstructions = None
self.powerReminder = None
self.lastTimeHeadingSent = 0
self.lastTempHeadingSent = 0
self.holdCycleTime = 0.0
self.inPlayBack = 0
self.swingInterval = None
self.sfxInterval = None
self.isLookingAtPutt = False
self.clubs = {}
self.camInterval = None
self.flyOverInterval = None
self.needToDoFlyOver = True
self.translucentLastFrame = []
self.translucentCurFrame = []
self.localMissedSwings = 0
self.localToonHitControl = False
self.warningInterval = None
self.playBackDelayDelete = None
self.aimMomentum = 0.0
self.lastBumpSfxPos = Point3(0, 0, 0)
self.__textGen = TextNode('golfHoleText')
self.__textGen.setFont(ToontownGlobals.getSignFont())
self.__textGen.setAlign(TextNode.ACenter)
if TTLocalizer.getLanguage() in ['castillian',
'japanese',
'german',
'portuguese',
'french']:
self.__textGen.setGlyphScale(0.7)
self.avIdList = []
self.enterAimStart = 0
return
def generate(self):
self.notify.debug('Hole Generate')
DistributedPhysicsWorld.DistributedPhysicsWorld.generate(self)
self.golfPowerTaskName = self.uniqueName('updateGolfPower')
def announceGenerate(self):
DistributedPhysicsWorld.DistributedPhysicsWorld.announceGenerate(self)
self.setup()
self.sendReady()
self.request('Off')
index = 1
for avId in self.avIdList:
self.createBall(avId, index)
self.createClub(avId)
index += 1
if self.avIdList:
avId = self.avIdList[0]
self.currentGolfer = avId
self.currentGolferActive = False
def delete(self):
self.removePlayBackDelayDelete()
self.request('Cleanup')
taskMgr.remove(self.golfPowerTaskName)
DistributedPhysicsWorld.DistributedPhysicsWorld.delete(self)
GolfHoleBase.GolfHoleBase.delete(self)
if hasattr(self, 'perfectIval'):
self.perfectIval.pause()
del self.perfectIval
self.golfCourse = None
if self.teeInstructions:
self.teeInstructions.destroy()
self.teeInstructions = None
if self.aimInstructions:
self.aimInstructions.destory()
self.aimInstructions = None
if self.powerReminder:
self.powerReminder.destroy()
self.powerReminder = None
if self.swingInterval:
self.swingInterval.pause()
self.swingInterval = None
if self.sfxInterval:
self.sfxInterval.pause()
self.sfxInterval = None
if self.camInterval:
self.camInterval.pause()
self.camInterval = None
for club in self.clubs:
self.clubs[club].removeNode()
del self.clubs
if hasattr(self, 'scoreBoard'):
if hasattr(self.scoreBoard, 'maximizeB'):
if self.scoreBoard.maximizeB:
self.scoreBoard.maximizeB.hide()
if not self.titleLabel == None:
self.titleLabel.destroy()
self.notify.debug('Deleted title label')
self.notify.debug('Delete function')
if self.flyOverInterval:
self.flyOverInterval.pause()
self.flyOverInterval = None
for key in self.ballShadowDict:
self.ballShadowDict[key].removeNode()
self.dropShadowModel.removeNode()
return
def sendReady(self):
self.sendUpdate('setAvatarReadyHole', [])
def createClub(self, avId):
club = NodePath('club-%s' % avId)
clubModel = loader.loadModel('phase_6/models/golf/putter')
clubModel.reparentTo(club)
clubModel.setR(clubModel, 45)
self.clubs[avId] = club
def attachClub(self, avId, pointToBall = False):
club = self.clubs[avId]
if club:
av = base.cr.doId2do.get(avId)
if av:
av.useLOD(1000)
lHand = av.getLeftHands()[0]
club.setPos(0, 0, 0)
club.reparentTo(lHand)
netScale = club.getNetTransform().getScale()[1]
counterActToonScale = lHand.find('**/counteractToonScale')
if counterActToonScale.isEmpty():
counterActToonScale = lHand.attachNewNode('counteractToonScale')
counterActToonScale.setScale(1 / netScale)
self.notify.debug('creating counterActToonScale for %s' % av.getName())
club.reparentTo(counterActToonScale)
club.setX(-0.25 * netScale)
if pointToBall:
club.lookAt(self.clubLookatSpot)
def createToonRay(self):
self.toonRay = OdeRayGeom(self.space, 10.0)
self.toonRay.setCollideBits(BitMask32(16777215))
self.toonRay.setCategoryBits(BitMask32(0))
self.toonRay.setRotation(Mat3(1, 0, 0, 0, -1, 0, 0, 0, -1))
self.space.setCollideId(self.toonRay, GolfGlobals.TOON_RAY_COLLIDE_ID)
self.rayList.append(self.toonRay)
def createSkyRay(self):
self.skyRay = OdeRayGeom(self.space, 100.0)
self.skyRay.setCollideBits(BitMask32(240))
self.skyRay.setCategoryBits(BitMask32(0))
self.skyRay.setRotation(Mat3(1, 0, 0, 0, -1, 0, 0, 0, -1))
self.space.setCollideId(self.skyRay, 78)
self.rayList.append(self.skyRay)
def createCameraRay(self):
self.cameraRay = OdeRayGeom(self.space, 30.0)
self.cameraRay.setCollideBits(BitMask32(8388608))
self.cameraRay.setCategoryBits(BitMask32(0))
self.space.setCollideId(self.cameraRay, GolfGlobals.CAMERA_RAY_COLLIDE_ID)
self.cameraRayNodePath = self.terrainModel.attachNewNode('cameraRayNodePath')
self.rayList.append(self.cameraRay)
def loadLevel(self):
GolfHoleBase.GolfHoleBase.loadLevel(self)
self.teeNodePath = self.terrainModel.find('**/tee0')
if self.teeNodePath.isEmpty():
teePos = Vec3(0, 0, 10)
else:
teePos = self.teeNodePath.getPos()
teePos.setZ(teePos.getZ() + GolfGlobals.GOLF_BALL_RADIUS)
self.notify.debug('teeNodePath heading = %s' % self.teeNodePath.getH())
self.teePositions = [teePos]
teeIndex = 1
teeNode = self.terrainModel.find('**/tee%d' % teeIndex)
while not teeNode.isEmpty():
teePos = teeNode.getPos()
teePos.setZ(teePos.getZ() + GolfGlobals.GOLF_BALL_RADIUS)
self.teePositions.append(teePos)
self.notify.debug('teeNodeP heading = %s' % teeNode.getH())
teeIndex += 1
teeNode = self.terrainModel.find('**/tee%d' % teeIndex)
self.holeBottomNodePath = self.terrainModel.find('**/holebottom0')
if self.holeBottomNodePath.isEmpty():
self.holeBottomPos = Vec3(*self.holeInfo['holePos'][0])
else:
self.holeBottomPos = self.holeBottomNodePath.getPos()
self.holePositions.append(self.holeBottomPos)
minHard = Point3(0, 0, 0)
maxHard = Point3(0, 0, 0)
self.hardSurfaceNodePath.calcTightBounds(minHard, maxHard)
centerX = (minHard[0] + maxHard[0]) / 2.0
centerY = (minHard[1] + maxHard[1]) / 2.0
heightX = (centerX - minHard[0]) / math.tan(deg2Rad(23))
heightY = (centerY - minHard[1]) / math.tan(deg2Rad(18))
height = max(heightX, heightY)
self.camTopViewPos = Point3(centerX, centerY, height)
self.camTopViewHpr = Point3(0, -90, 0)
self.createRays()
self.createToonRay()
self.createCameraRay()
def createLocatorDict(self):
self.locDict = {}
locatorNum = 1
curNodePath = self.hardSurfaceNodePath.find('**/locator%d' % locatorNum)
while not curNodePath.isEmpty():
self.locDict[locatorNum] = curNodePath
locatorNum += 1
curNodePath = self.hardSurfaceNodePath.find('**/locator%d' % locatorNum)
def loadBlockers(self):
loadAll = base.config.GetBool('golf-all-blockers', 0)
self.createLocatorDict()
self.blockerNums = self.holeInfo['blockers']
for locatorNum in self.locDict:
if locatorNum in self.blockerNums or loadAll:
locator = self.locDict[locatorNum]
locatorParent = locator.getParent()
locator.getChildren().wrtReparentTo(locatorParent)
else:
self.locDict[locatorNum].removeNode()
self.hardSurfaceNodePath.flattenStrong()
def loadSounds(self):
self.hitBallSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Hit_Ball.ogg')
self.holeInOneSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Hole_In_One.ogg')
self.holeInTwoPlusSfx = loader.loadSfx('phase_4/audio/sfx/MG_sfx_vine_game_fall.ogg')
self.ballGoesInStartSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Ball_Goes_In_Start.ogg')
self.ballGoesInLoopSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Ball_Goes_In_Loop.ogg')
self.ballGoesToRestSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Ball_Rest_In_Cup.ogg')
self.kickedOutSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Sad_Noise_Kicked_Off_Hole.ogg')
self.crowdBuildupSfx = []
self.crowdApplauseSfx = []
self.crowdMissSfx = []
for i in xrange(4):
self.crowdBuildupSfx.append(loader.loadSfx('phase_6/audio/sfx/Golf_Crowd_Buildup.ogg'))
self.crowdApplauseSfx.append(loader.loadSfx('phase_6/audio/sfx/Golf_Crowd_Applause.ogg'))
self.crowdMissSfx.append(loader.loadSfx('phase_6/audio/sfx/Golf_Crowd_Miss.ogg'))
self.bumpHardSfx = loader.loadSfx('phase_6/audio/sfx/Golf_Hit_Barrier_3.ogg')
self.bumpMoverSfx = loader.loadSfx('phase_4/audio/sfx/Golf_Hit_Barrier_2.ogg')
self.bumpWindmillSfx = loader.loadSfx('phase_4/audio/sfx/Golf_Hit_Barrier_1.ogg')
def setup(self):
self.notify.debug('setup golf hole')
self.loadLevel()
self.loadSounds()
self.camMove = 0
self.arrowKeys = ArrowKeys.ArrowKeys()
self.arrowKeys.setPressHandlers([None,
None,
self.__leftArrowPressed,
self.__rightArrowPressed,
self.__beginTossGolf])
self.arrowKeys.setReleaseHandlers([None,
None,
None,
None,
self.__endTossGolf])
self.targets = render.attachNewNode('targetGameTargets')
self.ballFollow = render.attachNewNode('nodeAtBall')
self.startingTeeHeading = self.teeNodePath.getH()
self.ballFollow.setH(self.startingTeeHeading)
self.ballFollowToonSpot = self.ballFollow.attachNewNode('toonAimSpot')
self.ballFollowToonSpot.setX(-2.0)
self.ballFollowToonSpot.setY(0)
self.ballFollowToonSpot.setH(-90)
self.clubLookatSpot = self.ballFollow.attachNewNode('clubLookat')
self.clubLookatSpot.setY(-(GolfGlobals.GOLF_BALL_RADIUS + 0.1))
camera.reparentTo(self.ballFollow)
self.camPosBallFollow = Point3(0.0, -23.0, 12.0)
self.camHprBallFollow = Point3(0, -16.0, 0)
camera.setPos(self.camPosBallFollow)
camera.setHpr(self.camHprBallFollow)
if self.holeBottomNodePath.isEmpty():
holePositions = self.holePositions
for index in xrange(len(holePositions)):
holePos = holePositions[index]
targetNodePathGeom, t1, t2 = BuildGeometry.addCircleGeom(self.targets, 16, 1)
targetNodePathGeom.setPos(holePos)
targetNodePathGeom.setBin('ground', 0)
targetNodePathGeom.setDepthWrite(False)
targetNodePathGeom.setDepthTest(False)
targetNodePathGeom.setTransparency(TransparencyAttrib.MAlpha)
targetNodePathGeom.setColorScale(0.0, 0.0, 0.0, 1.0)
self.holeNodes.append(targetNodePathGeom)
holeSphere = CollisionSphere(0, 0, 0, 1)
holeSphere.setTangible(1)
holeCNode = CollisionNode('Hole')
holeCNode.addSolid(holeSphere)
holeC = targetNodePathGeom.attachNewNode(holeCNode)
holeC.show()
holeC.setCollideMask(ToontownGlobals.PieBitmask)
toon = base.localAvatar
toon.setPos(0.0, 0.0, -100.0)
toon.b_setAnimState('neutral', 1.0)
self.pollingCtrl = 0
self.timeLastCtrl = 0.0
self.powerBar = DirectWaitBar(guiId='launch power bar', pos=(0.0, 0, -0.65), relief=DGG.SUNKEN, frameSize=(-2.0,
2.0,
-0.2,
0.2), borderWidth=(0.02, 0.02), scale=0.25, range=100, sortOrder=50, frameColor=(0.5, 0.5, 0.5, 0.5), barColor=(1.0, 0.0, 0.0, 1.0), text='', text_scale=0.26, text_fg=(1, 1, 1, 1), text_align=TextNode.ACenter, text_pos=(0, -0.05))
self.power = 0
self.powerBar['value'] = self.power
self.powerBar.hide()
self.accept('tab', self.tabKeyPressed)
self.putAwayAllToons()
base.transitions.irisOut(t=0)
self.dropShadowModel = loader.loadModel('phase_3/models/props/drop_shadow')
self.dropShadowModel.setColor(0, 0, 0, 0.5)
self.dropShadowModel.flattenMedium()
self.dropShadowModel.hide()
return
def switchToAnimState(self, animStateName, forced = False):
curAnimState = base.localAvatar.animFSM.getCurrentState()
curAnimStateName = ''
if curAnimState:
curAnimStateName = curAnimState.getName()
if curAnimStateName != animStateName or forced:
base.localAvatar.b_setAnimState(animStateName)
def __aimTask(self, task):
self.attachClub(self.currentGolfer, True)
x = -math.sin(self.ballFollow.getH() * 0.0174532925)
y = math.cos(self.ballFollow.getH() * 0.0174532925)
dt = globalClock.getDt()
b = self.curGolfBall()
forceMove = 500
forceMoveDt = forceMove * dt
posUpdate = False
momentumChange = dt * 60.0
if (self.arrowKeys.upPressed() or self.arrowKeys.downPressed()) and not self.golfCourse.canDrive(self.currentGolfer):
posUpdate = True
self.aimMomentum = 0.0
self.ballFollow.headsUp(self.holeBottomNodePath)
elif self.arrowKeys.rightPressed() and not self.arrowKeys.leftPressed():
self.aimMomentum -= momentumChange
if self.aimMomentum > 0:
self.aimMomentum = 0.0
elif self.aimMomentum < -30.0:
self.aimMomentum = -30.0
posUpdate = True
self.switchToAnimState('GolfRotateLeft')
self.scoreBoard.hide()
elif self.arrowKeys.leftPressed() and not self.arrowKeys.rightPressed():
self.aimMomentum += momentumChange
if self.aimMomentum < 0.0:
self.aimMomentum = 0.0
elif self.aimMomentum > 30.0:
self.aimMomentum = 30.0
posUpdate = True
self.switchToAnimState('GolfRotateRight')
self.scoreBoard.hide()
else:
self.aimMomentum = 0.0
self.switchToAnimState('GolfPuttLoop')
self.ballFollow.setH(self.ballFollow.getH() + self.aimMomentum * dt)
if self.arrowKeys.upPressed() and self.golfCourse.canDrive(self.currentGolfer):
b.enable()
b.addForce(Vec3(x * forceMoveDt, y * forceMoveDt, 0))
if self.arrowKeys.downPressed() and self.golfCourse.canDrive(self.currentGolfer):
b.enable()
b.addForce(Vec3(-x * forceMoveDt, -y * forceMoveDt, 0))
if self.arrowKeys.leftPressed() and self.arrowKeys.rightPressed() and self.golfCourse.canDrive(self.currentGolfer):
b.enable()
b.addForce(Vec3(0, 0, 3000 * dt))
if posUpdate:
if globalClock.getFrameTime() - self.lastTimeHeadingSent > 0.2:
self.sendUpdate('setTempAimHeading', [localAvatar.doId, self.ballFollow.getH()])
self.lastTimeHeadingSent = globalClock.getFrameTime()
self.lastTempHeadingSent = self.ballFollow.getH()
elif self.lastTempHeadingSent != self.ballFollow.getH():
self.sendUpdate('setTempAimHeading', [localAvatar.doId, self.ballFollow.getH()])
self.lastTimeHeadingSent = globalClock.getFrameTime()
self.lastTempHeadingSent = self.ballFollow.getH()
self.setCamera2Ball()
self.fixCurrentGolferFeet()
self.adjustClub()
self.orientCameraRay()
return task.cont
def fixCurrentGolferFeet(self):
golfer = base.cr.doId2do.get(self.currentGolfer)
if not golfer:
return
golferPos = golfer.getPos(render)
newPos = Vec3(golferPos[0], golferPos[1], golferPos[2] + 5)
self.toonRay.setPosition(newPos)
def adjustClub(self):
club = self.clubs[self.currentGolfer]
if club:
distance = club.getDistance(self.clubLookatSpot)
scaleFactor = distance / 2.058
club.setScale(1, scaleFactor, 1)
def resetPowerBar(self):
self.power = 0
self.powerBar['value'] = self.power
self.powerBar['text'] = ''
def sendSwingInfo(self):
kickHimOut = self.updateWarning()
if kickHimOut:
return
curAimTime = globalClock.getRealTime() - self.enterAimStart
if curAimTime < 0:
curAimTime = 0
if curAimTime > GolfGlobals.AIM_DURATION:
curAimTime = GolfGlobals.AIM_DURATION
self.notify.debug('curAimTime = %f' % curAimTime)
x = -math.sin(self.ballFollow.getH() * 0.0174532925)
y = math.cos(self.ballFollow.getH() * 0.0174532925)
b = self.curGolfBall()
if hasattr(base, 'golfPower') and base.golfPower != None:
self.power = float(base.golfPower)
if not self.swingInfoSent:
self.sendUpdate('postSwingState', [self.getCycleTime(),
self.power,
b.getPosition()[0],
b.getPosition()[1],
b.getPosition()[2],
x,
y,
curAimTime,
self.getCommonObjectData()])
self.swingInfoSent = True
if self.power < 15 and self.golfCourse.scores[localAvatar.doId][self.golfCourse.curHoleIndex] == 0:
self.powerReminder = DirectLabel(text=TTLocalizer.GolfPowerReminder, text_shadow=(0, 0, 0, 1), text_fg=VBase4(1, 1, 0.0, 1), text_align=TextNode.ACenter, relief=None, pos=(0, 0, 0.8), scale=0.12)
return
def updateWarning(self):
retval = False
if not self.localToonHitControl:
self.localMissedSwings += 1
else:
self.localMissedSwings = 0
if self.localMissedSwings == GolfGlobals.KICKOUT_SWINGS - 1:
self.warningLabel = DirectLabel(parent=aspect2d, relief=None, pos=(0, 0, 0), text_align=TextNode.ACenter, text=TTLocalizer.GolfWarningMustSwing, text_scale=0.12, text_font=ToontownGlobals.getSignFont(), text_fg=(1, 0.1, 0.1, 1), text_wordwrap=20)
self.warningInterval = Sequence(LerpColorScaleInterval(self.warningLabel, 10, Vec4(1, 1, 1, 0), startColorScale=Vec4(1, 1, 1, 1), blendType='easeIn'), Func(self.warningLabel.destroy))
self.warningInterval.start()
elif self.localMissedSwings >= GolfGlobals.KICKOUT_SWINGS:
self.golfCourse.handleFallingAsleepGolf(None)
retval = True
return retval
def assignRecordSwing(self, avId, cycleTime, power, x, y, z, dirX, dirY, commonObjectData):
ball = self.ballDict[avId]['golfBall']
holdBallPos = ball.getPosition()
self.useCommonObjectData(commonObjectData)
self.trackRecordBodyFlight(ball, cycleTime, power, Vec3(x, y, z), dirX, dirY)
ball.setPosition(holdBallPos)
self.sendUpdate('ballMovie2AI', [cycleTime,
avId,
self.recording,
self.aVRecording,
self.ballInHoleFrame,
self.ballTouchedHoleFrame,
self.ballFirstTouchedHoleFrame,
commonObjectData])
self.ballMovie2Client(cycleTime, avId, self.recording, self.aVRecording, self.ballInHoleFrame, self.ballTouchedHoleFrame, self.ballFirstTouchedHoleFrame, commonObjectData)
def __watchAimTask(self, task):
self.setCamera2Ball()
self.attachClub(self.currentGolfer, True)
self.adjustClub()
self.fixCurrentGolferFeet()
self.orientCameraRay()
return task.cont
def __watchTeeTask(self, task):
self.setCamera2Ball()
return task.cont
def curGolfBall(self):
return self.ballDict[self.currentGolfer]['golfBall']
def curGolfBallGeom(self):
return self.ballDict[self.currentGolfer]['golfBallGeom']
def curBallShadow(self):
return self.ballShadowDict[self.currentGolfer]
def cleanupGeom(self):
self.targets.remove()
self.terrainModel.remove()
self.powerBar.destroy()
def cleanupPowerBar(self):
self.powerBar.hide()
def cleanupPhysics(self):
pass
def curBall(self):
return self.ballDict[self.currentGolfer]['ball']
def curBallANP(self):
return self.ballDict[self.currentGolfer]['ballActorNodePath']
def curBallActor(self):
return self.ballDict[self.currentGolfer]['ballActor']
def enterAim(self):
self.notify.debug('Aim')
self.notify.debug('currentGolfer = %s' % self.currentGolfer)
self.switchToAnimState('GolfPuttLoop', forced=True)
self.swingInfoSent = False
self.lastState = self.state
self.aimMomentum = 0.0
self.enterAimStart = globalClock.getRealTime()
taskMgr.add(self.__aimTask, 'Aim Task')
self.showOnlyCurGolfer()
strokes = self.golfCourse.getStrokesForCurHole(self.currentGolfer)
self.camPivot = self.ballFollow.attachNewNode('golf-camPivot')
self.targetCamPivot = self.ballFollow.attachNewNode('golf-targetCamPivot')
self.targetCamPivot.setP(self.DefaultCamP)
self.curCamPivot = self.ballFollow.attachNewNode('golf-curCamPivot')
self.curCamPivot.setP(self.DefaultCamP)
self.ccTrav = CollisionTraverser('golf.ccTrav')
self.ccLine = CollisionSegment(0.0, 0.0, 0.0, 1.0, 0.0, 0.0)
self.ccLineNode = CollisionNode('golf.ccLineNode')
self.ccLineNode.addSolid(self.ccLine)
self.ccLineNodePath = self.camPivot.attachNewNode(self.ccLineNode)
self.ccLineBitMask = BitMask32(1048576)
self.ccLineNode.setFromCollideMask(self.ccLineBitMask)
self.ccLineNode.setIntoCollideMask(BitMask32.allOff())
self.camCollisionQueue = CollisionHandlerQueue()
self.ccTrav.addCollider(self.ccLineNodePath, self.camCollisionQueue)
if strokes:
self.ballFollow.headsUp(self.holeBottomNodePath)
self.camPivot.setP(self.DefaultCamP)
self._golfBarrierCollection = self.terrainModel.findAllMatches('**/collision?')
self._camAdjust = ScratchPad()
self._camAdjust.iters = 0
self._camAdjust.lower = self.DefaultCamP
self._camAdjust.upper = self.MaxCamP
base.camera.setPos(self.camPosBallFollow)
base.camera.setHpr(self.camHprBallFollow)
self.camPivot.setP(self.DefaultCamP)
base.camera.wrtReparentTo(self.camPivot)
A = Point3(0, 0, 0)
B = base.camera.getPos()
AtoB = B - A
AtoBnorm = Point3(AtoB)
AtoBnorm.normalize()
A += AtoBnorm * 0.4
self.ccLine.setPointA(A)
self.ccLine.setPointB(B)
self.camPivot.setP(self.DefaultCamP)
self._camAdjust.task = taskMgr.add(self._adjustCamera, 'adjustCamera')
self.resetPowerBar()
self.powerBar.show()
self.aimDuration = GolfGlobals.AIM_DURATION
if not self.unlimitedAimTime:
self.timer = ToontownTimer.ToontownTimer()
self.timer.posInTopRightCorner()
self.timer.setTime(self.aimDuration)
self.timer.countdown(self.aimDuration, self.timerExpired)
self.aimInstructions = DirectLabel(text=TTLocalizer.GolfAimInstructions, text_shadow=(0, 0, 0, 1), text_fg=VBase4(1, 1, 1, 1), text_align=TextNode.ACenter, relief=None, pos=(0, 0, -0.8), scale=TTLocalizer.DGHaimInstructions)
self.skyContact = 1
self.localToonHitControl = False
self._adjustCamera()
return
def exitAim(self):
localAvatar.wrtReparentTo(render)
taskMgr.remove(self._camAdjust.task)
taskMgr.remove('Aim Task')
taskMgr.remove(self.golfPowerTaskName)
if self.timer:
self.timer.stop()
self.timer.destroy()
self.timer = None
self.powerBar.hide()
self.ccLineNodePath.detachNode()
self.targetCamPivot.detachNode()
self.curCamPivot.detachNode()
self.camPivot.detachNode()
if self.aimInstructions:
self.aimInstructions.destroy()
self.aimInstructions = None
return
def timerExpired(self):
taskMgr.remove(self.golfPowerTaskName)
self.aimStart = None
self.sendSwingInfo()
self.resetPowerBar()
return
def _adjustCamera(self, task=None, first=True):
if task is None and first:
while 1:
self._adjustCamera(first=False)
if self._camAdjust.iters == 0:
return Task.cont
MaxIters = 5
finalP = self._camAdjust.lower
localAvatar.stash()
for barrier in self._golfBarrierCollection:
barrier.stash()
self.ccTrav.traverse(render)
for barrier in self._golfBarrierCollection:
barrier.unstash()
localAvatar.unstash()
midP = (self._camAdjust.lower + self._camAdjust.upper)/2
if self.camCollisionQueue.getNumEntries() > 0:
self.camCollisionQueue.sortEntries()
entry = self.camCollisionQueue.getEntry(0)
sPoint = entry.getSurfacePoint(self.camPivot)
self._camAdjust.lower = self.camPivot.getP()
finalP = midP
self.camPivot.setP(finalP)
else:
self._camAdjust.upper = self.camPivot.getP()
finalP = self._camAdjust.upper
self.camPivot.setP(midP)
if abs(self._camAdjust.lower - self._camAdjust.upper) < 1.0:
self._camAdjust.iters = MaxIters
self._camAdjust.iters += 1
if self._camAdjust.iters >= MaxIters:
self.targetCamPivot.setP(self._camAdjust.upper)
if task is None:
self.curCamPivot.setP(finalP)
self._camAdjust.iters = 0
self._camAdjust.lower = self.DefaultCamP
self._camAdjust.upper = self.MaxCamP
self.camPivot.setP(self.DefaultCamP)
if task is not None:
self.curCamPivot.setP(self.curCamPivot,
self.targetCamPivot.getP(self.curCamPivot)*min(1.0, 1.0*globalClock.getDt()))
curP = self.curCamPivot.getP()
self.curCamPivot.setP(self.DefaultCamP)
base.camera.reparentTo(self.ballFollow)
base.camera.setPos(self.camPosBallFollow)
base.camera.setHpr(self.camHprBallFollow)
base.camera.wrtReparentTo(self.curCamPivot)
self.curCamPivot.setP(curP)
base.camera.wrtReparentTo(self.ballFollow)
return Task.cont
def enterChooseTee(self):
self.notify.debug('ChooseTee')
self.curGolfBallGeom().show()
self.curBallShadow().show()
self.lastState = self.state
taskMgr.add(self.__chooseTeeTask, 'ChooseTee Task')
self.ballFollow.setH(self.startingTeeHeading)
self.localAvatarChosenTee = False
self.localTempTee = 0
if len(self.teePositions) > 1:
self.localTempTee = 1
self.chooseTeeDuration = GolfGlobals.TEE_DURATION
if not self.unlimitedTeeTime:
self.teeTimer = ToontownTimer.ToontownTimer()
self.teeTimer.posInTopRightCorner()
self.teeTimer.setTime(self.chooseTeeDuration)
self.teeTimer.countdown(self.chooseTeeDuration, self.teeTimerExpired)
self.teeInstructions = DirectLabel(text=TTLocalizer.GolfChooseTeeInstructions, text_fg=VBase4(1, 1, 1, 1), text_align=TextNode.ACenter, text_shadow=(0, 0, 0, 1), relief=None, pos=(0, 0, -0.75), scale=TTLocalizer.DGHteeInstructions)
self.powerBar.hide()
return
def exitChooseTee(self):
localAvatar.wrtReparentTo(render)
if hasattr(self, 'teeInstructions') and self.teeInstructions:
self.teeInstructions.destroy()
self.teeInstructions = None
taskMgr.remove('ChooseTee Task')
taskMgr.remove(self.golfPowerTaskName)
if self.teeTimer:
self.teeTimer.stop()
self.teeTimer.destroy()
self.teeTimer = None
self.powerBar.show()
return
def sendTeeInfo(self):
self.sendUpdate('setAvatarTee', [self.localTempTee])
self.localAvatarChosenTee = True
def __chooseTeeTask(self, task):
if self.localAvatarChosenTee:
return task.done
if self.arrowKeys.jumpPressed():
if self.flyOverInterval and self.flyOverInterval.isPlaying():
pass
else:
self.sendTeeInfo()
return task.cont
def changeTee(self, newTee):
ball = self.curGolfBall()
ball.setPosition(self.teePositions[newTee])
self.setCamera2Ball()
self.fixCurrentGolferFeet()
self.adjustClub()
def changeLocalTee(self, newTee):
self.changeTee(newTee)
self.sendUpdate('setAvatarTempTee', [localAvatar.doId, newTee])
self.fixCurrentGolferFeet()
self.adjustClub()
def __leftArrowPressed(self):
if self.state != 'ChooseTee':
return
self.localTempTee -= 1
if self.localTempTee < 0:
self.localTempTee = len(self.teePositions) - 1
self.changeLocalTee(self.localTempTee)
def __rightArrowPressed(self):
if self.state != 'ChooseTee':
return
self.localTempTee += 1
self.localTempTee %= len(self.teePositions)
self.changeLocalTee(self.localTempTee)
def teeTimerExpired(self):
self.sendTeeInfo()
def enterWatchAim(self):
self.notify.debug('Watch Aim')
self.notify.debugStateCall(self)
self.notify.debug('currentGolfer = %s' % self.currentGolfer)
strokes = self.golfCourse.getStrokesForCurHole(self.currentGolfer)
if strokes:
self.ballFollow.lookAt(self.holeBottomNodePath)
self.ballFollow.setP(0)
self.showOnlyCurGolfer()
taskMgr.add(self.__watchAimTask, 'Watch Aim Task')
def exitWatchAim(self):
self.notify.debugStateCall(self)
av = base.cr.doId2do.get(self.currentGolfer)
if av:
heading = av.getH(render)
toonPos = av.getPos(render)
av.reparentTo(render)
av.setH(heading)
av.setPos(toonPos)
self.notify.debug('av %s now at position %s' % (av.getName(), av.getPos()))
else:
self.notify.debug('could not get avId %d' % self.currentGolfer)
taskMgr.remove('Watch Aim Task')
def enterWatchTee(self):
self.notify.debug('Watch Tee')
self.notify.debugStateCall(self)
self.curGolfBallGeom().show()
self.ballFollow.setH(self.startingTeeHeading)
self.ballShadowDict[self.currentGolfer].show()
def exitWatchTee(self):
self.notify.debugStateCall(self)
av = base.cr.doId2do.get(self.currentGolfer)
taskMgr.remove('Watch Tee Task')
def enterWait(self):
self.notify.debug('Wait')
self.notify.debugStateCall(self)
def exitWait(self):
self.notify.debugStateCall(self)
def removePlayBackDelayDelete(self):
if self.playBackDelayDelete:
self.playBackDelayDelete.destroy()
self.playBackDelayDelete = None
return
def enterPlayback(self):
def shiftClubToRightHand():
club = self.clubs[self.currentGolfer]
av = base.cr.doId2do.get(self.currentGolfer)
if av and club:
club.wrtReparentTo(av.getRightHands()[0])
av = base.cr.doId2do.get(self.currentGolfer)
if not av:
return
else:
self.removePlayBackDelayDelete()
self.playBackDelayDelete = DelayDelete.DelayDelete(av, 'GolfHole.enterPlayback')
self.accept('clientCleanup', self._handleClientCleanup)
self.inPlayBack = 1
self.setLookingAtPutt(False)
self.swingInterval = Sequence(ActorInterval(av, 'swing-putt', startFrame=0, endFrame=GolfGlobals.BALL_CONTACT_FRAME), Func(self.startBallPlayback), ActorInterval(av, 'swing-putt', startFrame=GolfGlobals.BALL_CONTACT_FRAME, endFrame=23), Func(shiftClubToRightHand), Func(self.setLookingAtPutt, True), Func(self.removePlayBackDelayDelete))
adjustedBallTouchedHoleTime = self.ballTouchedHoleTime + GolfGlobals.BALL_CONTACT_TIME
adjustedBallFirstTouchedHoleTime = self.ballFirstTouchedHoleTime + GolfGlobals.BALL_CONTACT_TIME
adjustedBallDropTime = self.ballDropTime + GolfGlobals.BALL_CONTACT_TIME
adjustedPlaybackEndTime = self.playbackMovieDuration + GolfGlobals.BALL_CONTACT_TIME
self.notify.debug('adjustedTimes ballTouched=%.2f ballFirstTouched=%.2f ballDrop=%.2f playbaybackEnd=%.2f' % (adjustedBallTouchedHoleTime,
adjustedBallFirstTouchedHoleTime,
adjustedBallDropTime,
adjustedPlaybackEndTime))
if self.ballWillGoInHole:
curDuration = self.swingInterval.getDuration()
lookPuttInterval = ActorInterval(av, 'look-putt')
if curDuration < adjustedBallDropTime:
self.swingInterval.append(lookPuttInterval)
curDuration = self.swingInterval.getDuration()
diffTime = adjustedBallDropTime - curDuration
if diffTime > 0:
self.swingInterval.append(ActorInterval(av, 'lookloop-putt', endTime=diffTime))
self.swingInterval.append(ActorInterval(av, 'good-putt', endTime=self.playbackMovieDuration, loop=1))
elif self.ballTouchedHoleTime:
self.notify.debug('doing self.ballTouchedHoleTime')
curDuration = self.swingInterval.getDuration()
lookPuttInterval = ActorInterval(av, 'look-putt')
if curDuration < adjustedBallTouchedHoleTime:
self.swingInterval.append(lookPuttInterval)
curDuration = self.swingInterval.getDuration()
diffTime = adjustedBallTouchedHoleTime - curDuration
if diffTime > 0:
self.swingInterval.append(ActorInterval(av, 'lookloop-putt', endTime=diffTime))
self.swingInterval.append(ActorInterval(av, 'bad-putt', endFrame=32))
self.swingInterval.append(ActorInterval(av, 'badloop-putt', endTime=self.playbackMovieDuration, loop=1))
else:
self.swingInterval.append(ActorInterval(av, 'look-putt'))
self.swingInterval.append(ActorInterval(av, 'lookloop-putt', endTime=self.playbackMovieDuration, loop=1))
sfxInterval = Parallel()
ballHitInterval = Sequence(Wait(GolfGlobals.BALL_CONTACT_TIME), SoundInterval(self.hitBallSfx))
sfxInterval.append(ballHitInterval)
if self.ballWillGoInHole:
ballRattle = Sequence()
timeToPlayBallRest = adjustedPlaybackEndTime - self.ballGoesToRestSfx.length()
if adjustedBallFirstTouchedHoleTime < timeToPlayBallRest:
diffTime = timeToPlayBallRest - adjustedBallFirstTouchedHoleTime
if self.ballGoesInStartSfx.length() < diffTime:
ballRattle.append(Wait(adjustedBallFirstTouchedHoleTime))
ballRattle.append(SoundInterval(self.ballGoesInStartSfx))
timeToPlayLoop = adjustedBallFirstTouchedHoleTime + self.ballGoesInStartSfx.length()
loopTime = timeToPlayBallRest - timeToPlayLoop
if self.ballGoesInLoopSfx.length() == 0.0:
numLoops = 0
else:
numLoops = int(loopTime / self.ballGoesInLoopSfx.length())
self.notify.debug('numLoops=%d loopTime=%f' % (numLoops, loopTime))
if loopTime > 0:
ballRattle.append(SoundInterval(self.ballGoesInLoopSfx, loop=1, duration=loopTime, seamlessLoop=True))
ballRattle.append(SoundInterval(self.ballGoesToRestSfx))
self.notify.debug('playing full rattling')
else:
self.notify.debug('playing abbreviated rattling')
timeToPlayBallGoesIn = adjustedBallFirstTouchedHoleTime
ballRattle.append(Wait(timeToPlayBallGoesIn))
startTime = self.ballGoesInStartSfx.length() - diffTime
self.notify.debug('adjustedBallDropTime=%s diffTime=%s starTime=%s' % (adjustedBallDropTime, diffTime, startTime))
ballRattle.append(SoundInterval(self.ballGoesInStartSfx, startTime=startTime))
ballRattle.append(SoundInterval(self.ballGoesToRestSfx))
else:
self.notify.debug('playing abbreviated ball goes to rest')
ballRattle.append(Wait(adjustedBallFirstTouchedHoleTime))
diffTime = adjustedPlaybackEndTime - adjustedBallFirstTouchedHoleTime
startTime = self.ballGoesToRestSfx.length() - diffTime
self.notify.debug('adjustedBallDropTime=%s diffTime=%s starTime=%s' % (adjustedBallDropTime, diffTime, startTime))
ballRattle.append(SoundInterval(self.ballGoesToRestSfx, startTime=startTime))
sfxInterval.append(ballRattle)
crowdBuildupSfx = self.crowdBuildupSfx[self.avIdList.index(self.currentGolfer)]
crowdApplauseSfx = self.crowdApplauseSfx[self.avIdList.index(self.currentGolfer)]
crowdMissSfx = self.crowdMissSfx[self.avIdList.index(self.currentGolfer)]
if self.ballWillGoInHole:
crowdIval = Sequence()
buildupLength = crowdBuildupSfx.length()
self.notify.debug('buildupLength=%s' % buildupLength)
diffTime = adjustedBallFirstTouchedHoleTime - buildupLength
if diffTime > 0:
crowdIval.append(Wait(diffTime))
crowdIval.append(SoundInterval(crowdBuildupSfx))
crowdIval.append(SoundInterval(crowdApplauseSfx))
else:
startTime = buildupLength - adjustedBallFirstTouchedHoleTime
self.notify.debug('playing abbreviated crowd build and applause diffTime=%s startTime=%s' % (diffTime, startTime))
crowdIval.append(SoundInterval(crowdBuildupSfx, startTime=startTime))
crowdIval.append(SoundInterval(crowdApplauseSfx))
sfxInterval.append(crowdIval)
elif self.ballFirstTouchedHoleTime:
crowdIval = Sequence()
buildupLength = crowdBuildupSfx.length()
self.notify.debug('touched but not going in buildupLength=%s' % buildupLength)
diffTime = adjustedBallFirstTouchedHoleTime - buildupLength
if diffTime > 0:
self.notify.debug('waiting %.2f to play crowd buildup' % diffTime)
crowdIval.append(Wait(diffTime))
crowdIval.append(SoundInterval(crowdBuildupSfx))
crowdIval.append(SoundInterval(crowdMissSfx))
else:
startTime = buildupLength - adjustedBallFirstTouchedHoleTime
self.notify.debug('playing abbreviated crowd build and miss diffTime=%s startTime=%s' % (diffTime, startTime))
crowdIval.append(SoundInterval(crowdBuildupSfx, startTime=startTime))
crowdIval.append(SoundInterval(crowdMissSfx))
sfxInterval.append(crowdIval)
if self.sfxInterval:
sfxInterval.finish()
self.sfxInterval = sfxInterval
self.sfxInterval.start()
self.swingInterval.start()
def exitPlayback(self):
self.notify.debug('Exiting Playback')
if self.swingInterval:
self.swingInterval.pause()
av = base.cr.doId2do.get(self.currentGolfer)
if av:
if self.ballWillGoInHole:
av.loop('good-putt', restart=0)
elif self.ballTouchedHoleTime:
pass
else:
av.loop('neutral')
self.setLookingAtPutt(False)
if av == base.localAvatar:
if self.ballWillGoInHole:
av.b_setAnimState('GolfGoodPutt')
elif self.ballTouchedHoleTime:
av.b_setAnimState('GolfBadPutt')
else:
av.b_setAnimState('neutral')
taskMgr.remove('playback task')
self.curGolfBall().disable()
self.readyCurrentGolfer(None)
self.inPlayBack = 0
if self.powerReminder:
self.powerReminder.destroy()
self.powerReminder = None
return
def setLookingAtPutt(self, newVal):
self.isLookingAtPutt = newVal
def getLookingAtPutt(self):
return self.isLookingAtPutt
def startBallPlayback(self):
self.playbackFrameNum = 0
self.sourceFrame = self.recording[0]
self.destFrameNum = 1
self.destFrame = self.recording[self.destFrameNum]
self.aVSourceFrame = self.aVRecording[0]
self.aVDestFrameNum = 1
self.aVDestFrame = self.aVRecording[self.aVDestFrameNum]
self.inPlayBack = 2
def isCurBallInHole(self):
retval = False
ball = self.curGolfBall()
ballPos = ball.getPosition()
for holePos in self.holePositions:
displacement = ballPos - holePos
length = displacement.length()
self.notify.debug('hole %s length=%s' % (holePos, length))
if length <= GolfGlobals.DistanceToBeInHole:
retval = True
break
return retval
def handleBallGoingInHole(self):
par = GolfGlobals.HoleInfo[self.holeId]['par']
unlimitedSwing = False
av = base.cr.doId2do.get(self.currentGolfer)
if av:
unlimitedSwing = av.getUnlimitedSwing()
if not unlimitedSwing:
self.curGolfBall().setPosition(0, 0, -100)
self.ballShadowDict[self.currentGolfer].setPos(0, 0, -100)
self.ballShadowDict[self.currentGolfer].hide()
strokes = 3
if self.golfCourse:
strokes = self.golfCourse.getStrokesForCurHole(self.currentGolfer)
else:
self.notify.warning('self.golfCourse is None')
diff = strokes - par
if diff > 0:
textStr = '+' + str(diff)
else:
textStr = diff
if strokes == 1:
textStr = TTLocalizer.GolfHoleInOne
elif diff in TTLocalizer.GolfShotDesc:
if self.ballWillGoInHole:
textStr = TTLocalizer.GolfShotDesc[diff]
perfectTextSubnode = hidden.attachNewNode(self.__genText(textStr))
perfectText = hidden.attachNewNode('perfectText')
perfectTextSubnode.reparentTo(perfectText)
frame = self.__textGen.getCardActual()
offsetY = -abs(frame[2] + frame[3]) / 2.0 - 1.35
perfectTextSubnode.setPos(0, 0, offsetY)
perfectText.setColor(1, 0.1, 0.1, 1)
def fadeFunc(t, text = perfectText):
text.setColorScale(1, 1, 1, t)
def destroyText(text = perfectText):
text.removeNode()
animTrack = Sequence()
av = base.cr.doId2do.get(self.currentGolfer)
animTrack.append(Func(self.golfCourse.updateScoreBoard))
textTrack = Sequence(Func(perfectText.reparentTo, aspect2d), Parallel(LerpScaleInterval(perfectText, duration=0.5, scale=0.3, startScale=0.0), LerpFunctionInterval(fadeFunc, fromData=0.0, toData=1.0, duration=0.5)), Wait(2.0), Parallel(LerpScaleInterval(perfectText, duration=0.5, scale=1.0), LerpFunctionInterval(fadeFunc, fromData=1.0, toData=0.0, duration=0.5, blendType='easeIn')), Func(destroyText), WaitInterval(0.5), Func(self.sendUpdate, 'turnDone', []))
soundTrack = Sequence()
if strokes == 1:
soundTrack.append(SoundInterval(self.holeInOneSfx))
elif self.hasCurGolferReachedMaxSwing and not self.ballWillGoInHole:
soundTrack.append(SoundInterval(self.kickedOutSfx))
self.perfectIval = Parallel(textTrack, soundTrack, animTrack)
self.perfectIval.start()
def __playbackTask(self, task):
return self.playBackFrame(task)
def toonRayCollisionCallback(self, x, y, z):
if self.state not in ('Aim', 'WatchAim', 'ChooseTee', 'WatchTee'):
return
tempPath = render.attachNewNode('temp')
tempPath.setPos(x, y, z)
relPos = tempPath.getPos(self.ballFollowToonSpot)
av = base.cr.doId2do.get(self.currentGolfer)
if av:
zToUse = relPos[2]
if zToUse < 0 - GolfGlobals.GOLF_BALL_RADIUS:
zToUse = 0 - GolfGlobals.GOLF_BALL_RADIUS
av.setPos(0, 0, zToUse)
tempPath.removeNode()
def preStep(self):
if self.currentGolferActive:
GolfHoleBase.GolfHoleBase.preStep(self)
def postStep(self):
if self.currentGolferActive:
GolfHoleBase.GolfHoleBase.postStep(self)
DistributedPhysicsWorld.DistributedPhysicsWorld.postStep(self)
if self.inPlayBack == 2:
self.playBackFrame()
self.makeCurGolferLookAtBall()
elif self.state == 'Playback' and self.inPlayBack == 0:
self.request('Wait')
self.updateTranslucentObjects()
def updateTranslucentObjects(self):
for translucentNodePathLastFrame in self.translucentLastFrame:
if translucentNodePathLastFrame not in self.translucentCurFrame:
translucentNodePathLastFrame.setColorScale(1, 1, 1, 1)
for transNpCurFrame in self.translucentCurFrame:
if transNpCurFrame not in self.translucentLastFrame:
self.notify.debug('making translucent %s' % transNpCurFrame)
transNpCurFrame.setColorScale(1, 1, 1, 0.25)
transNpCurFrame.setTransparency(1)
def makeCurGolferLookAtBall(self):
if self.getLookingAtPutt():
av = base.cr.doId2do.get(self.currentGolfer)
if av:
ballPos = self.curGolfBall().getPosition()
av.headsUp(ballPos[0], ballPos[1], ballPos[2])
av.setH(av.getH() - 90)
def playBackFrame(self):
doPrint = 0
doAVPrint = 0
lastFrame = self.recording[len(self.recording) - 1][0]
if self.playbackFrameNum >= self.destFrame[0]:
self.sourceFrame = self.destFrame
self.destFrameNum += 1
doPrint = 1
if self.destFrameNum < len(self.recording):
self.destFrame = self.recording[self.destFrameNum]
else:
self.notify.debug('recording length %s' % len(self.recording))
if self.isCurBallInHole() or self.hasCurGolferReachedMaxSwing():
self.handleBallGoingInHole()
self.request('Wait')
else:
self.golfCourse.updateScoreBoard()
self.request('Wait')
self.sendUpdate('turnDone', [])
return
self.projLength = self.destFrame[0] - self.sourceFrame[0]
self.projPen = self.destFrame[0] - self.playbackFrameNum
propSource = float(self.projPen) / float(self.projLength)
propDest = 1.0 - propSource
projX = self.sourceFrame[1] * propSource + self.destFrame[1] * propDest
projY = self.sourceFrame[2] * propSource + self.destFrame[2] * propDest
projZ = self.sourceFrame[3] * propSource + self.destFrame[3] * propDest
newPos = Vec3(projX, projY, projZ)
ball = self.curGolfBall()
ball.setPosition(newPos)
if self.playbackFrameNum >= self.aVDestFrame[0]:
self.aVSourceFrame = self.aVDestFrame
self.aVDestFrameNum += 1
doAVPrint = 1
if self.aVDestFrameNum < len(self.aVRecording):
self.aVDestFrame = self.aVRecording[self.aVDestFrameNum]
newAV = Vec3(self.aVSourceFrame[1], self.aVSourceFrame[2], self.aVSourceFrame[3])
self.projLength = self.aVDestFrame[0] - self.aVSourceFrame[0]
self.projPen = self.aVDestFrame[0] - self.playbackFrameNum
propSource = float(self.projPen) / float(self.projLength)
propDest = 1.0 - propSource
projX = self.aVSourceFrame[1] * propSource + self.aVDestFrame[1] * propDest
projY = self.aVSourceFrame[2] * propSource + self.aVDestFrame[2] * propDest
projZ = self.aVSourceFrame[3] * propSource + self.aVDestFrame[3] * propDest
newAV = Vec3(projX, projY, projZ)
ball = self.curGolfBall()
ball.setAngularVel(newAV)
if self.playbackFrameNum < lastFrame - 1:
ball.enable()
else:
ball.disable()
self.setCamera2Ball()
self.placeBodies()
if doAVPrint:
pass
if doPrint:
self.notify.debug('. %s %s %s %s %s' % (self.playbackFrameNum,
self.sourceFrame[0],
self.destFrame[0],
self.destFrameNum,
newPos))
self.playbackFrameNum += 1
def enterCleanup(self):
taskMgr.remove('update task')
if hasattr(self, 'arrowKeys'):
self.arrowKeys.destroy()
self.arrowKeys = None
self.ignoreAll()
if self.swingInterval:
self.swingInterval.pause()
self.swingInterval = None
if self.sfxInterval:
self.sfxInterval.pause()
self.sfxInterval = None
self.cleanupGeom()
return
def exitCleanup(self):
pass
def setCamera2Ball(self):
b = self.curGolfBall()
ballPos = Point3(b.getPosition()[0], b.getPosition()[1], b.getPosition()[2])
self.ballFollow.setPos(ballPos)
def hitBall(self, ball, power, x, y):
self.performSwing(self, ball, power, x, y)
def ballMovie2Client(self, cycleTime, avId, movie, spinMovie, ballInFrame, ballTouchedHoleFrame, ballFirstTouchedHoleFrame, commonObjectData):
self.notify.debug('received Movie, number of frames %s %s ballInFrame=%d ballTouchedHoleFrame=%d ballFirstTouchedHoleFrame=%d' % (len(movie),
len(spinMovie),
ballInFrame,
ballTouchedHoleFrame,
ballFirstTouchedHoleFrame))
if self.state == 'Playback':
self.notify.debug('SMASHED PLAYBACK')
return
self.ballShadowDict[avId].show()
self.holdCycleTime = cycleTime
self.holdCommonObjectData = commonObjectData
self.useCommonObjectData(self.holdCommonObjectData)
self.recording = movie
self.aVRecording = spinMovie
endingBallPos = Vec3(movie[-1][1], movie[-1][2], movie[-1][3])
endingFrame = movie[-1][0]
self.playbackMovieDuration = endingFrame * self.DTAStep
self.notify.debug('playback movie duration=%s' % self.playbackMovieDuration)
displacement = self.holePositions[0] - endingBallPos
self.ballWillGoInHole = False
if displacement.length() <= GolfGlobals.DistanceToBeInHole:
self.ballWillGoInHole = True
self.notify.debug('endingBallPos=%s, distanceToHole=%s, ballWillGoInHole=%s' % (endingBallPos, displacement.length(), self.ballWillGoInHole))
self.ballDropTime = ballInFrame * self.DTAStep
self.ballTouchedHoleTime = ballTouchedHoleFrame * self.DTAStep
self.ballFirstTouchedHoleTime = ballFirstTouchedHoleFrame * self.DTAStep
if self.state == 'WatchTee':
self.request('WatchAim')
self.request('Playback')
def golfersTurn(self, avId):
self.readyCurrentGolfer(avId)
if avId == localAvatar.doId:
self.setCamera2Ball()
self.request('Aim')
else:
self.setCamera2Ball()
self.request('WatchAim')
def readyCurrentGolfer(self, avId):
for index in self.ballDict:
self.ballDict[index]['golfBallOdeGeom'].setCollideBits(BitMask32(0))
self.ballDict[index]['golfBallOdeGeom'].setCategoryBits(BitMask32(0))
self.ballDict[index]['golfBall'].disable()
if avId:
self.currentGolfer = avId
self.currentGolferActive = True
if avId in self.ballDict:
self.ballDict[avId]['golfBallOdeGeom'].setCollideBits(BitMask32(16777215))
self.ballDict[avId]['golfBallOdeGeom'].setCategoryBits(BitMask32(4278190080L))
else:
self.currentGolferActive = False
def setGolferIds(self, avIds):
self.avIdList = avIds
self.numPlayers = len(self.avIdList)
self.teeChosen = {}
for avId in self.avIdList:
self.teeChosen[avId] = -1
def setHoleId(self, holeId):
self.holeId = holeId
self.holeInfo = GolfGlobals.HoleInfo[holeId]
def createBall(self, avId, index = None):
golfBallGeom, golfBall, odeGeom = self.createSphere(self.world, self.space, GolfGlobals.GOLF_BALL_DENSITY, GolfGlobals.GOLF_BALL_RADIUS, index)
startPos = self.teePositions[0]
if len(self.teePositions) > 1:
startPos = self.teePositions[1]
golfBall.setPosition(startPos)
golfBallGeom.hide()
if self.notify.getDebug():
self.notify.debug('golf ball body id')
golfBall.write()
self.notify.debug(' -')
golfBallGeom.setName('golfBallGeom%s' % avId)
self.ballDict[avId] = {'golfBall': golfBall,
'golfBallGeom': golfBallGeom,
'golfBallOdeGeom': odeGeom}
golfBall.disable()
shadow = self.dropShadowModel.copyTo(render)
shadow.setBin('shadow', 100)
shadow.setScale(0.09)
shadow.setDepthWrite(False)
shadow.setDepthTest(True)
self.ballShadowDict[avId] = shadow
shadow.hide()
def setGolfCourseDoId(self, golfCourseDoId):
self.golfCourseDoId = golfCourseDoId
self.golfCourse = base.cr.doId2do.get(self.golfCourseDoId)
if not self.golfCourse:
self.cr.relatedObjectMgr.abortRequest(self.golfCourseRequest)
self.golfCourseRequest = self.cr.relatedObjectMgr.requestObjects([self.golfCourseDoId], eachCallback=self.__gotGolfCourse)
else:
self.scoreBoard = self.golfCourse.scoreBoard
self.scoreBoard.hide()
def __gotGolfCourse(self, golfCourse):
self.golfCourseRequest = None
self.golfCourse = golfCourse
return
def __genText(self, text):
self.__textGen.setText(text)
return self.__textGen.generate()
def sendBox(self, pos0, pos1, pos2, quat0, quat1, quat2, quat3, anV0, anV1, anV2, lnV0, lnV1, lnV2):
self.swingBox.setPosition(pos0, pos1, pos2)
self.swingBox.setQuaternion(Quat(quat0, quat1, quat2, quat3))
self.swingBox.setAngularVel(anV0, anV1, anV2)
self.swingBox.setLinearVel(lnV0, lnV1, lnV2)
def hasCurGolferReachedMaxSwing(self):
strokes = self.golfCourse.getStrokesForCurHole(self.currentGolfer)
maxSwing = self.holeInfo['maxSwing']
retval = strokes >= maxSwing
if retval:
pass
return retval
def __getGolfPower(self, time):
elapsed = max(time - self.aimStart, 0.0)
t = elapsed / self.golfPowerSpeed
t = math.pow(t, self.golfPowerExponent)
power = int(t * 100) % 200
if power > 100:
power = 200 - power
return power
def __beginTossGolf(self):
if self.aimStart != None:
return
if not self.state == 'Aim':
return
if self.swingInfoSent:
return
self.localToonHitControl = True
time = globalClock.getFrameTime()
self.aimStart = time
messenger.send('wakeup')
self.scoreBoard.hide()
taskMgr.add(self.__updateGolfPower, self.golfPowerTaskName)
return
def __endTossGolf(self):
if self.aimStart == None:
return
if not self.state == 'Aim':
return
messenger.send('wakeup')
taskMgr.remove(self.golfPowerTaskName)
self.aimStart = None
self.sendSwingInfo()
self.resetPowerBar()
return
def __updateGolfPower(self, task):
if not self.powerBar:
print '### no power bar!!!'
return Task.done
newPower = self.__getGolfPower(globalClock.getFrameTime())
self.power = newPower
self.powerBar['value'] = newPower
self.powerBar['text'] = TTLocalizer.GolfPowerBarText % {'power': newPower}
return Task.cont
def golferChooseTee(self, avId):
self.readyCurrentGolfer(avId)
self.putAwayAllToons()
if self.needToDoFlyOver and self.doFlyOverMovie(avId):
pass
else:
if avId == localAvatar.doId:
self.setCamera2Ball()
if not self.state == 'ChooseTee':
self.request('ChooseTee')
else:
self.setCamera2Ball()
self.request('WatchTee')
self.takeOutToon(self.currentGolfer)
def setAvatarTempTee(self, avId, tempTee):
if self.state != 'WatchTee':
return
if avId != self.currentGolfer:
self.notify.warning('setAvatarTempTee avId=%s not equal to self.currentGolfer=%s' % (avId, self.currentGolfer))
return
self.changeTee(tempTee)
def setAvatarFinalTee(self, avId, finalTee):
if avId != self.currentGolfer:
self.notify.warning('setAvatarTempTee avId=%s not equal to self.currentGolfer=%s' % (avId, self.currentGolfer))
return
self.changeTee(finalTee)
def setTempAimHeading(self, avId, heading):
if avId != self.currentGolfer:
self.notify.warning('setAvatarTempTee avId=%s not equal to self.currentGolfer=%s' % (avId, self.currentGolfer))
return
if self.state != 'WatchAim':
return
if avId != localAvatar.doId:
self.ballFollow.setH(heading)
def stickToonToBall(self, avId):
av = base.cr.doId2do.get(avId)
if av:
av.reparentTo(self.ballFollowToonSpot)
av.setPos(0, 0, 0)
av.setH(0)
def putAwayToon(self, avId):
av = base.cr.doId2do.get(avId)
if av:
av.reparentTo(render)
av.setPos(0, 0, -1000)
av.setH(0)
def putAwayAllToons(self):
for avId in self.avIdList:
self.putAwayToon(avId)
def takeOutToon(self, avId):
self.stickToonToBall(avId)
self.fixCurrentGolferFeet()
self.attachClub(avId)
def showOnlyCurGolfer(self):
self.notify.debug('curGolfer = %s' % self.currentGolfer)
self.stickToonToBall(self.currentGolfer)
self.fixCurrentGolferFeet()
self.attachClub(self.currentGolfer)
for avId in self.avIdList:
if avId != self.currentGolfer:
self.putAwayToon(avId)
def tabKeyPressed(self):
doInterval = True
self.notify.debug('tab key pressed')
if not hasattr(self, 'ballFollow'):
return
if self.flyOverInterval and self.flyOverInterval.isPlaying():
return
if self.camInterval and self.camInterval.isPlaying():
self.camInterval.pause()
if base.camera.getParent() == self.ballFollow:
if doInterval:
curHpr = camera.getHpr(render)
angle = PythonUtil.closestDestAngle2(curHpr[0], 0)
self.camInterval = Sequence(Func(base.camera.wrtReparentTo, render), LerpPosHprInterval(base.camera, 2, self.camTopViewPos, self.camTopViewHpr))
self.camInterval.start()
else:
base.camera.reparentTo(render)
base.camera.setPos(self.camTopViewPos)
base.camera.setHpr(self.camTopViewHpr)
elif doInterval:
curHpr = camera.getHpr(self.ballFollow)
angle = PythonUtil.closestDestAngle2(curHpr[0], 0)
self.camInterval = Sequence(Func(base.camera.wrtReparentTo, self.ballFollow), LerpPosHprInterval(base.camera, 2, self.camPosBallFollow, self.camHprBallFollow))
self.camInterval.start()
else:
base.camera.reparentTo(self.ballFollow)
base.camera.setPos(self.camPosBallFollow)
base.camera.setHpr(self.camHprBallFollow)
def doFlyOverMovie(self, avId):
title = GolfGlobals.getCourseName(self.golfCourse.courseId) + ' :\n ' + GolfGlobals.getHoleName(self.holeId) + '\n' + TTLocalizer.GolfPar + ' : ' + '%s' % self.holeInfo['par']
self.titleLabel = DirectLabel(parent=aspect2d, relief=None, pos=(0, 0, 0.8), text_align=TextNode.ACenter, text=title, text_scale=0.12, text_font=ToontownGlobals.getSignFont(), text_fg=(1, 0.8, 0.4, 1))
self.titleLabel.setBin('opaque', 19)
self.titleLabel.hide()
self.needToDoFlyOver = False
bamFile = self.holeInfo['terrainModel']
fileName = bamFile.split('/')[-1]
dotIndex = fileName.find('.')
baseName = fileName[0:dotIndex]
camModelName = baseName + '_cammodel.bam'
cameraName = baseName + '_camera.bam'
path = bamFile[0:bamFile.find(fileName)]
camModelFullPath = path + camModelName
cameraAnimFullPath = path + cameraName
try:
self.flyOverActor = Actor.Actor(camModelFullPath, {'camera': cameraAnimFullPath})
except StandardError:
self.notify.debug("Couldn't find flyover %s" % camModelFullPath)
return False
base.transitions.noIris()
self.flyOverActor.reparentTo(render)
self.flyOverActor.setBlend(frameBlend=True)
flyOverJoint = self.flyOverActor.find('**/camera1')
children = flyOverJoint.getChildren()
numChild = children.getNumPaths()
for i in xrange(numChild):
childNodePath = children.getPath(i)
childNodePath.removeNode()
self.flyOverJoint = flyOverJoint
self.flyOverInterval = Sequence(Func(base.camera.reparentTo, flyOverJoint), Func(base.camera.clearTransform), Func(self.titleLabel.show), ActorInterval(self.flyOverActor, 'camera'), Func(base.camera.reparentTo, self.ballFollow), Func(base.camera.setPos, self.camPosBallFollow), Func(base.camera.setHpr, self.camHprBallFollow))
if avId == localAvatar.doId:
self.flyOverInterval.append(Func(self.setCamera2Ball))
self.flyOverInterval.append(Func(self.safeRequestToState, 'ChooseTee'))
else:
self.flyOverInterval.append(Func(self.setCamera2Ball))
self.flyOverInterval.append(Func(self.safeRequestToState, 'WatchTee'))
self.flyOverInterval.append(Func(self.titleLabel.hide))
self.flyOverInterval.append(Func(self.takeOutToon, avId))
self.flyOverInterval.start()
return True
def avExited(self, avId):
if self.state == 'Playback' and self.currentGolfer == avId:
pass
else:
self.ballDict[avId]['golfBallGeom'].hide()
def orientCameraRay(self):
pos = base.camera.getPos(self.terrainModel)
self.cameraRayNodePath.setPos(pos)
self.cameraRayNodePath.lookAt(self.ballFollow)
renderPos = self.cameraRayNodePath.getPos(render)
if renderPos != pos:
self.notify.debug('orientCamerRay this should not happen')
ballPos = self.ballFollow.getPos(self.terrainModel)
dirCam = Vec3(ballPos - pos)
dirCam.normalize()
self.cameraRay.set(pos, dirCam)
def performSwing(self, ball, power, dirX, dirY):
startTime = globalClock.getRealTime()
avId = base.localAvatar.doId
position = ball.getPosition()
x = position[0]
y = position[1]
z = position[2]
if avId not in self.golfCourse.drivingToons:
x = position[0]
y = position[1]
z = position[2]
self.swingTime = cycleTime
lift = 0
ball = self.ball
forceMove = 2500
if power > 50:
lift = 0
ball.enable()
ball.setPosition(x, y, z)
ball.setLinearVel(0.0, 0.0, 0.0)
ball.setAngularVel(0.0, 0.0, 0.0)
ball.addForce(Vec3(dirX * forceMove * power / 100.0, dirY * forceMove * power / 100.0, lift))
self.initRecord()
safety = 0
self.llv = None
self.record(ball)
while ball.isEnabled() and len(self.recording) < 2000:
self.preStep()
self.simulate()
self.postStep()
self.record(ball)
safety += 1
self.record(ball)
midTime = globalClock.getRealTime()
self.processRecording()
self.processAVRecording()
self.notify.debug('Recording End time %s cycle %s len %s avLen %s' % (self.timingSimTime,
self.getSimCycleTime(),
len(self.recording),
len(self.aVRecording)))
self.request('WaitPlayback')
length = len(self.recording) - 1
x = self.recording[length][1]
y = self.recording[length][2]
z = self.recording[length][3]
self.ballPos[avId] = Vec3(x, y, z)
endTime = globalClock.getRealTime()
diffTime = endTime - startTime
fpsTime = self.frame / diffTime
self.notify.debug('Time Start %s Mid %s End %s Diff %s Fps %s frames %s' % (startTime,
midTime,
endTime,
diffTime,
fpsTime,
self.frame))
self.ballMovie2Client(cycleTime, avId, self.recording, self.aVRecording, self.ballInHoleFrame, self.ballTouchedHoleFrame, self.ballFirstTouchedHoleFrame)
return
def handleBallHitNonGrass(self, c0, c1):
if not self.inPlayBack:
return
golfBallPos = self.curGolfBall().getPosition()
if self.lastBumpSfxPos == golfBallPos:
return
if GolfGlobals.HARD_COLLIDE_ID in [c0, c1]:
if not self.bumpHardSfx.status() == self.bumpHardSfx.PLAYING:
distance = (golfBallPos - self.lastBumpSfxPos).length()
if distance > 2.0:
base.playSfx(self.bumpHardSfx)
self.lastBumpSfxPos = golfBallPos
elif GolfGlobals.MOVER_COLLIDE_ID in [c0, c1]:
if not self.bumpMoverSfx.status() == self.bumpMoverSfx.PLAYING:
base.playSfx(self.bumpMoverSfx)
self.lastBumpSfxPos = golfBallPos
elif GolfGlobals.WINDMILL_BASE_COLLIDE_ID in [c0, c1]:
if not self.bumpWindmillSfx.status() == self.bumpWindmillSfx.PLAYING:
base.playSfx(self.bumpWindmillSfx)
self.lastBumpSfxPos = golfBallPos
def safeRequestToState(self, newState):
doingRequest = False
if self.state in self.defaultTransitions:
if newState in self.defaultTransitions[self.state]:
self.request(newState)
doingRequest = True
if not doingRequest:
self.notify.warning('ignoring transition from %s to %s' % (self.state, newState))
def doMagicWordHeading(self, heading):
if self.state == 'Aim':
self.aimMomentum = 0.0
self.ballFollow.setH(float(heading))
def _handleClientCleanup(self):
self.removePlayBackDelayDelete()
self.ignore('clientCleanup')
| [
"[email protected]"
] | |
504420d710edfc66b1f1a47c0510b3db3d98bd57 | 8f3336bbf7cd12485a4c52daa831b5d39749cf9b | /Python/remove-invalid-parentheses.py | dcc6a2c8e851f973a63f3c1085274a85e39313e6 | [] | no_license | black-shadows/LeetCode-Topicwise-Solutions | 9487de1f9a1da79558287b2bc2c6b28d3d27db07 | b1692583f7b710943ffb19b392b8bf64845b5d7a | refs/heads/master | 2022-05-30T22:16:38.536678 | 2022-05-18T09:18:32 | 2022-05-18T09:18:32 | 188,701,704 | 240 | 110 | null | 2020-05-08T13:04:36 | 2019-05-26T15:41:03 | C++ | UTF-8 | Python | false | false | 2,367 | py | # Time: O(C(n, c)), try out all possible substrings with the minimum c deletion.
# Space: O(c), the depth is at most c, and it costs n at each depth
class Solution(object):
def removeInvalidParentheses(self, s):
"""
:type s: str
:rtype: List[str]
"""
# Calculate the minimum left and right parantheses to remove
def findMinRemove(s):
left_removed, right_removed = 0, 0
for c in s:
if c == '(':
left_removed += 1
elif c == ')':
if not left_removed:
right_removed += 1
else:
left_removed -= 1
return (left_removed, right_removed)
# Check whether s is valid or not.
def isValid(s):
sum = 0
for c in s:
if c == '(':
sum += 1
elif c == ')':
sum -= 1
if sum < 0:
return False
return sum == 0
def removeInvalidParenthesesHelper(start, left_removed, right_removed):
if left_removed == 0 and right_removed == 0:
tmp = ""
for i, c in enumerate(s):
if i not in removed:
tmp += c
if isValid(tmp):
res.append(tmp)
return
for i in xrange(start, len(s)):
if right_removed == 0 and left_removed > 0 and s[i] == '(':
if i == start or s[i] != s[i - 1]: # Skip duplicated.
removed[i] = True
removeInvalidParenthesesHelper(i + 1, left_removed - 1, right_removed)
del removed[i]
elif right_removed > 0 and s[i] == ')':
if i == start or s[i] != s[i - 1]: # Skip duplicated.
removed[i] = True
removeInvalidParenthesesHelper(i + 1, left_removed, right_removed - 1)
del removed[i]
res, removed = [], {}
(left_removed, right_removed) = findMinRemove(s)
removeInvalidParenthesesHelper(0, left_removed, right_removed)
return res
| [
"[email protected]"
] | |
2a0c7b9841901436c823d4d5e7c6ff16f4b4e7cc | 38f765bc213d2c90e46f22922a7425cba28e6f00 | /fetchr/packages/amplify.py | 6632a0726d9f8c686a9b5f19a4ddb18a79ad98d3 | [] | no_license | yejianye/fetchr | 0d0572dc87beba93c7de3ece625682a4d753626e | 90f8922617d35fcf24d902f21af398009d80ded4 | refs/heads/master | 2021-01-10T02:48:41.737322 | 2013-01-19T09:10:12 | 2013-01-19T09:10:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | from fetchr.packages.base import package, SimplePackage
@package
class Amplify(SimplePackage):
"""A set of tools solve problems of request, store, pub/sub"""
version = '1.1.0'
@property
def cdn_urls(self):
return ['//cdnjs.cloudflare.com/ajax/libs/amplifyjs/$version/amplify.min.js']
| [
"[email protected]"
] | |
139b52a654e7e288a4b0a4ebaec109926cb274a6 | 2a67dc681af4c4b9ef7a8e18c2ff75377dc5b44f | /aws.cloudwatch.EventPermission.organization-access-python/__main__.py | 96a5858ff755253665d97a47249d9fe835f082f2 | [] | no_license | ehubbard/templates-aws | e323b693a18234defe6bd56ffcc64095dc58e3a1 | 2ae2e7a5d05490078017fed6d132dcdde1f21c63 | refs/heads/master | 2022-11-17T13:53:14.531872 | 2020-07-10T21:56:27 | 2020-07-10T21:56:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | import pulumi
import pulumi_aws as aws
organization_access = aws.cloudwatch.EventPermission("organizationAccess",
condition={
"key": "aws:PrincipalOrgID",
"type": "StringEquals",
"value": aws_organizations_organization["example"]["id"],
},
principal="*",
statement_id="OrganizationAccess")
| [
"[email protected]"
] | |
9c2d9a898db1b9765259f287859f7910b04c5de5 | c2092dbf89e74e1484f0468d21badfda2eafe19d | /backend/users/migrations/0002_auto_20201113_0020.py | 2383bc55cf5cafb959552769c381e49d82d28c70 | [] | no_license | crowdbotics-apps/jd-searcher-22572 | 1a47422d2c4f393562fc4389422b0906fed594b8 | a194536595837400bf0e1fe0c1be5bbd262ff6b2 | refs/heads/master | 2023-01-06T21:14:55.102493 | 2020-11-13T00:20:57 | 2020-11-13T00:20:57 | 312,428,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,275 | py | # Generated by Django 2.2.17 on 2020-11-13 00:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='user',
name='timestamp_created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"[email protected]"
] | |
4c340b330451ba30962d6b42b55c433d5c586d23 | 704976ea552111c6a5af9cd7cb62b9d9abaf3996 | /dotviewer/drawgraph.py | e247c773bafd2411cfdb7998286c36a81e749422 | [
"BSD-3-Clause"
] | permissive | mesalock-linux/mesapy | 4f02c5819ce7f2f6e249d34840f1aa097577645d | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | refs/heads/mesapy2.7 | 2023-08-16T21:33:02.239581 | 2019-08-13T10:29:43 | 2019-08-13T18:06:45 | 136,080,721 | 396 | 33 | NOASSERTION | 2020-04-01T03:05:18 | 2018-06-04T20:45:17 | Python | UTF-8 | Python | false | false | 44,450 | py | """
A custom graphic renderer for the '.plain' files produced by dot.
"""
from __future__ import generators
import re, os, math
import pygame
from pygame.locals import *
from strunicode import forceunicode
this_dir = os.path.dirname(os.path.abspath(__file__))
FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf')
FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf')
COLOR = {
'aliceblue': (240, 248, 255),
'antiquewhite': (250, 235, 215),
'antiquewhite1': (255, 239, 219),
'antiquewhite2': (238, 223, 204),
'antiquewhite3': (205, 192, 176),
'antiquewhite4': (139, 131, 120),
'aquamarine': (127, 255, 212),
'aquamarine1': (127, 255, 212),
'aquamarine2': (118, 238, 198),
'aquamarine3': (102, 205, 170),
'aquamarine4': (69, 139, 116),
'azure': (240, 255, 255),
'azure1': (240, 255, 255),
'azure2': (224, 238, 238),
'azure3': (193, 205, 205),
'azure4': (131, 139, 139),
'beige': (245, 245, 220),
'bisque': (255, 228, 196),
'bisque1': (255, 228, 196),
'bisque2': (238, 213, 183),
'bisque3': (205, 183, 158),
'bisque4': (139, 125, 107),
'black': (0, 0, 0),
'blanchedalmond': (255, 235, 205),
'blue': (0, 0, 255),
'blue1': (0, 0, 255),
'blue2': (0, 0, 238),
'blue3': (0, 0, 205),
'blue4': (0, 0, 139),
'blueviolet': (138, 43, 226),
'brown': (165, 42, 42),
'brown1': (255, 64, 64),
'brown2': (238, 59, 59),
'brown3': (205, 51, 51),
'brown4': (139, 35, 35),
'burlywood': (222, 184, 135),
'burlywood1': (255, 211, 155),
'burlywood2': (238, 197, 145),
'burlywood3': (205, 170, 125),
'burlywood4': (139, 115, 85),
'cadetblue': (95, 158, 160),
'cadetblue1': (152, 245, 255),
'cadetblue2': (142, 229, 238),
'cadetblue3': (122, 197, 205),
'cadetblue4': (83, 134, 139),
'chartreuse': (127, 255, 0),
'chartreuse1': (127, 255, 0),
'chartreuse2': (118, 238, 0),
'chartreuse3': (102, 205, 0),
'chartreuse4': (69, 139, 0),
'chocolate': (210, 105, 30),
'chocolate1': (255, 127, 36),
'chocolate2': (238, 118, 33),
'chocolate3': (205, 102, 29),
'chocolate4': (139, 69, 19),
'coral': (255, 127, 80),
'coral1': (255, 114, 86),
'coral2': (238, 106, 80),
'coral3': (205, 91, 69),
'coral4': (139, 62, 47),
'cornflowerblue': (100, 149, 237),
'cornsilk': (255, 248, 220),
'cornsilk1': (255, 248, 220),
'cornsilk2': (238, 232, 205),
'cornsilk3': (205, 200, 177),
'cornsilk4': (139, 136, 120),
'crimson': (220, 20, 60),
'cyan': (0, 255, 255),
'cyan1': (0, 255, 255),
'cyan2': (0, 238, 238),
'cyan3': (0, 205, 205),
'cyan4': (0, 139, 139),
'darkgoldenrod': (184, 134, 11),
'darkgoldenrod1': (255, 185, 15),
'darkgoldenrod2': (238, 173, 14),
'darkgoldenrod3': (205, 149, 12),
'darkgoldenrod4': (139, 101, 8),
'darkgreen': (0, 100, 0),
'darkkhaki': (189, 183, 107),
'darkolivegreen': (85, 107, 47),
'darkolivegreen1': (202, 255, 112),
'darkolivegreen2': (188, 238, 104),
'darkolivegreen3': (162, 205, 90),
'darkolivegreen4': (110, 139, 61),
'darkorange': (255, 140, 0),
'darkorange1': (255, 127, 0),
'darkorange2': (238, 118, 0),
'darkorange3': (205, 102, 0),
'darkorange4': (139, 69, 0),
'darkorchid': (153, 50, 204),
'darkorchid1': (191, 62, 255),
'darkorchid2': (178, 58, 238),
'darkorchid3': (154, 50, 205),
'darkorchid4': (104, 34, 139),
'darksalmon': (233, 150, 122),
'darkseagreen': (143, 188, 143),
'darkseagreen1': (193, 255, 193),
'darkseagreen2': (180, 238, 180),
'darkseagreen3': (155, 205, 155),
'darkseagreen4': (105, 139, 105),
'darkslateblue': (72, 61, 139),
'darkslategray': (47, 79, 79),
'darkslategray1': (151, 255, 255),
'darkslategray2': (141, 238, 238),
'darkslategray3': (121, 205, 205),
'darkslategray4': (82, 139, 139),
'darkslategrey': (47, 79, 79),
'darkturquoise': (0, 206, 209),
'darkviolet': (148, 0, 211),
'deeppink': (255, 20, 147),
'deeppink1': (255, 20, 147),
'deeppink2': (238, 18, 137),
'deeppink3': (205, 16, 118),
'deeppink4': (139, 10, 80),
'deepskyblue': (0, 191, 255),
'deepskyblue1': (0, 191, 255),
'deepskyblue2': (0, 178, 238),
'deepskyblue3': (0, 154, 205),
'deepskyblue4': (0, 104, 139),
'dimgray': (105, 105, 105),
'dimgrey': (105, 105, 105),
'dodgerblue': (30, 144, 255),
'dodgerblue1': (30, 144, 255),
'dodgerblue2': (28, 134, 238),
'dodgerblue3': (24, 116, 205),
'dodgerblue4': (16, 78, 139),
'firebrick': (178, 34, 34),
'firebrick1': (255, 48, 48),
'firebrick2': (238, 44, 44),
'firebrick3': (205, 38, 38),
'firebrick4': (139, 26, 26),
'floralwhite': (255, 250, 240),
'forestgreen': (34, 139, 34),
'gainsboro': (220, 220, 220),
'ghostwhite': (248, 248, 255),
'gold': (255, 215, 0),
'gold1': (255, 215, 0),
'gold2': (238, 201, 0),
'gold3': (205, 173, 0),
'gold4': (139, 117, 0),
'goldenrod': (218, 165, 32),
'goldenrod1': (255, 193, 37),
'goldenrod2': (238, 180, 34),
'goldenrod3': (205, 155, 29),
'goldenrod4': (139, 105, 20),
'gray': (192, 192, 192),
'gray0': (0, 0, 0),
'gray1': (3, 3, 3),
'gray10': (26, 26, 26),
'gray100': (255, 255, 255),
'gray11': (28, 28, 28),
'gray12': (31, 31, 31),
'gray13': (33, 33, 33),
'gray14': (36, 36, 36),
'gray15': (38, 38, 38),
'gray16': (41, 41, 41),
'gray17': (43, 43, 43),
'gray18': (46, 46, 46),
'gray19': (48, 48, 48),
'gray2': (5, 5, 5),
'gray20': (51, 51, 51),
'gray21': (54, 54, 54),
'gray22': (56, 56, 56),
'gray23': (59, 59, 59),
'gray24': (61, 61, 61),
'gray25': (64, 64, 64),
'gray26': (66, 66, 66),
'gray27': (69, 69, 69),
'gray28': (71, 71, 71),
'gray29': (74, 74, 74),
'gray3': (8, 8, 8),
'gray30': (77, 77, 77),
'gray31': (79, 79, 79),
'gray32': (82, 82, 82),
'gray33': (84, 84, 84),
'gray34': (87, 87, 87),
'gray35': (89, 89, 89),
'gray36': (92, 92, 92),
'gray37': (94, 94, 94),
'gray38': (97, 97, 97),
'gray39': (99, 99, 99),
'gray4': (10, 10, 10),
'gray40': (102, 102, 102),
'gray41': (105, 105, 105),
'gray42': (107, 107, 107),
'gray43': (110, 110, 110),
'gray44': (112, 112, 112),
'gray45': (115, 115, 115),
'gray46': (117, 117, 117),
'gray47': (120, 120, 120),
'gray48': (122, 122, 122),
'gray49': (125, 125, 125),
'gray5': (13, 13, 13),
'gray50': (127, 127, 127),
'gray51': (130, 130, 130),
'gray52': (133, 133, 133),
'gray53': (135, 135, 135),
'gray54': (138, 138, 138),
'gray55': (140, 140, 140),
'gray56': (143, 143, 143),
'gray57': (145, 145, 145),
'gray58': (148, 148, 148),
'gray59': (150, 150, 150),
'gray6': (15, 15, 15),
'gray60': (153, 153, 153),
'gray61': (156, 156, 156),
'gray62': (158, 158, 158),
'gray63': (161, 161, 161),
'gray64': (163, 163, 163),
'gray65': (166, 166, 166),
'gray66': (168, 168, 168),
'gray67': (171, 171, 171),
'gray68': (173, 173, 173),
'gray69': (176, 176, 176),
'gray7': (18, 18, 18),
'gray70': (179, 179, 179),
'gray71': (181, 181, 181),
'gray72': (184, 184, 184),
'gray73': (186, 186, 186),
'gray74': (189, 189, 189),
'gray75': (191, 191, 191),
'gray76': (194, 194, 194),
'gray77': (196, 196, 196),
'gray78': (199, 199, 199),
'gray79': (201, 201, 201),
'gray8': (20, 20, 20),
'gray80': (204, 204, 204),
'gray81': (207, 207, 207),
'gray82': (209, 209, 209),
'gray83': (212, 212, 212),
'gray84': (214, 214, 214),
'gray85': (217, 217, 217),
'gray86': (219, 219, 219),
'gray87': (222, 222, 222),
'gray88': (224, 224, 224),
'gray89': (227, 227, 227),
'gray9': (23, 23, 23),
'gray90': (229, 229, 229),
'gray91': (232, 232, 232),
'gray92': (235, 235, 235),
'gray93': (237, 237, 237),
'gray94': (240, 240, 240),
'gray95': (242, 242, 242),
'gray96': (245, 245, 245),
'gray97': (247, 247, 247),
'gray98': (250, 250, 250),
'gray99': (252, 252, 252),
'green': (0, 255, 0),
'green1': (0, 255, 0),
'green2': (0, 238, 0),
'green3': (0, 205, 0),
'green4': (0, 139, 0),
'greenyellow': (173, 255, 47),
'grey': (192, 192, 192),
'grey0': (0, 0, 0),
'grey1': (3, 3, 3),
'grey10': (26, 26, 26),
'grey100': (255, 255, 255),
'grey11': (28, 28, 28),
'grey12': (31, 31, 31),
'grey13': (33, 33, 33),
'grey14': (36, 36, 36),
'grey15': (38, 38, 38),
'grey16': (41, 41, 41),
'grey17': (43, 43, 43),
'grey18': (46, 46, 46),
'grey19': (48, 48, 48),
'grey2': (5, 5, 5),
'grey20': (51, 51, 51),
'grey21': (54, 54, 54),
'grey22': (56, 56, 56),
'grey23': (59, 59, 59),
'grey24': (61, 61, 61),
'grey25': (64, 64, 64),
'grey26': (66, 66, 66),
'grey27': (69, 69, 69),
'grey28': (71, 71, 71),
'grey29': (74, 74, 74),
'grey3': (8, 8, 8),
'grey30': (77, 77, 77),
'grey31': (79, 79, 79),
'grey32': (82, 82, 82),
'grey33': (84, 84, 84),
'grey34': (87, 87, 87),
'grey35': (89, 89, 89),
'grey36': (92, 92, 92),
'grey37': (94, 94, 94),
'grey38': (97, 97, 97),
'grey39': (99, 99, 99),
'grey4': (10, 10, 10),
'grey40': (102, 102, 102),
'grey41': (105, 105, 105),
'grey42': (107, 107, 107),
'grey43': (110, 110, 110),
'grey44': (112, 112, 112),
'grey45': (115, 115, 115),
'grey46': (117, 117, 117),
'grey47': (120, 120, 120),
'grey48': (122, 122, 122),
'grey49': (125, 125, 125),
'grey5': (13, 13, 13),
'grey50': (127, 127, 127),
'grey51': (130, 130, 130),
'grey52': (133, 133, 133),
'grey53': (135, 135, 135),
'grey54': (138, 138, 138),
'grey55': (140, 140, 140),
'grey56': (143, 143, 143),
'grey57': (145, 145, 145),
'grey58': (148, 148, 148),
'grey59': (150, 150, 150),
'grey6': (15, 15, 15),
'grey60': (153, 153, 153),
'grey61': (156, 156, 156),
'grey62': (158, 158, 158),
'grey63': (161, 161, 161),
'grey64': (163, 163, 163),
'grey65': (166, 166, 166),
'grey66': (168, 168, 168),
'grey67': (171, 171, 171),
'grey68': (173, 173, 173),
'grey69': (176, 176, 176),
'grey7': (18, 18, 18),
'grey70': (179, 179, 179),
'grey71': (181, 181, 181),
'grey72': (184, 184, 184),
'grey73': (186, 186, 186),
'grey74': (189, 189, 189),
'grey75': (191, 191, 191),
'grey76': (194, 194, 194),
'grey77': (196, 196, 196),
'grey78': (199, 199, 199),
'grey79': (201, 201, 201),
'grey8': (20, 20, 20),
'grey80': (204, 204, 204),
'grey81': (207, 207, 207),
'grey82': (209, 209, 209),
'grey83': (212, 212, 212),
'grey84': (214, 214, 214),
'grey85': (217, 217, 217),
'grey86': (219, 219, 219),
'grey87': (222, 222, 222),
'grey88': (224, 224, 224),
'grey89': (227, 227, 227),
'grey9': (23, 23, 23),
'grey90': (229, 229, 229),
'grey91': (232, 232, 232),
'grey92': (235, 235, 235),
'grey93': (237, 237, 237),
'grey94': (240, 240, 240),
'grey95': (242, 242, 242),
'grey96': (245, 245, 245),
'grey97': (247, 247, 247),
'grey98': (250, 250, 250),
'grey99': (252, 252, 252),
'honeydew': (240, 255, 240),
'honeydew1': (240, 255, 240),
'honeydew2': (224, 238, 224),
'honeydew3': (193, 205, 193),
'honeydew4': (131, 139, 131),
'hotpink': (255, 105, 180),
'hotpink1': (255, 110, 180),
'hotpink2': (238, 106, 167),
'hotpink3': (205, 96, 144),
'hotpink4': (139, 58, 98),
'indianred': (205, 92, 92),
'indianred1': (255, 106, 106),
'indianred2': (238, 99, 99),
'indianred3': (205, 85, 85),
'indianred4': (139, 58, 58),
'indigo': (75, 0, 130),
'invis': (255, 255, 254),
'ivory': (255, 255, 240),
'ivory1': (255, 255, 240),
'ivory2': (238, 238, 224),
'ivory3': (205, 205, 193),
'ivory4': (139, 139, 131),
'khaki': (240, 230, 140),
'khaki1': (255, 246, 143),
'khaki2': (238, 230, 133),
'khaki3': (205, 198, 115),
'khaki4': (139, 134, 78),
'lavender': (230, 230, 250),
'lavenderblush': (255, 240, 245),
'lavenderblush1': (255, 240, 245),
'lavenderblush2': (238, 224, 229),
'lavenderblush3': (205, 193, 197),
'lavenderblush4': (139, 131, 134),
'lawngreen': (124, 252, 0),
'lemonchiffon': (255, 250, 205),
'lemonchiffon1': (255, 250, 205),
'lemonchiffon2': (238, 233, 191),
'lemonchiffon3': (205, 201, 165),
'lemonchiffon4': (139, 137, 112),
'lightblue': (173, 216, 230),
'lightblue1': (191, 239, 255),
'lightblue2': (178, 223, 238),
'lightblue3': (154, 192, 205),
'lightblue4': (104, 131, 139),
'lightcoral': (240, 128, 128),
'lightcyan': (224, 255, 255),
'lightcyan1': (224, 255, 255),
'lightcyan2': (209, 238, 238),
'lightcyan3': (180, 205, 205),
'lightcyan4': (122, 139, 139),
'lightgoldenrod': (238, 221, 130),
'lightgoldenrod1': (255, 236, 139),
'lightgoldenrod2': (238, 220, 130),
'lightgoldenrod3': (205, 190, 112),
'lightgoldenrod4': (139, 129, 76),
'lightgoldenrodyellow': (250, 250, 210),
'lightgray': (211, 211, 211),
'lightgrey': (211, 211, 211),
'lightpink': (255, 182, 193),
'lightpink1': (255, 174, 185),
'lightpink2': (238, 162, 173),
'lightpink3': (205, 140, 149),
'lightpink4': (139, 95, 101),
'lightsalmon': (255, 160, 122),
'lightsalmon1': (255, 160, 122),
'lightsalmon2': (238, 149, 114),
'lightsalmon3': (205, 129, 98),
'lightsalmon4': (139, 87, 66),
'lightseagreen': (32, 178, 170),
'lightskyblue': (135, 206, 250),
'lightskyblue1': (176, 226, 255),
'lightskyblue2': (164, 211, 238),
'lightskyblue3': (141, 182, 205),
'lightskyblue4': (96, 123, 139),
'lightslateblue': (132, 112, 255),
'lightslategray': (119, 136, 153),
'lightslategrey': (119, 136, 153),
'lightsteelblue': (176, 196, 222),
'lightsteelblue1': (202, 225, 255),
'lightsteelblue2': (188, 210, 238),
'lightsteelblue3': (162, 181, 205),
'lightsteelblue4': (110, 123, 139),
'lightyellow': (255, 255, 224),
'lightyellow1': (255, 255, 224),
'lightyellow2': (238, 238, 209),
'lightyellow3': (205, 205, 180),
'lightyellow4': (139, 139, 122),
'limegreen': (50, 205, 50),
'linen': (250, 240, 230),
'magenta': (255, 0, 255),
'magenta1': (255, 0, 255),
'magenta2': (238, 0, 238),
'magenta3': (205, 0, 205),
'magenta4': (139, 0, 139),
'maroon': (176, 48, 96),
'maroon1': (255, 52, 179),
'maroon2': (238, 48, 167),
'maroon3': (205, 41, 144),
'maroon4': (139, 28, 98),
'mediumaquamarine': (102, 205, 170),
'mediumblue': (0, 0, 205),
'mediumorchid': (186, 85, 211),
'mediumorchid1': (224, 102, 255),
'mediumorchid2': (209, 95, 238),
'mediumorchid3': (180, 82, 205),
'mediumorchid4': (122, 55, 139),
'mediumpurple': (147, 112, 219),
'mediumpurple1': (171, 130, 255),
'mediumpurple2': (159, 121, 238),
'mediumpurple3': (137, 104, 205),
'mediumpurple4': (93, 71, 139),
'mediumseagreen': (60, 179, 113),
'mediumslateblue': (123, 104, 238),
'mediumspringgreen': (0, 250, 154),
'mediumturquoise': (72, 209, 204),
'mediumvioletred': (199, 21, 133),
'midnightblue': (25, 25, 112),
'mintcream': (245, 255, 250),
'mistyrose': (255, 228, 225),
'mistyrose1': (255, 228, 225),
'mistyrose2': (238, 213, 210),
'mistyrose3': (205, 183, 181),
'mistyrose4': (139, 125, 123),
'moccasin': (255, 228, 181),
'navajowhite': (255, 222, 173),
'navajowhite1': (255, 222, 173),
'navajowhite2': (238, 207, 161),
'navajowhite3': (205, 179, 139),
'navajowhite4': (139, 121, 94),
'navy': (0, 0, 128),
'navyblue': (0, 0, 128),
'none': (255, 255, 254),
'oldlace': (253, 245, 230),
'olivedrab': (107, 142, 35),
'olivedrab1': (192, 255, 62),
'olivedrab2': (179, 238, 58),
'olivedrab3': (154, 205, 50),
'olivedrab4': (105, 139, 34),
'orange': (255, 165, 0),
'orange1': (255, 165, 0),
'orange2': (238, 154, 0),
'orange3': (205, 133, 0),
'orange4': (139, 90, 0),
'orangered': (255, 69, 0),
'orangered1': (255, 69, 0),
'orangered2': (238, 64, 0),
'orangered3': (205, 55, 0),
'orangered4': (139, 37, 0),
'orchid': (218, 112, 214),
'orchid1': (255, 131, 250),
'orchid2': (238, 122, 233),
'orchid3': (205, 105, 201),
'orchid4': (139, 71, 137),
'palegoldenrod': (238, 232, 170),
'palegreen': (152, 251, 152),
'palegreen1': (154, 255, 154),
'palegreen2': (144, 238, 144),
'palegreen3': (124, 205, 124),
'palegreen4': (84, 139, 84),
'paleturquoise': (175, 238, 238),
'paleturquoise1': (187, 255, 255),
'paleturquoise2': (174, 238, 238),
'paleturquoise3': (150, 205, 205),
'paleturquoise4': (102, 139, 139),
'palevioletred': (219, 112, 147),
'palevioletred1': (255, 130, 171),
'palevioletred2': (238, 121, 159),
'palevioletred3': (205, 104, 137),
'palevioletred4': (139, 71, 93),
'papayawhip': (255, 239, 213),
'peachpuff': (255, 218, 185),
'peachpuff1': (255, 218, 185),
'peachpuff2': (238, 203, 173),
'peachpuff3': (205, 175, 149),
'peachpuff4': (139, 119, 101),
'peru': (205, 133, 63),
'pink': (255, 192, 203),
'pink1': (255, 181, 197),
'pink2': (238, 169, 184),
'pink3': (205, 145, 158),
'pink4': (139, 99, 108),
'plum': (221, 160, 221),
'plum1': (255, 187, 255),
'plum2': (238, 174, 238),
'plum3': (205, 150, 205),
'plum4': (139, 102, 139),
'powderblue': (176, 224, 230),
'purple': (160, 32, 240),
'purple1': (155, 48, 255),
'purple2': (145, 44, 238),
'purple3': (125, 38, 205),
'purple4': (85, 26, 139),
'red': (255, 0, 0),
'red1': (255, 0, 0),
'red2': (238, 0, 0),
'red3': (205, 0, 0),
'red4': (139, 0, 0),
'rosybrown': (188, 143, 143),
'rosybrown1': (255, 193, 193),
'rosybrown2': (238, 180, 180),
'rosybrown3': (205, 155, 155),
'rosybrown4': (139, 105, 105),
'royalblue': (65, 105, 225),
'royalblue1': (72, 118, 255),
'royalblue2': (67, 110, 238),
'royalblue3': (58, 95, 205),
'royalblue4': (39, 64, 139),
'saddlebrown': (139, 69, 19),
'salmon': (250, 128, 114),
'salmon1': (255, 140, 105),
'salmon2': (238, 130, 98),
'salmon3': (205, 112, 84),
'salmon4': (139, 76, 57),
'sandybrown': (244, 164, 96),
'seagreen': (46, 139, 87),
'seagreen1': (84, 255, 159),
'seagreen2': (78, 238, 148),
'seagreen3': (67, 205, 128),
'seagreen4': (46, 139, 87),
'seashell': (255, 245, 238),
'seashell1': (255, 245, 238),
'seashell2': (238, 229, 222),
'seashell3': (205, 197, 191),
'seashell4': (139, 134, 130),
'sienna': (160, 82, 45),
'sienna1': (255, 130, 71),
'sienna2': (238, 121, 66),
'sienna3': (205, 104, 57),
'sienna4': (139, 71, 38),
'skyblue': (135, 206, 235),
'skyblue1': (135, 206, 255),
'skyblue2': (126, 192, 238),
'skyblue3': (108, 166, 205),
'skyblue4': (74, 112, 139),
'slateblue': (106, 90, 205),
'slateblue1': (131, 111, 255),
'slateblue2': (122, 103, 238),
'slateblue3': (105, 89, 205),
'slateblue4': (71, 60, 139),
'slategray': (112, 128, 144),
'slategray1': (198, 226, 255),
'slategray2': (185, 211, 238),
'slategray3': (159, 182, 205),
'slategray4': (108, 123, 139),
'slategrey': (112, 128, 144),
'snow': (255, 250, 250),
'snow1': (255, 250, 250),
'snow2': (238, 233, 233),
'snow3': (205, 201, 201),
'snow4': (139, 137, 137),
'springgreen': (0, 255, 127),
'springgreen1': (0, 255, 127),
'springgreen2': (0, 238, 118),
'springgreen3': (0, 205, 102),
'springgreen4': (0, 139, 69),
'steelblue': (70, 130, 180),
'steelblue1': (99, 184, 255),
'steelblue2': (92, 172, 238),
'steelblue3': (79, 148, 205),
'steelblue4': (54, 100, 139),
'tan': (210, 180, 140),
'tan1': (255, 165, 79),
'tan2': (238, 154, 73),
'tan3': (205, 133, 63),
'tan4': (139, 90, 43),
'thistle': (216, 191, 216),
'thistle1': (255, 225, 255),
'thistle2': (238, 210, 238),
'thistle3': (205, 181, 205),
'thistle4': (139, 123, 139),
'tomato': (255, 99, 71),
'tomato1': (255, 99, 71),
'tomato2': (238, 92, 66),
'tomato3': (205, 79, 57),
'tomato4': (139, 54, 38),
'transparent': (255, 255, 254),
'turquoise': (64, 224, 208),
'turquoise1': (0, 245, 255),
'turquoise2': (0, 229, 238),
'turquoise3': (0, 197, 205),
'turquoise4': (0, 134, 139),
'violet': (238, 130, 238),
'violetred': (208, 32, 144),
'violetred1': (255, 62, 150),
'violetred2': (238, 58, 140),
'violetred3': (205, 50, 120),
'violetred4': (139, 34, 82),
'wheat': (245, 222, 179),
'wheat1': (255, 231, 186),
'wheat2': (238, 216, 174),
'wheat3': (205, 186, 150),
'wheat4': (139, 126, 102),
'white': (255, 255, 255),
'whitesmoke': (245, 245, 245),
'yellow': (255, 255, 0),
'yellow1': (255, 255, 0),
'yellow2': (238, 238, 0),
'yellow3': (205, 205, 0),
'yellow4': (139, 139, 0),
'yellowgreen': (154, 205, 50),
}
re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)')
re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)')
def combine(color1, color2, alpha):
r1, g1, b1 = color1
r2, g2, b2 = color2
beta = 1.0 - alpha
return (int(r1 * alpha + r2 * beta),
int(g1 * alpha + g2 * beta),
int(b1 * alpha + b2 * beta))
def highlight_color(color):
if color == (0, 0, 0): # black becomes magenta
return (255, 0, 255)
elif color == (255, 255, 255): # white becomes yellow
return (255, 255, 0)
intensity = sum(color)
if intensity > 191 * 3:
return combine(color, (128, 192, 0), 0.2)
else:
return combine(color, (255, 255, 0), 0.2)
def getcolor(name, default):
if name in COLOR:
return COLOR[name]
elif name.startswith('#') and len(name) == 7:
rval = COLOR[name] = (int(name[1:3],16), int(name[3:5],16), int(name[5:7],16))
return rval
else:
return default
class GraphLayout:
fixedfont = False
def __init__(self, scale, width, height):
self.scale = scale
self.boundingbox = width, height
self.nodes = {}
self.edges = []
self.links = {}
def add_node(self, *args):
n = Node(*args)
self.nodes[n.name] = n
def add_edge(self, *args):
self.edges.append(Edge(self.nodes, *args))
def get_display(self):
from graphdisplay import GraphDisplay
return GraphDisplay(self)
def display(self):
self.get_display().run()
def reload(self):
return self
# async interaction helpers
def display_async_quit():
pygame.event.post(pygame.event.Event(QUIT))
def display_async_cmd(**kwds):
pygame.event.post(pygame.event.Event(USEREVENT, **kwds))
EventQueue = []
def wait_for_events():
if not EventQueue:
EventQueue.append(pygame.event.wait())
EventQueue.extend(pygame.event.get())
def wait_for_async_cmd():
# wait until another thread pushes a USEREVENT in the queue
while True:
wait_for_events()
e = EventQueue.pop(0)
if e.type in (USEREVENT, QUIT): # discard all other events
break
EventQueue.insert(0, e) # re-insert the event for further processing
class Node:
def __init__(self, name, x, y, w, h, label, style, shape, color, fillcolor):
self.name = forceunicode(name)
self.x = float(x)
self.y = float(y)
self.w = float(w)
self.h = float(h)
self.label = forceunicode(label)
self.style = style
self.shape = shape
self.color = color
self.fillcolor = fillcolor
self.highlight = False
def sethighlight(self, which):
self.highlight = bool(which)
class Edge:
label = None
def __init__(self, nodes, tail, head, cnt, *rest):
self.tail = nodes[forceunicode(tail)]
self.head = nodes[forceunicode(head)]
cnt = int(cnt)
self.points = [(float(rest[i]), float(rest[i+1]))
for i in range(0, cnt*2, 2)]
rest = rest[cnt*2:]
if len(rest) > 2:
self.label, xl, yl = rest[:3]
self.xl = float(xl)
self.yl = float(yl)
rest = rest[3:]
self.style, self.color = rest
linematch = re_linewidth.match(self.style)
if linematch:
num = linematch.group(1)
self.linewidth = int(round(float(num)))
self.style = self.style[linematch.end(0):]
else:
self.linewidth = 1
self.highlight = False
self.cachedbezierpoints = None
self.cachedarrowhead = None
self.cachedlimits = None
def sethighlight(self, which):
self.highlight = bool(which)
def limits(self):
result = self.cachedlimits
if result is None:
points = self.bezierpoints()
xs = [point[0] for point in points]
ys = [point[1] for point in points]
self.cachedlimits = result = (min(xs), max(ys), max(xs), min(ys))
return result
def bezierpoints(self):
result = self.cachedbezierpoints
if result is None:
result = []
pts = self.points
for i in range(0, len(pts)-3, 3):
result += beziercurve(pts[i], pts[i+1], pts[i+2], pts[i+3])
self.cachedbezierpoints = result
return result
def arrowhead(self):
result = self.cachedarrowhead
if result is None:
# we don't know if the list of points is in the right order
# or not :-( try to guess...
def dist(node, pt):
return abs(node.x - pt[0]) + abs(node.y - pt[1])
error_if_direct = (dist(self.head, self.points[-1]) +
dist(self.tail, self.points[0]))
error_if_reversed = (dist(self.tail, self.points[-1]) +
dist(self.head, self.points[0]))
if error_if_direct > error_if_reversed: # reversed edge
head = 0
dir = 1
else:
head = -1
dir = -1
n = 1
while True:
try:
x0, y0 = self.points[head]
x1, y1 = self.points[head+n*dir]
except IndexError:
result = []
break
vx = x0-x1
vy = y0-y1
try:
f = 0.12 / math.sqrt(vx*vx + vy*vy)
vx *= f
vy *= f
result = [(x0 + 0.9*vx, y0 + 0.9*vy),
(x0 + 0.4*vy, y0 - 0.4*vx),
(x0 - 0.4*vy, y0 + 0.4*vx)]
break
except (ZeroDivisionError, ValueError):
n += 1
self.cachedarrowhead = result
return result
def beziercurve((x0,y0), (x1,y1), (x2,y2), (x3,y3), resolution=8):
result = []
f = 1.0/(resolution-1)
append = result.append
for i in range(resolution):
t = f*i
t0 = (1-t)*(1-t)*(1-t)
t1 = t *(1-t)*(1-t) * 3.0
t2 = t * t *(1-t) * 3.0
t3 = t * t * t
append((x0*t0 + x1*t1 + x2*t2 + x3*t3,
y0*t0 + y1*t1 + y2*t2 + y3*t3))
return result
def segmentdistance((x0,y0), (x1,y1), (x,y)):
"Distance between the point (x,y) and the segment (x0,y0)-(x1,y1)."
vx = x1-x0
vy = y1-y0
try:
l = math.hypot(vx, vy)
vx /= l
vy /= l
dlong = vx*(x-x0) + vy*(y-y0)
except (ZeroDivisionError, ValueError):
dlong = -1
if dlong < 0.0:
return math.hypot(x-x0, y-y0)
elif dlong > l:
return math.hypot(x-x1, y-y1)
else:
return abs(vy*(x-x0) - vx*(y-y0))
class GraphRenderer:
MARGIN = 0.6
SCALEMIN = 3
SCALEMAX = 100
FONTCACHE = {}
def __init__(self, screen, graphlayout, scale=75):
self.graphlayout = graphlayout
self.setscale(scale)
self.setoffset(0, 0)
self.screen = screen
self.textzones = []
self.highlightwords = graphlayout.links
self.highlight_word = None
self.visiblenodes = []
self.visibleedges = []
def wordcolor(self, word):
info = self.highlightwords[word]
if isinstance(info, tuple) and len(info) >= 2:
color = info[1]
else:
color = None
if color is None:
color = (128,0,0)
if word == self.highlight_word:
return ((255,255,80), color)
else:
return (color, None)
def setscale(self, scale):
scale = max(min(scale, self.SCALEMAX), self.SCALEMIN)
self.scale = float(scale)
w, h = self.graphlayout.boundingbox
self.margin = int(self.MARGIN * scale)
self.width = int(w * scale) + (2 * self.margin)
self.height = int(h * scale) + (2 * self.margin)
self.bboxh = h
size = int(15 * (scale-10) / 75)
self.font = self.getfont(size)
def getfont(self, size):
if size in self.FONTCACHE:
return self.FONTCACHE[size]
elif size < 5:
self.FONTCACHE[size] = None
return None
else:
if self.graphlayout.fixedfont:
filename = FIXEDFONT
else:
filename = FONT
font = self.FONTCACHE[size] = pygame.font.Font(filename, size)
return font
def setoffset(self, offsetx, offsety):
"Set the (x,y) origin of the rectangle where the graph will be rendered."
self.ofsx = offsetx - self.margin
self.ofsy = offsety - self.margin
def shiftoffset(self, dx, dy):
self.ofsx += dx
self.ofsy += dy
def getcenter(self):
w, h = self.screen.get_size()
return self.revmap(w//2, h//2)
def setcenter(self, x, y):
w, h = self.screen.get_size()
x, y = self.map(x, y)
self.shiftoffset(x-w//2, y-h//2)
def shiftscale(self, factor, fix=None):
if fix is None:
fixx, fixy = self.screen.get_size()
fixx //= 2
fixy //= 2
else:
fixx, fixy = fix
x, y = self.revmap(fixx, fixy)
self.setscale(self.scale * factor)
newx, newy = self.map(x, y)
self.shiftoffset(newx - fixx, newy - fixy)
def reoffset(self, swidth, sheight):
offsetx = noffsetx = self.ofsx
offsety = noffsety = self.ofsy
width = self.width
height = self.height
# if it fits, center it, otherwise clamp
if width <= swidth:
noffsetx = (width - swidth) // 2
else:
noffsetx = min(max(0, offsetx), width - swidth)
if height <= sheight:
noffsety = (height - sheight) // 2
else:
noffsety = min(max(0, offsety), height - sheight)
self.ofsx = noffsetx
self.ofsy = noffsety
def getboundingbox(self):
"Get the rectangle where the graph will be rendered."
return (-self.ofsx, -self.ofsy, self.width, self.height)
def visible(self, x1, y1, x2, y2):
"""Is any part of the box visible (i.e. within the bounding box)?
We have to perform clipping ourselves because with big graphs the
coordinates may sometimes become longs and cause OverflowErrors
within pygame.
"""
w, h = self.screen.get_size()
return x1 < w and x2 > 0 and y1 < h and y2 > 0
def computevisible(self):
del self.visiblenodes[:]
del self.visibleedges[:]
w, h = self.screen.get_size()
for node in self.graphlayout.nodes.values():
x, y = self.map(node.x, node.y)
nw2 = int(node.w * self.scale)//2
nh2 = int(node.h * self.scale)//2
if x-nw2 < w and x+nw2 > 0 and y-nh2 < h and y+nh2 > 0:
self.visiblenodes.append(node)
for edge in self.graphlayout.edges:
x1, y1, x2, y2 = edge.limits()
x1, y1 = self.map(x1, y1)
if x1 < w and y1 < h:
x2, y2 = self.map(x2, y2)
if x2 > 0 and y2 > 0:
self.visibleedges.append(edge)
def map(self, x, y):
return (int(x*self.scale) - (self.ofsx - self.margin),
int((self.bboxh-y)*self.scale) - (self.ofsy - self.margin))
def revmap(self, px, py):
return ((px + (self.ofsx - self.margin)) / self.scale,
self.bboxh - (py + (self.ofsy - self.margin)) / self.scale)
def draw_node_commands(self, node):
xcenter, ycenter = self.map(node.x, node.y)
boxwidth = int(node.w * self.scale)
boxheight = int(node.h * self.scale)
fgcolor = getcolor(node.color, (0,0,0))
bgcolor = getcolor(node.fillcolor, (255,255,255))
if node.highlight:
fgcolor = highlight_color(fgcolor)
bgcolor = highlight_color(bgcolor)
text = node.label
lines = text.replace('\\l','\\l\n').replace('\r','\r\n').split('\n')
# ignore a final newline
if not lines[-1]:
del lines[-1]
wmax = 0
hmax = 0
commands = []
bkgndcommands = []
if self.font is None:
if lines:
raw_line = lines[0].replace('\\l','').replace('\r','')
if raw_line:
for size in (12, 10, 8, 6, 4):
font = self.getfont(size)
img = TextSnippet(self, raw_line, (0, 0, 0), bgcolor, font=font)
w, h = img.get_size()
if (w >= boxwidth or h >= boxheight):
continue
else:
if w>wmax: wmax = w
def cmd(img=img, y=hmax, w=w):
img.draw(xcenter-w//2, ytop+y)
commands.append(cmd)
hmax += h
break
else:
for line in lines:
raw_line = line.replace('\\l','').replace('\r','') or ' '
if '\f' in raw_line: # grayed out parts of the line
imgs = []
graytext = True
h = 16
w_total = 0
for linepart in raw_line.split('\f'):
graytext = not graytext
if not linepart.strip():
continue
if graytext:
fgcolor = (128, 160, 160)
else:
fgcolor = (0, 0, 0)
img = TextSnippet(self, linepart, fgcolor, bgcolor)
imgs.append((w_total, img))
w, h = img.get_size()
w_total += w
if w_total > wmax: wmax = w_total
def cmd(imgs=imgs, y=hmax):
for x, img in imgs:
img.draw(xleft+x, ytop+y)
commands.append(cmd)
else:
img = TextSnippet(self, raw_line, (0, 0, 0), bgcolor)
w, h = img.get_size()
if w>wmax: wmax = w
if raw_line.strip():
if line.endswith('\\l'):
def cmd(img=img, y=hmax):
img.draw(xleft, ytop+y)
elif line.endswith('\r'):
def cmd(img=img, y=hmax, w=w):
img.draw(xright-w, ytop+y)
else:
def cmd(img=img, y=hmax, w=w):
img.draw(xcenter-w//2, ytop+y)
commands.append(cmd)
hmax += h
#hmax += 8
# we know the bounding box only now; setting these variables will
# have an effect on the values seen inside the cmd() functions above
xleft = xcenter - wmax//2
xright = xcenter + wmax//2
ytop = ycenter - hmax//2
x = xcenter-boxwidth//2
y = ycenter-boxheight//2
if node.shape == 'box':
rect = (x-1, y-1, boxwidth+2, boxheight+2)
def cmd():
self.screen.fill(bgcolor, rect)
bkgndcommands.append(cmd)
def cmd():
pygame.draw.rect(self.screen, fgcolor, rect, 1)
commands.append(cmd)
elif node.shape == 'ellipse':
rect = (x-1, y-1, boxwidth+2, boxheight+2)
def cmd():
pygame.draw.ellipse(self.screen, bgcolor, rect, 0)
bkgndcommands.append(cmd)
def cmd():
pygame.draw.ellipse(self.screen, fgcolor, rect, 1)
commands.append(cmd)
elif node.shape == 'octagon':
step = 1-math.sqrt(2)/2
points = [(int(x+boxwidth*fx), int(y+boxheight*fy))
for fx, fy in [(step,0), (1-step,0),
(1,step), (1,1-step),
(1-step,1), (step,1),
(0,1-step), (0,step)]]
def cmd():
pygame.draw.polygon(self.screen, bgcolor, points, 0)
bkgndcommands.append(cmd)
def cmd():
pygame.draw.polygon(self.screen, fgcolor, points, 1)
commands.append(cmd)
return bkgndcommands, commands
def draw_commands(self):
nodebkgndcmd = []
nodecmd = []
for node in self.visiblenodes:
cmd1, cmd2 = self.draw_node_commands(node)
nodebkgndcmd += cmd1
nodecmd += cmd2
edgebodycmd = []
edgeheadcmd = []
for edge in self.visibleedges:
fgcolor = getcolor(edge.color, (0,0,0))
if edge.highlight:
fgcolor = highlight_color(fgcolor)
points = [self.map(*xy) for xy in edge.bezierpoints()]
def drawedgebody(points=points, fgcolor=fgcolor, width=edge.linewidth):
pygame.draw.lines(self.screen, fgcolor, False, points, width)
edgebodycmd.append(drawedgebody)
points = [self.map(*xy) for xy in edge.arrowhead()]
if points:
def drawedgehead(points=points, fgcolor=fgcolor):
pygame.draw.polygon(self.screen, fgcolor, points, 0)
edgeheadcmd.append(drawedgehead)
if edge.label:
x, y = self.map(edge.xl, edge.yl)
img = TextSnippet(self, edge.label, (0, 0, 0))
w, h = img.get_size()
if self.visible(x-w//2, y-h//2, x+w//2, y+h//2):
def drawedgelabel(img=img, x1=x-w//2, y1=y-h//2):
img.draw(x1, y1)
edgeheadcmd.append(drawedgelabel)
return edgebodycmd + nodebkgndcmd + edgeheadcmd + nodecmd
def render(self):
self.computevisible()
bbox = self.getboundingbox()
ox, oy, width, height = bbox
dpy_width, dpy_height = self.screen.get_size()
# some versions of the SDL misinterpret widely out-of-range values,
# so clamp them
if ox < 0:
width += ox
ox = 0
if oy < 0:
height += oy
oy = 0
if width > dpy_width:
width = dpy_width
if height > dpy_height:
height = dpy_height
self.screen.fill((224, 255, 224), (ox, oy, width, height))
# gray off-bkgnd areas
gray = (128, 128, 128)
if ox > 0:
self.screen.fill(gray, (0, 0, ox, dpy_height))
if oy > 0:
self.screen.fill(gray, (0, 0, dpy_width, oy))
w = dpy_width - (ox + width)
if w > 0:
self.screen.fill(gray, (dpy_width-w, 0, w, dpy_height))
h = dpy_height - (oy + height)
if h > 0:
self.screen.fill(gray, (0, dpy_height-h, dpy_width, h))
# draw the graph and record the position of texts
del self.textzones[:]
for cmd in self.draw_commands():
cmd()
def findall(self, searchstr):
"""Return an iterator for all nodes and edges that contain a searchstr.
"""
for item in self.graphlayout.nodes.itervalues():
if item.label and searchstr in item.label:
yield item
for item in self.graphlayout.edges:
if item.label and searchstr in item.label:
yield item
def at_position(self, (x, y)):
"""Figure out the word under the cursor."""
for rx, ry, rw, rh, word in self.textzones:
if rx <= x < rx+rw and ry <= y < ry+rh:
return word
return None
def node_at_position(self, (x, y)):
"""Return the Node under the cursor."""
x, y = self.revmap(x, y)
for node in self.visiblenodes:
if 2.0*abs(x-node.x) <= node.w and 2.0*abs(y-node.y) <= node.h:
return node
return None
def edge_at_position(self, (x, y), distmax=14):
"""Return the Edge near the cursor."""
# XXX this function is very CPU-intensive and makes the display kinda sluggish
distmax /= self.scale
xy = self.revmap(x, y)
closest_edge = None
for edge in self.visibleedges:
pts = edge.bezierpoints()
for i in range(1, len(pts)):
d = segmentdistance(pts[i-1], pts[i], xy)
if d < distmax:
distmax = d
closest_edge = edge
return closest_edge
class TextSnippet:
def __init__(self, renderer, text, fgcolor, bgcolor=None, font=None):
self.renderer = renderer
self.imgs = []
self.parts = []
if font is None:
font = renderer.font
if font is None:
return
parts = self.parts
for word in re_nonword.split(text):
if not word:
continue
if word in renderer.highlightwords:
fg, bg = renderer.wordcolor(word)
bg = bg or bgcolor
else:
fg, bg = fgcolor, bgcolor
parts.append((word, fg, bg))
# consolidate sequences of words with the same color
for i in range(len(parts)-2, -1, -1):
if parts[i][1:] == parts[i+1][1:]:
word, fg, bg = parts[i]
parts[i] = word + parts[i+1][0], fg, bg
del parts[i+1]
# delete None backgrounds
for i in range(len(parts)):
if parts[i][2] is None:
parts[i] = parts[i][:2]
# render parts
i = 0
while i < len(parts):
part = parts[i]
word = part[0]
try:
img = font.render(word, True, *part[1:])
except pygame.error:
del parts[i] # Text has zero width
else:
self.imgs.append(img)
i += 1
def get_size(self):
if self.imgs:
sizes = [img.get_size() for img in self.imgs]
return sum([w for w,h in sizes]), max([h for w,h in sizes])
else:
return 0, 0
def draw(self, x, y):
for part, img in zip(self.parts, self.imgs):
word = part[0]
self.renderer.screen.blit(img, (x, y))
w, h = img.get_size()
self.renderer.textzones.append((x, y, w, h, word))
x += w
| [
"[email protected]"
] | |
5ea079a246153c51efaee0a478cb091326370e2d | 82c50f82f9e743b93dcb581d7ec95dd5e3a44380 | /browseapp/browse_main.py | 3dabd002cc8ff02326dfbfaac5cdff7b3bd4642e | [] | no_license | ivanerill/collecTF | 4f1c7981e616989d1044db9dd308377b3e3450c6 | 541f8f3852fdc740798f4f892d8ff1ef8b2225df | refs/heads/master | 2021-01-18T12:34:56.368336 | 2013-07-15T18:07:35 | 2013-07-15T18:07:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | from browse_site import browse_by_site
from browse_curation import browse_curation
| [
"[email protected]"
] | |
1a95d366947058c89f9419baffce0086c13280a6 | 36978086cf5f34e16ceac7c2649b49ccb4c5ac90 | /config/munin/mongodb_replset_lag | 0c2f3ed4bdbabde170d68abc6b6e9b74d14b19de | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | aragilar/NewsBlur | 04e754093cd52bc2d9957ea767747d6d604dfbba | 64ecd83bf4cea175f1bdeeb6e475fd5cadb679c9 | refs/heads/master | 2021-08-28T17:39:50.734396 | 2013-06-06T01:52:20 | 2013-06-06T01:52:37 | 10,520,281 | 0 | 0 | MIT | 2021-08-13T05:35:33 | 2013-06-06T06:26:24 | Objective-C | UTF-8 | Python | false | false | 1,790 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from munin.mongodb import MuninMongoDBPlugin
PRIMARY_STATE = 1
SECONDARY_STATE = 2
class MongoReplicaSetLag(MuninMongoDBPlugin):
vlabel = "seconds"
title = "MongoDB Replica Set Lag"
fields = [("optimeLag", {'label': "Oldest secondary lag"}), ("oplogLength", {"label": "Primary oplog length" })]
def _get_oplog_length(self):
oplog = self.connection['local'].oplog.rs
last_op = oplog.find({}, {'ts': 1}).sort([('$natural', -1)]).limit(1)[0]['ts'].time
first_op = oplog.find({}, {'ts': 1}).sort([('$natural', 1)]).limit(1)[0]['ts'].time
oplog_length = last_op - first_op
return oplog_length
def _get_max_replication_lag(self):
status = self.connection.admin.command('replSetGetStatus')
members = status['members']
primary_optime = None
oldest_secondary_optime = None
for member in members:
member_state = member['state']
optime = member['optime']
if member_state == PRIMARY_STATE:
primary_optime = optime.time
elif member_state == SECONDARY_STATE:
if not oldest_secondary_optime or optime.time < oldest_secondary_optime.time:
oldest_secondary_optime = optime.time
if not primary_optime or not oldest_secondary_optime:
raise Exception("Replica set is not healthy")
return primary_optime - oldest_secondary_optime
def execute(self):
oplog_length = self._get_oplog_length()
replication_lag = self._get_max_replication_lag()
return {
"optimeLag": replication_lag,
"oplogLength": oplog_length
}
if __name__ == "__main__":
MongoReplicaSetLag().run()
| [
"[email protected]"
] | ||
4680394e14442b9e016dc3834172a4f40eede73b | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/63/usersdata/239/32114/submittedfiles/swamee.py | aac7bfcabaa9fa811d2304d01539530e593c0d46 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO AQUI
f = float(input("Digite aqui o valor de f: "))
l = float(input("Digite aqui o valor de l: "))
q = float(input("Digite aqui o valor de q: "))
DH = float(input("Digite aqui o valor de Delta H: "))
v = float(input("Digite aqui o valor de v: "))
g = 9.81
e = 0.000002
pi = 3.14
D = sqrt(8*f*l*q**q)/(pi*pi*g*DH)
print("D=%.4f"%D)
Rey = (4*q)/(pi*D*v)
print("Rey=%.4f"%Rey)
k = 0.25/(math.log10((e)/(3.7*D))+(5.74)/((Rey**0.9))**2)
print("k=%.4f"%k)
| [
"[email protected]"
] | |
dbf52a834f34fa4f3c3318bcab831ea4e23f15a0 | 2748d523c4ced916b61e8f2a0ebd6c7237705f69 | /core/forms.py | 8d56e99d1aec9c278839b44ba66ef4cdee9daa37 | [] | no_license | Titowisk/meubaz | 52d5101bc107081c7175f27bb538efc6fecf5b24 | 1af586195123ffd13818695cff8cc286018a1c7b | refs/heads/master | 2021-08-23T16:47:54.960522 | 2017-12-05T19:02:09 | 2017-12-05T19:02:09 | 106,558,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,337 | py | from django import forms
from django.conf import settings
from django.core.mail import send_mail
class ContactForm(forms.Form):
name = forms.CharField(label="Nome", max_length=100)
email = forms.EmailField(label="E-mail")
message = forms.CharField(label="Mensagem", widget=forms.Textarea)
def send_mail(self):
name = self.cleaned_data['name']
email = self.cleaned_data['email']
message = self.cleaned_data['message']
message = "Nome: {0}\nEmail: {1}\nMensagem: {2}".format(name, email, message)
send_mail(
subject='Contato do MeuBaz',
message=message,
from_email=settings.DEFAULT_FROM_EMAIL,
recipient_list=[settings.DEFAULT_FROM_EMAIL]
)
"""
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.fields['name'].widget.attrs['class'] = 'form-control'
self.fields['email'].widget.attrs['class'] = 'form-control'
self.fields['message'].widget.attrs['class'] = 'form-control'
self.fields['message'].widget.attrs['rows'] = '4'
"""
# self.fields['name'] acessa o campo
#<input type="text" name="name" maxlength="100" required="" id="id_name">
# .widget.attrs[''] = '' pode criar argumentos dentro do campo | [
"[email protected]"
] | |
6fd67b6b693b6c301e3654e90e09871256f29eb2 | 7b09d131ba09c3ef5c7658eeea9075e0b4a7ec5a | /updateBlynk.py | 5d9c051f86d7db79d4f3dad8d5b0e5c7ba8a9dfd | [] | no_license | deepcore2/SDL_Pi_SkyWeather | 38afe43466fbf078629966504e010c5fe2fafb5e | 492d1df40b49a2896280cc1ddfb64e98d38e2045 | refs/heads/master | 2022-02-20T13:41:59.418699 | 2019-09-30T01:30:03 | 2019-09-30T01:30:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,691 | py |
# provides routine to update SGS Blynk Display
import time
import requests
import json
import util
import state
import traceback
# Check for user imports
try:
import conflocal as config
except ImportError:
import config
DEBUGBLYNK = False
def stopFlash():
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V30?value=0')
def blynkInit():
# initalize button states
try:
if (DEBUGBLYNK):
print "Entering blynkInit:"
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V5?value=0')
if (state.runOLED == True):
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V6?value=1')
else:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V6?value=0')
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V30?value=0')
# initialize LEDs
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V42?value=255')
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V43?value=255')
# read english Metric in from file
try:
f = open("/home/pi/SDL_Pi_SkyWeather/state/EnglishMetric.txt", "r")
value = int(f.read())
f.close()
except Exception as e:
value = 0
#print "initial state - no EnglishMetric.txt value=", value
f1 = open("/home/pi/SDL_Pi_SkyWeather/state/EnglishMetric.txt", "w")
f1.write("0")
f1.close()
state.EnglishMetric = value
if (DEBUGBLYNK):
print "state.EnglishMetric = ", value
if (state.EnglishMetric == 0):
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V8?value=0')
else:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V8?value=1')
if (DEBUGBLYNK):
print "Exiting blynkInit:"
except Exception as e:
print "exception in blynkInit"
print (e)
return 0
def blynkResetButton(buttonNumber):
try:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/'+buttonNumber+'?value=0')
except Exception as e:
print "exception in blynkResetButton"
print (e)
return 0
def blynkEventUpdate(Event):
try:
put_header={"Content-Type": "application/json"}
val = Event
put_body = json.dumps([val])
if (DEBUGBLYNK):
print "blynkEventUpdate:",val
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V31', data=put_body, headers=put_header)
if (DEBUGBLYNK):
print "blynkEventUpdate:POST:r.status_code:",r.status_code
return 1
except Exception as e:
print "exception in blynkEventUpdate"
print (e)
return 0
def blynkStatusTerminalUpdate(entry):
try:
put_header={"Content-Type": "application/json"}
entry = time.strftime("%Y-%m-%d %H:%M:%S")+": "+entry+"\n"
put_body = json.dumps([entry])
if (DEBUGBLYNK):
print "blynkStateUpdate:Pre:put_body:",put_body
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V32', data=put_body, headers=put_header)
if (DEBUGBLYNK):
print "blynkStateUpdate:POST:r.status_code:",r.status_code
except Exception as e:
print "exception in blynkTerminalUpdate"
print (e)
return 0
def blynkSolarTerminalUpdate(entry):
try:
put_header={"Content-Type": "application/json"}
entry = time.strftime("%Y-%m-%d %H:%M:%S")+": "+entry+"\n"
put_body = json.dumps([entry])
if (DEBUGBLYNK):
print "blynkStateUpdate:Pre:put_body:",put_body
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V33', data=put_body, headers=put_header)
if (DEBUGBLYNK):
print "blynkStateUpdate:POST:r.status_code:",r.status_code
except Exception as e:
print "exception in blynkTerminalUpdate"
print (e)
return 0
def blynkUpdateImage():
#Blynk.setProperty(V1, "urls", "https://image1.jpg", "https://image2.jpg");
try:
if (DEBUGBLYNK):
print "blynkUpdateImage:started"
"""
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?value=2') # Picture URL
if (DEBUGBLYNK):
print "blynkUpdateImage:OTHER:r.status_code:",r.status_code
#r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?urls=http://www.switchdoc.com/2.jpg') # Picture URL
#r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?urls=http://www.switchdoc.com/skycamera.jpg,http://www.switchdoc.com/2.jpg') # Picture URL
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?value=1;url=http://www.switchdoc.com/skycamera.jpg')
if (DEBUGBLYNK):
print "blynkUpdateImage:OTHER:r.status_code:",r.status_code
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?value=2;url=http://www.switchdoc.com/2.jpg') # Picture URL
if (DEBUGBLYNK):
print "blynkUpdateImage:OTHER:r.status_code:",r.status_code
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?value=2') # Picture URL
if (DEBUGBLYNK):
print "blynkUpdateImage:OTHER:r.status_code:",r.status_code
"""
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V70?urls=http://www.switchdoc.com/SkyWeatherNoAlpha.png') # Picture URL
except Exception as e:
print "exception in blynkUpdateImage"
print (e)
return 0
def blynkStateUpdate():
try:
blynkUpdateImage()
put_header={"Content-Type": "application/json"}
# set last sample time
put_header={"Content-Type": "application/json"}
val = time.strftime("%Y-%m-%d %H:%M:%S")
put_body = json.dumps([val])
if (DEBUGBLYNK):
print "blynkEventUpdate:",val
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V44', data=put_body, headers=put_header)
if (DEBUGBLYNK):
print "blynkEventUpdate:POST:r.status_code:",r.status_code
# do the graphs
val = state.Outdoor_AirQuality_Sensor_Value
put_body = json.dumps([val])
if (DEBUGBLYNK):
print "blynkStateUpdate:Pre:put_body:",put_body
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V7', data=put_body, headers=put_header)
if (DEBUGBLYNK):
print "blynkStateUpdate:POST:r.status_code:",r.status_code
val = util.returnTemperatureCF(state.currentOutsideTemperature)
tval = "{0:0.1f} ".format(val) + util.returnTemperatureCFUnit()
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V0', data=put_body, headers=put_header)
val = util.returnTemperatureCF(state.currentOutsideTemperature)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V10', data=put_body, headers=put_header)
val = state.currentOutsideHumidity
put_body = json.dumps(["{0:0.1f}%".format(val)])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V1', data=put_body, headers=put_header)
val = state.currentOutsideHumidity
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V11', data=put_body, headers=put_header)
val = util.returnTemperatureCF(state.currentInsideTemperature)
tval = "{0:0.1f} ".format(val) + util.returnTemperatureCFUnit()
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V21', data=put_body, headers=put_header)
val = util.returnTemperatureCF(state.currentInsideTemperature)
tval = "{0:0.1f}".format(val)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V120', data=put_body, headers=put_header)
val = state.currentInsideHumidity
put_body = json.dumps(["{0:0.1f}%".format(val)])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V13', data=put_body, headers=put_header)
val = state.currentInsideHumidity
put_body = json.dumps(["{0:0.1f}".format(val)])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V121', data=put_body, headers=put_header)
if (state.fanState == False):
val = 0
else:
val = 1
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V122', data=put_body, headers=put_header)
#wind
val = util.returnWindSpeed(state.ScurrentWindSpeed)
tval = "{0:0.1f}".format(val) + util.returnWindSpeedUnit()
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V9', data=put_body, headers=put_header)
#now humiidyt
#val = util.returnWindSpeed(state.ScurrentWindSpeed)
val = state.currentOutsideHumidity
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V19', data=put_body, headers=put_header)
# outdoor Air Quality
val = state.Outdoor_AirQuality_Sensor_Value
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V20', data=put_body, headers=put_header)
#wind direction
val = "{0:0.0f}/".format(state.ScurrentWindDirection) + util.returnWindDirection(state.ScurrentWindDirection)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V2', data=put_body, headers=put_header)
#rain
val = "{0:0.2f}".format(state.currentTotalRain)
if (state.EnglishMetric == 1):
tval = "{0:0.2f}mm".format(state.currentTotalRain)
else:
tval = "{0:0.2f}in".format(state.currentTotalRain / 25.4)
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V3', data=put_body, headers=put_header)
#Sunlight
val = "{0:0.0f}".format(state.currentSunlightVisible)
#print ("Sunlight Val = ", state.currentSunlightVisible)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V4', data=put_body, headers=put_header)
#Sunlight
val = "{0:0.0f}".format(state.currentSunlightVisible)
#print ("Sunlight Val = ", state.currentSunlightVisible)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V60', data=put_body, headers=put_header)
#barometric Pressure
if (state.EnglishMetric == 1):
tval = "{0:0.2f}hPa".format(state.currentSeaLevel)
else:
tval = "{0:0.2f}in".format((state.currentSeaLevel * 0.2953)/10.0)
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V40', data=put_body, headers=put_header)
#barometric Pressure graph
if (state.EnglishMetric == 1):
tval = "{0:0.2f}".format(state.currentSeaLevel)
else:
tval = "{0:0.2f}".format((state.currentSeaLevel * 0.2953)/10.0)
put_body = json.dumps([tval])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V41', data=put_body, headers=put_header)
#solar data
val = "{0:0.2f}".format(state.solarVoltage)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V50', data=put_body, headers=put_header)
val = "{0:0.1f}".format(state.solarCurrent)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V51', data=put_body, headers=put_header)
val = "{0:0.2f}".format(state.batteryVoltage)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V52', data=put_body, headers=put_header)
val = "{0:0.1f}".format(state.batteryCurrent)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V53', data=put_body, headers=put_header)
val = "{0:0.2f}".format(state.loadVoltage)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V54', data=put_body, headers=put_header)
val = "{0:0.1f}".format(state.loadCurrent)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V55', data=put_body, headers=put_header)
val = "{0:0.1f}W".format(state.batteryPower)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V60', data=put_body, headers=put_header)
val = "{0:0.1f}W".format(state.solarPower)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V61', data=put_body, headers=put_header)
val = "{0:0.1f}W".format(state.loadPower)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V62', data=put_body, headers=put_header)
val = "{0:0.1f}".format(state.batteryCharge)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V56', data=put_body, headers=put_header)
val = "{0:0.1f}".format(state.batteryCharge)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V127', data=put_body, headers=put_header)
delta = util.returnTemperatureCF(state.currentInsideTemperature)- util.returnTemperatureCF(state.currentOutsideTemperature)
val = "{0:0.1f}".format(delta)
put_body = json.dumps([val])
r = requests.put(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V128', data=put_body, headers=put_header)
# LEDs
if (state.barometricTrend): #True is up, False is down
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V42?color=%2300FF00') # Green
if (DEBUGBLYNK):
print "blynkAlarmUpdate:OTHER:r.status_code:",r.status_code
else:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V42?color=%23FF0000') # red
if (state.currentAs3935LastLightningTimeStamp < time.clock() + 1800): #True is lightning, False is none
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V43?color=%2300FF00') # Green
if (DEBUGBLYNK):
print "blynkAlarmUpdate:OTHER:r.status_code:",r.status_code
else:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/update/V43?color=%23FF0000') # red
return 1
except Exception as e:
print "exception in blynkStateUpdate"
print(traceback.format_exc())
print (e)
return 0
def blynkStatusUpdate():
if (DEBUGBLYNK):
print "blynkStatusUpdate Entry"
try:
put_header={"Content-Type": "application/json"}
# look for English or Metric
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/get/V8') # read button state
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTEM:r.status_code:",r.status_code
print "blynkStatusUpdate:POSTEM:r.text:",r.text
if (r.text == '["1"]'):
if (state.EnglishMetric == 0):
state.EnglishMetric = 1
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRC:state.EnglishMetric set to Metric"
blynkStatusTerminalUpdate("Set to Metric Units ")
f = open("/home/pi/SDL_Pi_SkyWeather/state/EnglishMetric.txt", "w")
f.write("1")
f.close()
else:
if (state.EnglishMetric == 1):
state.EnglishMetric = 0
f = open("/home/pi/SDL_Pi_SkyWeather/state/EnglishMetric.txt", "w")
f.write("0")
f.close()
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRC:state.EnglishMetric set to English"
blynkStatusTerminalUpdate("Set to English Units ")
# look for rainbow button change
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/get/V5') # read button state
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBR:r.status_code:",r.status_code
print "blynkStatusUpdate:POSTBR:r.text:",r.text
if (r.text == '["1"]'):
state.runRainbow = True
blynkStatusTerminalUpdate("Turning Rainbow On ")
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRC:state.runRainbow set to True"
else:
if(state.runRainbow == True):
blynkStatusTerminalUpdate("Turning Rainbow Off ")
state.runRainbow = False
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRC:state.runRainbow set to False"
# turn OLED ON and OFF
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/get/V6') # read button state
#if (DEBUGBLYNK):
if (r.text == '["1"]'):
if (state.runOLED == False):
state.runOLED = True
blynkStatusTerminalUpdate("Turning OLED On ")
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRO:state.runOLED set to True"
if (config.OLED_Originally_Present == True):
config.OLED_Present = True
util.turnOLEDOn()
else:
if (state.runOLED == True):
blynkStatusTerminalUpdate("Turning OLED Off ")
state.runOLED = False
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRO:state.runOLED set to False"
if (config.OLED_Originally_Present == True):
config.OLED_Present = False
util.turnOLEDOff()
# look for Flash Strip Command
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/get/V30') # read button state
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBF:r.status_code:",r.status_code
print "blynkStatusUpdate:POSTBF:r.text:",r.text
if (r.text == '["1"]'):
state.flashStrip = True
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRF:state.flashStrip set to True"
else:
state.flashStrip = False
if (DEBUGBLYNK):
print "blynkStatusUpdate:POSTBRF:state.flashStrip set to False"
return 1
except Exception as e:
print "exception in blynkStatusUpdate"
print (e)
return 0
def blynkSGSAppOnline():
try:
r = requests.get(config.BLYNK_URL+config.BLYNK_AUTH+'/isAppConnected')
if (DEBUGBLYNK):
print "blynkSGSAppOnline:POSTCHECK:r.text:",r.text
return r.text
except Exception as e:
print "exception in blynkApponline"
print (e)
return ""
| [
"[email protected]"
] | |
ed3962679f3569de0efc57197373f7139220afbe | be0edc20433a6ad3bf4b8f448f1c457437de4c52 | /huxley/core/admin/delegate.py | 6f7e07e1c80d5f269090bfe38f1d8dd13775523a | [
"BSD-3-Clause"
] | permissive | ethanlee16/huxley | eca8c3c1d4ea543a5875c28d4cb5c81dc4e4eddb | 5d601e952c711e9b6703170c78fb23fcc2734ead | refs/heads/master | 2021-01-15T09:20:25.310737 | 2014-12-03T14:51:33 | 2014-12-03T14:51:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,306 | py | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import csv
from django.conf.urls import patterns, url
from django.contrib import admin
from django.http import HttpResponse
from huxley.core.models import Delegate
class DelegateAdmin(admin.ModelAdmin):
def roster(self, request):
'''Return a CSV file representing the entire roster of registered
delegates, including their committee, country, and school.'''
roster = HttpResponse(content_type='text/csv')
roster['Content-Disposition'] = 'attachment; filename="roster.csv"'
writer = csv.writer(roster)
ordering = 'assignment__school__name'
for delegate in Delegate.objects.all().order_by(ordering):
writer.writerow([
delegate,
delegate.committee,
delegate.country,
delegate.school
])
return roster
def get_urls(self):
urls = super(DelegateAdmin, self).get_urls()
urls += patterns('',
url(
r'roster',
self.admin_site.admin_view(self.roster),
name='core_delegate_roster',
),
)
return urls
| [
"[email protected]"
] | |
769c233947bb21e73d616adc9283780a1161b902 | 43277f3962edfd5f16d116a3ed35cc08000a0707 | /modular/badger_utils/sacred/experiment_config_diff.py | 5d352aa5b1beb2c014775fb68bf4df78bac4dffd | [] | no_license | GoodAI/badger-2020 | 0cbeb60bf5b5fa2959504b1ba4489d5725646474 | bb3822dbcbb04ed9c153c4deffa25a81011c8ce5 | refs/heads/master | 2021-07-15T12:49:44.227988 | 2021-03-02T19:06:06 | 2021-03-02T19:06:06 | 243,016,754 | 7 | 1 | null | 2020-08-10T13:13:51 | 2020-02-25T14:15:24 | Jupyter Notebook | UTF-8 | Python | false | false | 3,324 | py | from typing import Dict, Any, List, Tuple
import pandas as pd
from badger_utils.view.config_utils import tuple_to_dict
class ExperimentConfigDiff:
_diff: Dict[List[Tuple[str, Any]], List[int]]
_common: Dict[str, Any]
def __init__(self, common: Dict[str, Any], diff: Dict[List[Tuple[str, Any]], List[int]]):
"""
Args:
common: dict of config vars, e.g. {'size': 10, 'epochs': 1000}
diff: dict with keys being list of tuples of ('name', 'value') of config and list of run_ids as value,
e.g. {[('n_experts', 4), ('n_inputs', 3)]: [23, 24], [('n_experts', 4), ('n_inputs', 2)]: [25]}
"""
self._common = common
self._diff = diff
def diff_as_df(self, explode_by_run_id: bool = False) -> pd.DataFrame:
"""
Returns:
DataFrame with columns named by config keys
plus one column "run_ids" where are stored comma separated run_ids
"""
df = pd.DataFrame([{**tuple_to_dict(r), **{'run_ids': v}} for r, v in self._diff.items()])
if explode_by_run_id:
df = df.explode('run_ids').astype({'run_ids': int}).set_index('run_ids')
df.index.name = None
return df
def diff_as_lines(self) -> List[str]:
"""
Returns:
List of one_line string representation for diff. Usable e.g. for a plot legend.
"""
return ExperimentConfigDiff.df_as_lines(self.diff_as_df())
def common_as_text(self, line_delimiter: str = '\n') -> str:
return line_delimiter.join([f'{k}: {v}' for k, v in self._common.items()])
def diff_filtered_run_ids(self, filter_dict: Dict[str, Any]) -> List[int]:
"""
Return list of run_ids for runs that match filter_dict. Only runs matching all filter conditions are selected.
Args:
filter_dict: Dict config_item -> expected_value. E.g. {'n_experts': 4, 'rollout_size': 8}
Returns:
List of run_ids
"""
filtered = self.filter_df(self.diff_as_df(), filter_dict)
return self.flatten(filtered['run_ids'])
@staticmethod
def filter_df(df: pd.DataFrame, filter_dict: Dict[str, Any]) -> pd.DataFrame:
for k, v in filter_dict.items():
df = df.loc[df[k] == v]
return df
@staticmethod
def flatten(l):
return [item for sublist in l for item in sublist]
@staticmethod
def df_as_lines(df: pd.DataFrame) -> List[str]:
"""
Convert DataFrame to list of strings representation
Args:
df: DataFrame to be converted
Returns:
List of one_line string representation for DataFrame. Usable e.g. for a plot legend.
"""
def format_config(r):
return ', '.join([f'{c}: {v}' for c, v in zip(r._fields, r)])
return [format_config(r) for r in df.itertuples(index=False, name='Row')]
@staticmethod
def df_as_description_runids_dict(df: pd.DataFrame) -> Dict[str, List[int]]:
result = {}
for idx, row in df.iterrows():
columns_values = [f'{name}: {row[name]}' for name in row.index if name != 'run_ids']
description = ', '.join(columns_values)
result[description] = row['run_ids']
return result
| [
"[email protected]"
] | |
66335d806ccf0a4f3148e4dabc2ca3baa18b55b8 | e1fada3a9846a5593e3d3d2fdc32b23b832e38b4 | /tests/unit/cli/tools/test_eval.py | 76eee501f89daf8095a5032255767270ab304ab5 | [
"Apache-2.0"
] | permissive | GalyaZalesskaya/openvino_training_extensions | fd1ebb189900008b16b85568449e5c62d8edbad5 | 6116639caeff100b06a6c10a96c7e7f5951f20c7 | refs/heads/develop | 2023-09-03T19:32:44.702497 | 2023-03-15T06:48:24 | 2023-03-15T06:48:24 | 202,568,309 | 0 | 0 | Apache-2.0 | 2019-10-28T16:16:27 | 2019-08-15T15:41:59 | Python | UTF-8 | Python | false | false | 4,079 | py | import argparse
import pytest
from otx.cli.tools import eval as target_package
from otx.cli.tools.eval import get_args, main
from tests.test_suite.e2e_test_system import e2e_pytest_unit
@e2e_pytest_unit
def test_get_args(mocker):
mock_options = {
"--test-data-roots": "test/data/root",
"--load-weights": "weight/path",
"--save-performance": "save/path",
"--work-dir": "work/dir/path",
}
mock_command = ["otx"]
for key, value in mock_options.items():
mock_command.extend([key, value])
mocker.patch("sys.argv", mock_command)
mocker.patch.object(
target_package, "get_parser_and_hprams_data", return_value=[argparse.ArgumentParser(), {"param": "test"}, []]
)
mocker.patch.object(target_package, "add_hyper_parameters_sub_parser", return_value=argparse.ArgumentParser())
parsed_args, _ = get_args()
assert parsed_args.test_data_roots == "test/data/root"
assert parsed_args.load_weights == "weight/path"
assert parsed_args.save_performance == "save/path"
assert parsed_args.work_dir == "work/dir/path"
@pytest.fixture
def mock_args(mocker, tmp_path):
mock_args = mocker.MagicMock()
mock_args.test_data_roots = "fake_test_data_root"
mock_args.load_weights = "fake_load_weights.xml"
mock_args.save_performance = tmp_path / "save/performance.json"
mock_args.work_dir = tmp_path / "work_dir"
def mock_contains(self, val):
return val in self.__dict__
mock_args.__contains__ = mock_contains
mock_get_args = mocker.patch("otx.cli.tools.eval.get_args")
mock_get_args.return_value = [mock_args, []]
return mock_args
@pytest.fixture
def mock_config_manager(mocker):
mock_config_manager = mocker.patch.object(target_package, "ConfigManager")
mock_template = mocker.MagicMock()
mock_template.name = "fake_name"
mock_config_manager.return_value.template = mock_template
mock_config_manager.return_value.check_workspace.return_value = True
mock_config_manager.return_value.get_dataset_config.return_value = {}
mock_config_manager.return_value.get_hyparams_config.return_value = {}
return mock_config_manager
@pytest.fixture
def mock_dataset_adapter(mocker):
mock_dataset_adapter = mocker.patch("otx.cli.tools.eval.get_dataset_adapter")
mock_dataset = mocker.MagicMock()
mock_label_schema = mocker.MagicMock()
mock_dataset_adapter.return_value.get_otx_dataset.return_value = mock_dataset
mock_dataset_adapter.return_value.get_label_schema.return_value = mock_label_schema
return mock_dataset_adapter
@pytest.fixture
def mock_task(mocker):
mock_task_class = mocker.MagicMock()
mock_task = mocker.MagicMock()
mock_task_class.return_value = mock_task
mocker.patch.object(target_package, "get_impl_class", return_value=mock_task_class)
return mock_task
@e2e_pytest_unit
def test_main(
mocker,
mock_args,
mock_config_manager,
mock_dataset_adapter,
):
mocker.patch.object(
target_package,
"read_model",
return_value=mocker.MagicMock(),
)
mocker.patch.object(
target_package,
"get_impl_class",
return_value=mocker.MagicMock(),
)
mocker.patch.object(
target_package,
"get_dataset_adapter",
return_value=mock_dataset_adapter,
)
mocker.patch.object(
target_package,
"ResultSetEntity",
return_value=mocker.MagicMock(),
)
mocker.patch.object(
target_package,
"InferenceParameters",
return_value=mocker.MagicMock(),
)
mocker.patch.object(
target_package,
"Subset",
return_value=mocker.MagicMock(),
)
mocker.patch.object(
target_package,
"TaskEnvironment",
return_value=mocker.MagicMock(),
)
mocker.patch("json.dump")
mocker.patch("builtins.open")
mock_get_args = mocker.patch("otx.cli.tools.eval.get_args")
mock_get_args.return_value = [mock_args, []]
ret = main()
assert ret["retcode"] == 0
| [
"[email protected]"
] | |
cfe29c23297e0b8167a1f1a3e388e74ad9a83c5c | c8cd3dbcb783b6daad866be07be950bbc4cd9fe9 | /boards/models.py | 95ad56c05b421ac32e2b4d6d8490dcb0569a8431 | [] | no_license | pauloendoh/django-boards | d6b42b829dee0c96c4bda676da5e2ac1977f0922 | 640c0672b049d190213f5bf318f390b40e697262 | refs/heads/master | 2020-03-13T03:43:19.604777 | 2018-04-26T02:22:58 | 2018-04-26T02:22:58 | 130,949,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,223 | py | from markdown import markdown
from django.db import models
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
from django.utils.text import Truncator
import math
class Board(models.Model):
name = models.CharField(max_length=30, unique=True)
description = models.CharField(max_length=100)
def __str__(self):
return self.name
def get_posts_count(self):
return Post.objects.filter(topic__board=self).count()
def get_last_post(self):
return Post.objects.filter(topic__board=self).order_by('-created_at').first()
class Topic(models.Model):
subject = models.CharField(max_length=255)
last_updated = models.DateTimeField(auto_now_add=True)
board = models.ForeignKey(Board, related_name='topics', on_delete=models.CASCADE)
starter = models.ForeignKey(User, related_name='topics', on_delete=models.CASCADE)
views = models.PositiveIntegerField(default=0) # <- here
def __str__(self):
return self.subject
def get_page_count(self):
count = self.posts.count()
pages = count / 20
return math.ceil(pages)
def has_many_pages(self, count=None):
if count is None:
count = self.get_page_count()
return count > 6
def get_page_range(self):
count = self.get_page_count()
if self.has_many_pages(count):
return range(1, 5)
return range(1, count + 1)
def get_last_ten_posts(self):
return self.posts.order_by('-created_at')[:10]
class Post(models.Model):
message = models.TextField(max_length=4000)
topic = models.ForeignKey(Topic, related_name='posts', on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(null=True)
created_by = models.ForeignKey(User, related_name='posts', on_delete=models.CASCADE)
updated_by = models.ForeignKey(User, null=True, related_name='+', on_delete=models.CASCADE)
def __str__(self):
truncated_message = Truncator(self.message)
return truncated_message.chars(30)
def get_message_as_markdown(self):
return mark_safe(markdown(self.message, safe_mode='escape'))
| [
"[email protected]"
] | |
82147037ffb32a42caafdb8859d25db1cbd55b59 | 4f804508c78c331d7985db936d099522a5739303 | /dcorch/api/proxy/apps/controller.py | 53f93e012e597b6aacf6235591087795391f5261 | [
"Apache-2.0"
] | permissive | starlingx-staging/stx-kingbird | 406f6ada829fe285329670d81d5c9e4bcc58884e | 9869ad4640e76384fa14f031a59134cd439929a8 | refs/heads/master | 2020-03-18T00:56:00.772399 | 2018-05-20T04:28:15 | 2018-05-20T04:35:38 | 134,110,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,493 | py | # Copyright 2017 Wind River
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import webob.dec
import webob.exc
from dcorch.api.proxy.apps.dispatcher import APIDispatcher
from dcorch.api.proxy.common import constants as proxy_consts
from dcorch.api.proxy.common.service import Middleware
from dcorch.api.proxy.common import utils as proxy_utils
from dcorch.common import consts
import dcorch.common.context as k_context
from dcorch.common import exceptions as exception
from dcorch.common import utils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service.wsgi import Request
from dcorch.rpc import client as rpc_client
LOG = logging.getLogger(__name__)
controller_opts = [
cfg.BoolOpt('show_request',
default=False,
help='Print out the request information'),
cfg.BoolOpt('show_response',
default=False,
help='Print out the response information'),
]
CONF = cfg.CONF
CONF.register_opts(controller_opts)
class APIController(Middleware):
def __init__(self, app, conf):
super(APIController, self).__init__(app)
self.ctxt = k_context.get_admin_context()
self._default_dispatcher = APIDispatcher(app)
self.rpc_client = rpc_client.EngineClient()
self.response_hander_map = {}
@staticmethod
def get_status_code(response):
"""Returns the integer status code from the response.
"""
return response.status_int
@staticmethod
def _get_resource_type_from_environ(request_environ):
return proxy_utils.get_routing_match_value(request_environ, 'action')
@staticmethod
def get_resource_id_from_link(url):
return proxy_utils.get_url_path_components(url)[-1]
@staticmethod
def get_request_header(environ):
from paste.request import construct_url
return construct_url(environ)
def notify(self, environ, endpoint_type):
self.rpc_client.sync_request(self.ctxt, endpoint_type)
def process_request(self, req):
return self._default_dispatcher
def process_response(self, environ, request_body, response):
if CONF.show_response:
LOG.info("Response: (%s)", str(response))
LOG.info("Response status: (%d)", self.get_status_code(response))
handler = self.response_hander_map[CONF.type]
return handler(environ, request_body, response)
def _update_response(self, environ, request_body, response):
# overwrite the usage numbers with the aggregated usage
# from dcorch
LOG.info("Query dcorch for usage info")
desired_fields = {'quota_set': 'in_use',
'quota': 'used'}
project_id = proxy_utils.get_tenant_id(environ)
user_id = proxy_utils.get_user_id(environ)
response_data = json.loads(response.body)
# get the first match since it should only has one match
resource_type = next((x for x in desired_fields if x in response_data),
None)
if resource_type is None:
LOG.error("Could not find the quota data to update")
return response
resource_info = response_data[resource_type]
try:
usage_dict = self.rpc_client.get_usage_for_project_and_user(
self.ctxt, CONF.type, project_id, user_id)
except Exception:
return response
usage_info = json.dumps(usage_dict)
LOG.info("Project (%s) User (%s) aggregated usage: (%s)",
project_id, user_id, usage_info)
quota_usage = desired_fields[resource_type]
to_be_updated = [res for res in usage_dict if res in resource_info]
for k in to_be_updated:
resource_info[k][quota_usage] = usage_dict[k]
response_data[resource_type] = resource_info
response.body = json.dumps(response_data)
return response
@staticmethod
def print_environ(environ):
for name, value in sorted(environ.items()):
if (name not in ['CONTENT_LENGTH', 'CONTENT_TYPE'] and
not name.startswith('HTTP_')):
continue
LOG.info(' %s: %s\n' % (name, value))
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
if CONF.show_request:
self.print_request(req)
environ = req.environ
# copy the request body
request_body = req.body
application = self.process_request(req)
response = req.get_response(application)
return self.process_response(environ, request_body, response)
@staticmethod
def print_request_body(body):
if body:
LOG.info("Request body:")
for line in body.splitlines():
LOG.info(line.encode('string_escape') + '\n')
def print_request(self, req):
environ = req.environ
length = int(req.environ.get('CONTENT_LENGTH') or '0')
LOG.info("Incoming request:(%s), content length: (%d)",
environ['REQUEST_METHOD'], length)
LOG.info("Request URL: (%s)\n", self.get_request_header(environ))
LOG.info("Request header: \n")
for k, v in req.headers.iteritems():
LOG.info(" %s: %s\n", k, v)
self.print_environ(environ)
self.print_request_body(req.body)
class ComputeAPIController(APIController):
ENDPOINT_TYPE = consts.ENDPOINT_TYPE_COMPUTE
RESOURCE_TYPE_MAP = {
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET: 'quota_set',
}
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
webob.exc.HTTPCreated.code,
webob.exc.HTTPAccepted.code,
webob.exc.HTTPNoContent.code
]
def __init__(self, app, conf):
super(ComputeAPIController, self).__init__(app, conf)
self.response_hander_map = {
self.ENDPOINT_TYPE: self._process_response
}
self._resource_handler = {
proxy_consts.FLAVOR_RESOURCE_TAG: self._process_flavor,
proxy_consts.FLAVOR_ACCESS_RESOURCE_TAG:
self._process_flavor_action,
proxy_consts.FLAVOR_EXTRA_SPECS_RESOURCE_TAG:
self._process_extra_spec,
proxy_consts.KEYPAIRS_RESOURCE_TAG:
self._process_keypairs,
proxy_consts.QUOTA_RESOURCE_TAG:
self._process_quota,
proxy_consts.QUOTA_CLASS_RESOURCE_TAG:
self._process_quota
}
@staticmethod
def _get_resource_tag_from_header(url, operation, resource_type):
result = proxy_utils.get_url_path_components(url)
if (operation == consts.OPERATION_TYPE_DELETE or
resource_type == consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET or
resource_type == consts.RESOURCE_TYPE_COMPUTE_QUOTA_CLASS_SET):
return result[-2]
else:
return result[-1]
@staticmethod
def _get_flavor_id_from_environ(environ):
return proxy_utils.get_routing_match_value(environ, 'flavor_id')
def _process_response(self, environ, request_body, response):
operation_type = proxy_utils.get_operation_type(environ)
if self.get_status_code(response) in self.OK_STATUS_CODE and \
operation_type != consts.OPERATION_TYPE_GET:
self._enqueue_work(environ, request_body, response)
self.notify(environ, self.ENDPOINT_TYPE)
return response
def _process_flavor(self, **kwargs):
resource_id = None
resource_info = None
resource_type = kwargs.get('resource_type')
operation_type = kwargs.get('operation_type')
if operation_type == consts.OPERATION_TYPE_POST:
operation_type = consts.OPERATION_TYPE_CREATE
resp = json.loads(kwargs.get('response_body'))
resource = json.loads(kwargs.get('request_body'))
if resource_type in resource:
resource_info = resource[resource_type]
else:
LOG.info("Can't find resource type (%s) in request (%s)",
resource_type, resource)
if resource_type in resp:
if 'links' in resp[resource_type]:
link = resp[resource_type]['links'][0]
resource_id = self.get_resource_id_from_link(link['href'])
# update the resource id if it is available
if resource_id is not None:
resource_info['id'] = resource_id
resource_info = json.dumps(resource_info)
LOG.info("Resource id: (%s)", resource_id)
LOG.info("Resource info: (%s)", resource_info)
elif operation_type == consts.OPERATION_TYPE_DELETE:
resource_id = self.get_resource_id_from_link(
kwargs.get('request_header'))
LOG.info("Resource id: (%s), resource type: (%s)",
resource_id, resource_type)
else:
# it should never happen
LOG.info("Ignore request type: (%s)", operation_type)
return operation_type, resource_id, resource_info
def _process_flavor_action(self, **kwargs):
resource_id = self._get_flavor_id_from_environ(kwargs.get('environ'))
resource_info = kwargs.get('request_body')
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
consts.OPERATION_TYPE_ACTION, resource_id, resource_info)
return consts.OPERATION_TYPE_ACTION, resource_id, resource_info
def _process_extra_spec(self, **kwargs):
environ = kwargs.get('environ')
resource_id = self._get_flavor_id_from_environ(environ)
operation_type = kwargs.get('operation_type')
if operation_type == consts.OPERATION_TYPE_DELETE:
extra_spec = proxy_utils.get_routing_match_value(
environ, 'extra_spec')
resource_dict = {consts.ACTION_EXTRASPECS_DELETE: extra_spec}
resource_info = json.dumps(resource_dict)
else:
resource_info = kwargs.get('request_body')
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
operation_type, resource_id, resource_info)
return consts.OPERATION_TYPE_ACTION, resource_id, resource_info
def _process_keypairs(self, **kwargs):
resource_info = {}
user_id = None
environ = kwargs.get('environ')
operation_type = kwargs.get('operation_type')
if operation_type == consts.OPERATION_TYPE_POST:
operation_type = consts.OPERATION_TYPE_CREATE
request = json.loads(kwargs.get('request_body'))
resource_info = request[kwargs.get('resource_type')]
if 'public_key' not in resource_info:
# need to get the public_key from response
resp = json.loads(kwargs.get('response_body'))
resp_info = resp.get(kwargs.get('resource_type'))
resource_info['public_key'] = resp_info.get('public_key')
if 'user_id' in resource_info:
user_id = resource_info['user_id']
resource_id = resource_info['name']
else:
resource_id = proxy_utils.get_routing_match_value(
environ, consts.RESOURCE_TYPE_COMPUTE_KEYPAIR)
user_id = proxy_utils.get_user_id(environ)
if user_id is None:
user_id = environ.get('HTTP_X_USER_ID', '')
# resource_id = "name/user_id"
resource_id = utils.keypair_construct_id(resource_id, user_id)
resource_info = json.dumps(resource_info)
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
operation_type, resource_id, resource_info)
return operation_type, resource_id, resource_info
def _process_quota(self, **kwargs):
environ = kwargs.get('environ')
resource_id = self.get_resource_id_from_link(
kwargs.get('request_header'))
resource_type = kwargs.get('resource_type')
operation_type = kwargs.get('operation_type')
if operation_type == consts.OPERATION_TYPE_DELETE:
resource_info = {}
else:
request = json.loads(kwargs.get('request_body'))
if resource_type in self.RESOURCE_TYPE_MAP:
resource_info = request[self.RESOURCE_TYPE_MAP.get(
resource_type)]
else:
resource_info = request[resource_type]
# add user_id to resource if it is specified
user_id = proxy_utils.get_user_id(environ)
if user_id is not None:
resource_info['user_id'] = user_id
resource_info = json.dumps(resource_info)
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
operation_type, resource_id, resource_info)
return operation_type, resource_id, resource_info
def _enqueue_work(self, environ, request_body, response):
LOG.info("enqueue_work")
request_header = self.get_request_header(environ)
operation_type = proxy_utils.get_operation_type(environ)
resource_type = self._get_resource_type_from_environ(environ)
resource_tag = self._get_resource_tag_from_header(request_header,
operation_type,
resource_type)
handler = self._resource_handler[resource_tag]
operation_type, resource_id, resource_info = handler(
environ=environ,
operation_type=operation_type,
resource_type=resource_type,
request_header=request_header,
request_body=request_body,
response_body=response.body)
try:
utils.enqueue_work(self.ctxt,
self.ENDPOINT_TYPE,
resource_type,
resource_id,
operation_type,
resource_info)
except exception.ResourceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
class SysinvAPIController(APIController):
ENDPOINT_TYPE = consts.ENDPOINT_TYPE_PLATFORM
RESOURCE_ID_MAP = {
consts.RESOURCE_TYPE_SYSINV_SNMP_TRAPDEST: 'ip_address',
consts.RESOURCE_TYPE_SYSINV_SNMP_COMM: 'community'
}
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
webob.exc.HTTPNoContent.code
]
def __init__(self, app, conf):
super(SysinvAPIController, self).__init__(app, conf)
self.response_hander_map = {
self.ENDPOINT_TYPE: self._process_response
}
def _process_response(self, environ, request_body, response):
if self.get_status_code(response) in self.OK_STATUS_CODE:
self._enqueue_work(environ, request_body, response)
self.notify(environ, self.ENDPOINT_TYPE)
return response
def _enqueue_work(self, environ, request_body, response):
LOG.info("enqueue_work")
resource_info = {}
request_header = self.get_request_header(environ)
operation_type = proxy_utils.get_operation_type(environ)
resource_type = self._get_resource_type_from_environ(environ)
# Firewall rule and certificate need special processing
p_resource_info = 'suppressed'
if resource_type == consts.RESOURCE_TYPE_SYSINV_FIREWALL_RULES:
resource_info['payload'] = request_body
resource = json.loads(response.body)[resource_type]
resource_id = resource['firewall_sig']
elif resource_type == consts.RESOURCE_TYPE_SYSINV_CERTIFICATE:
resource_info['payload'] = request_body
resource_info['content_type'] = environ.get('CONTENT_TYPE')
resource = json.loads(response.body)[resource_type]
resource_id = resource['signature']
else:
if (operation_type == consts.OPERATION_TYPE_POST and
resource_type in self.RESOURCE_ID_MAP):
# need to get the id from the request data since it is
# not available in the header
rid = self.RESOURCE_ID_MAP.get(resource_type)
resource_id = json.loads(request_body)[rid]
else:
resource_id = self.get_resource_id_from_link(request_header)
if operation_type != consts.OPERATION_TYPE_DELETE:
resource_info['payload'] = json.loads(request_body)
p_resource_info = resource_info
LOG.info("Resource id: (%s), type: (%s), info: (%s)",
resource_id, resource_type, p_resource_info)
try:
utils.enqueue_work(self.ctxt,
self.ENDPOINT_TYPE,
resource_type,
resource_id,
operation_type,
json.dumps(resource_info))
except exception.ResourceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
class CinderAPIController(APIController):
ENDPOINT_TYPE = consts.ENDPOINT_TYPE_VOLUME
RESOURCE_TYPE_MAP = {
consts.RESOURCE_TYPE_VOLUME_QUOTA_SET: 'quota_set',
}
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
]
def __init__(self, app, conf):
super(CinderAPIController, self).__init__(app, conf)
self.response_hander_map = {
self.ENDPOINT_TYPE: self._process_response
}
def _process_response(self, environ, request_body, response):
if self.get_status_code(response) in self.OK_STATUS_CODE:
operation_type = proxy_utils.get_operation_type(environ)
if operation_type == consts.OPERATION_TYPE_GET:
if proxy_utils.show_usage(environ):
response = self._update_response(environ, request_body,
response)
else:
self._enqueue_work(environ, request_body, response)
self.notify(environ, self.ENDPOINT_TYPE)
return response
def _enqueue_work(self, environ, request_body, response):
request_header = self.get_request_header(environ)
resource_id = self.get_resource_id_from_link(request_header)
resource_type = self._get_resource_type_from_environ(environ)
operation_type = proxy_utils.get_operation_type(environ)
if operation_type == consts.OPERATION_TYPE_DELETE:
resource_info = {}
else:
request = json.loads(request_body)
if resource_type in self.RESOURCE_TYPE_MAP:
resource_info = request[self.RESOURCE_TYPE_MAP.get(
resource_type)]
else:
resource_info = request[resource_type]
resource_info = json.dumps(resource_info)
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
operation_type, resource_id, resource_info)
try:
utils.enqueue_work(self.ctxt,
self.ENDPOINT_TYPE,
resource_type,
resource_id,
operation_type,
resource_info)
except exception.ResourceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
class NeutronAPIController(APIController):
ENDPOINT_TYPE = consts.ENDPOINT_TYPE_NETWORK
RESOURCE_TYPE_MAP = {
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: 'quota',
}
# the following fields will be inserted to the resource_info if
# they are not presented in the request but are provided in the
# response
DESIRED_FIELDS = ['tenant_id', 'project_id']
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
webob.exc.HTTPCreated.code,
webob.exc.HTTPNoContent.code
]
def __init__(self, app, conf):
super(NeutronAPIController, self).__init__(app, conf)
self.response_hander_map = {
self.ENDPOINT_TYPE: self._process_response
}
def _process_response(self, environ, request_body, response):
if self.get_status_code(response) in self.OK_STATUS_CODE:
self._enqueue_work(environ, request_body, response)
self.notify(environ, self.ENDPOINT_TYPE)
return response
def _enqueue_work(self, environ, request_body, response):
request_header = self.get_request_header(environ)
resource_type = self._get_resource_type_from_environ(environ)
operation_type = proxy_utils.get_operation_type(environ)
if operation_type == consts.OPERATION_TYPE_POST:
resource = json.loads(response.body)[resource_type]
resource_id = resource['id']
else:
resource_id = self.get_resource_id_from_link(request_header)
if operation_type == consts.OPERATION_TYPE_DELETE:
resource_info = {}
else:
request = json.loads(request_body)
if resource_type in self.RESOURCE_TYPE_MAP:
original_type = self.RESOURCE_TYPE_MAP.get(
resource_type)
else:
original_type = resource_type
resource_info = request[original_type]
if operation_type == consts.OPERATION_TYPE_POST:
resp_info = json.loads(response.body)[original_type]
for f in self.DESIRED_FIELDS:
if f not in resource_info and f in resp_info:
resource_info[f] = resp_info[f]
resource_info = json.dumps(resource_info)
LOG.info("Operation:(%s), resource_id:(%s), resource_info:(%s)",
operation_type, resource_id, resource_info)
try:
utils.enqueue_work(self.ctxt,
self.ENDPOINT_TYPE,
resource_type,
resource_id,
operation_type,
resource_info)
except exception.ResourceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
class OrchAPIController(APIController):
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
]
def __init__(self, app, conf):
super(OrchAPIController, self).__init__(app, conf)
self.response_hander_map = {
consts.ENDPOINT_TYPE_COMPUTE: self._process_response,
consts.ENDPOINT_TYPE_NETWORK: self._process_response
}
def _process_response(self, environ, request_body, response):
if self.get_status_code(response) in self.OK_STATUS_CODE:
response = self._update_response(environ, request_body, response)
return response
| [
"[email protected]"
] | |
a56284f885fa48ed63884b6ce71bc6af019845e8 | f13acd0d707ea9ab0d2f2f010717b35adcee142f | /Others/past/past201912-open/g/main.py | 54e6af8dac7ce0af88307994ac3cc8aa07ba57a3 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | KATO-Hiro/AtCoder | 126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7 | bf43320bc1af606bfbd23c610b3432cddd1806b9 | refs/heads/master | 2023-08-18T20:06:42.876863 | 2023-08-17T23:45:21 | 2023-08-17T23:45:21 | 121,067,516 | 4 | 0 | CC0-1.0 | 2023-09-14T21:59:38 | 2018-02-11T00:32:45 | Python | UTF-8 | Python | false | false | 1,159 | py | # -*- coding: utf-8 -*-
def get_pairs(group):
from itertools import combinations
pairs = list()
for p1, p2 in combinations(group, 2):
if p1 > p2:
continue
pairs.append((p1, p2))
return pairs
def main():
import sys
from itertools import product
input = sys.stdin.readline
n = int(input())
a = list()
for i in range(n):
ai = [0] * (i + 1) + list(map(int, input().split()))
a.append(ai)
groups = product(range(3), repeat=n)
ans = -(10**18)
for group in groups:
group_one = list()
group_two = list()
group_three = list()
for i, g in enumerate(group):
if g == 0:
group_one.append(i)
elif g == 1:
group_two.append(i)
else:
group_three.append(i)
pairs = get_pairs(group_one)
pairs += get_pairs(group_two)
pairs += get_pairs(group_three)
candidate = 0
for p1, p2 in pairs:
candidate += a[p1][p2]
ans = max(ans, candidate)
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c3e09bab0bfed296d9c0504d22539054f33298af | 3b60e6f4bbc011003ac4929f01eb7409918deb79 | /Analysis_v1/Simulation/Pythia/RSG/CP2RSGPythia8/RSGfragments/RSGravitonToGammaGamma_kMpl01_M_4750_TuneCP2_13TeV_pythia8_cfi.py | fb6cecf4012af20bf07d74d1f2cf406820e124f7 | [] | no_license | uzzielperez/Analyses | d1a64a4e8730325c94e2bc8461544837be8a179d | 1d66fa94763d7847011ea551ee872936c4c401be | refs/heads/master | 2023-02-09T04:54:01.854209 | 2020-09-07T14:57:54 | 2020-09-07T14:57:54 | 120,850,137 | 0 | 0 | null | 2020-06-17T16:48:16 | 2018-02-09T03:14:04 | C++ | UTF-8 | Python | false | false | 1,157 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.MCTunes2017.PythiaCP2Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(1.095e-3),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CP2SettingsBlock,
processParameters = cms.vstring(
'ExtraDimensionsG*:all = on',
'ExtraDimensionsG*:kappaMG = 0.541643794389',
'5100039:m0 = 4750.0',
'5100039:onMode = off',
'5100039:onIfAny = 22',
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CP2Settings',
'processParameters',
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
5f3de75aad1afc4cfe886a3e0fe4d562ec53a65a | 1a03664e4dd5f5fb12434d32129e612a76bf6d61 | /core/loaders.py | ec96d2b6531d033017d8a3b7ea1fae80ede688e7 | [] | no_license | SergioAnd95/refactored-adventure | 6601dabaa0b7125a94f0010157e17862c84c1d32 | 553d1425b6d59f69b9c526eecff07df0f18835b4 | refs/heads/master | 2020-03-30T09:05:05.893448 | 2018-10-17T15:10:40 | 2018-10-17T15:10:40 | 151,059,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 575 | py | import importlib
from settings import settings
def autodiscover_app_module(module_name):
for app in settings.INSTALLED_APPS:
importlib.import_module(f'{app}.{module_name}')
def discover_urls():
"""
Find and return all routes
from apps
:return: list
"""
urlpatterns = []
for app in settings.INSTALLED_APPS:
try:
_temp = __import__(f'{app}.urls', globals(), locals(), ['urlpatterns'], 0)
urlpatterns += _temp.urlpatterns
except ModuleNotFoundError:
pass
return urlpatterns
| [
"[email protected]"
] | |
f975e0a4d12496012b500813cfc94786bb7d9803 | 644b13f90d43e9eb2fae0d2dc580c7484b4c931b | /2019 baekjoon/Math/1676_factorial2.py | 2481524d4c177197c939031d86cfa79bd6a652e5 | [] | no_license | yeonnseok/ps-algorithm | c79a41f132c8016655719f74e9e224c0870a8f75 | fc9d52b42385916344bdd923a7eb3839a3233f18 | refs/heads/master | 2020-07-09T11:53:55.786001 | 2020-01-26T02:27:09 | 2020-01-26T02:27:09 | 203,962,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | # def factorial(num):
# if num == 1:
# return 1
# return num * factorial(num - 1)
#
#
# def factorial_count(num):
# target = list(str(factorial(num)))
# count = 0
# for i in reversed(range(len(target))):
# if target[i] == '0':
# count += 1
# else:
# return count
#
#
# def main():
# num = int(input())
# print(factorial_count(num))
#
#
# main()
def main():
num = int(input())
i = 5
ans = 0
while i <= num:
ans += int(num/i)
i *= 5
print(ans)
main() | [
"[email protected]"
] | |
61a56e8f561435c4a10d86df63ea689a20d4c8df | 49f61714a6f78d984fd2194d6064d84e891bc5b7 | /2019-1/220/users/4266/codes/1693_1879.py | 55a2e7d2215d87de2d3bec79e75dfecd21f22dde | [] | no_license | psbarros/Variaveis3 | b5c4e1517e7d94a846ee03791d25d5821a1c651c | 3dcf6f810709ce03c78335acf9533e008a2ae125 | refs/heads/master | 2023-06-13T07:05:00.878430 | 2021-07-06T17:51:37 | 2021-07-06T17:51:37 | 383,549,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | py | # Ao testar sua solução, não se limite ao caso de exemplo.
extras = float(input("Digite o numero de horas extras: "))
faltou = float(input("Digite o numero de horas que faltou: "))
h = extras - ((1/4)*faltou)
if (h>400):
g = 500.0
else:
g = 100.0
print(extras, " extras e ", faltou, " de falta")
print("R$ ",g)
| [
"[email protected]"
] | |
976024538682c39ea666ce8a446262993b29caed | 2a8a6327fb9a7ce8696aa15b197d5170661fb94f | /test/test_get_credit_memo_item_typewith_success_finance_information.py | 7a33e49537112879bd23b6dc4151bca3264a0e5d | [] | no_license | moderndatainc/zuora-client | 8b88e05132ddf7e8c411a6d7dad8c0baabaa6dad | d50da49ce1b8465c76723496c2561a3b8ebdf07d | refs/heads/master | 2021-09-21T19:17:34.752404 | 2018-08-29T23:24:07 | 2018-08-29T23:24:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,982 | py | # coding: utf-8
"""
Zuora API Reference
# Introduction Welcome to the reference for the Zuora REST API! <a href=\"http://en.wikipedia.org/wiki/REST_API\" target=\"_blank\">REST</a> is a web-service protocol that lends itself to rapid development by using everyday HTTP and JSON technology. The Zuora REST API provides a broad set of operations and resources that: * Enable Web Storefront integration from your website. * Support self-service subscriber sign-ups and account management. * Process revenue schedules through custom revenue rule models. * Enable manipulation of most objects in the Zuora Object Model. Want to share your opinion on how our API works for you? <a href=\"https://community.zuora.com/t5/Developers/API-Feedback-Form/gpm-p/21399\" target=\"_blank\">Tell us how you feel </a>about using our API and what we can do to make it better. ## Access to the API If you have a Zuora tenant, you can access the Zuora REST API via one of the following endpoints: | Tenant | Base URL for REST Endpoints | |-------------------------|-------------------------| |US Production | https://rest.zuora.com | |US API Sandbox | https://rest.apisandbox.zuora.com| |US Performance Test | https://rest.pt1.zuora.com | |EU Production | https://rest.eu.zuora.com | |EU Sandbox | https://rest.sandbox.eu.zuora.com | The Production endpoint provides access to your live user data. API Sandbox tenants are a good place to test code without affecting real-world data. If you would like Zuora to provision an API Sandbox tenant for you, contact your Zuora representative for assistance. **Note:** If you have a tenant in the Production Copy Environment, submit a request at <a href=\"http://support.zuora.com/\" target=\"_blank\">Zuora Global Support</a> to enable the Zuora REST API in your tenant and obtain the base URL for REST endpoints. If you do not have a Zuora tenant, go to <a href=\"https://www.zuora.com/resource/zuora-test-drive\" target=\"_blank\">https://www.zuora.com/resource/zuora-test-drive</a> and sign up for a Production Test Drive tenant. The tenant comes with seed data, including a sample product catalog. # API Changelog You can find the <a href=\"https://community.zuora.com/t5/Developers/API-Changelog/gpm-p/18092\" target=\"_blank\">Changelog</a> of the API Reference in the Zuora Community. # Authentication ## OAuth v2.0 Zuora recommends that you use OAuth v2.0 to authenticate to the Zuora REST API. Currently, OAuth is not available in every environment. See [Zuora Testing Environments](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/D_Zuora_Environments) for more information. Zuora recommends you to create a dedicated API user with API write access on a tenant when authenticating via OAuth, and then create an OAuth client for this user. See <a href=\"https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users/Create_an_API_User\" target=\"_blank\">Create an API User</a> for how to do this. By creating a dedicated API user, you can control permissions of the API user without affecting other non-API users. If a user is deactivated, all of the user's OAuth clients will be automatically deactivated. Authenticating via OAuth requires the following steps: 1. Create a Client 2. Generate a Token 3. Make Authenticated Requests ### Create a Client You must first [create an OAuth client](https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users#Create_an_OAuth_Client_for_a_User) in the Zuora UI. To do this, you must be an administrator of your Zuora tenant. This is a one-time operation. You will be provided with a Client ID and a Client Secret. Please note this information down, as it will be required for the next step. **Note:** The OAuth client will be owned by a Zuora user account. If you want to perform PUT, POST, or DELETE operations using the OAuth client, the owner of the OAuth client must have a Platform role that includes the \"API Write Access\" permission. ### Generate a Token After creating a client, you must make a call to obtain a bearer token using the [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) operation. This operation requires the following parameters: - `client_id` - the Client ID displayed when you created the OAuth client in the previous step - `client_secret` - the Client Secret displayed when you created the OAuth client in the previous step - `grant_type` - must be set to `client_credentials` **Note**: The Client ID and Client Secret mentioned above were displayed when you created the OAuth Client in the prior step. The [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) response specifies how long the bearer token is valid for. Call [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) again to generate a new bearer token. ### Make Authenticated Requests To authenticate subsequent API requests, you must provide a valid bearer token in an HTTP header: `Authorization: Bearer {bearer_token}` If you have [Zuora Multi-entity](https://www.zuora.com/developer/api-reference/#tag/Entities) enabled, you need to set an additional header to specify the ID of the entity that you want to access. You can use the `scope` field in the [Generate an OAuth token](https://www.zuora.com/developer/api-reference/#operation/createToken) response to determine whether you need to specify an entity ID. If the `scope` field contains more than one entity ID, you must specify the ID of the entity that you want to access. For example, if the `scope` field contains `entity.1a2b7a37-3e7d-4cb3-b0e2-883de9e766cc` and `entity.c92ed977-510c-4c48-9b51-8d5e848671e9`, specify one of the following headers: - `Zuora-Entity-Ids: 1a2b7a37-3e7d-4cb3-b0e2-883de9e766cc` - `Zuora-Entity-Ids: c92ed977-510c-4c48-9b51-8d5e848671e9` **Note**: For a limited period of time, Zuora will accept the `entityId` header as an alternative to the `Zuora-Entity-Ids` header. If you choose to set the `entityId` header, you must remove all \"-\" characters from the entity ID in the `scope` field. If the `scope` field contains a single entity ID, you do not need to specify an entity ID. ## Other Supported Authentication Schemes Zuora continues to support the following additional legacy means of authentication: * Use username and password. Include authentication with each request in the header: * `apiAccessKeyId` * `apiSecretAccessKey` Zuora recommends that you create an API user specifically for making API calls. See <a href=\"https://knowledgecenter.zuora.com/CF_Users_and_Administrators/A_Administrator_Settings/Manage_Users/Create_an_API_User\" target=\"_blank\">Create an API User</a> for more information. * Use an authorization cookie. The cookie authorizes the user to make calls to the REST API for the duration specified in **Administration > Security Policies > Session timeout**. The cookie expiration time is reset with this duration after every call to the REST API. To obtain a cookie, call the [Connections](https://www.zuora.com/developer/api-reference/#tag/Connections) resource with the following API user information: * ID * Password * For CORS-enabled APIs only: Include a 'single-use' token in the request header, which re-authenticates the user with each request. See below for more details. ### Entity Id and Entity Name The `entityId` and `entityName` parameters are only used for [Zuora Multi-entity](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity \"Zuora Multi-entity\"). These are the legacy parameters that Zuora will only continue to support for a period of time. Zuora recommends you to use the `Zuora-Entity-Ids` parameter instead. The `entityId` and `entityName` parameters specify the Id and the [name of the entity](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity/B_Introduction_to_Entity_and_Entity_Hierarchy#Name_and_Display_Name \"Introduction to Entity and Entity Hierarchy\") that you want to access, respectively. Note that you must have permission to access the entity. You can specify either the `entityId` or `entityName` parameter in the authentication to access and view an entity. * If both `entityId` and `entityName` are specified in the authentication, an error occurs. * If neither `entityId` nor `entityName` is specified in the authentication, you will log in to the entity in which your user account is created. To get the entity Id and entity name, you can use the GET Entities REST call. For more information, see [API User Authentication](https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Multi-entity/A_Overview_of_Multi-entity#API_User_Authentication \"API User Authentication\"). ### Token Authentication for CORS-Enabled APIs The CORS mechanism enables REST API calls to Zuora to be made directly from your customer's browser, with all credit card and security information transmitted directly to Zuora. This minimizes your PCI compliance burden, allows you to implement advanced validation on your payment forms, and makes your payment forms look just like any other part of your website. For security reasons, instead of using cookies, an API request via CORS uses **tokens** for authentication. The token method of authentication is only designed for use with requests that must originate from your customer's browser; **it should not be considered a replacement to the existing cookie authentication** mechanism. See [Zuora CORS REST](https://knowledgecenter.zuora.com/DC_Developers/REST_API/A_REST_basics/G_CORS_REST \"Zuora CORS REST\") for details on how CORS works and how you can begin to implement customer calls to the Zuora REST APIs. See [HMAC Signatures](https://www.zuora.com/developer/api-reference/#operation/POSTHMACSignature \"HMAC Signatures\") for details on the HMAC method that returns the authentication token. # Requests and Responses ## Request IDs As a general rule, when asked to supply a \"key\" for an account or subscription (accountKey, account-key, subscriptionKey, subscription-key), you can provide either the actual ID or the number of the entity. ## HTTP Request Body Most of the parameters and data accompanying your requests will be contained in the body of the HTTP request. The Zuora REST API accepts JSON in the HTTP request body. No other data format (e.g., XML) is supported. ### Data Type ([Actions](https://www.zuora.com/developer/api-reference/#tag/Actions) and CRUD operations only) We recommend that you do not specify the decimal values with quotation marks, commas, and spaces. Use characters of `+-0-9.eE`, for example, `5`, `1.9`, `-8.469`, and `7.7e2`. Also, Zuora does not convert currencies for decimal values. ## Testing a Request Use a third party client, such as [curl](https://curl.haxx.se \"curl\"), [Postman](https://www.getpostman.com \"Postman\"), or [Advanced REST Client](https://advancedrestclient.com \"Advanced REST Client\"), to test the Zuora REST API. You can test the Zuora REST API from the Zuora API Sandbox or Production tenants. If connecting to Production, bear in mind that you are working with your live production data, not sample data or test data. ## Testing with Credit Cards Sooner or later it will probably be necessary to test some transactions that involve credit cards. For suggestions on how to handle this, see [Going Live With Your Payment Gateway](https://knowledgecenter.zuora.com/CB_Billing/M_Payment_Gateways/C_Managing_Payment_Gateways/B_Going_Live_Payment_Gateways#Testing_with_Credit_Cards \"C_Zuora_User_Guides/A_Billing_and_Payments/M_Payment_Gateways/C_Managing_Payment_Gateways/B_Going_Live_Payment_Gateways#Testing_with_Credit_Cards\" ). ## Concurrent Request Limits Zuora enforces tenant-level concurrent request limits. See <a href=\"https://knowledgecenter.zuora.com/BB_Introducing_Z_Business/Policies/Concurrent_Request_Limits\" target=\"_blank\">Concurrent Request Limits</a> for more information. ## Timeout Limit If a request does not complete within 120 seconds, the request times out and Zuora returns a Gateway Timeout error. ## Error Handling Responses and error codes are detailed in [Responses and errors](https://knowledgecenter.zuora.com/DC_Developers/REST_API/A_REST_basics/3_Responses_and_errors \"Responses and errors\"). # Pagination When retrieving information (using GET methods), the optional `pageSize` query parameter sets the maximum number of rows to return in a response. The maximum is `40`; larger values are treated as `40`. If this value is empty or invalid, `pageSize` typically defaults to `10`. The default value for the maximum number of rows retrieved can be overridden at the method level. If more rows are available, the response will include a `nextPage` element, which contains a URL for requesting the next page. If this value is not provided, no more rows are available. No \"previous page\" element is explicitly provided; to support backward paging, use the previous call. ## Array Size For data items that are not paginated, the REST API supports arrays of up to 300 rows. Thus, for instance, repeated pagination can retrieve thousands of customer accounts, but within any account an array of no more than 300 rate plans is returned. # API Versions The Zuora REST API are version controlled. Versioning ensures that Zuora REST API changes are backward compatible. Zuora uses a major and minor version nomenclature to manage changes. By specifying a version in a REST request, you can get expected responses regardless of future changes to the API. ## Major Version The major version number of the REST API appears in the REST URL. Currently, Zuora only supports the **v1** major version. For example, `POST https://rest.zuora.com/v1/subscriptions`. ## Minor Version Zuora uses minor versions for the REST API to control small changes. For example, a field in a REST method is deprecated and a new field is used to replace it. Some fields in the REST methods are supported as of minor versions. If a field is not noted with a minor version, this field is available for all minor versions. If a field is noted with a minor version, this field is in version control. You must specify the supported minor version in the request header to process without an error. If a field is in version control, it is either with a minimum minor version or a maximum minor version, or both of them. You can only use this field with the minor version between the minimum and the maximum minor versions. For example, the `invoiceCollect` field in the POST Subscription method is in version control and its maximum minor version is 189.0. You can only use this field with the minor version 189.0 or earlier. If you specify a version number in the request header that is not supported, Zuora will use the minimum minor version of the REST API. In our REST API documentation, if a field or feature requires a minor version number, we note that in the field description. You only need to specify the version number when you use the fields require a minor version. To specify the minor version, set the `zuora-version` parameter to the minor version number in the request header for the request call. For example, the `collect` field is in 196.0 minor version. If you want to use this field for the POST Subscription method, set the `zuora-version` parameter to `196.0` in the request header. The `zuora-version` parameter is case sensitive. For all the REST API fields, by default, if the minor version is not specified in the request header, Zuora will use the minimum minor version of the REST API to avoid breaking your integration. ### Minor Version History The supported minor versions are not serial. This section documents the changes made to each Zuora REST API minor version. The following table lists the supported versions and the fields that have a Zuora REST API minor version. | Fields | Minor Version | REST Methods | Description | |:--------|:--------|:--------|:--------| | invoiceCollect | 189.0 and earlier | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice and collects a payment for a subscription. | | collect | 196.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Collects an automatic payment for a subscription. | | invoice | 196.0 and 207.0| [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice for a subscription. | | invoiceTargetDate | 196.0 and earlier | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") |Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | invoiceTargetDate | 207.0 and earlier | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | targetDate | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") |Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | targetDate | 211.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Date through which charges are calculated on the invoice, as `yyyy-mm-dd`. | | includeExisting DraftInvoiceItems | 196.0 and earlier| [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | Specifies whether to include draft invoice items in subscription previews. Specify it to be `true` (default) to include draft invoice items in the preview result. Specify it to be `false` to excludes draft invoice items in the preview result. | | includeExisting DraftDocItems | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | Specifies whether to include draft invoice items in subscription previews. Specify it to be `true` (default) to include draft invoice items in the preview result. Specify it to be `false` to excludes draft invoice items in the preview result. | | previewType | 196.0 and earlier| [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | The type of preview you will receive. The possible values are `InvoiceItem`(default), `ChargeMetrics`, and `InvoiceItemChargeMetrics`. | | previewType | 207.0 and later | [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") | The type of preview you will receive. The possible values are `LegalDoc`(default), `ChargeMetrics`, and `LegalDocChargeMetrics`. | | runBilling | 211.0 and later | [Create Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_Subscription \"Create Subscription\"); [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\"); [Renew Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_RenewSubscription \"Renew Subscription\"); [Cancel Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_CancelSubscription \"Cancel Subscription\"); [Suspend Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_SuspendSubscription \"Suspend Subscription\"); [Resume Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_ResumeSubscription \"Resume Subscription\"); [Create Account](https://www.zuora.com/developer/api-reference/#operation/POST_Account \"Create Account\")|Generates an invoice or credit memo for a subscription. **Note:** Credit memos are only available if you have the Invoice Settlement feature enabled. | | invoiceDate | 214.0 and earlier | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date that should appear on the invoice being generated, as `yyyy-mm-dd`. | | invoiceTargetDate | 214.0 and earlier | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date through which to calculate charges on this account if an invoice is generated, as `yyyy-mm-dd`. | | documentDate | 215.0 and later | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date that should appear on the invoice and credit memo being generated, as `yyyy-mm-dd`. | | targetDate | 215.0 and later | [Invoice and Collect](https://www.zuora.com/developer/api-reference/#operation/POST_TransactionInvoicePayment \"Invoice and Collect\") |Date through which to calculate charges on this account if an invoice or a credit memo is generated, as `yyyy-mm-dd`. | | memoItemAmount | 223.0 and earlier | [Create credit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_CreditMemoFromPrpc \"Create credit memo from charge\"); [Create debit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_DebitMemoFromPrpc \"Create debit memo from charge\") | Amount of the memo item. | | amount | 224.0 and later | [Create credit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_CreditMemoFromPrpc \"Create credit memo from charge\"); [Create debit memo from charge](https://www.zuora.com/developer/api-reference/#operation/POST_DebitMemoFromPrpc \"Create debit memo from charge\") | Amount of the memo item. | | subscriptionNumbers | 222.4 and earlier | [Create order](https://www.zuora.com/developer/api-reference/#operation/POST_Order \"Create order\") | Container for the subscription numbers of the subscriptions in an order. | | subscriptions | 223.0 and later | [Create order](https://www.zuora.com/developer/api-reference/#operation/POST_Order \"Create order\") | Container for the subscription numbers and statuses in an order. | #### Version 207.0 and Later The response structure of the [Preview Subscription](https://www.zuora.com/developer/api-reference/#operation/POST_SubscriptionPreview \"Preview Subscription\") and [Update Subscription](https://www.zuora.com/developer/api-reference/#operation/PUT_Subscription \"Update Subscription\") methods are changed. The following invoice related response fields are moved to the invoice container: * amount * amountWithoutTax * taxAmount * invoiceItems * targetDate * chargeMetrics # Zuora Object Model The following diagram presents a high-level view of the key Zuora objects. Click the image to open it in a new tab to resize it. <a href=\"https://www.zuora.com/wp-content/uploads/2017/01/ZuoraERD.jpeg\" target=\"_blank\"><img src=\"https://www.zuora.com/wp-content/uploads/2017/01/ZuoraERD.jpeg\" alt=\"Zuora Object Model Diagram\"></a> See the following articles for information about other parts of the Zuora business object model: * <a href=\"https://knowledgecenter.zuora.com/CB_Billing/Invoice_Settlement/D_Invoice_Settlement_Object_Model\" target=\"_blank\">Invoice Settlement Object Model</a> * <a href=\"https://knowledgecenter.zuora.com/BC_Subscription_Management/Orders/BA_Orders_Object_Model\" target=\"_blank\">Orders Object Model</a> You can use the [Describe object](https://www.zuora.com/developer/api-reference/#operation/GET_Describe) operation to list the fields of each Zuora object that is available in your tenant. When you call the operation, you must specify the API name of the Zuora object. The following table provides the API name of each Zuora object: | Object | API Name | |-----------------------------------------------|--------------------------------------------| | Account | `Account` | | Accounting Code | `AccountingCode` | | Accounting Period | `AccountingPeriod` | | Amendment | `Amendment` | | Application Group | `ApplicationGroup` | | Billing Run | <p>`BillingRun`</p><p>**Note:** The API name of this object is `BillingRun` in the [Describe object](https://www.zuora.com/developer/api-reference/#operation/GET_Describe) operation and Export ZOQL queries only. Otherwise, the API name of this object is `BillRun`.</p> | | Contact | `Contact` | | Contact Snapshot | `ContactSnapshot` | | Credit Balance Adjustment | `CreditBalanceAdjustment` | | Credit Memo | `CreditMemo` | | Credit Memo Application | `CreditMemoApplication` | | Credit Memo Application Item | `CreditMemoApplicationItem` | | Credit Memo Item | `CreditMemoItem` | | Credit Memo Part | `CreditMemoPart` | | Credit Memo Part Item | `CreditMemoPartItem` | | Credit Taxation Item | `CreditTaxationItem` | | Custom Exchange Rate | `FXCustomRate` | | Debit Memo | `DebitMemo` | | Debit Memo Item | `DebitMemoItem` | | Debit Taxation Item | `DebitTaxationItem` | | Discount Applied Metrics | `DiscountAppliedMetrics` | | Entity | `Tenant` | | Gateway Reconciliation Event | `PaymentGatewayReconciliationEventLog` | | Gateway Reconciliation Job | `PaymentReconciliationJob` | | Gateway Reconciliation Log | `PaymentReconciliationLog` | | Invoice | `Invoice` | | Invoice Adjustment | `InvoiceAdjustment` | | Invoice Item | `InvoiceItem` | | Invoice Item Adjustment | `InvoiceItemAdjustment` | | Invoice Payment | `InvoicePayment` | | Journal Entry | `JournalEntry` | | Journal Entry Item | `JournalEntryItem` | | Journal Run | `JournalRun` | | Order | `Order` | | Order Action | `OrderAction` | | Order ELP | `OrderElp` | | Order Item | `OrderItem` | | Order MRR | `OrderMrr` | | Order Quantity | `OrderQuantity` | | Order TCB | `OrderTcb` | | Order TCV | `OrderTcv` | | Payment | `Payment` | | Payment Application | `PaymentApplication` | | Payment Application Item | `PaymentApplicationItem` | | Payment Method | `PaymentMethod` | | Payment Method Snapshot | `PaymentMethodSnapshot` | | Payment Method Transaction Log | `PaymentMethodTransactionLog` | | Payment Method Update | `UpdaterDetail` | | Payment Part | `PaymentPart` | | Payment Part Item | `PaymentPartItem` | | Payment Run | `PaymentRun` | | Payment Transaction Log | `PaymentTransactionLog` | | Processed Usage | `ProcessedUsage` | | Product | `Product` | | Product Rate Plan | `ProductRatePlan` | | Product Rate Plan Charge | `ProductRatePlanCharge` | | Product Rate Plan Charge Tier | `ProductRatePlanChargeTier` | | Rate Plan | `RatePlan` | | Rate Plan Charge | `RatePlanCharge` | | Rate Plan Charge Tier | `RatePlanChargeTier` | | Refund | `Refund` | | Refund Application | `RefundApplication` | | Refund Application Item | `RefundApplicationItem` | | Refund Invoice Payment | `RefundInvoicePayment` | | Refund Part | `RefundPart` | | Refund Part Item | `RefundPartItem` | | Refund Transaction Log | `RefundTransactionLog` | | Revenue Charge Summary | `RevenueChargeSummary` | | Revenue Charge Summary Item | `RevenueChargeSummaryItem` | | Revenue Event | `RevenueEvent` | | Revenue Event Credit Memo Item | `RevenueEventCreditMemoItem` | | Revenue Event Debit Memo Item | `RevenueEventDebitMemoItem` | | Revenue Event Invoice Item | `RevenueEventInvoiceItem` | | Revenue Event Invoice Item Adjustment | `RevenueEventInvoiceItemAdjustment` | | Revenue Event Item | `RevenueEventItem` | | Revenue Event Item Credit Memo Item | `RevenueEventItemCreditMemoItem` | | Revenue Event Item Debit Memo Item | `RevenueEventItemDebitMemoItem` | | Revenue Event Item Invoice Item | `RevenueEventItemInvoiceItem` | | Revenue Event Item Invoice Item Adjustment | `RevenueEventItemInvoiceItemAdjustment` | | Revenue Event Type | `RevenueEventType` | | Revenue Schedule | `RevenueSchedule` | | Revenue Schedule Credit Memo Item | `RevenueScheduleCreditMemoItem` | | Revenue Schedule Debit Memo Item | `RevenueScheduleDebitMemoItem` | | Revenue Schedule Invoice Item | `RevenueScheduleInvoiceItem` | | Revenue Schedule Invoice Item Adjustment | `RevenueScheduleInvoiceItemAdjustment` | | Revenue Schedule Item | `RevenueScheduleItem` | | Revenue Schedule Item Credit Memo Item | `RevenueScheduleItemCreditMemoItem` | | Revenue Schedule Item Debit Memo Item | `RevenueScheduleItemDebitMemoItem` | | Revenue Schedule Item Invoice Item | `RevenueScheduleItemInvoiceItem` | | Revenue Schedule Item Invoice Item Adjustment | `RevenueScheduleItemInvoiceItemAdjustment` | | Subscription | `Subscription` | | Taxable Item Snapshot | `TaxableItemSnapshot` | | Taxation Item | `TaxationItem` | | Updater Batch | `UpdaterBatch` | | Usage | `Usage` | # noqa: E501
OpenAPI spec version: 2018-08-23
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import zuora_client
from zuora_client.models.get_credit_memo_item_typewith_success_finance_information import GETCreditMemoItemTypewithSuccessFinanceInformation # noqa: E501
from zuora_client.rest import ApiException
class TestGETCreditMemoItemTypewithSuccessFinanceInformation(unittest.TestCase):
"""GETCreditMemoItemTypewithSuccessFinanceInformation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGETCreditMemoItemTypewithSuccessFinanceInformation(self):
"""Test GETCreditMemoItemTypewithSuccessFinanceInformation"""
# FIXME: construct object with mandatory attributes with example values
# model = zuora_client.models.get_credit_memo_item_typewith_success_finance_information.GETCreditMemoItemTypewithSuccessFinanceInformation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
056be20ed1e3365c7fdde9a90eaa63c5dcb36b19 | cce1e235c2c8e58d83af6dbadeb471ca62b710a1 | /hackerrank/data_structures/linked_lists/print_in_reverse.py | 2ce2ada1121b872bcca83b376be97f1e9c07e040 | [] | no_license | SebastianThomas1/coding_challenges | 6b51ce046b458c44db809687b6809d16d066566f | bd3bc6be7a975b6255e4b2198c953d56bd74e75a | refs/heads/master | 2023-03-03T00:18:00.147369 | 2021-02-08T21:52:02 | 2021-02-08T21:52:02 | 336,688,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | # Sebastian Thomas (coding at sebastianthomas dot de)
# https://www.hackerrank.com/challenges/print-the-elements-of-a-linked-list-in-reverse
#
# Print in Reverse
class SinglyLinkedListNode:
def __init__(self, node_data):
self.data = node_data
self.next = None
def reverse(head):
current_node = head
predecessor = None
while current_node:
successor = current_node.next
current_node.next = predecessor
predecessor = current_node
current_node = successor
return predecessor
def reverse_print(head):
head = reverse(head)
current_node = head
while current_node is not None:
print(current_node.data)
current_node = current_node.next
reverse(head)
| [
"[email protected]"
] | |
15aa12ee133e35281060e4580a88fe2e75fd98f9 | 7f0481f1f3508b6a957f71bf49478ceb592fe945 | /F9744/Keras/Ch02/Ch2_3.py | 2d96fd756a615e339fc8234a3768a8308657e512 | [] | no_license | dsalearning/tf.keras_python | d8c8174fac793cd5266e4aded9e83c2631311c15 | f06950d035c6aff0fd518eafcc09caffa348aefe | refs/heads/main | 2023-01-20T03:23:10.826040 | 2020-12-01T01:56:00 | 2020-12-01T01:56:00 | 309,405,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | num = 10
print(num)
if num >= 10:
print("數字是10") | [
"[email protected]"
] | |
828390637851af3ac878569b4c3b034030f07415 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-css/huaweicloudsdkcss/v1/model/setting.py | 0c3511543b950ff7b9b7c9005a2fe6122a67016d | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 9,314 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class Setting:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'workers': 'int',
'batch_size': 'int',
'batch_delay_ms': 'int',
'queue_type': 'str',
'queue_check_point_writes': 'int',
'queue_max_bytes_mb': 'int'
}
attribute_map = {
'workers': 'workers',
'batch_size': 'batchSize',
'batch_delay_ms': 'batchDelayMs',
'queue_type': 'queueType',
'queue_check_point_writes': 'queueCheckPointWrites',
'queue_max_bytes_mb': 'queueMaxBytesMb'
}
def __init__(self, workers=None, batch_size=None, batch_delay_ms=None, queue_type=None, queue_check_point_writes=None, queue_max_bytes_mb=None):
"""Setting
The model defined in huaweicloud sdk
:param workers: 并行执行管道的Filters+Outputs阶段的工作线程数,默认值为CPU核数。
:type workers: int
:param batch_size: 单个工作线程在尝试执行其Filters和Outputs之前将从inputs收集的最大事件数,该值较大通常更有效,但会增加内存开销,默认为125。
:type batch_size: int
:param batch_delay_ms: 每个event被pipeline调度等待的最小时间。 单位毫秒。
:type batch_delay_ms: int
:param queue_type: 用于事件缓冲的内部队列模型。memory 为基于内存的传统队列,persisted为基于磁盘的ACKed持久化队列,默认值为memory。
:type queue_type: str
:param queue_check_point_writes: 如果使用持久化队列,则表示强制执行检查点之前写入的最大事件数,默认值为1024。
:type queue_check_point_writes: int
:param queue_max_bytes_mb: 如果使用持久化队列,则表示持久化队列的总容量(以兆字节MB为单位),确保磁盘的容量大于该值,默认值为1024。
:type queue_max_bytes_mb: int
"""
self._workers = None
self._batch_size = None
self._batch_delay_ms = None
self._queue_type = None
self._queue_check_point_writes = None
self._queue_max_bytes_mb = None
self.discriminator = None
if workers is not None:
self.workers = workers
if batch_size is not None:
self.batch_size = batch_size
if batch_delay_ms is not None:
self.batch_delay_ms = batch_delay_ms
self.queue_type = queue_type
if queue_check_point_writes is not None:
self.queue_check_point_writes = queue_check_point_writes
if queue_max_bytes_mb is not None:
self.queue_max_bytes_mb = queue_max_bytes_mb
@property
def workers(self):
"""Gets the workers of this Setting.
并行执行管道的Filters+Outputs阶段的工作线程数,默认值为CPU核数。
:return: The workers of this Setting.
:rtype: int
"""
return self._workers
@workers.setter
def workers(self, workers):
"""Sets the workers of this Setting.
并行执行管道的Filters+Outputs阶段的工作线程数,默认值为CPU核数。
:param workers: The workers of this Setting.
:type workers: int
"""
self._workers = workers
@property
def batch_size(self):
"""Gets the batch_size of this Setting.
单个工作线程在尝试执行其Filters和Outputs之前将从inputs收集的最大事件数,该值较大通常更有效,但会增加内存开销,默认为125。
:return: The batch_size of this Setting.
:rtype: int
"""
return self._batch_size
@batch_size.setter
def batch_size(self, batch_size):
"""Sets the batch_size of this Setting.
单个工作线程在尝试执行其Filters和Outputs之前将从inputs收集的最大事件数,该值较大通常更有效,但会增加内存开销,默认为125。
:param batch_size: The batch_size of this Setting.
:type batch_size: int
"""
self._batch_size = batch_size
@property
def batch_delay_ms(self):
"""Gets the batch_delay_ms of this Setting.
每个event被pipeline调度等待的最小时间。 单位毫秒。
:return: The batch_delay_ms of this Setting.
:rtype: int
"""
return self._batch_delay_ms
@batch_delay_ms.setter
def batch_delay_ms(self, batch_delay_ms):
"""Sets the batch_delay_ms of this Setting.
每个event被pipeline调度等待的最小时间。 单位毫秒。
:param batch_delay_ms: The batch_delay_ms of this Setting.
:type batch_delay_ms: int
"""
self._batch_delay_ms = batch_delay_ms
@property
def queue_type(self):
"""Gets the queue_type of this Setting.
用于事件缓冲的内部队列模型。memory 为基于内存的传统队列,persisted为基于磁盘的ACKed持久化队列,默认值为memory。
:return: The queue_type of this Setting.
:rtype: str
"""
return self._queue_type
@queue_type.setter
def queue_type(self, queue_type):
"""Sets the queue_type of this Setting.
用于事件缓冲的内部队列模型。memory 为基于内存的传统队列,persisted为基于磁盘的ACKed持久化队列,默认值为memory。
:param queue_type: The queue_type of this Setting.
:type queue_type: str
"""
self._queue_type = queue_type
@property
def queue_check_point_writes(self):
"""Gets the queue_check_point_writes of this Setting.
如果使用持久化队列,则表示强制执行检查点之前写入的最大事件数,默认值为1024。
:return: The queue_check_point_writes of this Setting.
:rtype: int
"""
return self._queue_check_point_writes
@queue_check_point_writes.setter
def queue_check_point_writes(self, queue_check_point_writes):
"""Sets the queue_check_point_writes of this Setting.
如果使用持久化队列,则表示强制执行检查点之前写入的最大事件数,默认值为1024。
:param queue_check_point_writes: The queue_check_point_writes of this Setting.
:type queue_check_point_writes: int
"""
self._queue_check_point_writes = queue_check_point_writes
@property
def queue_max_bytes_mb(self):
"""Gets the queue_max_bytes_mb of this Setting.
如果使用持久化队列,则表示持久化队列的总容量(以兆字节MB为单位),确保磁盘的容量大于该值,默认值为1024。
:return: The queue_max_bytes_mb of this Setting.
:rtype: int
"""
return self._queue_max_bytes_mb
@queue_max_bytes_mb.setter
def queue_max_bytes_mb(self, queue_max_bytes_mb):
"""Sets the queue_max_bytes_mb of this Setting.
如果使用持久化队列,则表示持久化队列的总容量(以兆字节MB为单位),确保磁盘的容量大于该值,默认值为1024。
:param queue_max_bytes_mb: The queue_max_bytes_mb of this Setting.
:type queue_max_bytes_mb: int
"""
self._queue_max_bytes_mb = queue_max_bytes_mb
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Setting):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
718f5c466bcb072ac392b31093e06e95e765a778 | c065ff2a6a377aea2303b7b8482558049958a7ec | /toydrill/1562239369/tactile.tac | bc08dcae3533cc9df2a88b7b7c9676aaa4f9f19f | [] | no_license | waedbara/vision2tactile | 7bc9861eecb4247fd254ea58dc508ed18a03b1af | edbc9dfee61b4a4b1f0caebb2f16faef090dff32 | refs/heads/master | 2022-04-02T20:43:16.621687 | 2019-12-11T08:07:39 | 2019-12-11T08:07:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | tac | ,3504,3675,3514,3654,3568,3467,3700,3569,3528,3402,3457,3599,3046,3296,3498,3420,3527,3532,3031,2852,3668,3676,3627,3561,2031,2034,1725,2050,2640,3328,3514,3353,3403,3289,3248,3462,3366,3348,3171,3256,3488,3320,3340,3234,3339,3344,3299,2877,2753,3504,3489,3478,3352,1909,1901,3056,2093,2473 | [
"[email protected]"
] | |
8585c66c448b22e2add5a38e02bc37cc636d7286 | 395b2e9718eeb5035995130b2377c47b8df05614 | /tests/attack/test_pgd.py | 87973fe5b11c7e424ec3918db6a543beef229bab | [] | no_license | gatheluck/fourier-attack | 0a6d773e268bf1e480f04a43dcc72905af804b43 | 1668f0d2eed6182cb69904c49fe223e78cb5d0cc | refs/heads/master | 2023-03-10T05:15:10.897205 | 2021-03-01T08:19:10 | 2021-03-01T08:19:10 | 320,191,916 | 1 | 0 | null | 2021-03-01T08:19:11 | 2020-12-10T07:21:19 | Python | UTF-8 | Python | false | false | 2,035 | py | import pathlib
from typing import Final
import torch
import torchvision
import fourier_attack.attack.pgd
from fourier_attack.util import Denormalizer
class TestPgdAttack:
def test__forward(
self, pretrained_cifar10_resnet50, cifar10_stats, normalize_cifar10_loader
):
input_size: Final = 32
num_iteration: Final = 8
eps_max: Final = 16.0
step_size: Final = eps_max / num_iteration
rand_init: Final = True
scale_eps: Final = True
scale_each: Final = True
avoid_target: Final = True
norms = {"linf", "l2"}
devices = set(["cuda"]) if torch.cuda.is_available() else set()
output_root: Final = pathlib.Path("logs/test/")
output_root.mkdir(exist_ok=True, parents=True)
model = pretrained_cifar10_resnet50
criterion_func = torch.nn.functional.cross_entropy
mean, std = cifar10_stats
for norm in norms:
for device in devices:
attacker = fourier_attack.attack.pgd.PgdAttack(
input_size,
mean,
std,
num_iteration,
eps_max,
step_size,
norm,
rand_init,
scale_eps,
scale_each,
avoid_target,
criterion_func,
device,
)
for x, t in normalize_cifar10_loader:
x, t = x.to(device), t.to(device)
batch_size = x.size(0)
x_adv = attacker(model, x, t)
denormalizer = Denormalizer(input_size, mean, std, device, False)
torchvision.utils.save_image(
denormalizer(x_adv), output_root / f"forward-pgd-{norm}.png"
)
assert x_adv.size() == torch.Size([batch_size, 3, 32, 32])
break # test only first batch
| [
"[email protected]"
] | |
ee47a4b6d23e2e42d141640137a6287efceccf21 | 951f4f4611e5bf2dc3970cc38aa545a54b78690b | /google/cloud/billing/budgets_v1/services/budget_service/pagers.py | 7dd43d59a473e66b6fa0df2247a29735ee27397a | [
"Apache-2.0"
] | permissive | renovate-bot/python-billingbudgets | e54771361e1e9239697e23255b00f6551a1d18b7 | 2b1e66fa19415f56e33713d57fcc516efca6d03a | refs/heads/master | 2023-06-08T01:23:04.490451 | 2021-08-18T15:42:00 | 2021-08-18T15:42:00 | 227,000,311 | 0 | 0 | Apache-2.0 | 2019-12-10T01:13:52 | 2019-12-10T01:13:51 | null | UTF-8 | Python | false | false | 5,860 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterable,
Awaitable,
Callable,
Iterable,
Sequence,
Tuple,
Optional,
)
from google.cloud.billing.budgets_v1.types import budget_model
from google.cloud.billing.budgets_v1.types import budget_service
class ListBudgetsPager:
"""A pager for iterating through ``list_budgets`` requests.
This class thinly wraps an initial
:class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` object, and
provides an ``__iter__`` method to iterate through its
``budgets`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListBudgets`` requests and continue to iterate
through the ``budgets`` field on the
corresponding responses.
All the usual :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., budget_service.ListBudgetsResponse],
request: budget_service.ListBudgetsRequest,
response: budget_service.ListBudgetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.billing.budgets_v1.types.ListBudgetsRequest):
The initial request object.
response (google.cloud.billing.budgets_v1.types.ListBudgetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = budget_service.ListBudgetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[budget_service.ListBudgetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[budget_model.Budget]:
for page in self.pages:
yield from page.budgets
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListBudgetsAsyncPager:
"""A pager for iterating through ``list_budgets`` requests.
This class thinly wraps an initial
:class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``budgets`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListBudgets`` requests and continue to iterate
through the ``budgets`` field on the
corresponding responses.
All the usual :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[budget_service.ListBudgetsResponse]],
request: budget_service.ListBudgetsRequest,
response: budget_service.ListBudgetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.billing.budgets_v1.types.ListBudgetsRequest):
The initial request object.
response (google.cloud.billing.budgets_v1.types.ListBudgetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = budget_service.ListBudgetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[budget_service.ListBudgetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[budget_model.Budget]:
async def async_generator():
async for page in self.pages:
for response in page.budgets:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| [
"[email protected]"
] | |
c764dffcf73e377fbeab0b1e3fe032ab8004b975 | 5b19ced6bd173baf11c4b5e9d1c08f17ca635773 | /Python/数字在排序数组中出现的次数.py | 3f6a48c6ebcbfd2edb992331e21d261b5f5d29a5 | [] | no_license | zhaoqun05/Coding-Interviews | 8efe579b6a1a6186107f599a31a9e96389df52f3 | e05c1e6390b3df49dd02571e13fb8a3822eae649 | refs/heads/master | 2022-01-08T13:30:06.542796 | 2019-06-18T14:00:55 | 2019-06-18T14:00:55 | 282,934,693 | 2 | 0 | null | 2020-07-27T15:13:54 | 2020-07-27T15:13:53 | null | UTF-8 | Python | false | false | 1,253 | py | '''
统计一个数字在排序数组中出现的次数。
'''
# -*- coding:utf-8 -*-
class Solution:
def GetNumberOfK(self, data, k):
# 直观的想法从前到后的顺序遍历,但是算法题几乎不会将顺序查找作为考察要点……
def getFirst(nums):
start, end = 0, len(nums) - 1
while start <= end:
mid = (start + end) // 2
if data[mid] >= k: # 注意前后两个二分查找条件不一致
end = mid - 1
else:
start = mid + 1
# 导致两个函数越界的指针不一致,应该返回的指针是非越界指针
return start if start < len(nums) and nums[start] == k else -1
def getLast(nums):
start, end = 0, len(nums) - 1
while start <= end:
mid = (start + end) // 2
if data[mid] <= k:
start = mid + 1
else:
end = mid - 1
return end if end < len(nums) and nums[end] == k else -1
if not data: return 0
first, last = getFirst(data), getLast(data)
return last - first + 1 if first != -1 and last != -1 else 0
| [
"[email protected]"
] | |
104aa7236b2381b4af5aa9d9053a24f682ac2631 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/39/usersdata/86/15445/submittedfiles/dec2bin.py | 0fccfb359d06440edc18ae24c0887321b3568f73 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | # -*- coding: utf-8 -*-
from __future__ import division
d = int(input('d:'))
soma=0
cont=0
while d>0:
soma=soma+d%2*10**cont
d=d//2
cont=cont+1
print(soma) | [
"[email protected]"
] | |
847428dbe3d202faf10a5e562519c1f606de2698 | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/catapult/telemetry/bin/run_snap_it_unittest | 288474f3a3d87e0d7a4684e5fbb8c7beafe91870 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 1,727 | #!/usr/bin/env python
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
TELEMETRY_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
sys.path.append(TELEMETRY_DIR)
from telemetry.core import util
from telemetry.internal.browser import browser_finder
from telemetry.internal.browser import browser_options
from telemetry.internal.util import binary_manager
from telemetry.util import wpr_modes
_SNAP_IT_TEST_URL = 'file:///%s' % (os.path.join(
util.GetCatapultThirdPartyDir(), 'snap-it', 'tests', 'tests.html'))
def RunSnapItUnittest(finder_options):
possible_browser = browser_finder.FindBrowser(finder_options)
with possible_browser.BrowserSession(
finder_options.browser_options) as browser:
tab = browser.tabs[0]
tab.Navigate(_SNAP_IT_TEST_URL)
tab.WaitForJavaScriptCondition('window.allTestDone')
num_tests = tab.EvaluateJavaScript('window.total')
failed_tests = tab.EvaluateJavaScript('window.failedTests')
for test in failed_tests:
print "Test '%s' failed" % test
if failed_tests:
print 'Failed %s tests (out of %s tests)' % (len(failed_tests), num_tests)
return 1
else:
print 'All %s tests passed' % num_tests
return 0
def main(args):
binary_manager.InitDependencyManager([])
options = browser_options.BrowserFinderOptions()
options.browser_options.extra_browser_args.add('--headless')
parser = options.CreateParser(usage="Run snap-it's unittests")
parser.parse_args(args)
return RunSnapItUnittest(options)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"[email protected]"
] | ||
77632dadb07288923599339f046b2e666610bf45 | 6679fd1102802bf190294ef43c434b6047840dc2 | /openconfig_bindings/routing_policy/policy_definitions/policy_definition/statements/statement/conditions/match_tag_set/state/__init__.py | 3afc3b819cdfb034d5fee85b9dba1b707dd4a68d | [] | no_license | robshakir/pyangbind-openconfig-napalm | d49a26fc7e38bbdb0419c7ad1fbc590b8e4b633e | 907979dc14f1578f4bbfb1c1fb80a2facf03773c | refs/heads/master | 2023-06-13T17:17:27.612248 | 2016-05-10T16:46:58 | 2016-05-10T16:46:58 | 58,091,515 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,728 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-routing-policy - based on the path /routing-policy/policy-definitions/policy-definition/statements/statement/conditions/match-tag-set/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data tag-set conditions
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__tag_set','__match_set_options',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__match_set_options = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'INVERT': {}, u'ANY': {}},), default=unicode("ANY"), is_leaf=True, yang_name="match-set-options", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='oc-pol-types:match-set-options-restricted-type', is_config=False)
self.__tag_set = YANGDynClass(base=ReferenceType(referenced_path='/routing-policy/defined-sets/tag-sets/tag-set/tag-set-name', caller=self._path() + ['tag-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="tag-set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'routing-policy', u'policy-definitions', u'policy-definition', u'statements', u'statement', u'conditions', u'match-tag-set', u'state']
def _get_tag_set(self):
"""
Getter method for tag_set, mapped from YANG variable /routing_policy/policy_definitions/policy_definition/statements/statement/conditions/match_tag_set/state/tag_set (leafref)
YANG Description: References a defined tag set
"""
return self.__tag_set
def _set_tag_set(self, v, load=False):
"""
Setter method for tag_set, mapped from YANG variable /routing_policy/policy_definitions/policy_definition/statements/statement/conditions/match_tag_set/state/tag_set (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_tag_set is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tag_set() directly.
YANG Description: References a defined tag set
"""
try:
t = YANGDynClass(v,base=ReferenceType(referenced_path='/routing-policy/defined-sets/tag-sets/tag-set/tag-set-name', caller=self._path() + ['tag-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="tag-set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """tag_set must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=ReferenceType(referenced_path='/routing-policy/defined-sets/tag-sets/tag-set/tag-set-name', caller=self._path() + ['tag-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="tag-set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)""",
})
self.__tag_set = t
if hasattr(self, '_set'):
self._set()
def _unset_tag_set(self):
self.__tag_set = YANGDynClass(base=ReferenceType(referenced_path='/routing-policy/defined-sets/tag-sets/tag-set/tag-set-name', caller=self._path() + ['tag-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name="tag-set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)
def _get_match_set_options(self):
"""
Getter method for match_set_options, mapped from YANG variable /routing_policy/policy_definitions/policy_definition/statements/statement/conditions/match_tag_set/state/match_set_options (oc-pol-types:match-set-options-restricted-type)
YANG Description: Optional parameter that governs the behaviour of the
match operation. This leaf only supports matching on ANY
member of the set or inverting the match. Matching on ALL is
not supported)
"""
return self.__match_set_options
def _set_match_set_options(self, v, load=False):
"""
Setter method for match_set_options, mapped from YANG variable /routing_policy/policy_definitions/policy_definition/statements/statement/conditions/match_tag_set/state/match_set_options (oc-pol-types:match-set-options-restricted-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_match_set_options is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_match_set_options() directly.
YANG Description: Optional parameter that governs the behaviour of the
match operation. This leaf only supports matching on ANY
member of the set or inverting the match. Matching on ALL is
not supported)
"""
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'INVERT': {}, u'ANY': {}},), default=unicode("ANY"), is_leaf=True, yang_name="match-set-options", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='oc-pol-types:match-set-options-restricted-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """match_set_options must be of a type compatible with oc-pol-types:match-set-options-restricted-type""",
'defined-type': "oc-pol-types:match-set-options-restricted-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'INVERT': {}, u'ANY': {}},), default=unicode("ANY"), is_leaf=True, yang_name="match-set-options", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='oc-pol-types:match-set-options-restricted-type', is_config=False)""",
})
self.__match_set_options = t
if hasattr(self, '_set'):
self._set()
def _unset_match_set_options(self):
self.__match_set_options = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'INVERT': {}, u'ANY': {}},), default=unicode("ANY"), is_leaf=True, yang_name="match-set-options", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='oc-pol-types:match-set-options-restricted-type', is_config=False)
tag_set = property(_get_tag_set)
match_set_options = property(_get_match_set_options)
_pyangbind_elements = {'tag_set': tag_set, 'match_set_options': match_set_options, }
| [
"[email protected]"
] | |
4e2d44d096408a816838502d7c6b3b8ddca6a483 | 737a67744a98a536eccf5e2012628271f9120f79 | /django/integration/apps/coursesApp/urls.py | 6d242cf48c1525964f3352f9758845e37abff9d8 | [] | no_license | iota-cohort-dc/Daniel-Perez | 31d581cf6494d69404925685ca55ec9a9b97611c | 57b6a69e4f6e02f8b0694787ab195e08ad5dc52b | refs/heads/master | 2021-01-20T16:59:30.730460 | 2017-07-03T04:12:16 | 2017-07-03T04:12:16 | 82,850,732 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^$', views.index, name= "my_index"),
url(r'^addcourse$', views.addcourse),
url(r'^remove/(?P<id>\d+)$', views.remove, name= "remove"),
url(r'^remove/nah$', views.nah),
url(r'^delete/(?P<id>\d+)$', views.delete, name= "delete"),
url(r'^choose$', views.choose, name ="choose"),
url(r'^regUser$', views.regUser, name = 'regUser')
]
| [
"[email protected]"
] | |
e6f473220e6ba826010f02a19ed5052645008b2f | 179c9b2983ba3d4a3757f84fd55ac1356850c363 | /jinete/algorithms/heuristics/local_search/strategies/routes/one_shift.py | 67ed11e8558cde8355886ac88d94ef2aa23b27ae | [
"MIT"
] | permissive | garciparedes/jinete | 0d2fbf68a88b0ec565b8c1ed5c417f8f7cacceb0 | 6ed5687b2016aa7eb1f6499470c6ea21a9a57b8a | refs/heads/master | 2023-03-29T00:35:34.575828 | 2020-09-19T11:02:05 | 2020-09-19T11:02:05 | 150,865,909 | 9 | 2 | MIT | 2023-03-16T21:41:17 | 2018-09-29T13:17:05 | Python | UTF-8 | Python | false | false | 1,834 | py | import logging
from ......models import (
Route,
Stop,
)
from ..abc import (
LocalSearchStrategy,
)
logger = logging.getLogger(__name__)
class OneShiftLocalSearchStrategy(LocalSearchStrategy):
def _improve(self) -> None:
logger.info(f'Starting to improve "Result" with "{self.__class__.__name__}"...')
for route in self._routes:
cost = self._objective.optimization_function(route)
for i in range(1, len(route.stops) - 1):
j = i + 1
k = i + 2
first = route.stops[i]
second = route.stops[j]
third = route.stops[k] if k < len(route.stops) else None
if not set(first.pickup_planned_trips).isdisjoint(second.delivery_planned_trips):
continue
self._flip(route, first, second, third)
if not route.feasible or cost == self._objective.best(cost, route):
self._flip(route, second, first, third)
continue
cost = self._objective.optimization_function(route)
logger.info(f'Flipped "{i}"-th and "{j}"-th stops from "{route}".')
def _flip(self, route: Route, previous: Stop, other: Stop, following: Stop = None) -> None:
assert following is None or following.previous == other
assert other.previous == previous
self_index = route.stops.index(other)
other_index = route.stops.index(previous)
route.stops[self_index], route.stops[other_index] = route.stops[other_index], route.stops[self_index]
if following is not None:
following.previous = previous
other.previous = previous.previous
previous.previous = other
for stop in route.stops[self_index:]:
stop.flush()
| [
"[email protected]"
] | |
8cef7439cfc2680d9b2889fa9559a4c1184b7d58 | f508da1fd2e65491e1e6b5dd3a64d8bf6039bc6c | /eho/openstack/common/jsonutils.py | f800779f11987bc90c847efa74d36a2629bee971 | [
"Apache-2.0"
] | permissive | senhuang/eho | b9595739b109829b44e6d538b36348ac84529af8 | ca4dba1d0e7ab24d748b746e115ca6dc2191997a | refs/heads/master | 2021-01-21T00:36:50.271363 | 2013-03-19T07:55:50 | 2013-03-19T07:55:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,110 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
import xmlrpclib
from eho.openstack.common import timeutils
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
for test in nasty:
if test(value):
return unicode(value)
# value of itertools.count doesn't get caught by inspects
# above and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return unicode(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if isinstance(value, (list, tuple)):
return [recursive(v) for v in value]
elif isinstance(value, dict):
return dict((k, recursive(v)) for k, v in value.iteritems())
elif convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
else:
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return unicode(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
| [
"[email protected]"
] | |
79ce11ae807730b501809588bdbc2b9dec1e9067 | effce116340b7d937bd285e43b49e1ef83d56156 | /data_files/662 Maximum Width of Binary Tree.py | ee04fcc76e296c46188c5e41ed7b3c9c54194e8b | [] | no_license | DL2021Spring/CourseProject | a7c7ef57d69bc1b21e3303e737abb27bee3bd585 | 108cdd906e705e9d4d05640af32d34bfc8b124da | refs/heads/master | 2023-04-11T18:52:30.562103 | 2021-05-18T09:59:59 | 2021-05-18T09:59:59 | 365,733,976 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 668 | py |
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def widthOfBinaryTree(self, root: TreeNode) -> int:
if not root:
return 0
ret = 0
q = [(0, root)]
while q:
cur_q = []
left, right = q[0][0], q[-1][0]
ret = max(ret, right - left + 1)
for idx, node in q:
if node.left:
cur_q.append((idx * 2, node.left))
if node.right:
cur_q.append((idx * 2 + 1, node.right))
q = cur_q
return ret
| [
"[email protected]"
] | |
e94107b3d0f8b3efc736784daf10fda144d7be2a | e86f40099817f4bf16f695040ef6096bc026b4a5 | /week9/day1/daily_challenge/gif_site/manage.py | 69f2d398a2f26f2fd87b99bfa13532d95d43e9bd | [] | no_license | idosarue/DI_django | 7474e38c8b8cd668df3af5a0d88c021ada8da887 | cd3f91e51458941a5834fb65d6b8d6698770fc6c | refs/heads/main | 2023-07-16T12:10:07.549009 | 2021-09-02T17:42:30 | 2021-09-02T17:42:30 | 396,374,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gif_site.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
881083827d5bbb4da11a5b9d7edec6b217bc34d4 | 033da72a51c76e5510a06be93229a547a538cf28 | /Data Engineer with Python Track/21. Cleaning Data in SQL Server Databases/Chapter/04. Combining, splitting, and transforming data/01-Combining cities and states using +.py | c42e7743cf2ea24d935eb8842b2408c0e660b1ad | [] | no_license | ikhwan1366/Datacamp | d5dcd40c1bfeb04248977014260936b1fb1d3065 | 7738614eaebec446842d89177ae2bc30ab0f2551 | refs/heads/master | 2023-03-06T13:41:06.522721 | 2021-02-17T22:41:54 | 2021-02-17T22:41:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,403 | py | '''
Combining cities and states using +
In this lesson, you learned how to combine columns into one.
The clients table has one column, city, to store the cities where the clients live, and another column, state, to store the state of the city.
| client_id | client_name | client_surname | city | state |
|-----------|-------------|----------------|-----------|----------|
| 1 | Miriam | Antona | Las Vegas | Nevada |
| 2 | Astrid | Harper | Chicago | Illinois |
| 3 | David | Madden | Phoenix | Arizona |
| ... | ... | ... | ... | ... |
You need to combine city and state columns into one, to have the following format: 'Las Vegas, Nevada'.
You will use + operator to do it.
Instructions 1/2
50 XP
- Concatenate the names of the cities with the states using the + operator without worrying about NULL values.
'''
SELECT
client_name,
client_surname,
-- Concatenate city with state
city + ', ' + state AS city_state
FROM clients
'''
Instructions 2/2
50 XP
- Replace each instance of NULL in city and state with an ISNULL() function, so that if either column has a NULL value, an empty string '' is returned instead.
'''
SELECT
client_name,
client_surname,
-- Consider the NULL values
ISNULL(city, '') + ISNULL(', ' + state, '') AS city_state
FROM clients
| [
"[email protected]"
] | |
956a4a1dda129b01c57a16b2b87b7d0254c5efd0 | d2845579ea6aa51a2e150f0ffe6ccfda85d035ce | /common/python/calculation/spark/table.py | d6895cad3dcd329649fa6bfc23376bd7b4b0091c | [
"Apache-2.0"
] | permissive | as23187/WeFe | d8de9ff626f9f3e5d98e0850b0b717a80fd73e72 | ba92871d4b1d2eef6c606c34795f4575e84703bd | refs/heads/main | 2023-08-22T12:01:06.718246 | 2021-10-28T01:54:05 | 2021-10-28T01:54:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,065 | py | # Copyright 2021 Tianmian Tech. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from typing import Iterable
from common.python.calculation.spark import util
from common.python.common import consts
from common.python.common.consts import NAMESPACE
from common.python.table import Table
from common.python.utils import conf_utils
from common.python.utils.profile_util import log_elapsed
from common.python.utils.split import split_put, split_get
class RDDSource(Table):
# noinspection PyProtectedMember
@classmethod
def from_dsource(cls, session_id: str, dsource):
namespace = dsource._namespace
name = dsource._name
partitions = dsource._partitions
return RDDSource(session_id=session_id, namespace=namespace, name=name, partitions=partitions, dsource=dsource)
@classmethod
def from_rdd(cls, rdd, job_id: str, namespace: str, name: str):
partitions = rdd.getNumPartitions()
return RDDSource(session_id=job_id, namespace=namespace, name=name, partitions=partitions, rdd=rdd)
def __init__(self, session_id: str,
namespace: str,
name: str = None,
partitions: int = 1,
rdd=None,
dsource=None):
self._valid_param_check(rdd, dsource, namespace, partitions)
setattr(self, util.RDD_ATTR_NAME, rdd)
self._rdd = rdd
self._partitions = partitions
self._dsource = dsource
self.schema = {}
self._name = name or str(uuid.uuid1())
self._namespace = namespace
self._session_id = session_id
def get_name(self):
return self._name
def get_namespace(self):
return self._namespace
def __str__(self):
return f"{self._namespace}, {self._name}, {self._dsource}"
def __repr__(self):
return f"{self._namespace}, {self._name}, {self._dsource}"
def _tmp_table_from_rdd(self, rdd, name=None):
"""
tmp table, with namespace == job_id
"""
rdd = util.materialize(rdd)
name = name or f"{self._session_id}_{str(uuid.uuid1())}"
return RDDSource(session_id=self._session_id,
# namespace=self._namespace,
namespace=NAMESPACE.PROCESS,
name=name,
partitions=rdd.getNumPartitions(),
rdd=rdd,
dsource=None)
# self._rdd should not be pickled(spark requires all transformer/action to be invoked in driver).
def __getstate__(self):
state = dict(self.__dict__)
if "_rdd" in state:
del state["_rdd"]
return state
@staticmethod
def _valid_param_check(rdd, dtable, namespace, partitions):
assert (rdd is not None) or (dtable is not None), "params rdd and storage are both None"
assert namespace is not None, "namespace is None"
assert partitions > 0, "invalid partitions={0}".format(partitions)
def rdd(self):
if hasattr(self, "_rdd") and self._rdd is not None:
return self._rdd
if self._dsource is None:
raise AssertionError("try create rdd from None storage")
return self._rdd_from_dtable()
# noinspection PyProtectedMember,PyUnresolvedReferences
@log_elapsed
def _rdd_from_dtable(self):
storage_iterator = self._dsource.collect(use_serialize=True)
if self._dsource.count() <= 0:
storage_iterator = []
num_partition = self._dsource._partitions
# If the system forces to specify the number of shards, use the specified number
num_slices = conf_utils.get_comm_config(consts.COMM_CONF_KEY_SPARK_NUM_SLICES)
num_partition = int(num_slices) if num_slices else num_partition
from pyspark import SparkContext
self._rdd = SparkContext.getOrCreate() \
.parallelize(storage_iterator, num_partition) \
.persist(util.get_storage_level())
return self._rdd
def dsource(self):
"""
rdd -> storage
"""
if self._dsource:
return self._dsource
else:
if not hasattr(self, "_rdd") or self._rdd is None:
raise AssertionError("try create dtable from None")
return self._rdd_to_dtable()
# noinspection PyUnusedLocal
@log_elapsed
def _rdd_to_dtable(self, **kwargs):
self._dsource = self.save_as(name=self._name,
namespace=self._namespace,
partition=self._partitions,
persistent=False)._dsource
return self._dsource
def get_partitions(self):
return self._partitions
@log_elapsed
def map(self, func, **kwargs):
from common.python.calculation.spark.rdd_func import _map
rtn_rdd = _map(self.rdd(), func)
return self._tmp_table_from_rdd(rtn_rdd)
@log_elapsed
def mapValues(self, func, **kwargs):
from common.python.calculation.spark.rdd_func import _map_value
rtn_rdd = _map_value(self.rdd(), func)
return self._tmp_table_from_rdd(rtn_rdd)
@log_elapsed
def mapPartitions(self, func, **kwargs):
from common.python.calculation.spark.rdd_func import _map_partitions
rtn_rdd = _map_partitions(self.rdd(), func)
return self._tmp_table_from_rdd(rtn_rdd)
@log_elapsed
def mapPartitions2(self, func, **kwargs):
return self._tmp_table_from_rdd(self.rdd().mapPartitions(func))
@log_elapsed
def mapReducePartitions(self, mapper, reducer, **kwargs):
return self._tmp_table_from_rdd(self.rdd().mapPartitions(mapper).reduceByKey(reducer))
@log_elapsed
def applyPartitions(self, func, **kwargs):
return self.mapPartitions(func)
@log_elapsed
def reduce(self, func, key_func=None, **kwargs):
if key_func is None:
return self.rdd().values().reduce(func)
return dict(self.rdd().map(lambda x: (key_func(x[0]), x[1])).reduceByKey(func).collect())
def join(self, other, func=None, **kwargs):
rdd1 = self.rdd()
rdd2 = other.rdd()
# noinspection PyUnusedLocal,PyShadowingNames
@log_elapsed
def _join(rdda, rddb, **kwargs):
from common.python.calculation.spark.rdd_func import _join
return self._tmp_table_from_rdd(_join(rdda, rddb, func))
return _join(rdd1, rdd2, **kwargs)
@log_elapsed
def glom(self, **kwargs):
from common.python.calculation.spark.rdd_func import _glom
return self._tmp_table_from_rdd(_glom(self.rdd()))
@log_elapsed
def sample(self, fraction, seed=None, **kwargs):
from common.python.calculation.spark.rdd_func import _sample
return self._tmp_table_from_rdd(_sample(self.rdd(), fraction, seed))
@log_elapsed
def subtractByKey(self, other, **kwargs):
from common.python.calculation.spark.rdd_func import _subtract_by_key
return self._tmp_table_from_rdd(_subtract_by_key(self.rdd(), other.rdd()))
@log_elapsed
def filter(self, func, **kwargs):
from common.python.calculation.spark.rdd_func import _filter
return self._tmp_table_from_rdd(_filter(self.rdd(), func))
@log_elapsed
def union(self, other, func=lambda v1, v2: v1, **kwargs):
from common.python.calculation.spark.rdd_func import _union
return self._tmp_table_from_rdd(_union(self.rdd(), other.rdd(), func))
@log_elapsed
def flatMap(self, func, **kwargs):
from common.python.calculation.spark.rdd_func import _flat_map
return self._tmp_table_from_rdd(_flat_map(self.rdd(), func))
@log_elapsed
def collect(self, min_chunk_size=0, use_serialize=True, **kwargs):
if self._dsource:
return self._dsource.collect(min_chunk_size, use_serialize)
else:
return iter(self.rdd().collect())
"""
storage api
"""
def put(self, k, v, use_serialize=True, maybe_large_value=False):
if not maybe_large_value:
rtn = self.dsource().put(k, v, use_serialize)
else:
rtn = split_put(k, v, use_serialize=use_serialize, put_call_back_func=self.dsource().put)
self._rdd = None
return rtn
@log_elapsed
def put_all(self, kv_list: Iterable, use_serialize=True, chunk_size=100000):
rtn = self.dsource().put_all(kv_list, use_serialize, chunk_size)
self._rdd = None
return rtn
def get(self, k, use_serialize=True, maybe_large_value=False):
if not maybe_large_value:
return self.dsource().get(k, use_serialize)
else:
return split_get(k=k, use_serialize=use_serialize, get_call_back_func=self.dsource().get)
def delete(self, k, use_serialize=True):
rtn = self.dsource().delete(k, use_serialize)
self._rdd = None
return rtn
def destroy(self):
if self._dsource:
self._dsource.destroy()
else:
self._rdd = None
return True
def put_if_absent(self, k, v, use_serialize=True):
rtn = self.dsource().put_if_absent(k, v, use_serialize)
self._rdd = None
return rtn
# noinspection PyPep8Naming
def take(self, n=1, keysOnly=False, use_serialize=True):
if self._dsource:
return self._dsource.take(n, keysOnly, use_serialize)
else:
rtn = self._rdd.take(n)
if keysOnly:
rtn = [pair[0] for pair in rtn]
return rtn
# noinspection PyPep8Naming
def first(self, keysOnly=False, use_serialize=True):
first = self.take(1, keysOnly, use_serialize)
return first[0] if first else None
def count(self, **kwargs):
if self._dsource:
return self._dsource.count()
else:
return self._rdd.count()
@log_elapsed
def save_as(self, name, namespace, partition=None, use_serialize=True, persistent=True, **kwargs) -> 'RDDSource':
if partition is None:
partition = self._partitions
partition = partition or self._partitions
from common.python import RuntimeInstance
persistent_engine = RuntimeInstance.SESSION.get_persistent_engine()
if self._dsource:
_dtable = self._dsource.save_as(name, namespace, partition,
use_serialize=use_serialize,
persistent_engine=persistent_engine)
return RDDSource.from_dsource(session_id=self._session_id, dsource=_dtable)
else:
from common.python.calculation.spark.rdd_func import _save_as_func
return _save_as_func(self._rdd, name=name, namespace=namespace, partition=partition, persistent=persistent)
| [
"[email protected]"
] | |
65f649c9c12a0a4648e29cef731ddb6c40fbe6ef | 100193a599cd9961356b2c2ee13c734e467b9713 | /cc/apps/coder/migrations/0003_auto__add_participant.py | e0a754b87b3635d299c77eea42dd6f992853ed21 | [
"MIT"
] | permissive | mavroskardia/codechallenge | bd3678003d933b834eddc1d36dda74e53b5afa52 | a5fee4ba73be186d90daafca50819a6817ad3d27 | refs/heads/master | 2016-09-09T17:16:57.818465 | 2015-01-13T14:45:00 | 2015-01-13T14:45:00 | 15,058,542 | 0 | 0 | null | 2014-03-21T21:25:37 | 2013-12-09T20:42:54 | Python | UTF-8 | Python | false | false | 6,721 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Participant'
db.create_table('coder_participant', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('coder', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['coder.Coder'])),
('challenge', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['challenge.Challenge'])),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 3, 10, 0, 0))),
))
db.send_create_signal('coder', ['Participant'])
# Removing M2M table for field challenges on 'Coder'
db.delete_table(db.shorten_name('coder_coder_challenges'))
def backwards(self, orm):
# Deleting model 'Participant'
db.delete_table('coder_participant')
# Adding M2M table for field challenges on 'Coder'
m2m_table_name = db.shorten_name('coder_coder_challenges')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('coder', models.ForeignKey(orm['coder.coder'], null=False)),
('challenge', models.ForeignKey(orm['challenge.challenge'], null=False))
))
db.create_unique(m2m_table_name, ['coder_id', 'challenge_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'})
},
'auth.permission': {
'Meta': {'object_name': 'Permission', 'unique_together': "(('content_type', 'codename'),)", 'ordering': "('content_type__app_label', 'content_type__model', 'codename')"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'blank': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Group']", 'symmetrical': 'False', 'related_name': "'user_set'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False', 'related_name': "'user_set'"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'challenge.challenge': {
'Meta': {'object_name': 'Challenge'},
'duration': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"})
},
'coder.coder': {
'Meta': {'object_name': 'Coder'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '256'}),
'tagline': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '1024'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'unique': 'True', 'to': "orm['auth.User']"}),
'xp': ('django.db.models.fields.BigIntegerField', [], {'default': '0'})
},
'coder.level': {
'Meta': {'object_name': 'Level'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'starting_xp': ('django.db.models.fields.BigIntegerField', [], {})
},
'coder.participant': {
'Meta': {'object_name': 'Participant'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Challenge']"}),
'coder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 3, 10, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'db_table': "'django_content_type'", 'object_name': 'ContentType', 'unique_together': "(('app_label', 'model'),)", 'ordering': "('name',)"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['coder'] | [
"[email protected]"
] | |
f6de5f780c60294f59c4fd49a6ee574b9a0d8d34 | 1316cd6763e784811c769c1de577235c921af0de | /Apps/AlignOnBPMs/SAMPL/sourceCode/SAMPLcore/Components/ComponentBase.py | f2ed9848ab5bc4adadc7b8a0aa16629bf0f7f015 | [] | no_license | VELA-CLARA-software/Software | a6fb6b848584e5893fd6939a447d23134ce636cc | 2e2a88ac0b2b03a495c868d2e11e6481e05097c3 | refs/heads/master | 2023-02-05T07:40:58.260798 | 2023-01-27T09:39:09 | 2023-01-27T09:39:09 | 69,860,536 | 7 | 3 | null | 2021-04-07T14:17:07 | 2016-10-03T10:20:46 | Mathematica | UTF-8 | Python | false | false | 565 | py | # SAM to Python Conversion
# DJS August 2017
# Version 0.1
#
from ..SAMPLlab import Beam
class ComponentBase(object):
def __init__(self, length=0, name="", aperture=[]):
#super(ComponentBase, self).__init__(**kwargs)
# device length, in meters
self.length = length
# device name, string
self.name = name
# 1x2 array of elliptical aperture half-axes, in metres
self.aperture = aperture
# Each componet stores last beam that was tracked last (TP added)
self.lastTrackedBeam = Beam.Beam()
| [
"[email protected]"
] | |
9b2e55341c9f7148c5dfe553e2bec953871d0db2 | 377fc6e13101a2a45826cd118110c790f396a805 | /utpc2014-a.py | 9ac43857b565674f15f63bbc23a3181047f353ff | [] | no_license | number09/atcoder | 4076e7223f424b9923754e73992d6442e0bb0de7 | f521ca1205b254d99744abaf6a7a5bfe69845fe0 | refs/heads/master | 2021-06-04T23:16:39.021645 | 2021-01-19T08:30:39 | 2021-01-19T08:30:39 | 132,128,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | li_w = input().split()
li_answer = list()
flag = False
for w in reversed(li_w):
if w != 'not':
flag = True
li_answer.append(w)
else:
if flag == False:
li_answer.append(w)
else:
if li_answer[-1] == 'not':
li_answer = li_answer[:-1]
else:
li_answer.append(w)
print(' '.join(reversed(li_answer)))
| [
"[email protected]"
] | |
f98658569da2852dc39597141a14f473e098e230 | d51b4c766661af65b4ee6e7c30f8cb4bdd8603e3 | /python/algorithm/leetcode/91.py | 514aea9145662dfce4819b437de33ec85483955a | [] | no_license | yanxurui/keepcoding | 3e988c76b123d55b32cf7cc35fbffb12c4ccb095 | d6b9f07e2d1437681fa77fee0687ea9b83cab135 | refs/heads/master | 2021-01-24T09:01:41.306597 | 2020-05-21T05:36:04 | 2020-05-21T05:36:04 | 93,400,267 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | # https://leetcode.com/problems/decode-ways/discuss/30358/Java-clean-DP-solution-with-explanation
class Solution(object):
def numDecodings(self, s):
"""
:type s: str
:rtype: int
"""
if len(s) == 0:
return 0
table = [0] * (len(s)+1)
table[0] = 1
table[1] = 0 if s[0] == '0' else 1
for i in range(2, len(s)+1):
if int(s[i-1:i]) >= 1 and int(s[i-1:i]) <= 9:
table[i] += table[i-1]
if int(s[i-2:i]) >= 10 and int(s[i-2:i]) <= 26:
table[i] += table[i-2]
return table[len(s)]
if __name__ == '__main__':
from testfunc import test
test_data = [
(
"12",
2
),
(
"226",
3
),
(
'0',
0
),
(
'10',
1
),
(
'00',
0
),
(
'01',
0
)
]
test(Solution().numDecodings, test_data)
| [
"[email protected]"
] | |
0812527774fef2d427c2e1e56a7966441f10632c | 847273de4b1d814fab8b19dc651c651c2d342ede | /.history/Sudoku_II_005_20180620140617.py | 2ded808b8af91e0513d070dc92160645c14e005e | [] | no_license | Los4U/sudoku_in_python | 0ba55850afcffeac4170321651620f3c89448b45 | 7d470604962a43da3fc3e5edce6f718076197d32 | refs/heads/master | 2020-03-22T08:10:13.939424 | 2018-07-04T17:21:13 | 2018-07-04T17:21:13 | 139,749,483 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,433 | py | from random import randint
sudoku1 = [
[5, 9, 8, 6, 1, 2, 3, 4, 7],
[2, 1, 7, 9, 3, 4, 8, 6, 5],
[6, 4, 3, 5, 8, 7, 1, 2, 9],
[1, 6, 5, 4, 9, 8, 2, 7, 3],
[3, 2, 9, 7, 6, 5, 4, 1, 8],
[7, 8, 4, 3, 2, 1, 5, 9, 6],
[8, 3, 1, 2, 7, 6, 9, 5, 4],
[4, 7, 2, 8, 5, 9, 6, 3, 1],
[9, 5, 6, 1, 4, 3, 7, 8, " "]
]
sudoku2 = [
[9, 8, 7, 4, 3, 2, 5, 6, 1],
[2, 4, 3, 5, 1, 6, 8, 7, 9],
[5, 6, 1, 7, 9, 8, 4, 3, 2],
[3, 9, 5, 6, 4, 7, 2, 1, 8],
[8, 2, 4, 3, 5, 1, 6, 9, 7],
[1, 7, 6, 2, 8, 9, 3, 4, 5],
[7, 1, 2, 8, 6, 3, 9, 5, 4],
[4, 3, 8, 9, 7, 5, 1, 2, 6],
[' ', 5, ' ', ' ', 2, ' ', 7, ' ', ' ']
]
sudoku3 = [
[9, 8, 7, 4, 3, 2, 5, 6, 1],
[2, 4, 3, 5, 1, 6, 8, 7, 9],
[5, 6, 1, 7, 9, 8, 4, 3, 2],
[3, 9, 5, 6, 4, 7, 2, 1, 8],
[8, 2, 4, 3, 5, 1, 6, 9, 7],
[1, 7, 6, 2, 8, 9, 3, 4, 5],
[7, 1, 2, 8, 6, 3, 9, 5, 4],
[4, 3, 8, 9, 7, 5, 1, 2, 6],
[' ', 5, ' ', ' ', 2, ' ', 7, ' ', ' ']
]
def printSudoku():
i = 0
while i < 10:
if i == 0:
print(" 1 2 3 4 5 6 7 8 9")
print(" -------------------------")
elif i == 3 or i == 6 or i == 9:
print(" -------------------------")
line = "|"
if i < 9:
print('{2} {1} {0[0]} {0[1]} {0[2]} {1} {0[3]} {0[4]} {0[5]} {1} {0[6]} {0[7]} {0[8]} {1}'.format(sudoku[i], line, i+1))
i = i + 1
print(" ")
print(" %@@@@@@@ @@@ @@@ (@@@@@@@@@ ,@@@@2@@@@@ @@@, /@@@/ @@@, @@@ ")
print(" @@@* @@@ @@@ (@@( /@@@# .@@@% (@@@ @@@, @@@% @@@, @@@. ")
print(" @@@& @@@ @@@ (@@( @@@* @@@% #@@% @@@,.@@@. @@@, @@@. ")
print(" ,@@@@@@* @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@%@@% @@@, @@@. ")
print(" /@@@@@# @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@,@@@( @@@, @@@. ")
print(" *@@@. @@@ .@@& (@@( @@@. @@@% &@@( @@@, &@@@. @@@* .@@@. ")
print(" &, &@@@ #@@@. ,@@@, (@@( ,&@@@* ,@@@& .@@@@ @@@, (@@@/ #@@@* @@@# ")
print(",@@@@@@@@( (@@@@@@@@% (@@@@@@@@@( #@@@@@@@@@, @@@, ,@@@% ,@@@@@@@@@. \n ")
print("To start game input:")
print(" r - to load random puzzle:")
print(" 1 - to load chart nr 1:")
print(" 2 - to load chart nr 2:")
print(" 3 - to load chart nr 3:")
choice = input("Input here: ")
if choice == "R" or choice == "r":
listaSudoku = [sudoku1, sudoku2, sudoku3]
sudoku_number = randint(0, 2)
print("dupa", sudoku_number)
sudoku = listaSudoku[sudoku_number]
#print("ktore = ", sudoku)
elif int(choice) == 1:
s = 1
sudoku = sudoku
elif int(choice) == 2:
sudoku = sudoku2
elif int(choice) == 3:
sudoku = sudoku3
while True: # prints Sudoku until is solved
print("Your sudoku to solve:")
printSudoku()
print("Input 3 numbers in format a b c, np. 4 5 8")
print(" a - row number")
print(" b - column number ")
print(" c - value")
# vprint(" r - reset chart to start\n ")
x = input("Input a b c: ")
print("")
numbers = " 0123456789" # conditions of entering the numbers !
if (len(x) != 5) or (str(x[0]) not in numbers) or (str(x[2]) not in numbers) or (
str(x[4]) not in numbers) or (str(x[1]) != " ") or (str(x[3]) != " "):
if x == "r": # reset
# sudoku =
print(" Function reset() will be ready in Next Week")
else:
print("Error - wrong number format \n ")
continue
sudoku[int(x[0])-1][int(x[2])-1] = int(x[4])
column1 = 0
column2 = 0
try:
i = 0
list = []
while i < 9:
column = 0
for item in sudoku:
column = column + item[i]
list.append(column)
#p rint(list)
# print("Suma columny ", i, " = ", column)
i += 1
is45 = 0
for listElement in list:
if listElement == 45:
is45 = is45 + 1
# print("Ile kolumen OK", is45)
i = 0
for item in sudoku:
if sum(item) == 45 and is45 == 9:
i = i + 1
if i == 9:
printSudoku()
print("@@@@@@@@@@ YOU WIN @@@@@@@@@@")
break
except TypeError:
print()
| [
"[email protected]"
] | |
24c90e773275e7f451a54c1d751508a4e0f170da | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/cloud/securitycenter/v1/securitycenter-v1-py/google/cloud/securitycenter_v1/services/security_center/pagers.py | bef157b5d55df896bbcd1981ea658c13a78e2b7b | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,304 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.securitycenter_v1.types import notification_config
from google.cloud.securitycenter_v1.types import securitycenter_service
from google.cloud.securitycenter_v1.types import source
class GroupAssetsPager:
"""A pager for iterating through ``group_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse` object, and
provides an ``__iter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``GroupAssets`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.GroupAssetsResponse],
request: securitycenter_service.GroupAssetsRequest,
response: securitycenter_service.GroupAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.GroupAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.GroupResult]:
for page in self.pages:
yield from page.group_by_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupAssetsAsyncPager:
"""A pager for iterating through ``group_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``GroupAssets`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.GroupAssetsResponse]],
request: securitycenter_service.GroupAssetsRequest,
response: securitycenter_service.GroupAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.GroupAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.GroupResult]:
async def async_generator():
async for page in self.pages:
for response in page.group_by_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupFindingsPager:
"""A pager for iterating through ``group_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse` object, and
provides an ``__iter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``GroupFindings`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.GroupFindingsResponse],
request: securitycenter_service.GroupFindingsRequest,
response: securitycenter_service.GroupFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.GroupFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.GroupResult]:
for page in self.pages:
yield from page.group_by_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupFindingsAsyncPager:
"""A pager for iterating through ``group_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``GroupFindings`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.GroupFindingsResponse]],
request: securitycenter_service.GroupFindingsRequest,
response: securitycenter_service.GroupFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.GroupFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.GroupResult]:
async def async_generator():
async for page in self.pages:
for response in page.group_by_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListAssetsPager:
"""A pager for iterating through ``list_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListAssetsResponse` object, and
provides an ``__iter__`` method to iterate through its
``list_assets_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListAssets`` requests and continue to iterate
through the ``list_assets_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListAssetsResponse],
request: securitycenter_service.ListAssetsRequest,
response: securitycenter_service.ListAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.ListAssetsResponse.ListAssetsResult]:
for page in self.pages:
yield from page.list_assets_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListAssetsAsyncPager:
"""A pager for iterating through ``list_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListAssetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``list_assets_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListAssets`` requests and continue to iterate
through the ``list_assets_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListAssetsResponse]],
request: securitycenter_service.ListAssetsRequest,
response: securitycenter_service.ListAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.ListAssetsResponse.ListAssetsResult]:
async def async_generator():
async for page in self.pages:
for response in page.list_assets_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListFindingsPager:
"""A pager for iterating through ``list_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListFindingsResponse` object, and
provides an ``__iter__`` method to iterate through its
``list_findings_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListFindings`` requests and continue to iterate
through the ``list_findings_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListFindingsResponse],
request: securitycenter_service.ListFindingsRequest,
response: securitycenter_service.ListFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.ListFindingsResponse.ListFindingsResult]:
for page in self.pages:
yield from page.list_findings_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListFindingsAsyncPager:
"""A pager for iterating through ``list_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListFindingsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``list_findings_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListFindings`` requests and continue to iterate
through the ``list_findings_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListFindingsResponse]],
request: securitycenter_service.ListFindingsRequest,
response: securitycenter_service.ListFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.ListFindingsResponse.ListFindingsResult]:
async def async_generator():
async for page in self.pages:
for response in page.list_findings_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListNotificationConfigsPager:
"""A pager for iterating through ``list_notification_configs`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse` object, and
provides an ``__iter__`` method to iterate through its
``notification_configs`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListNotificationConfigs`` requests and continue to iterate
through the ``notification_configs`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListNotificationConfigsResponse],
request: securitycenter_service.ListNotificationConfigsRequest,
response: securitycenter_service.ListNotificationConfigsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListNotificationConfigsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListNotificationConfigsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListNotificationConfigsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[notification_config.NotificationConfig]:
for page in self.pages:
yield from page.notification_configs
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListNotificationConfigsAsyncPager:
"""A pager for iterating through ``list_notification_configs`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``notification_configs`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListNotificationConfigs`` requests and continue to iterate
through the ``notification_configs`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListNotificationConfigsResponse]],
request: securitycenter_service.ListNotificationConfigsRequest,
response: securitycenter_service.ListNotificationConfigsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListNotificationConfigsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListNotificationConfigsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListNotificationConfigsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[notification_config.NotificationConfig]:
async def async_generator():
async for page in self.pages:
for response in page.notification_configs:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListSourcesPager:
"""A pager for iterating through ``list_sources`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListSourcesResponse` object, and
provides an ``__iter__`` method to iterate through its
``sources`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListSources`` requests and continue to iterate
through the ``sources`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListSourcesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListSourcesResponse],
request: securitycenter_service.ListSourcesRequest,
response: securitycenter_service.ListSourcesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListSourcesRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListSourcesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListSourcesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListSourcesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[source.Source]:
for page in self.pages:
yield from page.sources
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListSourcesAsyncPager:
"""A pager for iterating through ``list_sources`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListSourcesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``sources`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListSources`` requests and continue to iterate
through the ``sources`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListSourcesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListSourcesResponse]],
request: securitycenter_service.ListSourcesRequest,
response: securitycenter_service.ListSourcesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListSourcesRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListSourcesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListSourcesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListSourcesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[source.Source]:
async def async_generator():
async for page in self.pages:
for response in page.sources:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
8e4afcc449f15b7d6b73cbcd4fc8e9b213912c94 | bc441bb06b8948288f110af63feda4e798f30225 | /container_sdk/api/workload/get_summary_pb2.pyi | 14f7cdbe1ae990f7ac049e645c1ccfa3bde880a7 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,748 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from container_sdk.model.container.pod_detail_pb2 import (
PodDetail as container_sdk___model___container___pod_detail_pb2___PodDetail,
)
from container_sdk.model.container.workload_pb2 import (
Workload as container_sdk___model___container___workload_pb2___Workload,
)
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class GetSummaryRequest(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
instanceId = ... # type: typing___Text
def __init__(self,
*,
instanceId : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> GetSummaryRequest: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> GetSummaryRequest: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"instanceId",b"instanceId"]) -> None: ...
class GetSummaryResponse(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def workload(self) -> container_sdk___model___container___workload_pb2___Workload: ...
@property
def pods(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[container_sdk___model___container___pod_detail_pb2___PodDetail]: ...
def __init__(self,
*,
workload : typing___Optional[container_sdk___model___container___workload_pb2___Workload] = None,
pods : typing___Optional[typing___Iterable[container_sdk___model___container___pod_detail_pb2___PodDetail]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> GetSummaryResponse: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> GetSummaryResponse: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"workload",b"workload"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"pods",b"pods",u"workload",b"workload"]) -> None: ...
class GetSummaryResponseWrapper(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
code = ... # type: builtin___int
codeExplain = ... # type: typing___Text
error = ... # type: typing___Text
@property
def data(self) -> GetSummaryResponse: ...
def __init__(self,
*,
code : typing___Optional[builtin___int] = None,
codeExplain : typing___Optional[typing___Text] = None,
error : typing___Optional[typing___Text] = None,
data : typing___Optional[GetSummaryResponse] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> GetSummaryResponseWrapper: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> GetSummaryResponseWrapper: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"code",b"code",u"codeExplain",b"codeExplain",u"data",b"data",u"error",b"error"]) -> None: ...
| [
"[email protected]"
] | |
fce03c8e1456ccbcd305145c27e222f513c4d844 | c2f92d75d235ff5ed7b213c02c4a0657545ba02f | /oliveapp/home/urls.py | ab2bdd20079d2d83da9374cda7ebebccbcb65b68 | [] | no_license | cash2one/tstpthon | fab6112691eb15a8a26bd168af3f179913e0c4e0 | fc5c42c024065c7b42bea2b9de1e3874a794a30d | refs/heads/master | 2021-01-20T01:52:06.519021 | 2017-04-14T09:50:55 | 2017-04-14T09:50:55 | 89,338,193 | 0 | 1 | null | 2017-04-25T08:46:06 | 2017-04-25T08:46:06 | null | UTF-8 | Python | false | false | 715 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from home import views
import os
css_media = os.path.join(
os.path.dirname(__file__),'templates/css/'
)
images_media = os.path.join(
os.path.dirname(__file__),'templates/images/'
)
js_media = os.path.join(
os.path.dirname(__file__),'templates/js/'
)
urlpatterns = patterns('',
url(r'^$', views.index, name='home'),
url(r'^images/(?P<path>.*)$','django.views.static.serve',{'document_root': images_media }),
url(r'^css/(?P<path>.*)$','django.views.static.serve',{'document_root': css_media }),
url(r'^js/(?P<path>.*)$','django.views.static.serve',{'document_root': js_media }),
)
| [
"[email protected]"
] | |
429ff554ac3fbec982f6543fabdf502cbf11eaf3 | 520cc1c536985f72a19e4183e736537a0660029c | /Network/urls.py | 4985dd2a706da8aafe87702c72f79b12e4def8ee | [] | no_license | kswelch53/Network | 9bcce1ebec1ae75100a3198dde779e353daaeb0e | 646a01cb52ffae63ea5d59635f06e9324c248b13 | refs/heads/master | 2021-09-09T04:44:46.666563 | 2018-03-13T23:24:06 | 2018-03-13T23:24:06 | 125,127,156 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | """Network URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^', include ('apps.app_one.urls', namespace='app1')),
url(r'^app2/', include ('apps.app_two.urls', namespace='app2')),
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
f9a7d373fd0a22027404c7c536075e139ac3a6b3 | 36957a9ce540846d08f151b6a2c2d582cff1df47 | /VR/Python/Python36/Lib/site-packages/django/contrib/gis/db/models/__init__.py | 9c0171e697081762f1b1e195bdfdad25bd682f41 | [] | no_license | aqp1234/gitVR | 60fc952307ef413e396d31e0d136faffe087ed2b | e70bd82c451943c2966b8ad1bee620a0ee1080d2 | refs/heads/master | 2022-12-29T15:30:12.540947 | 2020-10-07T15:26:32 | 2020-10-07T15:26:32 | 290,163,043 | 0 | 1 | null | 2020-08-25T09:15:40 | 2020-08-25T08:47:36 | C# | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:051de440079f22fe0ed4d92c8950944a1c2548ee0e08da1419a9fa7424462325
size 817
| [
"[email protected]"
] | |
9bed6ed6c401fac8dd4b07157b505d6d45bf5404 | 0a65d42f4f0e491cb2aada408401b94909f821c2 | /Attendance_Monitoring/hrg/hrg_hr/migrations/0007_auto_20200625_1027.py | f121581d75c52217f396b1f22613076fa26f7155 | [] | no_license | jmadlansacay/_Office | 3acde7655784e91c7dcecfc853d4f36cdfeef028 | 7f46449b9f7e8e892e2e0025ba493259197fa592 | refs/heads/main | 2023-07-28T10:23:54.680822 | 2021-09-11T02:28:07 | 2021-09-11T02:28:07 | 379,155,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | # Generated by Django 2.2.5 on 2020-06-25 02:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hrg_hr', '0006_auto_20200625_1006'),
]
operations = [
migrations.AlterField(
model_name='tblmaster',
name='employeestatus',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='hrg_hr_ref.employeestatuscode'),
),
]
| [
"[email protected]"
] | |
a5016bfefccde0f94ae1caf65fdcdc907a1c441f | fa7f66e12223a11a17d42c9a672d03c845b604bd | /pyvisa/resources/helpers.py | 4fa00202bba32d4cd5c69c74c7aa0cf9c8952d9e | [
"MIT"
] | permissive | caryan/pyvisa | 5756e65c42810553f6f4b9f14800b5007b9dee0a | 1529fce2ac42ac8b47cf6f2c8ad1de22c9e88488 | refs/heads/master | 2020-12-03T03:31:59.688014 | 2014-08-29T22:44:24 | 2014-08-29T22:44:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,584 | py | # -*- coding: utf-8 -*-
"""
pyvisa.resources.helpers
~~~~~~~~~~~~~~~~~~~~~~~~
Helper functions.
This file is part of PyVISA.
:copyright: 2014 by PyVISA Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from ..compat import string_types
from .. import constants
def _redoc(attribute_name, doc, extra_doc=''):
if isinstance(attribute_name, string_types):
if doc is None:
doc = ''
if not doc.endswith('\n\n'):
doc += '\n\n'
doc += ':VISA Attribute: %s.' % attribute_name
if extra_doc:
doc += '\n' + extra_doc
attribute_name = getattr(constants, attribute_name)
return attribute_name, doc
def attr(attribute_name, doc=None, ro=False):
attribute_name, doc = _redoc(attribute_name, doc)
def getter(self):
return self.get_visa_attribute(attribute_name)
if ro:
return property(fget=getter, doc=doc)
def setter(self, value):
self.set_visa_attribute(attribute_name, value)
return property(fget=getter, fset=setter, doc=doc)
def enum_attr(attribute_name, enum_type, doc=None, ro=False):
attribute_name, doc = _redoc(attribute_name, doc,
':type: :class:%s.%s' % (enum_type.__module__, enum_type.__name__))
def getter(self):
return enum_type(self.get_visa_attribute(attribute_name))
if ro:
return property(fget=getter, doc=doc)
def setter(self, value):
if value not in enum_type:
raise ValueError('%r is an invalid value for attribute %s, should be a %r',
value, attribute_name, enum_type)
self.set_visa_attribute(attribute_name, value)
return property(fget=getter, fset=setter, doc=doc)
def range_attr(attribute_name, min_value, max_value, doc=None, ro=False):
attribute_name, doc = _redoc(attribute_name, doc,
':range: %s <= value <= %s\n' % (min_value, max_value))
def getter(self):
return int(self.get_visa_attribute(attribute_name))
if ro:
return property(fget=getter, doc=doc)
def setter(self, value):
if not min_value <= value <= max_value:
raise ValueError('%r is an invalid value for attribute %s, should be between %r and %r',
value, attribute_name, min_value, max_value)
self.set_visa_attribute(attribute_name, value)
return property(fget=getter, fset=setter, doc=doc)
def boolean_attr(attribute_name, doc=None, ro=False):
attribute_name, doc = _redoc(attribute_name, doc,
':type: bool')
def getter(self):
return self.get_visa_attribute(attribute_name) == constants.VI_TRUE
if ro:
return property(fget=getter, doc=doc)
def setter(self, value):
self.set_visa_attribute(attribute_name, constants.VI_TRUE if value else constants.VI_FALSE)
return property(fget=getter, fset=setter, doc=doc)
def char_attr(attribute_name, doc=None, ro=False):
attribute_name, doc = _redoc(attribute_name, doc,
':range: 0 <= x <= 255\n:type: int')
def getter(self):
return chr(self.get_visa_attribute(attribute_name))
if ro:
return property(fget=getter, doc=doc)
def setter(self, value):
self.set_visa_attribute(attribute_name, ord(value))
return property(fget=getter, fset=setter, doc=doc)
| [
"[email protected]"
] | |
26e8321387d7acb7136a76bcf11db56c990ad589 | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /notifications/client/controls/notificationScrollContainer.py | 430068214b444982858c2a3b313c78d2ca6a9572 | [] | no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,742 | py | #Embedded file name: notifications/client/controls\notificationScrollContainer.py
from carbonui.control.scrollContainer import ScrollContainer
from carbonui.primitives.base import ReverseScaleDpi, ScaleDpiF
from carbonui.primitives.container import Container
import carbonui.const as uiconst
from carbonui.primitives.frame import Frame
class NotificationScrollContainer(ScrollContainer):
entryLoadEnabled = True
contentHeight = 0
mainContTopHeight = (0, 0)
def ApplyAttributes(self, attributes):
ScrollContainer.ApplyAttributes(self, attributes)
self.mainCont.Close()
self.mainCont = Container(name='mainCont', parent=self.clipCont, state=uiconst.UI_NORMAL, align=uiconst.TOPLEFT)
self.mainContTopHeight = (0, 0)
self.mainCont._OnResize = self._OnMainContResize
def EnableEntryLoad(self):
self.entryLoadEnabled = True
self.LoadVisibleEntries()
def DisableEntryLoad(self):
self.entryLoadEnabled = False
def _OnMainContResize(self, *args):
newTopHeight = (self.mainCont.top, self.mainCont.height)
if newTopHeight != self.mainContTopHeight:
self.mainContTopHeight = newTopHeight
self.LoadVisibleEntries()
def LoadVisibleEntries(self):
if not self.entryLoadEnabled:
return
for each in self.mainCont.children:
self.LoadEntryIfVisible(each)
def LoadEntryIfVisible(self, entry):
topOffset = self.mainCont.top
visibleHeight = ReverseScaleDpi(self.clipCont.displayHeight)
if topOffset + entry.top + entry.height >= 0 and topOffset + entry.top <= visibleHeight:
entry.UpdateAlignmentAsRoot()
entry.LoadContent()
entry.display = True
else:
entry.display = False
def _OnVerticalScrollBar(self, posFraction):
posFraction = max(0.0, min(posFraction, 1.0))
self.mainCont.top = -posFraction * (self.mainCont.height - ReverseScaleDpi(self.clipCont.displayHeight))
def _InsertChild(self, idx, obj):
self.mainCont.children.insert(idx, obj)
contentWidth = ReverseScaleDpi(self.displayWidth)
minContentHeight = ReverseScaleDpi(self.clipCont.displayHeight)
self.mainCont.width = contentWidth
obj.top = self.contentHeight
obj.width = contentWidth
obj.displayY = ScaleDpiF(self.contentHeight)
obj.displayWidth = ScaleDpiF(contentWidth)
self.contentHeight += obj.height
self.mainCont.height = max(minContentHeight, self.contentHeight)
self._UpdateScrollbars()
self.LoadEntryIfVisible(obj)
def Flush(self):
ScrollContainer.Flush(self)
self.contentHeight = 0
| [
"[email protected]"
] | |
794aec261c5a2a0b22f17b996021749049c4c913 | 396f93d8e73c419ef82a94174815a2cecbb8334b | /.history/tester2_20200321145335.py | eca006aa8c88a49574cbf23f7e9263b1bfe377c0 | [] | no_license | mirfarzam/ArtificialIntelligence-HeuristicAlgorithm-TabuSearch | 8c73d9448b916009c9431526864a4441fdeb682a | 90b2dca920c85cddd7c1b3335344ac7b10a9b061 | refs/heads/master | 2021-03-26T21:16:42.561068 | 2020-04-17T21:44:26 | 2020-04-17T21:44:26 | 247,750,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | import os
import subprocess
import re
from datetime import datetime
import time
process = subprocess.Popen(['./algo_tabou.exe', '1000', '1000', '50', 'distances_entre_villes_50.txt'],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
result = st
print(result)
| [
"[email protected]"
] | |
17049ed6b3705fb01190a0f5025f74cd714f8f3e | 1b26d0023ad7eb302a9dd7d0d6696ef836c17c05 | /HyeonJinGithub/2020-10-07/12100 2048 (Easy).py | 20832b880e18018074fca0d6cc9aa019aaeb8ae8 | [
"MIT"
] | permissive | Team-NTO/NTO | 93e643ddd3c6cad308f1f984aaa9abc43d9e3bb8 | 133f19e1e15e423589bd7b94b698d2afc76c3ef6 | refs/heads/master | 2023-06-23T06:26:16.374869 | 2021-07-11T06:43:08 | 2021-07-11T06:43:08 | 298,460,899 | 1 | 3 | MIT | 2021-07-11T06:43:09 | 2020-09-25T03:47:51 | Java | UTF-8 | Python | false | false | 2,831 | py | import sys
from copy import deepcopy
def print_max(arr):
global res
for i in range(len(arr)):
for j in range(len(arr[i])):
res = max(res, arr[i][j])
def dfs(tmp_board, n):
if n == 5:
print_max(tmp_board)
return
dfs(move_left(deepcopy(tmp_board)), n + 1)
dfs(move_right(deepcopy(tmp_board)), n + 1)
dfs(move_up(deepcopy(tmp_board)), n + 1)
dfs(move_down(deepcopy(tmp_board)), n + 1)
def move_left(board):
for i in range(N):
p = 0
x = 0
for j in range(N):
if board[i][j] == 0: continue
if x == 0:
x = board[i][j]
else:
if x == board[i][j]:
board[i][p] = x * 2
x = 0
p += 1
else:
board[i][p] = x
x = board[i][j]
p += 1
board[i][j] = 0
if x != 0: board[i][p] = x
return board
def move_right(board):
for i in range(N):
p = N - 1
x = 0
for j in range(N - 1, -1, -1):
if board[i][j] == 0: continue
if x == 0:
x = board[i][j]
else:
if x == board[i][j]:
board[i][p] = x * 2
p -= 1
x = 0
else:
board[i][p] = x
p -= 1
x = board[i][j]
board[i][j] = 0
if x != 0: board[i][p] = x
return board
def move_up(board):
for i in range(N):
p = 0
x = 0
for j in range(N):
if board[j][i] == 0: continue
if x == 0:
x = board[j][i]
else:
if x == board[j][i]:
board[p][i] = x * 2
p += 1
x = 0
else:
board[p][i] = x
p += 1
x = board[j][i]
board[j][i] = 0
if x != 0: board[p][i] = x
return board
def move_down(board):
for i in range(N):
p = N - 1
x = 0
for j in range(N - 1, -1, -1):
if board[j][i] == 0: continue
if x == 0:
x = board[j][i]
else:
if x == board[j][i]:
board[p][i] = x * 2
p -= 1
x = 0
else:
board[p][i] = x
p -= 1
x = board[j][i]
board[j][i] = 0
if x != 0: board[p][i] = x
return board
if __name__ == '__main__':
N = int(input())
a = [[int(x) for x in sys.stdin.readline().split()] for _ in range(N)]
res = 0
dfs(a, 0)
print(res) | [
"[email protected]"
] | |
b2e11180449c5cbd4123a1f6c4e49af8b9b06064 | ba3c06f9ae89479fa4987fe841ac09b5b5d71383 | /python_for_kids/book/Examples/mean.py | 1d733629a2eb40a1124f5270aca68952c682f52e | [] | no_license | mary-tano/python-programming | 6d806e25011e770a04a0922d0b71bf38c222d026 | 829654a3274be939fa529ed94ea568c12f7f1a27 | refs/heads/master | 2021-05-17T15:30:32.710838 | 2020-04-01T13:37:18 | 2020-04-01T13:37:18 | 250,846,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | # Среднее значение
print("Введи число: ", end="")
Sum = 0
Value = int(input())
for Number in range(1,Value+1) :
Sum += Number
Mean = Sum / Value
print("Результат: " + str(Mean))
| [
"[email protected]"
] | |
358893e35bbd56734f3c0df20f6129c87583d727 | abf4757a51e38b3cde6fc55b0251e77652521a2d | /models.py | 2e4f0d45edd16ef57cc997df440fe3bae2aa42ca | [] | no_license | stephenroller/relationstest | fd5284e6035682e0bfe1b13ff3c51dfec8e6f5ab | 92b66e3dd800107b5489f662264f87d5d178af61 | refs/heads/master | 2021-01-21T11:45:20.095298 | 2016-01-26T23:24:21 | 2016-01-26T23:24:21 | 37,383,124 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,369 | py | #!/usr/bin/env python
import numpy as np
from sklearn import svm, linear_model
from custom_classifiers import ThresholdClassifier
SETUPS = {
# baseline "unsupervised"
'cosine': ('threshold', 'cosine'),
# baseline memorizations
'lhs': ('linear', 'lhs'),
'rhs': ('linear', 'rhs'),
'concat': ('linear', 'concat'),
# asym models
'diff': ('linear', 'diff'),
'diffsq': ('linear', 'diffsq'),
# rb models
'diffrbf': ('rbf', 'diff'),
'concatrbf': ('rbf', 'concat'),
# others I dont want now
#('lhs', 'lr1', 'lhs'),
#('rhs', 'lr1', 'rhs'),
#('concat', 'lr1', 'concat'),
#('diff', 'lr1', 'diff'),
#('diffsq', 'lr1', 'diffsq'),
#('lhs', 'lr2', 'lhs'),
#('rhs', 'lr2', 'rhs'),
#('concat', 'lr2', 'concat'),
#('diff', 'lr2', 'diff'),
#('diffsq', 'lr2', 'diffsq'),
#('diffpoly', 'poly2', 'diff'),
}
def words2matrix(dataseries, space):
return np.array(list(dataseries.apply(lambda x: space[x])))
def generate_cosine_matrix(data, space):
lhs = words2matrix(data.word1, space)
rhs = words2matrix(data.word2, space)
return np.array([np.sum(np.multiply(lhs, rhs), axis=1)]).T
def generate_diff_matrix(data, space):
lhs = words2matrix(data.word1, space)
rhs = words2matrix(data.word2, space)
# difference vector
diff = rhs - lhs
return diff
def generate_diffsq_matrix(data, space):
lhs = words2matrix(data.word1, space)
rhs = words2matrix(data.word2, space)
# difference vector
diff = rhs - lhs
# element wise squared diffs
diff_sq = np.power(diff, 2)
X = np.concatenate([diff, diff_sq], axis=1)
return X
def generate_concat_matrix(data, space):
lhs = words2matrix(data.word1, space)
rhs = words2matrix(data.word2, space)
X = np.concatenate([lhs, rhs], axis=1)
return X
def generate_lhs_matrix(data, space):
lhs = words2matrix(data.word2, space)
return lhs
def generate_rhs_matrix(data, space):
rhs = words2matrix(data.word2, space)
return rhs
def generate_feature_matrix(data, space, features):
if features == 'cosine':
X = generate_cosine_matrix(data, space)
elif features == 'lhs':
X = generate_lhs_matrix(data, space)
elif features == 'rhs':
X = generate_rhs_matrix(data, space)
elif features == 'concat':
X = generate_concat_matrix(data, space)
elif features == 'diff':
X = generate_diff_matrix(data, space)
elif features == 'diffsq':
X = generate_diffsq_matrix(data, space)
else:
raise ValueError("Can't generate %s features" % features)
y = data.entails.as_matrix()
return X, y
def classifier_factory(name):
if name == 'linear':
return svm.LinearSVC()
elif name == 'poly2':
return svm.SVC(kernel='poly', degree=2)
elif name == 'threshold':
return ThresholdClassifier()
elif name == 'rbf':
return svm.SVC(kernel='rbf')
elif name == 'lr2':
return linear_model.LogisticRegression(penalty='l2')
elif name == 'lr1':
return linear_model.LogisticRegression(penalty='l1')
elif name == 'levy':
# todo this
return None
else:
raise ValueError("Don't know about %s models." % name)
def load_setup(setupname):
kl, fe = SETUPS[setupname]
return classifier_factory(kl), fe
| [
"[email protected]"
] | |
599c16f31a7bdd1841ee6346e94922f0cb6d2c86 | 3ec50b750e788d018ff0f6afd3528350a8956f6a | /ch_04/tests/test_classifier.py | 680a46568379375ea3e6dcaf7eb588014dcb498b | [
"MIT"
] | permissive | jegarciaor/Python-Object-Oriented-Programming---4th-edition | 45fb68f04c905a27865c40a48705da803fbdc27a | 2f3d6b09326dab6a0488c72c96d7368bee28fef4 | refs/heads/main | 2023-06-05T05:02:54.428716 | 2021-06-26T15:23:11 | 2021-06-26T15:23:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,536 | py | """
Python 3 Object-Oriented Programming Case Study
Chapter 4, Expecting the Unexpected
"""
import base64
import csv
from pathlib import Path
from pytest import *
import classifier
@fixture(scope="module")
def app_client():
test_users = [
classifier.User(
username='noriko',
email='[email protected]',
real_name='Noriko K. L.',
role=classifier.Role.BOTANIST,
password='md5$H5W30kno$10a2327b2fce08c1ad0f65a12d40552f'
),
classifier.User(
username='emma',
email='[email protected]',
real_name='Emma K.',
role=classifier.Role.RESEARCHER,
password='md5$F8ZVxsuE$ebf71d15067ed7c887c0408550b671e2'
)
]
with classifier.app.app_context():
classifier.app.config['TESTING'] = True
classifier.app.config['USER_FILE'] = Path.cwd()/"test_data"
for u in test_users:
classifier.users.add_user(u)
yield classifier.app.test_client()
def test_health_check(app_client):
result = app_client.get("health")
assert result.status_code == 200
assert result.json == {
"status": "OK",
"user_count": 2,
"users": [
{
'email': '[email protected]',
'role': 'botanist',
'password': 'md5$H5W30kno$10a2327b2fce08c1ad0f65a12d40552f',
'real_name': 'Noriko K. L.',
'username': 'noriko'
},
{
'email': '[email protected]',
'role': 'researcher',
'password': 'md5$F8ZVxsuE$ebf71d15067ed7c887c0408550b671e2',
'real_name': 'Emma K.',
'username': 'emma'
},
]
}
def test_whoami_good(app_client):
credentials = base64.b64encode("noriko:Hunter2".encode("utf-8"))
result = app_client.get(
"whoami",
headers={
"Authorization": f"BASIC {credentials.decode('ASCII')}"
}
)
assert result.status_code == 200
print(result.json)
assert result.json["status"] == "OK"
def test_whoami_bad(app_client):
credentials = base64.b64encode("noriko:not my passowrd".encode("utf-8"))
result = app_client.get(
"whoami",
headers={
"Authorization": f"BASIC {credentials.decode('ASCII')}"
}
)
assert result.status_code == 401
print(result.json)
assert result.json["message"] == "Unknown User"
| [
"[email protected]"
] | |
0551c05c3a0fcbffde3afd42eec059f9cc7d51a4 | 4d675034878c4b6510e1b45b856cc0a71af7f886 | /configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py | 7fb8e82ece225ab6f88f1f4f83bea56a42cf1a57 | [
"Apache-2.0",
"BSD-2-Clause-Views",
"MIT",
"BSD-2-Clause"
] | permissive | shinya7y/UniverseNet | 101ebc2ad8f15482ee45ea8d6561aa338a0fa49e | 3652b18c7ce68122dae7a32670624727d50e0914 | refs/heads/master | 2023-07-22T08:25:42.646911 | 2023-07-08T18:09:34 | 2023-07-08T18:09:34 | 263,555,721 | 407 | 58 | Apache-2.0 | 2023-01-27T01:13:31 | 2020-05-13T07:23:43 | Python | UTF-8 | Python | false | false | 376 | py | _base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py'
model = dict(
backbone=dict(
norm_cfg=dict(type='SyncBN', requires_grad=True),
norm_eval=False,
plugins=[
dict(
cfg=dict(type='ContextBlock', ratio=1. / 16),
stages=(False, True, True, True),
position='after_conv3')
]))
| [
"[email protected]"
] | |
1799efdff1eb4bb6b7e8ba832d454375c9017ab7 | e40091711a9900350939556374cee5f3e41c2c3c | /tourism/ratting/models.py | 129148c4edcd9dd3177d7a74515f01a81254f5d7 | [] | no_license | rg3915/drf-tourism | 951249f64450b6b710bb971aa52ed4d2efe2a85d | c6648a42eed77ab82cf10af242ffb20690404fc0 | refs/heads/main | 2023-03-25T21:15:08.185176 | 2021-03-24T08:49:28 | 2021-03-24T08:49:28 | 347,816,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | from django.contrib.auth.models import User
from django.db import models
class Ratting(models.Model):
comment = models.CharField('comentário', max_length=100, unique=True)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
verbose_name='usuário',
related_name='ratting_users',
null=True,
blank=True
)
note = models.DecimalField('nota', max_digits=3, decimal_places=2)
created = models.DateTimeField(
'criado em',
auto_now_add=True,
auto_now=False
)
class Meta:
ordering = ('comment',)
verbose_name = 'avaliação'
verbose_name_plural = 'avaliações'
def __str__(self):
return self.comment
| [
"[email protected]"
] | |
30841bd3f6a6a979eeeab80457b83222c00be2d3 | 3330ed9c8f0aed91638b3a07ad697668346db930 | /meiduo_mall/meiduo_mall/apps/payment/views.py | efbe83168b0fcbd31e9b45953a65fa112fb21d9b | [] | no_license | chengong825/meiduo_mall_django | fcee163bb6256672cbc5dcbd649aad2605bfb5e6 | e3ca5e48f0c043d3f3f6d24a198ac2812df6d719 | refs/heads/master | 2020-03-29T09:48:09.189141 | 2018-09-21T14:33:43 | 2018-09-21T14:33:43 | 149,774,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,574 | py | import os
from alipay import AliPay
from django.conf import settings
from django.shortcuts import render
# Create your views here.
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from orders.models import OrderInfo
from payment.models import Payment
class PaymentView(APIView):
"""
支付
"""
permission_classes = (IsAuthenticated,)
def get(self, request, order_id):
"""
获取支付链接
"""
# 判断订单信息是否正确
try:
order = OrderInfo.objects.get(order_id=order_id, user=request.user,
pay_method=OrderInfo.PAY_METHODS_ENUM["ALIPAY"],
status=OrderInfo.ORDER_STATUS_ENUM["UNPAID"])
except OrderInfo.DoesNotExist:
return Response({'message': '订单信息有误'}, status=status.HTTP_400_BAD_REQUEST)
# 构造支付宝支付链接地址
alipay = AliPay(
appid=settings.ALIPAY_APPID,
app_notify_url=None, # 默认回调url
app_private_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)), "keys/app_private_key.pem"),
alipay_public_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)), "keys/alipay_public_key.pem"), # 支付宝的公钥,验证支付宝回传消息使用,不是你自己的公钥,
sign_type="RSA2", # RSA 或者 RSA2
debug=settings.ALIPAY_DEBUG # 默认False
)
order_string = alipay.api_alipay_trade_page_pay(
out_trade_no=order_id,
total_amount=str(order.total_amount),
subject="美多商城%s" % order_id,
return_url="http://www.meiduo.site:8080/pay_success.html",
)
# 需要跳转到https://openapi.alipay.com/gateway.do? + order_string
# 拼接链接返回前端
alipay_url = settings.ALIPAY_URL + "?" + order_string
return Response({'alipay_url': alipay_url})
class PaymentStatusView(APIView):
"""
支付结果
"""
def put(self, request):
data = request.query_params.dict()
signature = data.pop("sign")
alipay = AliPay(
appid=settings.ALIPAY_APPID,
app_notify_url=None, # 默认回调url
app_private_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)), "keys/app_private_key.pem"),
alipay_public_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)),
"keys/alipay_public_key.pem"), # 支付宝的公钥,验证支付宝回传消息使用,不是你自己的公钥,
sign_type="RSA2", # RSA 或者 RSA2
debug=settings.ALIPAY_DEBUG # 默认False
)
success = alipay.verify(data, signature)
if success:
# 订单编号
order_id = data.get('out_trade_no')
# 支付宝支付流水号
trade_id = data.get('trade_no')
Payment.objects.create(
order_id=order_id,
trade_id=trade_id
)
OrderInfo.objects.filter(order_id=order_id, status=OrderInfo.ORDER_STATUS_ENUM['UNPAID']).update(status=OrderInfo.ORDER_STATUS_ENUM["UNCOMMENT"])
return Response({'trade_id': trade_id})
else:
return Response({'message': '非法请求'}, status=status.HTTP_403_FORBIDDEN) | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.