blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
015a8e9ef9d42e0845eedd82384f1664674a5957 | 3be42b83a15d022f5863c96ec26e21bac0f7c27e | /tensorflow_probability/python/mcmc/legacy_random_walk_metropolis_test.py | cc0e6d73a93c859b63903599869a1b5536077d7b | [
"Apache-2.0"
] | permissive | ogrisel/probability | 846f5c13cddee5cf167b215e651b7479003f15d2 | 8f67456798615f9bf60ced2ce6db5d3dba3515fe | refs/heads/master | 2022-11-09T10:53:23.000918 | 2020-07-01T23:16:03 | 2020-07-01T23:17:25 | 276,580,359 | 2 | 1 | Apache-2.0 | 2020-07-02T07:37:58 | 2020-07-02T07:37:57 | null | UTF-8 | Python | false | false | 6,468 | py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for RandomWalkMetropolisNormal and RandomWalkMetropolisUniform."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class RWMTest(test_util.TestCase):
def testRWM1DUniform(self):
"""Sampling from the Standard Normal Distribution."""
dtype = np.float32
target = tfd.Normal(loc=dtype(0), scale=dtype(1))
samples, _ = tfp.mcmc.sample_chain(
num_results=2000,
current_state=dtype(1),
kernel=tfp.mcmc.RandomWalkMetropolis(
target.log_prob,
new_state_fn=tfp.mcmc.random_walk_uniform_fn(scale=dtype(2.)),
seed=test_util.test_seed()),
num_burnin_steps=500,
parallel_iterations=1) # For determinism.
sample_mean = tf.math.reduce_mean(samples, axis=0)
sample_std = tf.math.reduce_std(samples, axis=0)
[sample_mean_, sample_std_] = self.evaluate([sample_mean, sample_std])
self.assertAllClose(0., sample_mean_, atol=0.17, rtol=0.)
self.assertAllClose(1., sample_std_, atol=0.2, rtol=0.)
def testRWM1DNormal(self):
"""Sampling from the Standard Normal Distribution with adaptation."""
dtype = np.float32
target = tfd.Normal(loc=dtype(0), scale=dtype(1))
samples, _ = tfp.mcmc.sample_chain(
num_results=500,
current_state=dtype([1] * 8), # 8 parallel chains
kernel=tfp.mcmc.RandomWalkMetropolis(
target.log_prob,
seed=test_util.test_seed()),
num_burnin_steps=500,
parallel_iterations=1) # For determinism.
sample_mean = tf.math.reduce_mean(samples, axis=(0, 1))
sample_std = tf.math.reduce_std(samples, axis=(0, 1))
[sample_mean_, sample_std_] = self.evaluate([sample_mean, sample_std])
self.assertAllClose(0., sample_mean_, atol=0.2, rtol=0.)
self.assertAllClose(1., sample_std_, atol=0.2, rtol=0.)
def testRWM1DCauchy(self):
"""Sampling from the Standard Normal Distribution using Cauchy proposal."""
dtype = np.float32
num_burnin_steps = 750
num_chain_results = 400
target = tfd.Normal(loc=dtype(0), scale=dtype(1))
def cauchy_new_state_fn(scale, dtype):
cauchy = tfd.Cauchy(loc=dtype(0), scale=dtype(scale))
def _fn(state_parts, seed):
seed_stream = tfp.util.SeedStream(
seed, salt='RandomWalkCauchyIncrement')
next_state_parts = [
state + cauchy.sample(state.shape, seed=seed_stream())
for state in state_parts]
return next_state_parts
return _fn
samples, _ = tfp.mcmc.sample_chain(
num_results=num_chain_results,
num_burnin_steps=num_burnin_steps,
current_state=dtype([1] * 8), # 8 parallel chains
kernel=tfp.mcmc.RandomWalkMetropolis(
target.log_prob,
new_state_fn=cauchy_new_state_fn(scale=0.5, dtype=dtype),
seed=test_util.test_seed()),
parallel_iterations=1) # For determinism.
sample_mean = tf.math.reduce_mean(samples, axis=(0, 1))
sample_std = tf.math.reduce_std(samples, axis=(0, 1))
[sample_mean_, sample_std_] = self.evaluate([sample_mean, sample_std])
self.assertAllClose(0., sample_mean_, atol=0.2, rtol=0.)
self.assertAllClose(1., sample_std_, atol=0.2, rtol=0.)
def testRWM2DNormal(self):
"""Sampling from a 2-D Multivariate Normal distribution."""
dtype = np.float32
true_mean = dtype([0, 0])
true_cov = dtype([[1, 0.5], [0.5, 1]])
num_results = 500
num_chains = 100
# Target distribution is defined through the Cholesky decomposition
chol = tf.linalg.cholesky(true_cov)
target = tfd.MultivariateNormalTriL(loc=true_mean, scale_tril=chol)
# Assume that the state is passed as a list of 1-d tensors `x` and `y`.
# Then the target log-density is defined as follows:
def target_log_prob(x, y):
# Stack the input tensors together
z = tf.stack([x, y], axis=-1) - true_mean
return target.log_prob(tf.squeeze(z))
# Initial state of the chain
init_state = [np.ones([num_chains, 1], dtype=dtype),
np.ones([num_chains, 1], dtype=dtype)]
# Run Random Walk Metropolis with normal proposal for `num_results`
# iterations for `num_chains` independent chains:
states, _ = tfp.mcmc.sample_chain(
num_results=num_results,
current_state=init_state,
kernel=tfp.mcmc.RandomWalkMetropolis(
target_log_prob_fn=target_log_prob,
seed=test_util.test_seed()),
num_burnin_steps=200,
num_steps_between_results=1,
parallel_iterations=1)
states = tf.stack(states, axis=-1)
sample_mean = tf.math.reduce_mean(states, axis=[0, 1])
x = states - sample_mean
sample_cov = tf.math.reduce_mean(
tf.linalg.matmul(x, x, transpose_a=True), axis=[0, 1])
[sample_mean_, sample_cov_] = self.evaluate([
sample_mean, sample_cov])
self.assertAllClose(np.squeeze(sample_mean_), true_mean, atol=0.1, rtol=0.1)
self.assertAllClose(np.squeeze(sample_cov_), true_cov, atol=0.1, rtol=0.1)
def testRWMIsCalibrated(self):
rwm = tfp.mcmc.RandomWalkMetropolis(
target_log_prob_fn=lambda x: -tf.square(x) / 2.,
)
self.assertTrue(rwm.is_calibrated)
def testUncalibratedRWIsNotCalibrated(self):
uncal_rw = tfp.mcmc.UncalibratedRandomWalk(
target_log_prob_fn=lambda x: -tf.square(x) / 2.,
)
self.assertFalse(uncal_rw.is_calibrated)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
1ecb996f4097f56f0ce63ab0d6dedf6b7f3b0ff8 | 80a3d98eae1d755d6914b5cbde63fd10f5cc2046 | /autox/autox_video/mmaction2/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py | 48df87cc320b51fd2cd980cd78eade24f3d1d968 | [
"Apache-2.0"
] | permissive | 4paradigm/AutoX | efda57b51b586209e1d58e1dab7d0797083aadc5 | 7eab9f4744329a225ff01bb5ec360c4662e1e52e | refs/heads/master | 2023-05-24T00:53:37.109036 | 2023-02-14T14:21:50 | 2023-02-14T14:21:50 | 388,068,949 | 752 | 162 | Apache-2.0 | 2022-07-12T08:28:09 | 2021-07-21T09:45:41 | Jupyter Notebook | UTF-8 | Python | false | false | 3,034 | py | _base_ = [
'../../_base_/models/slowonly_r50.py',
'../../_base_/schedules/sgd_150e_warmup.py',
'../../_base_/default_runtime.py'
]
# model settings
model = dict(cls_head=dict(num_classes=101))
# dataset settings
dataset_type = 'RawframeDataset'
data_root = 'data/ucf101/rawframes/'
data_root_val = 'data/ucf101/rawframes/'
split = 1 # official train/test splits. valid numbers: 1, 2, 3
ann_file_train = f'data/ucf101/ucf101_train_split_{split}_rawframes.txt'
ann_file_val = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt'
ann_file_test = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='RandomResizedCrop'),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(
type='SampleFrames',
clip_len=8,
frame_interval=4,
num_clips=1,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='CenterCrop', crop_size=224),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(
type='SampleFrames',
clip_len=8,
frame_interval=4,
num_clips=10,
test_mode=True),
dict(type='RawFrameDecode'),
dict(type='Resize', scale=(-1, 256)),
dict(type='ThreeCrop', crop_size=256),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=8,
workers_per_gpu=2,
test_dataloader=dict(videos_per_gpu=1),
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_test,
data_prefix=data_root_val,
pipeline=test_pipeline))
evaluation = dict(
interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy'])
# optimizer
optimizer = dict(lr=0.1) # this lr is used for 8 gpus
# learning policy
lr_config = dict(policy='CosineAnnealing', min_lr=0, by_epoch=False)
total_epochs = 64
# runtime settings
work_dir = './work_dirs/slowonly_r50_8x4x1_64e_ucf101_rgb'
| [
"[email protected]"
] | |
6511592e6810655b1bf0ef09338b91728067e6fe | 31e113e0baa03ccc7b58ecef8a1116ad6501e33a | /tensorflow_probability/python/experimental/mcmc/preconditioned_hmc_test.py | d13b3d5d494cf099bbc7be9fd385bbe34bc6cde2 | [
"Apache-2.0"
] | permissive | ksachdeva/probability | 9dbb771ec4da8094dea1c31d6cd5d514c2fe2c6f | dd24b7a6495e8801b7e7852aab16d6704993147c | refs/heads/master | 2021-07-19T12:40:09.133886 | 2021-02-09T16:29:17 | 2021-02-09T16:31:18 | 241,638,637 | 2 | 0 | Apache-2.0 | 2020-02-19T14:12:55 | 2020-02-19T14:12:54 | null | UTF-8 | Python | false | false | 28,713 | py | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for preconditioned_hmc."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from absl.testing import parameterized
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import test_util
from tensorflow_probability.python.internal import unnest
tfb = tfp.bijectors
tfd = tfp.distributions
tfde = tfp.experimental.distributions
# Allowed type of preconditioning schemes to use.
# See code for details.
PRECONDITION_SCHEMES = {
'direct', 'precision_factor', 'sqrtm', 'scale',
# `None` ==> No preconditioner. This is different than a "bad"
# preconditioner. We will be able to check asymptotics with "None".
'no_preconditioner',
}
RunHMCResults = collections.namedtuple('RunHMCResults', [
'draws',
'step_size',
'final_step_size',
'asymptotic_step_size',
'accept_prob',
'mean_accept_prob',
'min_ess',
'sample_mean',
'sample_cov',
'sample_var',
'mean_atol',
'cov_atol',
'var_rtol',
])
def _make_composite_tensor(dist):
"""Wrapper to make distributions of linear operators composite."""
if dist is None:
return dist
composite_dist = tfp.experimental.auto_composite_tensor(dist.__class__,
omit_kwargs='name')
p = dist.parameters
for k in p:
if isinstance(p[k], tfp.distributions.Distribution):
p[k] = _make_composite_tensor(p[k])
elif isinstance(p[k], tf.linalg.LinearOperator):
composite_linop = tfp.experimental.auto_composite_tensor(p[k].__class__)
p[k] = composite_linop(**p[k].parameters)
ac_dist = composite_dist(**p)
return ac_dist
@test_util.test_graph_and_eager_modes
class PreconditionedHMCCorrectnessTest(test_util.TestCase):
"""More careful tests that sampling/preconditioning is actually working."""
def _calculate_asymptotic_step_size(self, scales, prob_accept):
"""Calculate the (asymptotic) expected step size for given scales/P[accept].
The distribution should be a multivariate Gaussian, and the approximation is
appropriate in high dimensions when the spectrum is polynomially decreasing.
For details, see [1], equations (3.1, 3.2).
Args:
scales: Tensor with the square roots of the eigenvalues of the
covariance matrix.
prob_accept: Average acceptance probability.
Returns:
step_size: Float of approximate step size to achieve the target acceptance
rate.
#### References
[1]: Langmore, Ian, Michael Dikovsky, Scott Geraedts, Peter Norgaard, and
Rob Von Behren. 2019. “A Condition Number for Hamiltonian Monte Carlo."
http://arxiv.org/abs/1905.09813.
"""
inv_nu = tf.reduce_sum((1. / scales) ** 4, axis=-1) ** -0.25
step_size = (inv_nu *
(2**1.75) *
tf.sqrt(tfd.Normal(0., 1.).quantile(1 - prob_accept / 2.)))
return step_size
def _run_hmc_with_step_size(
self,
target_mvn,
precondition_scheme,
target_accept=0.75,
num_results=2000,
num_adaptation_steps=20,
):
"""Run HMC with step_size adaptation, and return RunHMCResults."""
assert precondition_scheme in PRECONDITION_SCHEMES
dims = target_mvn.event_shape[0]
target_cov = target_mvn.covariance()
cov_linop = tf.linalg.LinearOperatorFullMatrix(
target_cov,
is_self_adjoint=True,
is_positive_definite=True)
if precondition_scheme == 'no_preconditioner':
momentum_distribution = None
# Internal to the sampler, these scales are being used (implicitly).
internal_scales = tf.sqrt(tf.linalg.eigvalsh(target_cov))
elif precondition_scheme == 'direct':
momentum_distribution = tfd.MultivariateNormalLinearOperator(
# The covariance of momentum is inv(covariance of position), and we
# parameterize distributions by a square root of the covariance.
scale=cov_linop.inverse().cholesky(),
)
# Internal to the sampler, these scales are being used (implicitly).
internal_scales = tf.ones(dims)
elif precondition_scheme == 'precision_factor':
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
# The precision of momentum is the covariance of position.
# The "factor" is the cholesky factor.
precision_factor=cov_linop.cholesky(),
)
# Internal to the sampler, these scales are being used (implicitly).
internal_scales = tf.ones(dims)
elif precondition_scheme == 'sqrtm':
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
# The symmetric square root is a perfectly valid "factor".
precision_factor=tf.linalg.LinearOperatorFullMatrix(
tf.linalg.sqrtm(target_cov)),
)
# Internal to the sampler, these scales are being used (implicitly).
internal_scales = tf.ones(dims)
elif precondition_scheme == 'scale':
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
# Nothing wrong with using "scale", since the scale should be the
# same as cov_linop.cholesky().
precision_factor=target_mvn.scale,
)
# Internal to the sampler, these scales are being used (implicitly).
internal_scales = tf.ones(dims)
else:
raise RuntimeError(
'Unhandled precondition_scheme: {}'.format(precondition_scheme))
momentum_distribution = _make_composite_tensor(momentum_distribution)
# Asyptotic step size, assuming P[accept] = target_accept.
expected_step = self._calculate_asymptotic_step_size(
scales=internal_scales,
prob_accept=target_accept,
)
# Initialize step size to something close to the expected required step
# size. This helps reduce the need for a long burn-in. Don't use the
# expected step size exactly, since that would be cheating.
initial_step_size = expected_step / 2.345
# Set num_leapfrog_steps so that we get decent ESS.
max_internal_scale = tf.reduce_max(internal_scales)
num_leapfrog_steps = tf.minimum(
tf.cast(
tf.math.ceil(1.5 * max_internal_scale / expected_step),
dtype=tf.int32), 30)
hmc_kernel = tfp.mcmc.DualAveragingStepSizeAdaptation(
tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=target_mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=initial_step_size,
num_leapfrog_steps=num_leapfrog_steps),
num_adaptation_steps=num_adaptation_steps,
target_accept_prob=target_accept)
def trace_fn(_, pkr):
results = pkr.inner_results
return {
'accept_prob':
tf.exp(tf.minimum(0., results.log_accept_ratio)),
'step_size':
results.accepted_results.step_size,
}
@tf.function
def do_run_run_run():
"""Do a run, return RunHMCResults."""
states, trace = tfp.mcmc.sample_chain(
num_results,
current_state=tf.identity(target_mvn.sample(seed=0)),
kernel=hmc_kernel,
num_burnin_steps=num_adaptation_steps,
seed=test_util.test_seed(),
trace_fn=trace_fn)
# If we had some number of chain dimensions, we would change sample_axis.
sample_axis = 0
sample_cov = tfp.stats.covariance(states, sample_axis=sample_axis)
max_variance = tf.reduce_max(tf.linalg.diag_part(sample_cov))
max_stddev = tf.sqrt(max_variance)
min_ess = tf.reduce_min(tfp.mcmc.effective_sample_size(states))
mean_accept_prob = tf.reduce_mean(trace['accept_prob'])
# Asymptotic step size given that P[accept] = mean_accept_prob.
asymptotic_step_size = self._calculate_asymptotic_step_size(
scales=internal_scales,
prob_accept=mean_accept_prob,
)
return RunHMCResults(
draws=states,
step_size=trace['step_size'],
final_step_size=trace['step_size'][-1],
asymptotic_step_size=asymptotic_step_size,
accept_prob=trace['accept_prob'],
mean_accept_prob=mean_accept_prob,
min_ess=tf.reduce_min(tfp.mcmc.effective_sample_size(states)),
sample_mean=tf.reduce_mean(states, axis=sample_axis),
sample_cov=sample_cov,
sample_var=tf.linalg.diag_part(sample_cov),
# Standard error in variance estimation is related to standard
# deviation of variance estimates. For a Normal, this is just Sqrt(2)
# times variance divided by sqrt sample size (or so my old notes say).
# So a relative tolerance is useful.
# Add in a factor of 5 as a buffer.
var_rtol=5 * tf.sqrt(2.) / tf.sqrt(min_ess),
# For covariance matrix estimates, there can be terms that have
# expectation = 0 (e.g. off diagonal entries). So the above doesn't
# hold. So use an atol.
cov_atol=5 * max_variance / tf.sqrt(min_ess),
# Standard error in mean estimation is stddev divided by sqrt
# sample size. This is an absolute tolerance.
# Add in a factor of 5 as a buffer.
mean_atol=5 * max_stddev / tf.sqrt(min_ess),
)
# Evaluate now, to ensure that states/accept_prob/etc... all match up with
# the same graph evaluation. This is a gotcha about TFP MCMC in graph mode.
return self.evaluate(do_run_run_run())
def _check_correctness_of_moments_and_preconditioning(
self,
target_mvn,
num_results,
precondition_scheme,
check_step_size_asymptotics=True,
asymptotic_step_size_rtol=0.2,
):
"""Test that step size adaptation finds the theoretical optimal step size.
See _caclulate_expected_step_size for formula details, but roughly, for a
high dimensional Gaussian posterior, we can calculate the approximate step
size to achieve a given target accept rate. For such a posterior,
`PreconditionedHMC` mimics the dynamics of sampling from an isotropic
standard normal distribution, and so should adapt to the step size where
the scales are all ones.
In the example below, `expected_step` is around 0.00002, so there is
significantly different behavior when conditioning.
Args:
target_mvn: Multivariate normal instance to sample from.
num_results: Number of samples to collect (post burn-in).
precondition_scheme: String telling how to do preconditioning.
Should be in PRECONDITION_SCHEMES.
check_step_size_asymptotics: Boolean telling whether to check that the
step size and P[accept] match up with expected values. This checks
that the "internal/implicit" sampling distribution is as expected. E.g.
when preconditioning, we expect the internal distribution to be a
standard Normal. When not preconditioning we expect it to be the target.
asymptotic_step_size_rtol: rtol for the asymptotic step size test.
The "nastier" spectra (with a small number of tiny eigenvalues) often
require larger tolerance. About 10% rtol is what we can expect.
20% is the default for safety. When a "bad preconditioner" is used,
these two are off by 100% or more (but no guarantee, since luck may
prevail).
Returns:
RunHMCResults
"""
results = self._run_hmc_with_step_size(
target_mvn, precondition_scheme=precondition_scheme)
if check_step_size_asymptotics:
self.assertAllClose(
results.final_step_size,
results.asymptotic_step_size,
rtol=asymptotic_step_size_rtol)
self.assertAllClose(
results.sample_mean, target_mvn.mean(), atol=results.mean_atol)
self.assertAllClose(
results.sample_var, target_mvn.variance(), rtol=results.var_rtol)
self.assertAllClose(
results.sample_cov, target_mvn.covariance(), atol=results.cov_atol)
return results
@parameterized.named_parameters(
dict(testcase_name='_' + str(scheme), precondition_scheme=scheme)
for scheme in PRECONDITION_SCHEMES)
def test_correctness_with_2d_mvn_tril(self, precondition_scheme):
# Low dimensional test to help people who want to step through and debug.
target_mvn = tfd.MultivariateNormalTriL(
loc=tf.constant([0., 0.]),
scale_tril=[[1., 0.], [0.5, 2.]],
)
self._check_correctness_of_moments_and_preconditioning(
target_mvn,
# Lots of results, to test tight tolerance.
# We're using a small dims here, so this isn't a big deal.
num_results=5000,
precondition_scheme=precondition_scheme,
# We're in such low dimensions that we don't expect asymptotics to work.
check_step_size_asymptotics=False)
@parameterized.named_parameters(
dict(testcase_name='_' + str(scheme), precondition_scheme=scheme)
for scheme in PRECONDITION_SCHEMES)
def test_correctness_with_200d_mvn_tril(self, precondition_scheme):
# This is an almost complete check of the Gaussian case.
dims = 200
scale_wishart = tfd.WishartLinearOperator(
# Important that df is just slightly bigger than dims. This makes the
# scale_wishart ill condtioned. The result is that tests fail if we do
# not handle transposes correctly.
df=1.1 * dims,
scale=tf.linalg.LinearOperatorIdentity(dims),
input_output_cholesky=True,
name='wishart_for_samples',
)
# evaluate right here to avoid working with a random target_mvn in graph
# mode....that would cause issues, since we read off expected statistics
# from looking at the mvn properties, so it would be bad if these properties
# changed with every graph eval.
scale_tril = self.evaluate(scale_wishart.sample(seed=test_util.test_seed()))
target_mvn = tfd.MultivariateNormalTriL(
# Non-trivial "loc" ensures we do not rely on being centered at 0.
loc=tf.range(0., dims),
scale_tril=scale_tril,
)
self._check_correctness_of_moments_and_preconditioning(
target_mvn,
# Lots of results, to test tight tolerance.
num_results=3000,
precondition_scheme=precondition_scheme,
asymptotic_step_size_rtol=(
0.5 if precondition_scheme == 'no_preconditioner' else 0.25),
)
def test_sets_kinetic_energy(self):
dist = tfd.MultivariateNormalDiag(scale_diag=tf.constant([0.1, 10.]))
step_size = 0.1
kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=dist.log_prob,
step_size=step_size,
num_leapfrog_steps=1,
store_parameters_in_results=True)
init_state = tf.constant([0.1, 0.1])
kr = kernel.bootstrap_results(init_state)
# Manually set the momentum distribution.
kr = unnest.replace_innermost(kr, momentum_distribution=dist)
# Take one leapfrog step using the kernel.
_, nkr = kernel.one_step(init_state, kr, seed=test_util.test_seed())
# Need to evaluate here for consistency in graph mode.
(momentum_parts,
target_grad_parts,
proposed_state,
final_momentum,
target_log_prob,
grads_target_log_prob) = self.evaluate([
nkr.proposed_results.initial_momentum,
nkr.accepted_results.grads_target_log_prob,
nkr.proposed_state,
nkr.proposed_results.final_momentum,
nkr.proposed_results.target_log_prob,
nkr.proposed_results.grads_target_log_prob])
# Take one leapfrog step manually.
leapfrog = tfp.mcmc.internal.leapfrog_integrator.SimpleLeapfrogIntegrator(
target_fn=dist.log_prob,
step_sizes=[step_size],
num_steps=1)
# Again, need to evaluate here for graph mode consistency.
(next_momentum,
next_state,
next_target_log_prob,
grads_next_target_log_prob) = self.evaluate(leapfrog(
momentum_parts=momentum_parts,
state_parts=[init_state],
target=dist.log_prob(init_state),
target_grad_parts=target_grad_parts,
kinetic_energy_fn=lambda x: -dist.log_prob(x)))
# Verify resulting states are the same
self.assertAllClose(proposed_state,
next_state[0])
self.assertAllClose(final_momentum,
next_momentum)
self.assertAllClose(target_log_prob,
next_target_log_prob)
self.assertAllClose(grads_target_log_prob,
grads_next_target_log_prob)
@test_util.test_all_tf_execution_regimes
@parameterized.named_parameters(
dict(testcase_name='_default', use_default=True),
dict(testcase_name='_explicit', use_default=False))
class PreconditionedHMCTest(test_util.TestCase):
def test_f64(self, use_default):
if use_default:
momentum_distribution = None
else:
momentum_distribution = tfp.experimental.as_composite(
tfd.Normal(0., tf.constant(.5, dtype=tf.float64)))
kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
lambda x: -x**2, step_size=.5, num_leapfrog_steps=2,
momentum_distribution=momentum_distribution)
kernel = tfp.mcmc.SimpleStepSizeAdaptation(kernel, num_adaptation_steps=3)
self.evaluate(tfp.mcmc.sample_chain(
1, kernel=kernel, current_state=tf.ones([], tf.float64),
num_burnin_steps=5, trace_fn=None))
# TODO(b/175787154): Enable this test
def DISABLED_test_f64_multichain(self, use_default):
if use_default:
momentum_distribution = None
else:
momentum_distribution = tfp.experimental.as_composite(
tfd.Normal(0., tf.constant(.5, dtype=tf.float64)))
kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
lambda x: -x**2, step_size=.5, num_leapfrog_steps=2,
momentum_distribution=momentum_distribution)
kernel = tfp.mcmc.SimpleStepSizeAdaptation(kernel, num_adaptation_steps=3)
nchains = 7
self.evaluate(tfp.mcmc.sample_chain(
1, kernel=kernel, current_state=tf.ones([nchains], tf.float64),
num_burnin_steps=5, trace_fn=None))
def test_diag(self, use_default):
"""Test that a diagonal multivariate normal can be effectively sampled from.
Note that the effective sample size is expected to be exactly 100: this is
because the step size is tuned well enough that a single HMC step takes
a point to nearly the antipodal point, which causes a negative lag 1
autocorrelation, and the effective sample size calculation cuts off when
the autocorrelation drops below zero.
Args:
use_default: bool, whether to use a custom momentum distribution, or
the default.
"""
mvn = tfd.MultivariateNormalDiag(
loc=[1., 2., 3.], scale_diag=[0.1, 1., 10.])
if use_default:
momentum_distribution = None
step_size = 0.1
else:
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=mvn.scale,
)
step_size = 0.3
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=step_size,
num_leapfrog_steps=10)
draws = tfp.mcmc.sample_chain(
110,
tf.zeros(3),
kernel=hmc_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(draws[-100:],
filter_threshold=0,
filter_beyond_positive_pairs=False)
if not use_default:
self.assertAllClose(ess, tf.fill([3], 100.))
else:
self.assertLess(self.evaluate(tf.reduce_min(ess)), 100.)
def test_tril(self, use_default):
if tf.executing_eagerly():
self.skipTest('b/169882656 Too many warnings are issued in eager logs')
cov = 0.9 * tf.ones([3, 3]) + 0.1 * tf.eye(3)
scale = tf.linalg.cholesky(cov)
mv_tril = tfd.MultivariateNormalTriL(loc=[1., 2., 3.],
scale_tril=scale)
if use_default:
momentum_distribution = None
else:
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
# TODO(b/170015229) Don't use the covariance as inverse scale,
# it is the wrong preconditioner.
precision_factor=tf.linalg.LinearOperatorFullMatrix(cov),
)
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mv_tril.log_prob,
momentum_distribution=momentum_distribution,
step_size=0.2,
num_leapfrog_steps=10)
draws = tfp.mcmc.sample_chain(
120,
tf.zeros(3),
kernel=hmc_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(draws[-100:],
filter_threshold=0,
filter_beyond_positive_pairs=False)
# TODO(b/170015229): These and other tests like it, which assert ess is
# greater than some number, were all passing, even though the preconditioner
# was the wrong one. Why is that? A guess is that since there are *many*
# ways to have larger ess, these tests don't really test correctness.
# Perhaps remove all tests like these.
if not use_default:
self.assertAllClose(ess, tf.fill([3], 100.))
else:
self.assertLess(self.evaluate(tf.reduce_min(ess)), 100.)
def test_transform(self, use_default):
mvn = tfd.MultivariateNormalDiag(loc=[1., 2., 3.], scale_diag=[1., 1., 1.])
diag_variance = tf.constant([0.1, 1., 10.])
if use_default:
momentum_distribution = None
else:
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(
tf.math.sqrt(diag_variance)))
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=0.3,
num_leapfrog_steps=10)
transformed_kernel = tfp.mcmc.TransformedTransitionKernel(
hmc_kernel, bijector=tfb.Scale(tf.math.rsqrt(diag_variance)))
draws = tfp.mcmc.sample_chain(
110,
tf.zeros(3),
kernel=transformed_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(draws[-100:],
filter_threshold=0,
filter_beyond_positive_pairs=False)
if not use_default:
self.assertAllClose(ess, tf.fill([3], 100.))
else:
self.assertLess(self.evaluate(tf.reduce_min(ess)), 100.)
def test_multi_state_part(self, use_default):
mvn = tfd.JointDistributionSequential([
tfd.Normal(1., 0.1),
tfd.Normal(2., 1.),
tfd.Independent(tfd.Normal(3 * tf.ones([2, 3, 4]), 10.), 3)
])
if use_default:
momentum_distribution = None
step_size = 0.1
else:
reshape_to_scalar = tfp.bijectors.Reshape(event_shape_out=[])
reshape_to_234 = tfp.bijectors.Reshape(event_shape_out=[2, 3, 4])
momentum_distribution = tfd.JointDistributionSequential([
reshape_to_scalar(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag([0.1]))),
reshape_to_scalar(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag([1.]))),
reshape_to_234(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(
tf.fill([24], 10.))))
])
step_size = 0.3
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=step_size,
num_leapfrog_steps=10)
draws = tfp.mcmc.sample_chain(
100, [0., 0., tf.zeros((2, 3, 4))],
kernel=hmc_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(draws,
filter_threshold=0,
filter_beyond_positive_pairs=False)
if not use_default:
self.assertAllClose(
self.evaluate(ess),
[tf.constant(100.),
tf.constant(100.), 100. * tf.ones((2, 3, 4))])
else:
self.assertLess(
self.evaluate(
tf.reduce_min(tf.nest.map_structure(tf.reduce_min, ess))),
50.)
def test_batched_state(self, use_default):
mvn = tfd.MultivariateNormalDiag(
loc=[1., 2., 3.], scale_diag=[0.1, 1., 10.])
batch_shape = [2, 4]
if use_default:
momentum_distribution = None
step_size = 0.1
else:
momentum_distribution = tfde.MultivariateNormalPrecisionFactorLinearOperator(
tf.zeros((2, 4, 3)), precision_factor=mvn.scale)
step_size = 0.3
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=step_size,
num_leapfrog_steps=10)
draws = tfp.mcmc.sample_chain(
110,
tf.zeros(batch_shape + [3]),
kernel=hmc_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(draws[10:], cross_chain_dims=[1, 2],
filter_threshold=0,
filter_beyond_positive_pairs=False)
if not use_default:
self.assertAllClose(self.evaluate(ess), 100 * 2. * 4. * tf.ones(3))
else:
self.assertLess(self.evaluate(tf.reduce_min(ess)), 100.)
def test_batches(self, use_default):
mvn = tfd.JointDistributionSequential(
[tfd.Normal(1., 0.1),
tfd.Normal(2., 1.),
tfd.Normal(3., 10.)])
n_chains = 10
if use_default:
momentum_distribution = None
step_size = 0.1
else:
reshape_to_scalar = tfp.bijectors.Reshape(event_shape_out=[])
momentum_distribution = tfd.JointDistributionSequential([
reshape_to_scalar(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(
tf.fill([n_chains, 1], 0.1)))),
reshape_to_scalar(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(
tf.fill([n_chains, 1], 1.)))),
reshape_to_scalar(
tfde.MultivariateNormalPrecisionFactorLinearOperator(
precision_factor=tf.linalg.LinearOperatorDiag(
tf.fill([n_chains, 1], 10.)))),
])
step_size = 0.3
hmc_kernel = tfp.experimental.mcmc.PreconditionedHamiltonianMonteCarlo(
target_log_prob_fn=mvn.log_prob,
momentum_distribution=momentum_distribution,
step_size=step_size,
num_leapfrog_steps=10)
draws = tfp.mcmc.sample_chain(
100, [tf.zeros([n_chains]) for _ in range(3)],
kernel=hmc_kernel,
seed=test_util.test_seed(),
trace_fn=None)
ess = tfp.mcmc.effective_sample_size(
draws, cross_chain_dims=[1 for _ in draws],
filter_threshold=0, filter_beyond_positive_pairs=False)
if not use_default:
self.assertAllClose(self.evaluate(ess), 100 * n_chains * tf.ones(3))
else:
self.assertLess(self.evaluate(tf.reduce_min(ess)), 100.)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
5b18fbd4b0a8183ff967c046a05f8f8ac468e3eb | 2711e7408e590648ac6a51725c2177a56c566403 | /smilebuddies/urls.py | ea9397e69f37780d921d593336f630dad2ff758f | [] | no_license | SeedyROM/smilebuddies | 457415c1c843b495d92bdb925b0597411f1222c2 | 6ba4827205ce48c1b19786c9e32b9993cf8b43aa | refs/heads/master | 2020-03-21T15:29:13.592031 | 2018-06-26T10:38:38 | 2018-06-26T10:38:38 | 138,715,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | """smilebuddies URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.views.generic import TemplateView
urlpatterns = [
path('admin/', admin.site.urls),
path('', TemplateView.as_view(template_name='landing.html'), name='landing')
]
| [
"[email protected]"
] | |
e5850cab963a2bed4094268fcad193eda0cd489c | 717171ed7a14ad60dd42d62fe0dd217a0c0c50fd | /19年7月/7.18/url编码和解码.py | 44e1a5f421f2f103c0c08b57f4de71423a436f54 | [] | no_license | friedlich/python | 6e9513193227e4e9ee3e30429f173b55b9cdb85d | 1654ef4f616fe7cb9fffe79d1e6e7d7721c861ac | refs/heads/master | 2020-09-04T14:34:48.237404 | 2019-11-18T14:54:44 | 2019-11-18T14:54:44 | 219,756,451 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,798 | py | # Python进行URL解码
# 所用模块:urllib
# 所用函数:urllib.unquote()
from urllib.request import quote, unquote
# import urllib # 这样不行
rawurl = "%E6%B2%B3%E6%BA%90"
url = unquote(rawurl)
print(url)
print(quote("河源"))
print(type(quote('河源')))
# URL为何要编码、解码?
# 通常如果一样东西需要编码,说明这样东西并不适合传输。原因多种多样,如Size过大,包含隐私数据。对于Url来说,之所以要进行编码,
# 是因为Url中有些字符会引起歧义。
# 例如,Url参数字符串中使用key=value键值对这样的形式来传参,键值对之间以&符号分隔,如/s?q=abc&ie=utf-8。如果你的value字符串中
# 包含了=或者&,那么势必会造成接收Url的服务器解析错误,因此必须将引起歧义的&和=符号进行转义,也就是对其进行编码。
# 又如,Url的编码格式采用的是ASCII码,而不是Unicode,这也就是说你不能在Url中包含任何非ASCII字符,例如中文。否则如果客户端浏览器
# 和服务端浏览器支持的字符集不同的情况下,中文可能会造成问题。
# -*- coding: utf-8 -*-
# @File : urldecode_demo.py
# @Date : 2018-05-11
from urllib.request import quote, unquote
# 编码
url1 = "https://www.baidu.com/s?wd=中国"
# utf8编码,指定安全字符
ret1 = quote(url1, safe=";/?:@&=+$,", encoding="utf-8")
print(ret1)
print(type(ret1))
# https://www.baidu.com/s?wd=%E4%B8%AD%E5%9B%BD
# gbk编码
ret2 = quote(url1, encoding="gbk")
print(ret2)
print(type(ret2))
# https%3A//www.baidu.com/s%3Fwd%3D%D6%D0%B9%FA
# 解码
url3 = "https://www.baidu.com/s?wd=%E4%B8%AD%E5%9B%BD"
print(unquote(url3))
url4 = 'https%3A//www.baidu.com/s%3Fwd%3D%D6%D0%B9%FA'
print(unquote(url4, encoding='gbk'))
| [
"[email protected]"
] | |
69e17f4c855e3719a67fb44ed072035427f7e853 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /glue_read_2/workflow-run_get.py | eb26a1136104d518e28d211b93a913de8e86b4f2 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,045 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import execute_two_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/glue/get-workflow-run.html
if __name__ == '__main__':
"""
get-workflow-runs : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/glue/get-workflow-runs.html
resume-workflow-run : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/glue/resume-workflow-run.html
start-workflow-run : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/glue/start-workflow-run.html
stop-workflow-run : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/glue/stop-workflow-run.html
"""
parameter_display_string = """
# name : Name of the workflow being run.
# run-id : The ID of the workflow run.
"""
execute_two_parameter("glue", "get-workflow-run", "name", "run-id", parameter_display_string) | [
"[email protected]"
] | |
1cdef2efca5b6c72f28e2bd56aee45c125d3e2e9 | 21cfc943bf4989362fe4b1063ee9451a44175098 | /kitsune/kbadge/__init__.py | 24d06aa6bd2f1657367f5d85448c307b3bbb3212 | [] | permissive | feer56/Kitsune1 | 6230a8f01b554c3bb6b4a7016edf198f7b2d74dd | 0b39cbc41cb7a067699ce8401d80205dd7c5138d | refs/heads/master | 2023-01-07T14:34:24.046353 | 2014-11-23T04:38:04 | 2014-11-23T04:38:04 | 27,058,591 | 1 | 0 | BSD-3-Clause | 2022-12-27T14:53:52 | 2014-11-24T03:14:18 | Python | UTF-8 | Python | false | false | 28 | py | BADGER_BADGE_PAGE_SIZE = 12
| [
"[email protected]"
] | |
4d7fbb683f749be440f1e3f86814a797b247768e | 47fc606bcdfe5b563409386c94f745f920408851 | /src/python/twitter/common/python/marshaller.py | b5c29a06a99c6afbea083559b3636740c63a4085 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | ewhauser/commons | 2ef443c4f0be2fbbf1ff3226ed35058a7cc8254a | 0777b346cf1b32722b7b5f6ae9e6593fe185de22 | refs/heads/master | 2021-01-18T06:00:06.901691 | 2013-06-11T22:14:55 | 2013-06-11T22:14:55 | 1,741,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,032 | py | from imp import get_magic
import marshal
import struct
import time
from twitter.common.lang import Compatibility
class CodeTimestamp(object):
TIMESTAMP_RANGE = (4, 8)
@classmethod
def from_timestamp(timestamp):
return CodeTimestamp(timestamp)
@classmethod
def from_object(pyc_object):
stamp = time.localtime(
struct.unpack('I', pyc_object[slice(*CodeTimestamp.TIMESTAMP_RANGE)])[0])
return CodeTimestamp(stamp)
def __init__(self, stamp=time.time()):
self._stamp = stamp
def to_object(self):
return struct.pack('I', self._stamp)
class CodeMarshaller(object):
class InvalidCode(Exception): pass
MAGIC = struct.unpack('I', get_magic())[0]
MAGIC_RANGE = (0, 4)
TIMESTAMP_RANGE = (4, 8)
@staticmethod
def from_pyc(pyc):
if not isinstance(pyc, Compatibility.bytes) and not hasattr(pyc, 'read'):
raise CodeMarshaller.InvalidCode(
"CodeMarshaller.from_pyc expects a code or file-like object!")
if not isinstance(pyc, Compatibility.bytes):
pyc = pyc.read()
pyc_magic = struct.unpack('I', pyc[slice(*CodeMarshaller.MAGIC_RANGE)])[0]
if pyc_magic != CodeMarshaller.MAGIC:
raise CodeMarshaller.InvalidCode("Bad magic number! Got 0x%X" % pyc_magic)
stamp = time.localtime(struct.unpack('I', pyc[slice(*CodeMarshaller.TIMESTAMP_RANGE)])[0])
try:
code = marshal.loads(pyc[8:])
except ValueError as e:
raise CodeMarshaller.InvalidCode("Unmarshaling error! %s" % e)
return CodeMarshaller(code, stamp)
@staticmethod
def from_py(py, filename):
stamp = int(time.time())
code = compile(py, filename, 'exec')
return CodeMarshaller(code, stamp)
def __init__(self, code, stamp):
self._code = code
self._stamp = stamp
@property
def code(self):
return self._code
def to_pyc(self):
sio = Compatibility.BytesIO()
sio.write(struct.pack('I', CodeMarshaller.MAGIC))
sio.write(struct.pack('I', self._stamp))
sio.write(marshal.dumps(self._code))
return sio.getvalue()
| [
"[email protected]"
] | |
640b1ecbbff09f8d8ae3a1a9b0aa9c8146f0a093 | 4ba6207a7e4aa84da494e0f6d811eca606659b73 | /groupster/migrations/0003_jobseeker_resume.py | 5f0af9769b89646d52c1f168f716bf3a2099c0e6 | [] | no_license | jkol36/groupster | da5d9d4b882cd9df7a4b187b65cdc3fe8175e794 | 5967cb7b2689dec760727c7534ff0f73a6901ba4 | refs/heads/master | 2021-01-02T09:19:49.841001 | 2015-06-10T18:57:37 | 2015-06-10T18:57:37 | 35,061,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('groupster', '0002_remove_jobseeker_resume'),
]
operations = [
migrations.AddField(
model_name='jobseeker',
name='resume',
field=models.FileField(default=None, upload_to=b''),
),
]
| [
"[email protected]"
] | |
67bffd0980d1ea7f4201ae6348603c60f4fb7966 | 42fa1862effc3e494859904b76c43ce2bcd623a0 | /idealised_box_simulations_paper2b.py | 94394f21530d4fa8c134d0b1ed14dcc4aec1a8ec | [] | no_license | PaulHalloran/desktop_python_scripts | 3e83aedf3e232da610b5f7477e4d7e8fb0253f99 | 325e923527278a5c3e9ab8c978f29b2816dab087 | refs/heads/master | 2021-01-01T19:52:06.828997 | 2015-06-27T21:14:10 | 2015-06-27T21:14:10 | 38,155,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,527 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import matplotlib as mpl
results = np.genfromtxt('/home/ph290/box_modelling/boxmodel_6_box_back_to_basics/results/spg_box_model_qump_results_3.csv',delimiter = ',')
results_stg = np.genfromtxt('/home/ph290/box_modelling/boxmodel_6_box_back_to_basics/results/stg_box_model_qump_results_3.csv',delimiter = ',')
forcing_dir = '/home/ph290/box_modelling/boxmodel_6_box_back_to_basics/forcing_data/co2/'
co2_tmp = np.genfromtxt(forcing_dir+'rcp85_1.txt',delimiter = ',')
co2 = np.zeros([co2_tmp.shape[0],4])
co2[:,0] = np.genfromtxt(forcing_dir+'rcp85_1.txt',delimiter = ',')[:,1]
co2[:,1] = np.genfromtxt(forcing_dir+'rcp85_2.txt',delimiter = ',')[:,1]
co2[:,2] = np.genfromtxt(forcing_dir+'rcp85_3.txt',delimiter = ',')[:,1]
rcp85_yr = np.genfromtxt(forcing_dir+'historical_and_rcp85_atm_co2.txt',delimiter = ',')[:,0]
rcp85 = np.genfromtxt(forcing_dir+'historical_and_rcp85_atm_co2.txt',delimiter = ',')[:,1]
mpl.rcdefaults()
font = {'family' : 'monospace',
'weight' : 'bold',
'family' : 'serif',
'size' : 14}
mpl.rc('font', **font)
plt.close('all')
fig, (ax1, ax2) = plt.subplots(1,2,figsize=(10, 4))
leg_lab = ['y = 1.0285**x +c','y = 1.0265**x +c','y = 1.0305**x +c']
for i in range(3):
ax1.plot(co2[:,i],linewidth = 6,alpha= 0.4,label = leg_lab[i])
ax1.legend(loc = 2,prop={'size':10, 'family' : 'normal','weight' : 'bold'},ncol = 1).draw_frame(False)
#ax1.plot(rcp85_yr-1860,rcp85,'k',linewidth = 6,alpha= 0.4)
ax1.set_xlim([0,240])
ax1.set_ylim([200,1800])
ax1.set_ylabel('atm. CO$_2$ (ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
for i in range(3):
ax2.plot(results[:,0]-results[0,0],results[:,i+1],linewidth = 6,alpha= 0.4)
ax2b = ax2.twinx()
for i in range(3):
ax2b.plot(results[:,0]-results[0,0],results_stg[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '--')
leg_lab2 = ['Subpolar N. Atlantic (left axis)','Subtropical/equatorial (right axis)']
tmp = ax2.plot([0,0],'k',linewidth = 6,alpha= 0.4,label = leg_lab2[0])
tmp2 = ax2.plot([0,0],'k',linewidth = 6,alpha= 0.4,linestyle = '--',label = leg_lab2[1])
ax2.legend(loc = 2,prop={'size':10, 'family' : 'normal','weight' : 'bold'},ncol = 1).draw_frame(False)
tmp.pop(0).remove()
tmp2.pop(0).remove()
ax2.set_ylim([10,31])
ax2.set_ylabel('atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax2.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
ax2.set_xlim([0,240])
#plt.arrow(0,0,0,1, shape='full', lw=3, length_includes_head=True, head_width=.01)
a1 = matplotlib.patches.Arrow(0.5-0.01,0.5+0.01,0.05,0.0, width=0.8,edgecolor='none',facecolor='gray',fill=True,transform=fig.transFigure, figure=fig,alpha=0.25)
fig.lines.extend([a1])
fig.canvas.draw()
plt.tight_layout()
plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_1b.png')
plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_1b.pdf')
plt.show(block = False)
#plt.close('all')
'''
spg-stg difference plots
'''
#for i in range(4):
# ax1.plot(co2[:,i],linewidth = 6,alpha= 0.4)
#
#ax1.set_ylabel('atm. CO$_2$ (ppm)', multialignment='center',fontweight='bold',fontsize = 14)
#ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
#plt.close('all')
colours = ['b','r']
fig, (ax1) = plt.subplots(1,1,figsize=(5, 4))
ax1.plot(results[:,0]-results[0,0],results[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '-',color=colours[0])
ax2 = ax1.twinx()
ax2.plot(results[:,0]-results[0,0],results_stg[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '--',color=colours[1])
ax1.set_xlim([150,160])
min1 = 22
max1 = 27
min1b = -1
max1b = 4
ax1.set_ylim([min1,max1])
ax2.set_ylim([min1b,max1b])
ax1.set_ylabel('atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_ylabel('Subtropical atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
plt.tight_layout()
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.png')
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.pdf')
plt.show(block = False)
#plt.close('all')
'''
2
'''
fig, (ax1) = plt.subplots(1,1,figsize=(5, 4))
ax1.plot(results[:,0]-results[0,0],results[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '-',color=colours[0])
ax1.plot(results[:,0]-results[0,0],results_stg[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '--',color=colours[1])
ax1.set_xlim([155,165])
#min1 = 100
#max1 = 160
ax1.set_ylim([min1,max1])
ax1.set_ylabel('atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_ylabel('Subtropical atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
plt.tight_layout()
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.png')
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.pdf')
plt.show(block = False)
#plt.close('all')
'''
3
'''
fig, (ax1) = plt.subplots(1,1,figsize=(5, 4))
ax1.plot(results[:,0]-results[0,0],results[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '-',color=colours[0])
ax1.plot(results[:,0]-results[0,0],results_stg[:,i+1],linewidth = 6,alpha= 0.4,linestyle = '--',color=colours[1])
ax1.set_xlim([170,180])
#min1 = 100
#max1 = 160
ax1.set_ylim([min1,max1])
ax1.set_ylabel('atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_ylabel('Subtropical atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
plt.tight_layout()
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.png')
#plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/mechanism_2.pdf')
plt.show(block = False)
#plt.close('all')
results = np.genfromtxt('/home/ph290/box_modelling/boxmodel_6_box_back_to_basics/results/rcp85_spg_box_model_qump_results_3.csv',delimiter = ',')
results_stg = np.genfromtxt('/home/ph290/box_modelling/boxmodel_6_box_back_to_basics/results/rcp85_stg_box_model_qump_results_3.csv',delimiter = ',')
mpl.rcdefaults()
font = {'family' : 'monospace',
'weight' : 'bold',
'family' : 'serif',
'size' : 14}
mpl.rc('font', **font)
plt.close('all')
fig, (ax1) = plt.subplots(1,1,figsize=(5, 4))
for i in range(1):
ax1.plot(results[:,0]-results[0,0],results[:,i+1],'k',linewidth = 6,alpha= 0.4)
ax1b = ax1.twinx()
for i in range(1):
ax1b.plot(results[:,0]-results[0,0],results_stg[:,i+1],'k',linewidth = 6,alpha= 0.4,linestyle = '--')
ax1.set_ylabel('spg atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1b.set_ylabel('stg (dasshed) atm. [CO$_2$] minus ocean [CO$_2$]\n(ppm)', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlabel('year', multialignment='center',fontweight='bold',fontsize = 14)
ax1.set_xlim([0,240])
plt.tight_layout()
plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/rcp_85.png')
plt.savefig('/home/ph290/Documents/figures/n_atl_paper_II/rcp_85.pdf')
plt.show(block = False)
#plt.close('all')
| [
"[email protected]"
] | |
52c45fcb6941676bb95e51b20065f7003e69df4e | 502e97f0ec4f287b8280a546e7f2555ff3a5a1fd | /cnn_3d/loss_ssim.py | 1f9e166d4af572dad02709668df737d66c13e862 | [] | no_license | carlasailer/cnn_ct_pet | d350692be03432e025e33db6296ac33b36bedf08 | 4e256bb73f7ea0ab046c231762001b9f3535bb00 | refs/heads/master | 2020-12-18T23:11:24.048337 | 2020-01-22T10:40:52 | 2020-01-22T10:40:52 | 235,549,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,202 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 25 14:25:02 2019
@author: s1287
"""
import h5py
import os
import keras.backend as K
import numpy as np
def calc_ssim_git(y_true, y_pred):
"""structural similarity measurement system."""
## K1, K2 are two constants, much smaller than 1
K1 = 0.04
K2 = 0.06
## mean, std, correlation
mu_x = K.mean(y_pred)
mu_y = K.mean(y_true)
sig_x = K.std(y_pred)
sig_y = K.std(y_true)
sig_xy = (sig_x * sig_y) ** 0.5
## L, number of pixels, C1, C2, two constants
L = 33
C1 = (K1 * L) ** 2
C2 = (K2 * L) ** 2
ssim = (2 * mu_x * mu_y + C1) * (2 * sig_xy * C2) * 1.0 / ((mu_x ** 2 + mu_y ** 2 + C1) * (sig_x ** 2 + sig_y ** 2 + C2))
return ssim
def calc_ssim(y_true, y_pred):
"""Calculates the structured similarity of two images, ssim is in the range [-1,1]
Parameters:
y_true voxel used for calculation of SSIM
y_pred voxel used for calculation of SSIM
Returns:
ssim_value value of the structured similarity between the two images
"""
# size = y_true.shape
# print('The shape is:')
# print(size)
single_ssim = []
try:
for slice_nr in range(0, y_true.shape[0]):
# slice_ssim = compare_ssim(y_true[slice_nr,:,:], y_pred[slice_nr,:,:], win_size=3)
slice_ssim = compare_ssim(y_true[slice_nr,:,:], y_pred[slice_nr,:,:], win_size=3, gaussian_weights=True)
single_ssim.append(slice_ssim)
ssim_mean = np.mean(single_ssim)
except IndexError:
ssim_mean = 0
return ssim_mean
#def calc_ssim_multichannel (y_true, y_pred):#
# return compare_ssim(y_true, y_pred, multichannel=True, win_size=3)
def ssim_fct(y_true, y_pred):
"""wrapper function to fit into the Keras framework
Parameters:
y_true ground truth voxel
y_pred voxel predicted by network
Returns:
ssim value of the structural similarity, suited as loss function
"""
def ssim(y_true, y_pred):
return -calc_ssim(K.squeeze(y_true), K.squeeze(y_pred))
return ssim
if __name__ == '__main__':
contents = os.listdir('/home/s1287/no_backup/s1287/results_interp/patches_for_CNN/')
filename_test = '/home/s1287/no_backup/s1287/results_interp/patches_for_CNN/' + contents[0]
filename_training = '/home/s1287/no_backup/s1287/results_interp/patches_for_CNN/' + contents[1]
with h5py.File(filename_training, 'r') as file:
training_CT = np.array(file.get('CT'))
training_PET = np.array(file.get('PET'))
with h5py.File(filename_test, 'r') as file:
test_CT = np.array(file.get('CT'))
test_PET = np.array(file.get('PET'))
train_data = training_CT
train_labels = training_PET
test_data = test_CT
test_labels = test_PET
example_PET1 = train_labels[0]
example_PET2 = train_labels[1]
current_ssim = calc_ssim(example_PET1, example_PET2)
current_ssim1 = calc_ssim_multichannel(example_PET1, example_PET2)
print(current_ssim)
print('SSIM Multichannel %d' %current_ssim1)
| [
"[email protected]"
] | |
b784cb302379736956e6936d0636e72dbf650465 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02399/s627926226.py | 2a4ef109e41aa4a7c46ff6352b1a640bb190278c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py |
1
2
3
4
5
6
a, b = [int(i) for i in input().split()]
d = a // b
r = a % b
f = a / b
print('{0} {1} {2:.5f}'.format(d, r, f)) | [
"[email protected]"
] | |
05a8191a0221fcf44c3631cb1ae3b634e90a6c50 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/fractions_20200802103056.py | a8e741def594e4049345cfbf9c195c01f24b8d0d | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | def fractions(numerator,denominator):
if denominator == 0 :
return str(numerator)
number = numerator / denominator
if numerator % denominator == 0:
return str(numerator // denominator)
newStr = str(number)
print(newStr)
largeStr = newStr.split(".")
if len(largeStr[1]) > 1:
return largeStr[0] + "." + '(' + largeStr[1][0] + ')'
return newStr
def frac(numerator,denominator):
res = ""
# create a map to store already seen remainders
# remainder is used as key and its position in result is stored as value
# position for cases like 1/6
mp = {}
# find the first remainder
rem = numerator / denominator
print(rem)
# keep finding the remainder until the
print(frac(-4,333)) | [
"[email protected]"
] | |
d804293a9bb22f13def744ccad3cf0bcce62647f | 0fa7b9328e04d2ff5a2b607d9ec6962b7ee97532 | /vi_lib/lib/torchutils/test/test_models.py | afef409c72474e1be1ac61ba78474b7a8a8e86e3 | [] | no_license | aaronpmishkin/normalizing_flows | 4b12bcbe85f400bb27d21e93d8a3c35d9e5df90c | 249f0d99fee6d07783a2a3a595cfeb439af8c599 | refs/heads/master | 2020-04-09T01:09:40.906963 | 2018-12-14T07:47:08 | 2018-12-14T07:47:08 | 159,893,931 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,472 | py | import unittest
import torch
import torchutils.models as models
from torchutils.params import bp2v
from torch.nn.utils import vector_to_parameters as v2p
from torch.nn.utils import parameters_to_vector as p2v
class MLPTestCase(unittest.TestCase):
def assertAllClose(self, a, b):
self.assertTrue(torch.allclose(a, b, 0.01))
def get_dummy_inputs(self, n, indim, hiddim, outdim, s):
torch.manual_seed(0)
mlp = models.MLP(indim, hiddim, outdim)
x = torch.rand(n, indim)
noise = torch.randn(s, models.num_params(mlp))
return mlp, x, noise
def test_num_params(self):
self.assertEqual(models.num_params(models.MLP(10,[],1)), (10+1))
self.assertEqual(models.num_params(models.MLP(10,[1],1)), (10+1) + (1+1))
self.assertEqual(models.num_params(models.MLP(10,[2],1)), (10+1)*2 + (2+1))
def test_interface_forward(self):
mlp, x, _, = self.get_dummy_inputs(7, 5, [], 1, 3)
y = mlp(x)
self.assertTrue(y.shape[0] == x.shape[0])
self.assertTrue(y.shape[1] == 1)
def test_interface_forward_with_noise(self):
n, s = 7, 3
mlp, x, noise = self.get_dummy_inputs(n, 5, [], 1, s)
print(list(mlp.parameters()))
y = mlp(x, noise)
self.assertTrue(list(y.shape) == [s, n, 1])
mlp, x, noise = self.get_dummy_inputs(n, 5, [11], 1, s)
y = mlp(x, noise)
self.assertTrue(list(y.shape) == [s, n, 1])
def test_backward_with_noise(self):
n, s = 7, 3
def manual_gradient(mlp, x, noise):
mu = p2v(mlp.parameters())
gs = []
for sid in range(s):
v2p((noise[sid,:] + mu).contiguous(), mlp.parameters())
g = torch.autograd.grad(torch.sum(mlp(x)), mlp.parameters())
print([gg.shape for gg in g])
gs.append(bp2v(g, 0))
v2p(mu, mlp.parameters())
return sum(gs)
mlp, x, noise = self.get_dummy_inputs(n, 5, [], 1, s)
grad1 = p2v(torch.autograd.grad(torch.sum(mlp(x, noise)), mlp.parameters()))
grad2 = manual_gradient(mlp, x, noise)
self.assertAllClose(grad1, grad2)
mlp, x, noise = self.get_dummy_inputs(n, 5, [11], 1, s)
grad1 = p2v(torch.autograd.grad(torch.sum(mlp(x, noise)), mlp.parameters()))
grad2 = manual_gradient(mlp, x, noise)
self.assertAllClose(grad1, grad2)
| [
"[email protected]"
] | |
00dccca5378c4cc542d8e54c54e252e22ed0e38f | 5d4841bd3160418d3deb88b241edc22c7b7eab18 | /server/serving/package_scanner.py | 10cab85c3a049a56099c34414e74816f80bf0b21 | [] | no_license | cxbn12/ntu-nlp | 2493523bb886facfd661dd4194082ccd653496ae | de98f636919267a3701383636ccb31ccf108f28b | refs/heads/master | 2022-03-16T08:32:24.302783 | 2019-11-04T15:31:56 | 2019-11-04T15:31:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,314 | py | import os.path as osp
import sys
from pathlib import Path
def scan_package(path, base_dir='.'):
"""
Scan for all the python packages under a certain path. Note that this
will automatically append the scan path to the PYTHONPATH. You should be
careful if there is some packages with the same name. In the case of a
name collision, latter scanned packages will not be imported.
Args:
path (str): The path which all the packages under it will be
imported. You should provide the package path rather than the
package name.
base_dir (str, optional): The base directory to be used as a import root.
Assume the project structure is like:
.
├── package1
│ └── foo.py
└── setup.py
Without setting base_dir, which will automatically take your
scan root as the import root.
>>> scan_package('package1')
Which is equivalent to
>>> import foo
If you specify the scan root,
>>> scan_package('package1', 'package1')
this function will use the given root:
>>> import package1.foo
However, you should never let a scan root to be empty if the package
to be scanned is a regular package (with __init__.py inside).
.
├── package2
│ ├── __init__.py
│ └── foo.py
└── setup.py
This will raise a ValueError:
>>> scan_package('package2', 'package2')
Which is equivalent to
>>> import .
Raise:
ValueError:
- path does not exist
- base_dir does not exist
- base_dir is not valid for importing
"""
abs_path = osp.abspath(path)
if not osp.exists(abs_path):
raise ValueError('Parameter `path`: {} not exist'.format(abs_path))
if not osp.exists(base_dir):
raise ValueError('Parameter `base_dir`: {} does not exist'.format(base_dir))
base_dir = osp.abspath(base_dir)
if not abs_path.startswith(base_dir):
raise ValueError('`path`: {} is not a subdirectory of `base_dir`: {}'
.format(abs_path, base_dir))
# mark the base directory as source root
sys.path.insert(0, base_dir)
# scan for all **/*.py file under certain dir
modules = [f for f in Path(abs_path).rglob('*.py') if f.is_file()]
# set **/__init__.py to the package name
modules = [f.parent if f.name == '__init__.py' else f for f in modules]
# import all modules
for module in modules:
module_rel_path = module.relative_to(base_dir)
# check for invalid regular package import
if str(module_rel_path) == '.':
raise ValueError('You may want to import package {} with the scan root as the package, '
', which will cause a importing error. Please try some scan roots outside'
'the package')
else:
module_name = '.'.join(module_rel_path.with_suffix('').parts)
# check if the package has been imported
if module_name not in sys.modules.keys():
__import__(module_name)
| [
"[email protected]"
] | |
cdaacfbe7fce884d91c74e79e4a520fdf8185bea | 382ce68736c1dee91dcb5eb7846eff10519d2b70 | /etcewrappers/utils/iperfserver.py | f780cf8d9b291281079960623c45cbb9d682bb1a | [] | permissive | adjacentlink/python-etce | 4345c7bd719f18022fdb96b0c30efc529948f87c | 72d58535e230f3178b1cab9616a3412514dabaf3 | refs/heads/master | 2023-08-18T05:08:53.519074 | 2022-11-17T16:47:44 | 2022-11-17T16:47:44 | 103,570,572 | 7 | 4 | BSD-3-Clause | 2022-10-11T11:13:42 | 2017-09-14T19:01:27 | Python | UTF-8 | Python | false | false | 3,528 | py | #
# Copyright (c) 2015-2018,2020 - Adjacent Link LLC, Bridgewater, New Jersey
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Adjacent Link LLC nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import absolute_import, division, print_function
import time
from etce.wrapper import Wrapper
class IPerfServer(Wrapper):
"""
Execute iperf as a server. The iperfserver file should contain, at
most, one line of iperf common and server options. The iperf server
command will be built as 'iperf -s [file options] [arg values]. Lines
starting with "#" is ignored as comments. If multiple non-comment
lines are found, only the last one is used.
"""
def register(self, registrar):
registrar.register_infile_name('iperfserver.conf')
registrar.register_outfile_name('iperfserver.log')
registrar.register_argument(
'interval',
None,
'iperf measurement interval (iperf -i switch ' \
'argument)')
registrar.register_argument(
'bufferlen',
None,
'iperf buffer length (iperf -l switch argument)')
def run(self, ctx):
if not ctx.args.infile:
return
# run as daemon, log to output file and add argument specified via input file
argstr = '-D -o %s' % ctx.args.outfile
if ctx.args.interval is not None:
argstr += ' -i %d ' % ctx.args.interval
if ctx.args.bufferlen is not None:
argstr += ' -l %d ' % ctx.args.bufferlen
fileargstr = ''
serverarglines = [line.strip() for line
in open(ctx.args.infile).readlines()
if len(line.strip()) > 0
and line[0] != '#']
# take the last non-comment line as the iperf input
if len(serverarglines) > 0:
fileargstr = serverarglines[-1]
argstr = '-s %s %s' % (fileargstr, argstr)
ctx.run('iperf', argstr)
def stop(self, ctx):
ctx.stop()
# iperfserver takes some time to close down
time.sleep(5)
| [
"[email protected]"
] | |
b104d48e41d9130046b0c49a32c62beba8f2a35d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/85/usersdata/179/58876/submittedfiles/funcoes1.py | e9ffe616ef1b13fe73cb6d7961c81ad1912c3af5 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,154 | py | # -*- coding: utf-8 -*-
def crescente(a):
cont=0
for i in range(0,len(a),1):
if a[i]>[i+1]:
cont=cont+1
if cont==len(a):
return(true)
else:
return(false)
def decrescente(a):
cont=0
for i in range(0,len(a),1):
if a[i]<[i+1]:
cont=cont+1
if cont==len(a):
return(true)
else:
return(false)
def consecutivo(a):
cont=0
for i in range(0,len(a),1):
if a[i]==[i+1]:
cont=cont+1
if cont==len(a):
return(true)
else:
return(false)
b=[]
c=[]
d=[]
n=int(input('digite o valor de n :'))
for i in range(0,n,1):
valor=int(input('digite o valor :'))
b.append(valor)
if crescente(b):
print('S')
else:
print('N')
for i in range(0,n,1):
valor=int(input('digite o valor :'))
c.append(valor)
if decrescente(c):
print('S')
else:
print('N')
for i in range(0,n,1):
valor=int(input('digite o valor :'))
d.append(valor)
if consecultivo(d):
print('S')
else:
print('N')
#escreva as demais funções
#escreva o programa principal
| [
"[email protected]"
] | |
73b8253035b13946cdbafdad3f3ff53fae1a417a | a14dd601cde67f67d0ba38dfd1362f7c0109cef1 | /arrays/leetcode/grid/set-matrix-zeroes-73.py | 4b6d885e0787eeebbf94701b9d37fb1cd5bc4ce0 | [] | no_license | Meaha7/dsa | d5ea1615f05dae32671af1f1c112f0c759056473 | fa80219ff8a6f4429fcf104310f4169d007af712 | refs/heads/main | 2023-09-03T18:52:41.950294 | 2021-11-05T09:14:42 | 2021-11-05T09:14:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,232 | py | grids = [
[[1, 1, 1], [1, 0, 1], [1, 1, 1]],
[[0, 1, 2, 0], [3, 4, 5, 2], [1, 3, 1, 5]],
[[1, 2, 3, 4], [5, 0, 7, 8], [0, 10, 11, 12], [13, 14, 15, 0]]
]
# T=mn,S=m+n
def main(grid):
m, n = len(grid), len(grid[0])
rows, cols = set(), set()
for i in range(m):
for j in range(n):
if not grid[i][j]:
rows.add(i)
cols.add(j)
for i in range(m):
for j in range(n):
if i in rows or j in cols:
grid[i][j] = 0
return grid
for grid in grids:
print(main(grid))
print()
# T=mn,S=1
def main(grid):
m, n = len(grid), len(grid[0])
fr, fc = False, False
for i in range(m):
for j in range(n):
if not grid[i][j]:
if not i:
fr = True
if not j:
fc = True
grid[i][0] = grid[0][j] = 0
for i in range(1, m):
for j in range(1, n):
if not grid[i][0] or not grid[0][j]:
grid[i][j] = 0
if fr:
for j in range(n):
grid[0][j] = 0
if fc:
for i in range(m):
grid[i][0] = 0
for grid in grids:
main(grid)
print(grid)
| [
"[email protected]"
] | |
82a29e952d943526f88af2dd50b7eda0da44f165 | a38aa3779c16f31d02a2df031fd4ce072facaeb9 | /project/utils.py | 7ae54df72e5e3e66e59363eb3dbee5eab2359549 | [
"MIT"
] | permissive | nikifkon-old/csa-almaty-bot | a0a39673dfa39eb5f6ac6dd58eea08008d52c350 | f18d087c86b3b90171dec080e780e330d62e711a | refs/heads/master | 2022-11-30T07:16:45.839562 | 2020-08-19T09:48:39 | 2020-08-19T09:48:39 | 288,692,826 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | QUESTION_CHAR = "❓"
EXCLAMATION_CHAR = "❗️"
SEARCH_CHAR = "🔎"
BACK_CHAR = "🔙"
MENU_CHAR = "☰"
BACK_TO_MENU_TEXT = "{prefix} Вернуться к списку категорий".format(prefix=MENU_CHAR)
BACK_TO_SEARCH_RESULT = "{prefix} Вернуться к результатам поиска".format(prefix=BACK_CHAR)
OPEN_SEARCH = "{prefix} Найти вопрос".format(prefix=SEARCH_CHAR)
TRY_SEARCH_AGAIN = "{prefix} Попробовать найти ещё раз".format(prefix=SEARCH_CHAR)
| [
"[email protected]"
] | |
ae336a597ede11303d18e76036cbc9ac291953b5 | 6c90112e7d21086ef06432bb417bdb339fed4c33 | /django-tally/api/models.py | 11af0648223a22b4581387c627995055a13352e3 | [
"MIT"
] | permissive | blakelobato/BetterBusinessByReview | 9767a04cf1b1a8a8e96cdea634a24887182834ff | 1f8f0a03dc24a661b112b60fed1946142d918294 | refs/heads/master | 2022-04-04T00:08:37.474620 | 2020-02-06T21:01:00 | 2020-02-06T21:01:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,205 | py | from django.db import models
from django.conf import settings
from django.core.validators import int_list_validator
from django.contrib.auth.models import User
# Create your models here.
class Url(models.Model):
id = models.IntegerField(primary_key=True, )
url = models.CharField(max_length=5000)
# created = models.DateTimeField(auto_now_add=True)#saved on first input into database
# updated = models.DateTimeField(auto_now=True)
date = models.DateTimeField(auto_now_add=True)#saved on first input into database
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,)
word_phrase = models.CharField(max_length=50)
high_rating_score = models.DecimalField(max_digits=3, decimal_places=2, null=True)
low_rating_score = models.DecimalField(max_digits=3, decimal_places=2, null=True)
def __str__(self):
return '{}'.format(self.url)
class WordListAPI(models.Model):
id = models.IntegerField(primary_key=True)
word_phrase = models.CharField(max_length=50)
high_rating_score = models.DecimalField(max_digits=3, decimal_places=2)
low_rating_score = models.DecimalField(max_digits=3, decimal_places=2)
| [
"[email protected]"
] | |
deae7399994f02fc02cd2a1de41c3876a0a42f3d | d5005de630cbfcac46b6f90be845a827a029ff0d | /urlshortner/api/serializer.py | c040926e02219c805df9c6c192f55d7729c0b142 | [] | no_license | mahinm20/url-shortner | d4b18917a002aa12f4fdd1f6f3e2bf026b34f0ad | ea084f96136d5810b8ad6d53bf0acc1a8291b782 | refs/heads/master | 2023-08-11T07:35:04.804424 | 2021-09-14T09:37:27 | 2021-09-14T09:37:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | from django.db.models import fields
from rest_framework.serializers import ModelSerializer
from .models import Link
class LinkSerializer(ModelSerializer):
class Meta:
model=Link
fields='__all__'
| [
"[email protected]"
] | |
40d8671c94da3a301dcd8dd73470c1af8be6c4dc | 4f2cdd9a34fce873ff5995436edf403b38fb2ea5 | /Data-Structures/List/Part2/P003.py | b6642ac9b5001105f692e511ac814eb924a9b9b2 | [] | no_license | sanjeevseera/Python-Practice | 001068e9cd144c52f403a026e26e9942b56848b0 | 5ad502c0117582d5e3abd434a169d23c22ef8419 | refs/heads/master | 2021-12-11T17:24:21.136652 | 2021-08-17T10:25:01 | 2021-08-17T10:25:01 | 153,397,297 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | """
Write a Python program to generate all permutations of a list in Python.
"""
import itertools
print(list(itertools.permutations([1,2,3]))) | [
"[email protected]"
] | |
7b375c81b77e9b35c1623c3699790ed98d0b9a61 | 5c90b31943aff36cab344574b16575025e649b7e | /examples/tour_examples/xkcd_tour.py | 73632b0471a64d556c17914eda6f7e0bd123423f | [
"MIT"
] | permissive | 766/SeleniumBase | 7e23adb3d40cf3d9912e2ff0f4dd56c2fafdb29b | b81e7b93e16a9abee6d2386f55c97843aa90a7d9 | refs/heads/master | 2020-08-22T08:54:47.269550 | 2019-12-06T13:44:17 | 2019-12-06T13:44:17 | 216,360,246 | 1 | 0 | MIT | 2019-12-06T13:44:18 | 2019-10-20T12:43:47 | null | UTF-8 | Python | false | false | 1,051 | py | from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # Exports the tour
self.play_tour() # Plays the tour
| [
"[email protected]"
] | |
41e48a86030f730e374988d7f00909bc2d3b0cc9 | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/age_sex_20190618092905.py | f95b9e064c825ab7d3d8a555a7f973fcb638f23b | [] | no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 817 | py | import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHR-all')#txt目录提取
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrtxt = os.path.basename(emrtxt)
emrtxt_str = re.findall(r'(^.+?)\_',emrtxt)#提取ID
emrtxt = "".join(emrtxt_str)#转成str
out = []
for line in f.readlines():
if line=='男':
out.append(line)
elif line.‘女'
out.append(line)
if line.find('岁')>-1:
line = re.sub('岁','',line)
lien = ''.join(line)
out.append(line)
break
output = ' '.join(out)
EMRdef.text_create(r'D:\DeepLearning ER\EHRbase','.txt' ,emrtxt,output)
| [
"[email protected]"
] | |
45242f33898eb7f9f32d81e88104ff79dccc109a | f9fe13fe62ba3fb1fb096da4268d5dc43e435ea4 | /44)in_range_or_not.py | 7b43b95dede8dbe7f422e93fd0c9f353fc060d58 | [] | no_license | MANNEMPRATAPVARUN/guvipy | 7e460da8b9d98c2fcd488757585d5bd207570666 | 4da4fe4f3d4855e14383015da19588ef0aea4f32 | refs/heads/master | 2020-06-10T01:22:26.063815 | 2019-06-12T13:44:44 | 2019-06-12T13:44:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 81 | py | num2=int(input())
if num2 in range(1,10):
print("yes")
else:
print("no")
| [
"[email protected]"
] | |
dc3a3df04d9eba2f8895e74b91128c8c0b6b8a41 | 6413fe58b04ac2a7efe1e56050ad42d0e688adc6 | /tempenv/lib/python3.7/site-packages/plotly/validators/scattergeo/marker/colorbar/_title.py | e584a0b07ef22e6fd0a89b476a1df8aef97c2e3d | [
"MIT"
] | permissive | tytechortz/Denver_temperature | 7f91e0ac649f9584147d59193568f6ec7efe3a77 | 9d9ea31cd7ec003e8431dcbb10a3320be272996d | refs/heads/master | 2022-12-09T06:22:14.963463 | 2019-10-09T16:30:52 | 2019-10-09T16:30:52 | 170,581,559 | 1 | 0 | MIT | 2022-06-21T23:04:21 | 2019-02-13T21:22:53 | Python | UTF-8 | Python | false | false | 1,264 | py | import _plotly_utils.basevalidators
class TitleValidator(_plotly_utils.basevalidators.TitleValidator):
def __init__(
self,
plotly_name='title',
parent_name='scattergeo.marker.colorbar',
**kwargs
):
super(TitleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Title'),
data_docs=kwargs.pop(
'data_docs', """
font
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
side
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
text
Sets the title of the color bar. Note that
before the existence of `title.text`, the
title's contents used to be defined as the
`title` attribute itself. This behavior has
been deprecated.
"""
),
**kwargs
)
| [
"[email protected]"
] | |
e82882454e7c7b079412b3a407adef7c35b5b239 | 501b6a773f82a44dba7e2393108c839e33b355a6 | /09_strings-and-text/09_05_new_friends_1.py | 9f584b942188cfc7da1a3894e2ec5626472c2738 | [] | no_license | Bat-Turtle/python-101 | fda2c79e3a270924f37494b25377b5645098a5c7 | 8f4582a5a9b398d2163c27aa8fe433efd8ff69fa | refs/heads/main | 2023-08-19T16:37:46.732125 | 2021-10-26T16:46:46 | 2021-10-26T16:46:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | # Write code that produces a SyntaxError when you run this script.
| [
"[email protected]"
] | |
4c33e3aca15d5af803da190c1a4f63b0f3779bc9 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2016_09_01/operations/_operations.py | 13f814257aab9d3cc4f66a805c212581c9ced361 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 251,605 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_deployments_delete_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_deployments_check_existence_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_deployments_create_or_update_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_get_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_cancel_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/cancel",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="POST", url=_url, params=_params, **kwargs)
def build_deployments_validate_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/validate",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_export_template_request(
resource_group_name: str, deployment_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/exportTemplate",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_list_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployments_calculate_template_hash_request(*, json: JSON, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Resources/calculateTemplateHash")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, json=json, **kwargs)
def build_providers_unregister_request(
resource_provider_namespace: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister"
)
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_register_request(
resource_provider_namespace: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register")
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_list_request(
subscription_id: str, *, top: Optional[int] = None, expand: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_providers_get_request(
resource_provider_namespace: str, subscription_id: str, *, expand: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}")
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_list_resources_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_check_existence_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resource_groups_create_or_update_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_delete_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resource_groups_get_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_patch_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}")
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_export_template_request(
resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate"
)
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_resource_groups_list_request(
subscription_id: str, *, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourcegroups")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_move_resources_request(
source_resource_group_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
) # pylint: disable=line-too-long
path_format_arguments = {
"sourceResourceGroupName": _SERIALIZER.url(
"source_resource_group_name",
source_resource_group_name,
"str",
max_length=90,
min_length=1,
pattern=r"^[-\w\._\(\)]+$",
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resources")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_check_existence_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resources_delete_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resources_create_or_update_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_update_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_get_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
api_version: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, "str"),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, "str", skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, "str", skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_check_existence_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs)
def build_resources_delete_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_resources_create_or_update_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_update_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_resources_get_by_id_request(resource_id: str, *, api_version: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/{resourceId}")
path_format_arguments = {
"resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_delete_value_request(tag_name: str, tag_value: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"tagValue": _SERIALIZER.url("tag_value", tag_value, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_tags_create_or_update_value_request(
tag_name: str, tag_value: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"tagValue": _SERIALIZER.url("tag_value", tag_value, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_create_or_update_request(tag_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_tags_delete_request(tag_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames/{tagName}")
path_format_arguments = {
"tagName": _SERIALIZER.url("tag_name", tag_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_tags_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/tagNames")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployment_operations_get_request(
resource_group_name: str, deployment_name: str, operation_id: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"operationId": _SERIALIZER.url("operation_id", operation_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_deployment_operations_list_request(
resource_group_name: str, deployment_name: str, subscription_id: str, *, top: Optional[int] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"deploymentName": _SERIALIZER.url(
"deployment_name", deployment_name, "str", max_length=64, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if top is not None:
_params["$top"] = _SERIALIZER.query("top", top, "int")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class DeploymentsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`deployments` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_delete_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def begin_delete(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a deployment from the deployment history.
A template deployment that is currently running cannot be deleted. Deleting a template
deployment removes the associated deployment operations. Deleting a template deployment does
not affect the state of the resource group. This is an asynchronous operation that returns a
status of 202 until the template deployment is successfully deleted. The Location response
header contains the URI that is used to obtain the status of the process. While the process is
running, a call to the URI in the Location header returns a status of 202. When the process
finishes, the URI in the Location header returns a status of 204 on success. If the
asynchronous request failed, the URI in the Location header returns an error-level status code.
:param resource_group_name: The name of the resource group with the deployment to delete. The
name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to delete. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
deployment_name=deployment_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def check_existence(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> bool:
"""Checks whether the deployment exists.
:param resource_group_name: The name of the resource group with the deployment to check. The
name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to check. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_check_existence_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
def _create_or_update_initial(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> _models.DeploymentExtended:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Deployment")
request = build_deployments_create_or_update_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
deployment_name: str,
parameters: _models.Deployment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
deployment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> LROPoller[_models.DeploymentExtended]:
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or link to JSON files.
:param resource_group_name: The name of the resource group to deploy the resources to. The name
is case insensitive. The resource group must already exist. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Additional parameters supplied to the operation. Is either a Deployment type
or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either DeploymentExtended or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def get(self, resource_group_name: str, deployment_name: str, **kwargs: Any) -> _models.DeploymentExtended:
"""Gets a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to get. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentExtended or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentExtended] = kwargs.pop("cls", None)
request = build_deployments_get_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentExtended", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}"
}
@distributed_trace
def cancel( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> None:
"""Cancels a currently running template deployment.
You can cancel a deployment only if the provisioningState is Accepted or Running. After the
deployment is canceled, the provisioningState is set to Canceled. Canceling a template
deployment stops the currently running template deployment and leaves the resource group
partially deployed.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment to cancel. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_deployments_cancel_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.cancel.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
cancel.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/cancel"
}
@overload
def validate(
self,
resource_group_name: str,
deployment_name: str,
parameters: _models.Deployment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def validate(
self,
resource_group_name: str,
deployment_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def validate(
self, resource_group_name: str, deployment_name: str, parameters: Union[_models.Deployment, IO], **kwargs: Any
) -> _models.DeploymentValidateResult:
"""Validates whether the specified template is syntactically correct and will be accepted by Azure
Resource Manager..
:param resource_group_name: The name of the resource group the template will be deployed to.
The name is case insensitive. Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param parameters: Parameters to validate. Is either a Deployment type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.Deployment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentValidateResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentValidateResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.DeploymentValidateResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "Deployment")
request = build_deployments_validate_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.validate.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("DeploymentValidateResult", pipeline_response)
if response.status_code == 400:
deserialized = self._deserialize("DeploymentValidateResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
validate.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/validate"
}
@distributed_trace
def export_template(
self, resource_group_name: str, deployment_name: str, **kwargs: Any
) -> _models.DeploymentExportResult:
"""Exports the template used for specified deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment from which to get the template. Required.
:type deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentExportResult] = kwargs.pop("cls", None)
request = build_deployments_export_template_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.export_template.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentExportResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_template.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/exportTemplate"
}
@distributed_trace
def list(
self, resource_group_name: str, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.DeploymentExtended"]:
"""Get all the deployments for a resource group.
:param resource_group_name: The name of the resource group with the deployments to get. The
name is case insensitive. Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. For example, you can use
$filter=provisioningState eq '{state}'. Default value is None.
:type filter: str
:param top: The number of results to get. If null is passed, returns all deployments. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentExtended or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentExtended]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_deployments_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/"
}
@distributed_trace
def calculate_template_hash(self, template: JSON, **kwargs: Any) -> _models.TemplateHashResult:
"""Calculate the hash of the given template.
:param template: The template provided to calculate hash. Required.
:type template: JSON
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TemplateHashResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TemplateHashResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json"))
cls: ClsType[_models.TemplateHashResult] = kwargs.pop("cls", None)
_json = self._serialize.body(template, "object")
request = build_deployments_calculate_template_hash_request(
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.calculate_template_hash.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("TemplateHashResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
calculate_template_hash.metadata = {"url": "/providers/Microsoft.Resources/calculateTemplateHash"}
class ProvidersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`providers` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def unregister(self, resource_provider_namespace: str, **kwargs: Any) -> _models.Provider:
"""Unregisters a subscription from a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to unregister.
Required.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_unregister_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.unregister.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unregister.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister"}
@distributed_trace
def register(self, resource_provider_namespace: str, **kwargs: Any) -> _models.Provider:
"""Registers a subscription with a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to register.
Required.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_register_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.register.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
register.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register"}
@distributed_trace
def list(
self, top: Optional[int] = None, expand: Optional[str] = None, **kwargs: Any
) -> Iterable["_models.Provider"]:
"""Gets all resource providers for a subscription.
:param top: The number of results to return. If null is passed returns all deployments. Default
value is None.
:type top: int
:param expand: The properties to include in the results. For example, use &$expand=metadata in
the query string to retrieve resource provider metadata. To include property aliases in
response, use $expand=resourceTypes/aliases. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Provider or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.Provider]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ProviderListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_providers_list_request(
subscription_id=self._config.subscription_id,
top=top,
expand=expand,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ProviderListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers"}
@distributed_trace
def get(self, resource_provider_namespace: str, expand: Optional[str] = None, **kwargs: Any) -> _models.Provider:
"""Gets the specified resource provider.
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param expand: The $expand query parameter. For example, to include property aliases in
response, use $expand=resourceTypes/aliases. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.Provider
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.Provider] = kwargs.pop("cls", None)
request = build_providers_get_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Provider", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}"}
class ResourceGroupsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`resource_groups` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_resources(
self,
resource_group_name: str,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.GenericResourceExpanded"]:
"""Get all the resources for a resource group.
:param resource_group_name: The resource group with the resources to get. Required.
:type resource_group_name: str
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``. Default value is None.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resources. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GenericResourceExpanded or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResourceExpanded]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resource_groups_list_resources_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
expand=expand,
top=top,
api_version=api_version,
template_url=self.list_resources.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_resources.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources"}
@distributed_trace
def check_existence(self, resource_group_name: str, **kwargs: Any) -> bool:
"""Checks whether a resource group exists.
:param resource_group_name: The name of the resource group to check. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resource_groups_check_existence_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def create_or_update(
self,
resource_group_name: str,
parameters: _models.ResourceGroup,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self, resource_group_name: str, parameters: Union[_models.ResourceGroup, IO], **kwargs: Any
) -> _models.ResourceGroup:
"""Creates a resource group.
:param resource_group_name: The name of the resource group to create or update. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group. Is either a
ResourceGroup type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourceGroup")
request = build_resource_groups_create_or_update_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resource_groups_delete_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@distributed_trace
def begin_delete(self, resource_group_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a resource group.
When you delete a resource group, all of its resources are also deleted. Deleting a resource
group deletes all of its template deployments and currently stored operations.
:param resource_group_name: The name of the resource group to delete. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@distributed_trace
def get(self, resource_group_name: str, **kwargs: Any) -> _models.ResourceGroup:
"""Gets a resource group.
:param resource_group_name: The name of the resource group to get. The name is case
insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
request = build_resource_groups_get_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def patch(
self,
resource_group_name: str,
parameters: _models.ResourceGroup,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def patch(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def patch(
self, resource_group_name: str, parameters: Union[_models.ResourceGroup, IO], **kwargs: Any
) -> _models.ResourceGroup:
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive. Required.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group. Is either a ResourceGroup
type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroup] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourceGroup")
request = build_resource_groups_patch_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.patch.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroup", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}"}
@overload
def export_template(
self,
resource_group_name: str,
parameters: _models.ExportTemplateRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ExportTemplateRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def export_template(
self, resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def export_template(
self, resource_group_name: str, parameters: Union[_models.ExportTemplateRequest, IO], **kwargs: Any
) -> _models.ResourceGroupExportResult:
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template. Required.
:type resource_group_name: str
:param parameters: Parameters for exporting the template. Is either a ExportTemplateRequest
type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ExportTemplateRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupExportResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ResourceGroupExportResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ExportTemplateRequest")
request = build_resource_groups_export_template_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.export_template.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ResourceGroupExportResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_template.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate"
}
@distributed_trace
def list(
self, filter: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.ResourceGroup"]:
"""Gets all the resource groups for a subscription.
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param top: The number of results to return. If null is passed, returns all resource groups.
Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceGroup or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceGroupListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resource_groups_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups"}
class ResourcesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`resources` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _move_resources_initial( # pylint: disable=inconsistent-return-statements
self, source_resource_group_name: str, parameters: Union[_models.ResourcesMoveInfo, IO], **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ResourcesMoveInfo")
request = build_resources_move_resources_request(
source_resource_group_name=source_resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._move_resources_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_move_resources_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
}
@overload
def begin_move_resources(
self,
source_resource_group_name: str,
parameters: _models.ResourcesMoveInfo,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourcesMoveInfo
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_move_resources(
self, source_resource_group_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_move_resources(
self, source_resource_group_name: str, parameters: Union[_models.ResourcesMoveInfo, IO], **kwargs: Any
) -> LROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move. Required.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources. Is either a ResourcesMoveInfo type or a IO
type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourcesMoveInfo or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._move_resources_initial( # type: ignore
source_resource_group_name=source_resource_group_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_move_resources.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources"
}
@distributed_trace
def list(
self, filter: Optional[str] = None, expand: Optional[str] = None, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.GenericResourceExpanded"]:
"""Get all the resources in a subscription.
:param filter: The filter to apply on the operation. Default value is None.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``. Default value is None.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resources. Default
value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GenericResourceExpanded or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResourceExpanded]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.ResourceListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_resources_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
expand=expand,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resources"}
@distributed_trace
def check_existence(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> bool:
"""Checks whether a resource exists.
:param resource_group_name: The name of the resource group containing the resource to check.
The name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider of the resource to check. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type. Required.
:type resource_type: str
:param resource_name: The name of the resource to check whether it exists. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_check_existence_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.check_existence.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_delete_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes a resource.
:param resource_group_name: The name of the resource group that contains the resource to
delete. The name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type. Required.
:type resource_type: str
:param resource_name: The name of the resource to delete. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _create_or_update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_create_or_update_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create. Required.
:type resource_type: str
:param resource_name: The name of the resource to create. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for creating or updating the resource. Is either a
GenericResource type or a IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
def _update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_update_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@overload
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: Union[_models.GenericResource, IO],
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update. Required.
:type resource_type: str
:param resource_name: The name of the resource to update. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Parameters for updating the resource. Is either a GenericResource type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def get(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs: Any
) -> _models.GenericResource:
"""Gets a resource.
:param resource_group_name: The name of the resource group containing the resource to get. The
name is case insensitive. Required.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider. Required.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity. Required.
:type parent_resource_path: str
:param resource_type: The resource type of the resource. Required.
:type resource_type: str
:param resource_name: The name of the resource to get. Required.
:type resource_name: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
request = build_resources_get_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}"
}
@distributed_trace
def check_existence_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> bool:
"""Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_check_existence_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self.check_existence_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence_by_id.metadata = {"url": "/{resourceId}"}
def _delete_by_id_initial( # pylint: disable=inconsistent-return-statements
self, resource_id: str, api_version: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_resources_delete_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self._delete_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_by_id_initial.metadata = {"url": "/{resourceId}"}
@distributed_trace
def begin_delete_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_by_id_initial( # type: ignore
resource_id=resource_id,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete_by_id.metadata = {"url": "/{resourceId}"}
def _create_or_update_by_id_initial(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_create_or_update_by_id_request(
resource_id=resource_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_by_id_initial.metadata = {"url": "/{resourceId}"}
@overload
def begin_create_or_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update_by_id(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Create or update resource parameters. Is either a GenericResource type or a
IO type. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update_by_id.metadata = {"url": "/{resourceId}"}
def _update_by_id_initial(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> Optional[_models.GenericResource]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.GenericResource]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "GenericResource")
request = build_resources_update_by_id_request(
resource_id=resource_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_by_id_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_by_id_initial.metadata = {"url": "/{resourceId}"}
@overload
def begin_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: _models.GenericResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update_by_id(
self, resource_id: str, api_version: str, parameters: Union[_models.GenericResource, IO], **kwargs: Any
) -> LROPoller[_models.GenericResource]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:param parameters: Update resource parameters. Is either a GenericResource type or a IO type.
Required.
:type parameters: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either GenericResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update_by_id.metadata = {"url": "/{resourceId}"}
@distributed_trace
def get_by_id(self, resource_id: str, api_version: str, **kwargs: Any) -> _models.GenericResource:
"""Gets a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
Required.
:type resource_id: str
:param api_version: The API version to use for the operation. Required.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.GenericResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[_models.GenericResource] = kwargs.pop("cls", None)
request = build_resources_get_by_id_request(
resource_id=resource_id,
api_version=api_version,
template_url=self.get_by_id.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("GenericResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {"url": "/{resourceId}"}
class TagsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`tags` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def delete_value( # pylint: disable=inconsistent-return-statements
self, tag_name: str, tag_value: str, **kwargs: Any
) -> None:
"""Deletes a tag value.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:param tag_value: The value of the tag to delete. Required.
:type tag_value: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_tags_delete_value_request(
tag_name=tag_name,
tag_value=tag_value,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete_value.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_value.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}"}
@distributed_trace
def create_or_update_value(self, tag_name: str, tag_value: str, **kwargs: Any) -> _models.TagValue:
"""Creates a tag value. The name of the tag must already exist.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:param tag_value: The value of the tag to create. Required.
:type tag_value: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TagValue or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TagValue
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagValue] = kwargs.pop("cls", None)
request = build_tags_create_or_update_value_request(
tag_name=tag_name,
tag_value=tag_value,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.create_or_update_value.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("TagValue", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("TagValue", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update_value.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}/tagValues/{tagValue}"}
@distributed_trace
def create_or_update(self, tag_name: str, **kwargs: Any) -> _models.TagDetails:
"""Creates a tag in the subscription.
The tag name can have a maximum of 512 characters and is case insensitive. Tag names created by
Azure have prefixes of microsoft, azure, or windows. You cannot create tags with one of these
prefixes.
:param tag_name: The name of the tag to create. Required.
:type tag_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TagDetails or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.TagDetails
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagDetails] = kwargs.pop("cls", None)
request = build_tags_create_or_update_request(
tag_name=tag_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("TagDetails", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("TagDetails", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}"}
@distributed_trace
def delete(self, tag_name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements
"""Deletes a tag from the subscription.
You must remove all values from a resource tag before you can delete it.
:param tag_name: The name of the tag. Required.
:type tag_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_tags_delete_request(
tag_name=tag_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames/{tagName}"}
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.TagDetails"]:
"""Gets the names and values of all resource tags that are defined in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TagDetails or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.TagDetails]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.TagsListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_tags_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("TagsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/tagNames"}
class DeploymentOperationsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.resource.resources.v2016_09_01.ResourceManagementClient`'s
:attr:`deployment_operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get(
self, resource_group_name: str, deployment_name: str, operation_id: str, **kwargs: Any
) -> _models.DeploymentOperation:
"""Gets a deployments operation.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment. Required.
:type deployment_name: str
:param operation_id: The ID of the operation to get. Required.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperation
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentOperation] = kwargs.pop("cls", None)
request = build_deployment_operations_get_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("DeploymentOperation", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}"
}
@distributed_trace
def list(
self, resource_group_name: str, deployment_name: str, top: Optional[int] = None, **kwargs: Any
) -> Iterable["_models.DeploymentOperation"]:
"""Gets all deployments operations for a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param deployment_name: The name of the deployment with the operation to get. Required.
:type deployment_name: str
:param top: The number of results to return. Default value is None.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperation or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2016-09-01"))
cls: ClsType[_models.DeploymentOperationsListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_deployment_operations_list_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentOperationsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations"
}
| [
"[email protected]"
] | |
8849959f26a02a64d2d77a028c48084c8fc9310d | 955060597d643c695dff53b6cff0ea649db68a94 | /dequorum/urls.py | 44cfd19538837d6340d8c57944e6fc065b461a4c | [
"BSD-2-Clause"
] | permissive | pombredanne/django-dequorum | e99386fd01d640776d3ac6f2851c4ddc15316713 | b790e9b8b0920581a48c67679648a6df811e505b | refs/heads/master | 2021-01-18T10:10:33.571111 | 2013-12-13T23:04:16 | 2013-12-13T23:04:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py |
from django.conf.urls import patterns, include, url
from nap import api
urlpatterns = patterns('',
(u'^$', 'django.shortcuts.render', {'template_name': 'dequorum/index.html'}),
(u'^api/', include(api.APIS['dequorum'].patterns(flat=True))),
)
| [
"[email protected]"
] | |
9b07e90479e6556a9f1310bbceee661ebf9051fc | 0107160f73c6f46a0c693f0aa8b2b22bb04aaa07 | /flex/redis.py | 8a0c1594fac0646ca5ab8f762a94f41024245212 | [
"MIT"
] | permissive | centergy/flex | 8a9054171a121671e09646a88259c947d0d87cc4 | 4fc11d3ad48e4b5016f53256015e3eed2157daae | refs/heads/master | 2022-12-13T06:01:09.561457 | 2018-08-22T20:32:34 | 2018-08-22T20:32:34 | 145,748,684 | 0 | 0 | MIT | 2022-12-08T00:45:07 | 2018-08-22T18:40:47 | Python | UTF-8 | Python | false | false | 1,958 | py | from flask import current_app
from threading import Lock
from flex.utils.module_loading import import_string
__all__ = ('RedisManager', 'redis')
class _Connector(object):
__slots__ = ('app', 'lock', '_client', 'config')
def __init__(self, app, config):
self.app = app
self.config = config
self._client = None
self.lock = Lock()
@property
def client(self):
with self.lock:
if self._client is None:
cls = self.config.CLIENT_CLASS
if isinstance(cls, str):
cls = import_string(cls)
self._client = cls.from_url(
self.config.URL,
**self.config.CLIENT_OPTIONS
)
return self._client
class RedisManager(object):
__slots__ = ('_app', )
config_prefix = 'REDIS_'
default_config = dict(
url='redis://localhost:6379/0',
client_class='redis.StrictRedis',
client_options={}
)
def __init__(self, app=None):
self._app = None
if app is not None:
self.init_app(app)
self._app = app
@property
def _redis_client(self):
try:
return self._get_app().extensions['redis'].client
except KeyError:
raise RuntimeError('Redis not setup on app.')
def _get_app(self, app=None):
"""Helper method that implements the logic to look up an application."""
if app is not None:
return app
if current_app:
return current_app
if self._app is not None:
return self._app
raise RuntimeError(
'Application not registered on cache instance and no application'\
'bound to current context'
)
def init_app(self, app, **kwargs):
config = app.config.namespace(self.config_prefix)
config.setdefaults(self.default_config)
app.extensions['redis'] = _Connector(app, config)
def __getattr__(self, name):
return getattr(self._redis_client, name)
def __getitem__(self, name):
return self._redis_client[name]
def __setitem__(self, name, value):
self._redis_client[name] = value
def __delitem__(self, name):
del self._redis_client[name]
redis = RedisManager() | [
"[email protected]"
] | |
3d838033d15386a3eba79d8ff6c914677e51f87f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/158/47953/submittedfiles/testes.py | 79ed55e189435778a26b71a91b9e7d1d21f2ea6a | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
r=int(input('raio do circulo:'))
p=3,1415
area=p*r*r
print(area)
| [
"[email protected]"
] | |
494729e6f2f30c78583ca65070a1387032401821 | 2b86301d5ad3fecaa5a300cabfe6b4dfc82b78ed | /venv/Lib/site-packages/cassiopeia/transformers/championmastery.py | c45ee0d705e5dc57e8ccf720ebd5f8d5dd952cb4 | [
"MIT"
] | permissive | sserrot/champion_relationships | 72823bbe73e15973007e032470d7efdf72af3be0 | 91315d6b7f6e7e678d9f8083b4b3e63574e97d2b | refs/heads/master | 2022-12-21T05:15:36.780768 | 2021-12-05T15:19:09 | 2021-12-05T15:19:09 | 71,414,425 | 1 | 2 | MIT | 2022-12-18T07:42:59 | 2016-10-20T01:35:56 | Python | UTF-8 | Python | false | false | 1,956 | py | from typing import Type, TypeVar
from copy import deepcopy
from datapipelines import DataTransformer, PipelineContext
from ..core.championmastery import ChampionMasteryData, ChampionMasteryListData, ChampionMastery, ChampionMasteries
from ..dto.championmastery import ChampionMasteryDto, ChampionMasteryListDto
T = TypeVar("T")
F = TypeVar("F")
class ChampionMasteryTransformer(DataTransformer):
@DataTransformer.dispatch
def transform(self, target_type: Type[T], value: F, context: PipelineContext = None) -> T:
pass
# Dto to Data
@transform.register(ChampionMasteryDto, ChampionMasteryData)
def champion_mastery_dto_to_data(self, value: ChampionMasteryDto, context: PipelineContext = None) -> ChampionMasteryData:
return ChampionMasteryData(**value)
@transform.register(ChampionMasteryListDto, ChampionMasteryListData)
def champion_mastery_list_dto_to_data(self, value: ChampionMasteryListDto, context: PipelineContext = None) -> ChampionMasteryListData:
data = deepcopy(value)
data["masteries"] = [self.champion_mastery_dto_to_data(c) for c in data["masteries"]]
for c in data["masteries"]:
c(region=data["region"])
data = data["masteries"]
return ChampionMasteryListData(data, region=value["region"], summoner_id=value["summonerId"])
# Data to Core
#@transform.register(ChampionMasteryData, ChampionMastery)
def champion_mastery_data_to_core(self, value: ChampionMasteryData, context: PipelineContext = None) -> ChampionMastery:
return ChampionMastery.from_data(value)
#@transform.register(ChampionMasteryListData, ChampionMasteries)
def champion_mastery_list_data_to_core(self, value: ChampionMasteryListData, context: PipelineContext = None) -> ChampionMasteries:
return ChampionMasteries.from_data(*[self.champion_mastery_data_to_core(cm) for cm in value], region=value.region, summoner=value.summoner_id)
| [
"[email protected]"
] | |
0a2a6e6a68e79bebbef374d63bfd4e57a41093db | eb87c8b1ce8591d207643d3924b7939228f1a4fe | /conformance_suite/test_assign_test_var.py | b3a5f3ec6ae764f29359d631f46cf82e492d26f7 | [] | no_license | brownplt/insta-model | 06543b43dde89913c219d476ced0f51a439add7b | 85e2c794ec4b1befa19ecb85f2c8d2509ec8cf42 | refs/heads/main | 2023-08-30T19:06:58.083150 | 2023-05-03T18:53:58 | 2023-05-10T22:29:18 | 387,500,638 | 5 | 0 | null | 2022-04-23T23:06:52 | 2021-07-19T14:53:09 | Racket | UTF-8 | Python | false | false | 414 | py | # test_assign_test_var.py
# This should pass.
from typing import Optional
def f(x: Optional[int]) -> int:
if x is None:
x = 1
return x
# def test_assign_test_var(self):
# codestr = """
# from typing import Optional
# def f(x: Optional[int]) -> int:
# if x is None:
# x = 1
# return x
# """
# self.compile(codestr, modname="foo")
| [
"[email protected]"
] | |
818aa3abf6f0f26c357550965b482be18aa0a2b7 | 4ac57cc07c50d1cc4dbf4894b77783fa03a8c7b1 | /4-case-study-sunlight-in-austin/9_daily_hours_of_clear_sky.py | 127206faced7ca4e185d3d0c5346c054b778c6ed | [] | no_license | OCulzac/pandas-foundations | 905fa778beee5e9d8210716abcc06eeeaf02b8b9 | f13e7270dfcbb661da7a2fa3f26b4001df5eadc9 | refs/heads/master | 2020-05-19T10:44:58.816172 | 2019-05-05T04:09:45 | 2019-05-05T04:09:45 | 184,977,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,090 | py | """ Daily hours of clear sky
In a previous exercise, you analyzed the 'sky_condition' column to explore the difference in temperature on sunny days compared to overcast days. Recall that a 'sky_condition' of 'CLR' represents a sunny day. In this exercise, you will explore sunny days in greater detail. Specifically, you will use a box plot to visualize the fraction of days that are sunny.
The 'sky_condition' column is recorded hourly. Your job is to resample this column appropriately such that you can extract the number of sunny hours in a day and the number of total hours. Then, you can divide the number of sunny hours by the number of total hours, and generate a box plot of the resulting fraction.
As before, df_clean is available for you in the workspace.
Instructions 1/3
Get the cases in df_clean where the sky is clear. That is, when 'sky_condition' equals 'CLR', assigning to is_sky_clear.
Resample is_sky_clear by day, assigning to resampled. """
# Using df_clean, when is sky_condition 'CLR'?
is_sky_clear = df_clean['sky_condition'].str.contains('CLR')
# Resample is_sky_clear by day
resampled = is_sky_clear.resample('D')
# See the result
print(resampled)
""" Instructions 2/3
35 XP
2
3
Calculate the number of measured sunny hours per day as the sum of resampled, assigning to sunny_hours.
Calculate the total number of measured hours per day as the count of resampled, assigning to total_hours.
Calculate the fraction of hours per day that were sunny as the ratio of sunny hours to total hours.
"""
# From previous step
is_sky_clear = df_clean['sky_condition'] == 'CLR'
resampled = is_sky_clear.resample('D')
# Calculate the number of sunny hours per day
sunny_hours = resampled.sum()
# Calculate the number of measured hours per day
total_hours = resampled.count()
# Calculate the fraction of hours per day that were sunny
sunny_fraction = sunny_hours / total_hours
""" Instructions 3/3
30 XP
3
Draw a box plot of sunny_fraction using .plot() with kind set to `'box'``. """
# Make a box plot of sunny_fraction
sunny_fraction.plot(kind='box')
plt.show() | [
"[email protected]"
] | |
bdeef222539d05e8198b4ec62cdb482012b69652 | cd08794c5ccdae4f0c9260ba537c2c2f11f5658b | /jq/jq_spider.py | 4513d09576491b776bc6dc299148bed6696ad441 | [] | no_license | Biking0/spider_project | d0b1d5443070240d8c28d8db470de78323f7134c | 8c8c874cea90684f255011e4ecf03aa9cd10a0f0 | refs/heads/master | 2022-01-28T07:53:26.128321 | 2019-06-01T08:17:19 | 2019-06-01T08:17:19 | 189,704,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,038 | py | # coding=utf-8
import time, logging, requests, json, urllib, re, redis, random, sys, traceback, settings
from datetime import datetime, timedelta
from urllib.parse import urlparse, parse_qs, urlencode
# from urlparse import urlparse, parse_qs
from lxml import etree
from fake_useragent import UserAgent
from utils.genProxy import genProxy
# from utils.set_invalid import set_invalid
from utils.process_cookies.cookies_generator import get_cookies
import urllib3
from utils import pubUtil, timeUtil, dataUtil
# from utils.mac_address import get_mac_address
# logging基本配置
logging.basicConfig(
# filename='jq-spider-api.log', filemode="w",
level=logging.INFO,
format="[%(asctime)s] %(name)s:%(levelname)s: %(message)s"
)
class JQSpider:
def __init__(self, name, num, proxy, local):
# self.ua = UserAgent()
self.city_airport = self._city_airport()
self.now = 0
self.session = requests.session()
self.start_url = 'https://booking.jetstar.com/sg/zh/booking/search-flights?'
self.task = []
self.name = name
self.num = num
# self.dynamic = True if dynamic else False
# self.proxy = True if proxy and not dynamic else False
self.buffer = []
self.st_time = timeUtil.change_to_int('07:30:00')
self.en_time = timeUtil.change_to_int('22:00:00')
# self.genProxy = genProxy() if self.proxy else ''
self.genProxy = genProxy()
self.cookie_time = 0
# self.refreshCookies()
self.item_num = 0
self.db = redis.Redis('116.196.83.53', port=6379, db=1)
self.version = 1.4
self.ip_sleep = 0
self.local = local
urllib3.disable_warnings()
def refreshCookies(self):
content = None
try:
if self.db.llen('jq_cookies') <= 0:
content = get_cookies()
# else:
# content = self.db.lpop('jq_cookies')
except Exception as e:
# print('55', e)
content = get_cookies()
finally:
dict_cookies = json.loads(content)
self.bm_sz = 'bm_sz=' + dict_cookies.get('bm_sz')
self.ak_bmsc = 'ak_bmsc=' + dict_cookies.get('ak_bmsc')
# self.ASP_NET_SessionId = 'ASP.NET_SessionId=' + dict_cookies.get('ASP.NET_SessionId')
# print 'bmsz', self.bm_sz
# print 'ak_bmsc', self.ak_bmsc
# print 'ASP.NET_SessionId', self.ASP_NET_SessionId
logging.info('got new cookie')
# dict_cookies.pop('ASP.NET_SessionId')
final_cookies = requests.utils.cookiejar_from_dict(dict_cookies, cookiejar=None, overwrite=True)
self.session.cookies.update(final_cookies)
# 处理url
@property
def start_request(self):
result_iter = None
# 需要加查询判断
while True:
# if not timeUtil.time_is_valid(self.st_time, self.en_time):
# logging.info('Waiting to 07:30:00.....')
# time.sleep(5 * 60)
# continue
# data_api = 'http://dx.redis.jiaoan100.com/buddha/gettask?carrier=JX'
data_api = 'http://task.jiaoan100.com/buddha/gettask?carrier=jx'
try:
if self.local:
if not result_iter:
result_iter = pubUtil.get_task('JQ', days=10)
result = next(result_iter)
else:
result = json.loads(requests.get(data_api, timeout=60).text).get('data')
except Exception as e:
logging.error(e)
result = None
if result is None:
logging.info('Date is None!')
logging.info('Waiting...')
time.sleep(16)
continue
airports, _day, day_num = result[0].split(':')
# day_num='1'
# print('airports, _day, day_num',airports, _day, day_num)
FROM, TO = airports.split('-')
# FROM, TO = ('DAD', 'HKG')
_day = re.sub(r'(\d{4})(\d{2})(\d{2})', r'\1-\2-\3', _day)
days = self._get_dates(_day, day_num)
# print(days)
# days = ['2019-01-11', '2019-01-12', '2019-01-13']
for day in days:
# FROM, TO, day = 'RGN', 'SIN', '2019-01-17'
query = urlencode({
'origin1': FROM,
'destination1': TO,
# 'flight-type': '1',
'departuredate1': day,
'adults': str(settings.ADULT_NUM),
'children': '0',
'infants': '0',
})
print(query)
# set_invalid('JX', FROM, TO, day)
total_url = self.start_url + query
# 设置无效
invalid = {
'date': day.replace('-', ''),
'depAirport': FROM,
'arrAirport': TO,
'mins': settings.INVALID_TIME
}
# total_url = 'https://www.jetstar.com/au/en/home?origin=CBR&destination=HNL&flight-type=1&selected-departure-date=02-02-2019&adult=1&flexible=1¤cy=AUD'
# yield total_url,invalid
yield [total_url, invalid]
# 请求页面
def spider_worker(self, task):
url = task[0]
invalid = task[1]
# 解析url
result = parse_qs(urlparse(url).query)
FROM = result.get('origin1')[0]
TO = result.get('destination1')[0]
response = None
# try:
bm_sz = 'bm_sz=8FDDAD1500BB3181E007312084B74DA7~QAAQj+I+Fzus4t5nAQAACobLVLvxdiuzn063pNBFkTVgOPQsHzs06YJZFARyCeRdJ4OW1yMTQ6YZZ2KvYv0RGyJrd7irytTbRAKy4DPJf2FR3bV2+Jbl6azq9ffviB7OT/4PCwV+Wo5KWStfFY4PYePeDAdpwHNyJvDddWXmScoVlyjZu6iFn+ff9reRbCd4'
ak_bmsc = 'ak_bmsc=C0F93DC841F28198100D2E40067EDBAC173EE28F6F5A0000E2AA3E5C93B0C105~plmMZfVTVea4qlzoPlFKLl0JkkWVWIzJCizVuAJtNbqiAz1q3I+qfoNCCCkFwTFwPMYcyf72MggquEHzDTExDlhBtlHUp/QpM2HxFAVbkUFlV2ruGnUAg2KOvSRDs9Krfoci21iS98FZKfl/xaWQKABFi08wDORmmu/KsdJrsvDF7rsacdDGvjm/cZoh41w+zkYmrrBN5StLBRwL4e4vuTFOTYgerIGpxGAEqOEz4wxwKKrLVePd3D7tXDrY/fkHsp'
session = 'ASP.NET_SessionId=ekkha1fufcilv3fhdgbmricf'
# bm_sz = self.bm_sz
# ak_bmsc = self.ak_bmsc
ua = UserAgent()
headers_302 = {
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36',
'User-Agent': ua.random,
# self.ua.random,
# 'referer': ('https://www.google.com/travel/clk/f?t=%s' % int(time.time() * 1000)),
'referer': 'https://www.jetstar.com/au/en/home?origin=SYD&destination=NRT&flight-type=1&selected-departure-date=01-02-2019&adult=1&flexible=1¤cy=AUD',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'accept-encoding': 'gzip, deflate, br',
# 'accept-language': 'zh-CN,zh;q=0.9',
# 'cookie': bm_sz + ';' + ak_bmsc
}
# print 'headers_302', headers_302
headers_data = {
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36',
'User-Agent': ua.random,
# self.ua.random,
# 'referer': ('https://www.google.com/travel/clk/f?t=%s' % int(time.time() * 1000)),
'referer': 'https://www.jetstar.com/au/en/home?origin=SYD&destination=NRT&flight-type=1&selected-departure-date=01-02-2019&adult=1&flexible=1¤cy=AUD',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'accept-encoding': 'gzip, deflate, br',
# 'accept-language': 'zh-CN,zh;q=0.9',
# 'cookie': self.bm_sz + ';' + self.ak_bmsc + ';' + self.ASP_NET_SessionId
}
# if self.dynamic:
# (proxies, new_headers) = self.genProxy.genProxy()
# headers.update(new_headers)
# elif self.proxy:
# ip_port = self.genProxy.getHttpProxy()
# proxies = {"http": "http://" + ip_port, "https": "https://" + ip_port}
# else:
# proxies=''
# url = 'https://booking.jetstar.com/au/en/booking/search-flights?origin1=SYD&destination1=NRT&departuredate1=2019-02-01&adults=1&children=0&infants=0&AutoSubmit=Y¤cy=AUD'
# url = 'https://booking.jetstar.com/au/en/booking/search-flights?origin1=SYD&destination1=BNK&departuredate1=2019-02-02&adults=1&children=0&infants=0&AutoSubmit=Y¤cy=AUD'
# url = 'https://www.jetstar.com/au/en/home?origin=SYD&destination=BNK&flight-type=2&selected-departure-date=07-02-2019&adult=1&flexible=1¤cy=AUD'
#
# ip_port = genProxy().getHttpProxy()
# ip_port='ZLy5cF:[email protected]:9852'
# ip_port = 'lum-customer-zhanghua-zone-static-country-us:[email protected]:22225'
# proxies = {"http": "http://" + ip_port, "https": "https://" + ip_port}
# proxies = {"https": "https://" + ip_port}
response = self.session.get(url, headers=headers_302, timeout=30, verify=False,
allow_redirects=False)
# response = self.session.get(url, headers=headers_302, timeout=100, verify=False)
# print('130', response)
# url_302 = 'https://booking.jetstar.com/cn/zh/booking/select-flights'
url_302 = 'https://booking.jetstar.com/au/en/booking/select-flights'
proxies = {'http': 'http://localhost:8080'}
response = self.session.get(url_302, headers=headers_data, timeout=30, verify=False,
allow_redirects=False)
if response.status_code == 403:
logging.info('Access Denied!')
self.refreshCookies()
self.ip_sleep += 1
if self.ip_sleep > 5:
logging.info('# sleep 60s')
time.sleep(60)
self.ip_sleep = 0
return
self.ip_sleep = 0
# print('134', response)
self.parse(response.text, FROM, TO, invalid)
# except IndexError:
# if response.content.lower().find('<title>access denied</title>') != -1:
# logging.info('Access Denied!')
# self.refreshCookies()
# # if not self.dynamic and not self.proxy:
# # self.genProxy.getHttpProxy(True)
# self.spider_worker(url)
# return
# # traceback.print_exc()
# logging.info(url)
# logging.info("%s->%s no data,passed" % (FROM, TO))
# except Exception as e:
# # logging.info("%s->%s,%s,%s failed,try again" % (FROM, TO, 'requests.exceptions.Timeout',url))
# # traceback.print_exc()
# print e
# if not self.dynamic and self.proxy:
# self.genProxy.getHttpProxy(True)
# # self.refreshCookies()
# self.spider_worker(url)
# 解析页面数据
def parse(self, response, FROM, TO, invalid):
# logging.info('success get data!')
from_city = self.city_airport.get(FROM, FROM)
to_city = self.city_airport.get(TO, TO)
html = etree.HTML(response)
# f = open('123.txt', 'w')
# f.write(html)
# f.close()
# try:
currency = html.xpath('//div[@id="datalayer-data"]/@data-currency-code')[0]
# currency = html.xpath('//div[@id="datalayer-data"]/@data-currency-code')
# except Exception as e:
# print e
# print html.xpath('//div')[0].text
# print 226, html.xpath('//div[@id="tab-economy-SYD-NRT"]')
try:
eco_div = html.xpath('//div[@id="economy-%s-%s"]' % (FROM, TO))[0]
except:
return
# print 'eco: ', eco_div
display_div = eco_div.xpath('.//div[@class=" display-currency-%s"]/div[@class="row"]' % currency)[1:]
# row = [leg for leg in display_div if not leg.xpath('.//div[contains(@class, "fare__details--leg-1")]')]
row = [leg for leg in display_div]
for div in row:
# 忽略航班已售完的情况
try:
item = dict()
# try:
seats = div.xpath('.//span[@class="hot-fare"]')
if len(seats) == 0:
maxSeats = 9
else:
maxSeats = seats[0].text.split(' ')[0]
# 无航班
flight_info = div.xpath('.//div[@class="price-select__button"]/input/@data-price-breakdown')
if len(flight_info) == 0:
logging.info('# no flight')
self.task.append(invalid)
return
dataPrice = div.xpath('.//div[@class="price-select__button"]/input/@data-price-breakdown')[0]
dataPriceJson = json.loads(dataPrice)
adultPrice = round(float(dataPriceJson.get('TotalAmountDue')), 2) // settings.ADULT_NUM
adultPrice = float(dataPriceJson.get('TotalAmountDue')) // settings.ADULT_NUM
netFare = round(float(dataPriceJson.get('TotalFare')), 2) // settings.ADULT_NUM
depTime = div.xpath('.//div[@class="price-select__button"]/input/@data-departure-time')[0] # 出发时间
arrTime = div.xpath('.//div[@class="price-select__button"]/input/@data-arrival-time')[0] # 到达时间
flightNumber = div.xpath('.//div[@class="price-select__button"]/input/@data-flightnumber')[0] # 航班号
# 中转
if '-' in flightNumber:
logging.info('# is change')
continue
timegroup_str = div.xpath('.//div[@class="price-select__button"]/input/@id')[0]
timegroup = re.findall(r'(\d{2}/\d{2}/\d{4} \d{2}:\d{2})', timegroup_str)
depTimeStamp = time.mktime(time.strptime(timegroup[0], "%m/%d/%Y %H:%M")).__int__() # 出发时间
arrTimeStamp = time.mktime(time.strptime(timegroup[1], "%m/%d/%Y %H:%M")).__int__() # 出发时间
item.update(dict(
adultPrice=adultPrice,
netFare=netFare,
depTime=depTimeStamp,
arrTime=arrTimeStamp,
flightNumber=flightNumber,
depAirport=FROM, # 出发机场
arrAirport=TO, # 到达机场
cabin='ECO',
currency=currency,
fromCity=from_city,
toCity=to_city,
maxSeats=maxSeats,
isChange=1,
segments='[]',
getTime=time.mktime(datetime.now().timetuple()).__int__(),
))
item.update(dict(
adultTax=item["adultPrice"] - item["netFare"], # 税
carrier=item["flightNumber"][:2],
))
# except Exception as e:
# adultPrice = 0
# netFare = 0
# maxSeats = 0
# flightNumberTag = \
# div.xpath(
# './/div[contains(@class, "flight-info__flightNubmer")]/div[@class="medium-11"]/strong')[0]
# flightNumber = flightNumberTag.text
# depTimeTag = div.xpath('.//strong[@class="depaturestation"]')[0]
# arrTimeTag = div.xpath('.//strong[@class="arrivalstation"]')[0]
# depTimeContent = re.split(r'[\s\,\;\n\t]+', depTimeTag.text)
# arrTimeContent = re.split(r'[\s\,\;\n\t]+', arrTimeTag.text)
# depDateStr = ' '.join(depTimeContent[1:-1])
# arrDateStr = ' '.join(arrTimeContent[1:-1])
# depTimeStamp = time.mktime(time.strptime(depDateStr, "%A %d %B %Y %I:%M%p")).__int__()
# arrTimeStamp = time.mktime(time.strptime(arrDateStr, "%A %d %B %Y %I:%M%p")).__int__()
# print e
# continue
# finally:
# print(item)
self.process_item(item)
except:
print(FROM + '-->' + TO)
traceback.print_exc()
# 入库
def process_item(self, item):
self.buffer.append(item)
if len(self.buffer) >= 5:
# # 测试库
# url = '%scarrier=%s' % (settings.PUSH_DATA_URL_TEST, item["carrier"])
# # 正式库
# # url = '%scarrier=%s' % (settings.PUSH_DATA_URL, item["carrier"])
# data = {
# "action": "add",
# "data": self.buffer
#
# }
# response = requests.post(url, data=json.dumps(data), timeout=2 * 60, verify=False)
# logging.info("%s,%s" % (response.content, len(self.buffer)))
url = dataUtil.get_random_url(settings.PUSH_DATA_URL)
add_success = pubUtil.addData('add', self.buffer, url, self.name, 'JQ')
self.item_num += len(self.buffer)
if add_success:
self.buffer = []
invalid_success = pubUtil.invalidData('invalid', self.task, url + 'carrier=%s' % 'JQ', self.name)
if invalid_success:
self.task = []
# 加入心跳
run_time = time.time()
if run_time - self.now >= 60:
permins = self.item_num
self.item_num = 0
print(pubUtil.heartbeat('%s' % (self.name),
'jq', '%s' % self.num, permins, self.version))
self.now = run_time
# 城市-机场
@staticmethod
def _city_airport():
api = 'http://dx.jiaoan100.com/br/portcity?carrier=JQ'
response = requests.get(api)
return json.loads(response.text).get('data')
@staticmethod
def _get_dates(day, num):
start_day = datetime.strptime(day, '%Y-%m-%d')
dates = []
# num = 1
for _day in range(int(num)):
dates.append((start_day + timedelta(_day)).strftime('%Y-%m-%d'))
return dates
def run(self):
for url in self.start_request:
try:
self.spider_worker(url)
except:
traceback.print_exc()
pass
# 无参数测试
name = 'hyn-test'
num = 1
proxy = False
dynamic = False
run = JQSpider(name, '1', proxy, dynamic)
run.run()
# if __name__ == '__main__':
# import sys, os
#
# argv = sys.argv
# # os.system('mitmdump -s ./mitmproxy_js/addons.py')
# name = argv[1]
# num = argv[2] if len(argv) > 2 else 1
# proxy = argv[3] if len(argv) > 3 else False
# local = argv[4] if len(argv) > 4 else False
#
# if local:
# if local.split('=')[0] == 'local':
# local = 1
# else:
# local = 0
# else:
# local = 0
# # dynamic = argv[4] if len(argv) > 4 else False
# # jq = JQSpider(name=argv[1], num=num, proxy=proxy, dynamic=dynamic)
# jq = JQSpider(name=argv[1], num=num, proxy=proxy, local=local)
# jq.run()
| [
"[email protected]"
] | |
2f6956dd6f187273f31b75d5d6429b5d5d23c030 | 7a13a9def50e3d87d74f7d3a2b990cd9bc1acda1 | /accounts/admin.py | 1ae86e995a6ecd33ad7fd7b61e36b1ee99444204 | [] | no_license | anandrajB/speedy-scanner | a97bfe16feef483db9e2fe77a2b1639e1dea8707 | fd5d4fd7b3ba600d975ae2aaf73ae81e1d0e3632 | refs/heads/master | 2023-08-31T09:07:46.802433 | 2021-09-20T12:21:22 | 2021-09-20T12:21:22 | 374,634,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,063 | py | from django.contrib import admin
from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from .models import MyUser, Profile, File, Batch
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label="Password", widget=forms.PasswordInput)
password2 = forms.CharField(
label="Password confirmation", widget=forms.PasswordInput
)
class Meta:
model = MyUser
fields = ("email", "phone")
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = MyUser
fields = ("email", "phone", "password", "is_active", "is_admin")
def clean_password(self):
return self.initial["password"]
class UserAdmin(BaseUserAdmin):
# The forms to add and change user instances
form = UserChangeForm
add_form = UserCreationForm
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ("email", "phone", "is_admin")
list_filter = ("is_admin",)
fieldsets = (
(None, {"fields": ("email", "password")}),
("Personal info", {"fields": ("phone",)}),
("Permissions", {"fields": ("is_active", "is_admin",)}),
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(
None,
{
"classes": ("wide",),
"fields": ("email", "phone", "password1", "password2"),
},
),
)
search_fields = ("email",)
ordering = ("email",)
filter_horizontal = ()
# Now register the new UserAdmin...
admin.site.register(MyUser, UserAdmin)
# ... and, since we're not using Django's built-in permissions,
# unregister the Group model from admin.
admin.site.unregister(Group)
admin.site.register(Profile)
admin.site.register(Batch)
admin.site.register(File)
| [
"[email protected]"
] | |
ccdb0c30a8b56af6595a2ea1e7306c14dd805c10 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/HUAWEI-TRNG-MIB.py | a33b84f6643f628a5a51e2194e7f39fc3a47f759 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 18,263 | py | #
# PySNMP MIB module HUAWEI-TRNG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-TRNG-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:49:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Counter64, MibIdentifier, Counter32, iso, NotificationType, Gauge32, Bits, ObjectIdentity, Unsigned32, IpAddress, Integer32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibIdentifier", "Counter32", "iso", "NotificationType", "Gauge32", "Bits", "ObjectIdentity", "Unsigned32", "IpAddress", "Integer32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity")
TextualConvention, DisplayString, TruthValue, DateAndTime, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue", "DateAndTime", "RowStatus")
hwTRNG = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13))
hwTRNG.setRevisions(('2011-03-22 00:00', '2003-04-11 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: hwTRNG.setRevisionsDescriptions(('V1.01, modified the description of hwTimerangePeriodicEndTimes. modified the description of hwTrngCreateTimerangeTable, hwTrngAbsoluteTable and hwTrngPeriodicTable . modified the errors of the MIB file. modified the description of leaves. modified the datatype definition and the format of the MIB script.', 'V1.00, initial revision of this MIB module.',))
if mibBuilder.loadTexts: hwTRNG.setLastUpdated('201103220000Z')
if mibBuilder.loadTexts: hwTRNG.setOrganization('Huawei Technologies Co.,Ltd.')
if mibBuilder.loadTexts: hwTRNG.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: [email protected] ")
if mibBuilder.loadTexts: hwTRNG.setDescription('The mib is used for configuring time range. When configuring the ACL rule, if you need to specify the time for the ACL rule to take effect, you need to run this command to configure a time range before specifying the time. After that, you can specify the time for an ACL to take effect by referring the time range name when configuring the ACL rule.')
hwTRNGMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1))
hwTrngCreateTimerangeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1), )
if mibBuilder.loadTexts: hwTrngCreateTimerangeTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateTimerangeTable.setDescription('Describes a time range. When configuring an ACL rule, set its effective time. To do so, configurate a time range first. After the configuration, the effective time is specified by referencing the time range when an ACL rule is being configured. An ACL time range can be a relative time range and an absolute time range. The index of this table is hwTrngIndex. ')
hwTrngCreateTimerangeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngIndex"))
if mibBuilder.loadTexts: hwTrngCreateTimerangeEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateTimerangeEntry.setDescription('Describes a time range. When configuring an ACL rule, set its effective time. To do so, configurate a time range first. After the configuration, the effective time is specified by referencing the time range when an ACL rule is being configured. An ACL time range can be a relative time range and an absolute time range. The index of this entry is hwTrngIndex. ')
hwTrngIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngIndex.setDescription('Uniquely identifies a time range. Range: 1-256 ')
hwTrngName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngName.setStatus('current')
if mibBuilder.loadTexts: hwTrngName.setDescription('Indicates the character string of a time range name. It is used to identify different time ranges. The character string consists of 1-32 characters of letters and digits. No other characters can be included. ')
hwTrngValidFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwTrngValidFlag.setStatus('current')
if mibBuilder.loadTexts: hwTrngValidFlag.setDescription('Describes whether the current time range is valid, that is, whether the current time is within the specified time range. Options: 1. true(1) -if the current time is within the specified time range, the value is true(1), which indicates validity. 2. false(2) -if the current time is not within the specified time range, the value is false(2), which indicates invalidity. ')
hwTrngCreateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngCreateRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTrngCreateRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add a time range 3. destroy(6) -delete a time range It is used for adding or deleting a time range. To add a time range, you must bind hwTrngName and set hwTrngCreateRowStatus to createAndGo(4). To delete a time range, set hwTrngCreateRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTrngAbsoluteTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2), )
if mibBuilder.loadTexts: hwTrngAbsoluteTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteTable.setDescription('Describes an absolute time range. An absolute time range refers to the time range without a period. The time range is active from the specified start time and date to the end time and date. Otherwise, the time range is inactive. The indexes of this table are hwTrngAbsoluteNameIndex and hwTrngAbsoluteSubIndex. ')
hwTrngAbsoluteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngAbsoluteNameIndex"), (0, "HUAWEI-TRNG-MIB", "hwTrngAbsoluteSubIndex"))
if mibBuilder.loadTexts: hwTrngAbsoluteEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteEntry.setDescription('Describes an absolute time range. An absolute time range refers to the time range without a period. The time range is active from the specified start time and date to the end time and date. Otherwise, the time range is inactive. The indexes of this entry are hwTrngAbsoluteNameIndex and hwTrngAbsoluteSubIndex. ')
hwTrngAbsoluteNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngAbsoluteNameIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteNameIndex.setDescription('Uniquely identifies a time range. Range: 1-256 The specified time range must be created in hwTrngCreateTimerangeTable. ')
hwTrngAbsoluteSubIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 12)))
if mibBuilder.loadTexts: hwTrngAbsoluteSubIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngAbsoluteSubIndex.setDescription('Uniquely identifies an absolute time range. Range: 1-12 ')
hwTimerangeAbsoluteStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 3), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsoluteStartTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsoluteStartTime.setDescription('Indicates the start time of an absolute time range. It is an 8-byte hexadecimal numeral, where, the first two bytes indicate the year, the third byte indicates the month, the fourth byte indicates the day, the fifth byte indicates the hour, the six byte indicates the minute, and the seventh and eighth digits are reserved, which are filled in 0. For example, if the start time is 2010-1-10,8:10, the value is presented as 0x07 0xDA 0x01 0x0A 0x08 0x0A 0x00 0x00. The time range that the device can identify is 1970/01/01 00:00-2099/12/31 23:59. Therefore, the time value must be within the time range. ')
hwTimerangeAbsoluteEndTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 4), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsoluteEndTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsoluteEndTime.setDescription('Indicates the end time of an absolute time range. The format is the same as that of hwTrngAbsoluteStartTime. The value of the end time must be larger than that of the start time. If the value is not specified, the system uses 2099/12/31 23:59 by default. ')
hwTimerangeAbsolueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangeAbsolueRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTimerangeAbsolueRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add an absolute time range 3. destroy(6) -delete an absolute time range It is used for adding or deleting an absolute time range. To add an absolute time range, enter hwTrngAbsoluteStartTime and set hwTrngAbsolueRowStatus to createAndGo(4). hwTrngAbsoluteEndTime is optional. To delete an absolute time range, set hwTrngAbsolueRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTrngPeriodicTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3), )
if mibBuilder.loadTexts: hwTrngPeriodicTable.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicTable.setDescription('Describes a relative time range. A relative time range refers to the time range with a period. When a time range is already created, only the specific time is specified but the date is set to a day from Monday to Sunday. The time range is active at the specified time and date. Otherwise, the time range is inactive. The indexes of this table are hwTrngPeriodicNameIndex and hwTrngPeriodicSubIndex. ')
hwTrngPeriodicEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1), ).setIndexNames((0, "HUAWEI-TRNG-MIB", "hwTrngPeriodicNameIndex"), (0, "HUAWEI-TRNG-MIB", "hwTrngPeriodicSubIndex"))
if mibBuilder.loadTexts: hwTrngPeriodicEntry.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicEntry.setDescription('Describes a relative time range. A relative time range refers to the time range with a period. When a time range is already created, only the specific time is specified but the date is set to a day from Monday to Sunday. The time range is active at the specified time and date. Otherwise, the time range is inactive. The indexes of this entry are hwTrngPeriodicNameIndex and hwTrngPeriodicSubIndex. ')
hwTrngPeriodicNameIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwTrngPeriodicNameIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicNameIndex.setDescription('Uniquely identifies a relative time range. Range: 1-256 The specified time range must be created in hwTrngCreateTimerangeTable. ')
hwTrngPeriodicSubIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 32)))
if mibBuilder.loadTexts: hwTrngPeriodicSubIndex.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicSubIndex.setDescription('Uniquely identifies a relative time range. Range: 1-32 ')
hwTrngPeriodicDayofWeek = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTrngPeriodicDayofWeek.setStatus('current')
if mibBuilder.loadTexts: hwTrngPeriodicDayofWeek.setDescription('Indicates the day of week within the periodic time range. The values are as follows: Sunday: 0x01 Monday: 0x02 Tuesday: 0x04 Wednesday: 0x08 Thursday: 0x10 Friday: 0x20 Saturday: 0x40 If the value is set to Sunday and Monday, perform the | operation to the values of Sunday and Monday, and the value is 0x03, and so on. ')
hwTimerangePeriodicStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 4), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicStartTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicStartTime.setDescription('Indicates the start time of a periodic time range. The format is the same as that of hwTrngAbsoluteStartTime in hwTrngAbsoluteTable. The periodic time needs only the hour and minute values, and thus only the fifth and sixth bytes are used, where, the fifth byte indicates the hour value of the start time and the sixth byte indicates the minute value. Other bytes are reserved and are filled in 0. For example, if the start time is 08:30, the value is presented as 0x00 0x00 0x00 0x00 0x08 0x1E 0x00 0x00. The time must be from 00:00 to 24:00. ')
hwTimerangePeriodicEndTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 5), DateAndTime()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicEndTime.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicEndTime.setDescription('Indicates the end time of a periodic time range. The format is the same as that of hwTrngPeriodicStartTime. The value of the end time must be larger than that of the start time. The value must be from 00:00 to 24:00. The 7th byte is used only in the case of 23: 59: 60 to indicate the time 24: 00. ')
hwTimerangePeriodicRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 1, 3, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwTimerangePeriodicRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwTimerangePeriodicRowStatus.setDescription('Indicates the row status. Options: 1. active(1) -when this leaf is queried, the value is fixed to active(1). 2. createAndGo(4) -add a relative time range 3. destroy(6) -delete a relative time range It is used for adding or deleting a relative time range. To add a relative time range, enter hwTrngPeriodicStartTime and hwTrngPeriodicEndTime, and set hwTrngPeriodicRowStatus to createAndGo(4). To delete a relative time range, set hwTrngAbsolueRowStatus to destroy(6). When this leaf is queried, the value is fixed to active(1). ')
hwTRNGMibConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3))
hwTRNGMibCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 1))
hwTRNGMibCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 1, 1)).setObjects(("HUAWEI-TRNG-MIB", "hwTRNGGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwTRNGMibCompliance = hwTRNGMibCompliance.setStatus('current')
if mibBuilder.loadTexts: hwTRNGMibCompliance.setDescription('The compliance statement for entities which implement the Huawei Time-range MIB.')
hwTRNGMibGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 2))
hwTRNGGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 13, 3, 2, 1)).setObjects(("HUAWEI-TRNG-MIB", "hwTrngName"), ("HUAWEI-TRNG-MIB", "hwTrngValidFlag"), ("HUAWEI-TRNG-MIB", "hwTrngCreateRowStatus"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsoluteStartTime"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsoluteEndTime"), ("HUAWEI-TRNG-MIB", "hwTimerangeAbsolueRowStatus"), ("HUAWEI-TRNG-MIB", "hwTrngPeriodicDayofWeek"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicStartTime"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicEndTime"), ("HUAWEI-TRNG-MIB", "hwTimerangePeriodicRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwTRNGGroup = hwTRNGGroup.setStatus('current')
if mibBuilder.loadTexts: hwTRNGGroup.setDescription('A collection of objects providing mandatory time-range information.')
mibBuilder.exportSymbols("HUAWEI-TRNG-MIB", hwTrngAbsoluteSubIndex=hwTrngAbsoluteSubIndex, hwTrngCreateRowStatus=hwTrngCreateRowStatus, hwTrngPeriodicNameIndex=hwTrngPeriodicNameIndex, hwTRNGMibGroups=hwTRNGMibGroups, hwTrngAbsoluteNameIndex=hwTrngAbsoluteNameIndex, hwTrngIndex=hwTrngIndex, hwTimerangeAbsoluteStartTime=hwTimerangeAbsoluteStartTime, PYSNMP_MODULE_ID=hwTRNG, hwTrngPeriodicTable=hwTrngPeriodicTable, hwTrngAbsoluteEntry=hwTrngAbsoluteEntry, hwTRNG=hwTRNG, hwTRNGMibConformance=hwTRNGMibConformance, hwTrngPeriodicSubIndex=hwTrngPeriodicSubIndex, hwTrngPeriodicEntry=hwTrngPeriodicEntry, hwTrngValidFlag=hwTrngValidFlag, hwTrngPeriodicDayofWeek=hwTrngPeriodicDayofWeek, hwTRNGMibCompliance=hwTRNGMibCompliance, hwTrngCreateTimerangeTable=hwTrngCreateTimerangeTable, hwTrngName=hwTrngName, hwTimerangeAbsoluteEndTime=hwTimerangeAbsoluteEndTime, hwTimerangePeriodicEndTime=hwTimerangePeriodicEndTime, hwTRNGMibCompliances=hwTRNGMibCompliances, hwTimerangePeriodicRowStatus=hwTimerangePeriodicRowStatus, hwTRNGMibObjects=hwTRNGMibObjects, hwTrngCreateTimerangeEntry=hwTrngCreateTimerangeEntry, hwTRNGGroup=hwTRNGGroup, hwTimerangeAbsolueRowStatus=hwTimerangeAbsolueRowStatus, hwTrngAbsoluteTable=hwTrngAbsoluteTable, hwTimerangePeriodicStartTime=hwTimerangePeriodicStartTime)
| [
"[email protected]"
] | |
cc6895b8b702d18633c777f02493a8fe29b851f5 | 05263538c3ad0f577cdbbdb9bac87dcf450230ce | /alexa/ask-sdk/ask_sdk_model/dialog/elicit_slot_directive.py | 837a97063497b6119b45f42914b105a8118715ce | [] | no_license | blairharper/ISS-GoogleMap-project | cea027324fc675a9a309b5277de99fc0265dcb80 | 3df119036b454a0bb219af2d703195f4154a2471 | refs/heads/master | 2020-03-21T16:47:21.046174 | 2018-10-24T08:05:57 | 2018-10-24T08:05:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,761 | py | # coding: utf-8
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
from ask_sdk_model.directive import Directive
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional
from datetime import datetime
from ask_sdk_model.intent import Intent
class ElicitSlotDirective(Directive):
"""
NOTE: This class is auto generated.
Do not edit the class manually.
:type updated_intent: (optional) ask_sdk_model.intent.Intent
:type slot_to_elicit: (optional) str
"""
deserialized_types = {
'object_type': 'str',
'updated_intent': 'ask_sdk_model.intent.Intent',
'slot_to_elicit': 'str'
}
attribute_map = {
'object_type': 'type',
'updated_intent': 'updatedIntent',
'slot_to_elicit': 'slotToElicit'
}
def __init__(self, updated_intent=None, slot_to_elicit=None): # noqa: E501
# type: (Optional[Intent], Optional[str]) -> None
"""
:type updated_intent: (optional) ask_sdk_model.intent.Intent
:type slot_to_elicit: (optional) str
"""
self.__discriminator_value = "Dialog.ElicitSlot"
self.object_type = self.__discriminator_value
super(ElicitSlotDirective, self).__init__(object_type=self.__discriminator_value) # noqa: E501
self.updated_intent = updated_intent
self.slot_to_elicit = slot_to_elicit
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, ElicitSlotDirective):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
3f922e75f5f171c2885caab43d2367500edd7630 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-ice/aliyunsdkice/request/v20201109/UpdateCustomTemplateRequest.py | 6bcadb5d64d94f2e897f29f23f3529c5c5868e10 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,843 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkice.endpoint import endpoint_data
class UpdateCustomTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ICE', '2020-11-09', 'UpdateCustomTemplate','ice')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_TemplateId(self): # String
return self.get_query_params().get('TemplateId')
def set_TemplateId(self, TemplateId): # String
self.add_query_param('TemplateId', TemplateId)
def get_TemplateConfig(self): # String
return self.get_query_params().get('TemplateConfig')
def set_TemplateConfig(self, TemplateConfig): # String
self.add_query_param('TemplateConfig', TemplateConfig)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
| [
"[email protected]"
] | |
6f0ba3d11ea0ff417f52bc4d1e609f0f442421c9 | 130215e73cd45824fc5b7b2bc85949ce03115f20 | /py/netmod_kar2.py | 4479417117ce2a4aa307bbe3d9f7777fdceeeb82 | [] | no_license | felicitygong/MINLPinstances | 062634bf709a782a860234ec2daa7e6bf374371e | 1cd9c799c5758baa0818394c07adea84659c064c | refs/heads/master | 2022-12-06T11:58:14.141832 | 2022-12-01T17:17:35 | 2022-12-01T17:17:35 | 119,295,560 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 66,023 | py | # MINLP written by GAMS Convert at 11/10/17 15:35:22
#
# Equation counts
# Total E G L N X C B
# 667 43 0 624 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 457 321 136 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1849 1845 4 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(None,None),initialize=0)
m.b322 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b323 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b324 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b325 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b326 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b327 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b328 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b329 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b330 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b331 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b332 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b333 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b334 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b335 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b336 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b337 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b338 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b339 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b340 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b341 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b342 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b343 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b344 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b345 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b346 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b347 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b348 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b349 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b350 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b351 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b352 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b353 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b354 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b355 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b356 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b357 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b358 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b359 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b360 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b361 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b362 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b363 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b364 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b365 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b366 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b367 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b368 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b369 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b370 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b371 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b372 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b373 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b374 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b375 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b376 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b377 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b378 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b379 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b380 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b381 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b382 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b383 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b384 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b385 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b386 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b387 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b388 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b389 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b390 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b391 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b392 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b393 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b394 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b395 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b396 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b397 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b398 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b399 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b400 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b401 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b402 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b403 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b404 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b405 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b406 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b407 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b408 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b409 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b410 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b411 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b412 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b413 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b414 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b415 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b416 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b417 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b418 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b419 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b420 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b421 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b422 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b423 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b424 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b425 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b426 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b427 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b428 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b429 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b430 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b431 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b432 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b433 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b434 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b435 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b436 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b437 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b438 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b439 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b440 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b441 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b442 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b443 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b444 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b445 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b446 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b447 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b448 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b449 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b450 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b451 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b452 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b453 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b454 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b455 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b456 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b457 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr=-(-(0.00641025641025641*m.x6)**2 - (0.00641025641025641*m.x7)**2 - (0.00641025641025641*m.x8)**2
- (0.00641025641025641*m.x9)**2) - 0.0128205128205128*m.x2 - 0.0128205128205128*m.x3
- 0.0128205128205128*m.x4 - 0.0128205128205128*m.x5, sense=minimize)
m.c2 = Constraint(expr= m.b322 + m.b323 + m.b324 + m.b325 == 1)
m.c3 = Constraint(expr= m.b326 + m.b327 + m.b328 + m.b329 == 1)
m.c4 = Constraint(expr= m.b330 + m.b331 + m.b332 + m.b333 == 1)
m.c5 = Constraint(expr= m.b334 + m.b335 + m.b336 + m.b337 == 1)
m.c6 = Constraint(expr= m.b338 + m.b339 + m.b340 + m.b341 == 1)
m.c7 = Constraint(expr= m.b342 + m.b343 + m.b344 + m.b345 == 1)
m.c8 = Constraint(expr= m.b346 + m.b347 + m.b348 + m.b349 == 1)
m.c9 = Constraint(expr= m.b350 + m.b351 + m.b352 + m.b353 == 1)
m.c10 = Constraint(expr= m.b354 + m.b355 + m.b356 + m.b357 == 1)
m.c11 = Constraint(expr= m.b358 + m.b359 + m.b360 + m.b361 == 1)
m.c12 = Constraint(expr= m.b362 + m.b363 + m.b364 + m.b365 == 1)
m.c13 = Constraint(expr= m.b366 + m.b367 + m.b368 + m.b369 == 1)
m.c14 = Constraint(expr= m.b370 + m.b371 + m.b372 + m.b373 == 1)
m.c15 = Constraint(expr= m.b374 + m.b375 + m.b376 + m.b377 == 1)
m.c16 = Constraint(expr= m.b378 + m.b379 + m.b380 + m.b381 == 1)
m.c17 = Constraint(expr= m.b382 + m.b383 + m.b384 + m.b385 == 1)
m.c18 = Constraint(expr= m.b386 + m.b387 + m.b388 + m.b389 == 1)
m.c19 = Constraint(expr= m.b390 + m.b391 + m.b392 + m.b393 == 1)
m.c20 = Constraint(expr= m.b394 + m.b395 + m.b396 + m.b397 == 1)
m.c21 = Constraint(expr= m.b398 + m.b399 + m.b400 + m.b401 == 1)
m.c22 = Constraint(expr= m.b402 + m.b403 + m.b404 + m.b405 == 1)
m.c23 = Constraint(expr= m.b406 + m.b407 + m.b408 + m.b409 == 1)
m.c24 = Constraint(expr= m.b410 + m.b411 + m.b412 + m.b413 == 1)
m.c25 = Constraint(expr= m.b414 + m.b415 + m.b416 + m.b417 == 1)
m.c26 = Constraint(expr= m.b418 + m.b419 + m.b420 + m.b421 == 1)
m.c27 = Constraint(expr= m.b422 + m.b423 + m.b424 + m.b425 == 1)
m.c28 = Constraint(expr= m.b426 + m.b427 + m.b428 + m.b429 == 1)
m.c29 = Constraint(expr= m.b430 + m.b431 + m.b432 + m.b433 == 1)
m.c30 = Constraint(expr= m.b434 + m.b435 + m.b436 + m.b437 == 1)
m.c31 = Constraint(expr= m.b438 + m.b439 + m.b440 + m.b441 == 1)
m.c32 = Constraint(expr= m.b442 + m.b443 + m.b444 + m.b445 == 1)
m.c33 = Constraint(expr= m.b446 + m.b447 + m.b448 + m.b449 == 1)
m.c34 = Constraint(expr= m.b450 + m.b451 + m.b452 + m.b453 == 1)
m.c35 = Constraint(expr= m.b454 + m.b455 + m.b456 + m.b457 == 1)
m.c36 = Constraint(expr= m.x10 - m.b322 <= 0)
m.c37 = Constraint(expr= m.x11 - m.b323 <= 0)
m.c38 = Constraint(expr= m.x12 - m.b324 <= 0)
m.c39 = Constraint(expr= m.x13 - m.b325 <= 0)
m.c40 = Constraint(expr= m.x14 - m.b322 <= 0)
m.c41 = Constraint(expr= m.x15 - m.b323 <= 0)
m.c42 = Constraint(expr= m.x16 - m.b324 <= 0)
m.c43 = Constraint(expr= m.x17 - m.b325 <= 0)
m.c44 = Constraint(expr= m.x18 - m.b322 <= 0)
m.c45 = Constraint(expr= m.x19 - m.b323 <= 0)
m.c46 = Constraint(expr= m.x20 - m.b324 <= 0)
m.c47 = Constraint(expr= m.x21 - m.b325 <= 0)
m.c48 = Constraint(expr= m.x22 - m.b322 <= 0)
m.c49 = Constraint(expr= m.x23 - m.b323 <= 0)
m.c50 = Constraint(expr= m.x24 - m.b324 <= 0)
m.c51 = Constraint(expr= m.x25 - m.b325 <= 0)
m.c52 = Constraint(expr= m.x26 - m.b322 <= 0)
m.c53 = Constraint(expr= m.x27 - m.b323 <= 0)
m.c54 = Constraint(expr= m.x28 - m.b324 <= 0)
m.c55 = Constraint(expr= m.x29 - m.b325 <= 0)
m.c56 = Constraint(expr= m.x30 - m.b322 <= 0)
m.c57 = Constraint(expr= m.x31 - m.b323 <= 0)
m.c58 = Constraint(expr= m.x32 - m.b324 <= 0)
m.c59 = Constraint(expr= m.x33 - m.b325 <= 0)
m.c60 = Constraint(expr= m.x34 - m.b322 <= 0)
m.c61 = Constraint(expr= m.x35 - m.b323 <= 0)
m.c62 = Constraint(expr= m.x36 - m.b324 <= 0)
m.c63 = Constraint(expr= m.x37 - m.b325 <= 0)
m.c64 = Constraint(expr= m.x38 - m.b322 <= 0)
m.c65 = Constraint(expr= m.x39 - m.b323 <= 0)
m.c66 = Constraint(expr= m.x40 - m.b324 <= 0)
m.c67 = Constraint(expr= m.x41 - m.b325 <= 0)
m.c68 = Constraint(expr= m.x42 - m.b322 <= 0)
m.c69 = Constraint(expr= m.x43 - m.b323 <= 0)
m.c70 = Constraint(expr= m.x44 - m.b324 <= 0)
m.c71 = Constraint(expr= m.x45 - m.b325 <= 0)
m.c72 = Constraint(expr= m.x46 - m.b322 <= 0)
m.c73 = Constraint(expr= m.x47 - m.b323 <= 0)
m.c74 = Constraint(expr= m.x48 - m.b324 <= 0)
m.c75 = Constraint(expr= m.x49 - m.b325 <= 0)
m.c76 = Constraint(expr= m.x50 - m.b322 <= 0)
m.c77 = Constraint(expr= m.x51 - m.b323 <= 0)
m.c78 = Constraint(expr= m.x52 - m.b324 <= 0)
m.c79 = Constraint(expr= m.x53 - m.b325 <= 0)
m.c80 = Constraint(expr= m.x54 - m.b322 <= 0)
m.c81 = Constraint(expr= m.x55 - m.b323 <= 0)
m.c82 = Constraint(expr= m.x56 - m.b324 <= 0)
m.c83 = Constraint(expr= m.x57 - m.b325 <= 0)
m.c84 = Constraint(expr= m.x58 - m.b322 <= 0)
m.c85 = Constraint(expr= m.x59 - m.b323 <= 0)
m.c86 = Constraint(expr= m.x60 - m.b324 <= 0)
m.c87 = Constraint(expr= m.x61 - m.b325 <= 0)
m.c88 = Constraint(expr= m.x62 - m.b322 <= 0)
m.c89 = Constraint(expr= m.x63 - m.b323 <= 0)
m.c90 = Constraint(expr= m.x64 - m.b324 <= 0)
m.c91 = Constraint(expr= m.x65 - m.b325 <= 0)
m.c92 = Constraint(expr= m.x66 - m.b322 <= 0)
m.c93 = Constraint(expr= m.x67 - m.b323 <= 0)
m.c94 = Constraint(expr= m.x68 - m.b324 <= 0)
m.c95 = Constraint(expr= m.x69 - m.b325 <= 0)
m.c96 = Constraint(expr= m.x70 - m.b322 <= 0)
m.c97 = Constraint(expr= m.x71 - m.b323 <= 0)
m.c98 = Constraint(expr= m.x72 - m.b324 <= 0)
m.c99 = Constraint(expr= m.x73 - m.b325 <= 0)
m.c100 = Constraint(expr= m.x74 - m.b322 <= 0)
m.c101 = Constraint(expr= m.x75 - m.b323 <= 0)
m.c102 = Constraint(expr= m.x76 - m.b324 <= 0)
m.c103 = Constraint(expr= m.x77 - m.b325 <= 0)
m.c104 = Constraint(expr= m.x78 - m.b330 <= 0)
m.c105 = Constraint(expr= m.x79 - m.b331 <= 0)
m.c106 = Constraint(expr= m.x80 - m.b332 <= 0)
m.c107 = Constraint(expr= m.x81 - m.b333 <= 0)
m.c108 = Constraint(expr= m.x82 - m.b330 <= 0)
m.c109 = Constraint(expr= m.x83 - m.b331 <= 0)
m.c110 = Constraint(expr= m.x84 - m.b332 <= 0)
m.c111 = Constraint(expr= m.x85 - m.b333 <= 0)
m.c112 = Constraint(expr= m.x86 - m.b330 <= 0)
m.c113 = Constraint(expr= m.x87 - m.b331 <= 0)
m.c114 = Constraint(expr= m.x88 - m.b332 <= 0)
m.c115 = Constraint(expr= m.x89 - m.b333 <= 0)
m.c116 = Constraint(expr= m.x90 - m.b330 <= 0)
m.c117 = Constraint(expr= m.x91 - m.b331 <= 0)
m.c118 = Constraint(expr= m.x92 - m.b332 <= 0)
m.c119 = Constraint(expr= m.x93 - m.b333 <= 0)
m.c120 = Constraint(expr= m.x94 - m.b330 <= 0)
m.c121 = Constraint(expr= m.x95 - m.b331 <= 0)
m.c122 = Constraint(expr= m.x96 - m.b332 <= 0)
m.c123 = Constraint(expr= m.x97 - m.b333 <= 0)
m.c124 = Constraint(expr= m.x98 - m.b330 <= 0)
m.c125 = Constraint(expr= m.x99 - m.b331 <= 0)
m.c126 = Constraint(expr= m.x100 - m.b332 <= 0)
m.c127 = Constraint(expr= m.x101 - m.b333 <= 0)
m.c128 = Constraint(expr= m.x102 - m.b330 <= 0)
m.c129 = Constraint(expr= m.x103 - m.b331 <= 0)
m.c130 = Constraint(expr= m.x104 - m.b332 <= 0)
m.c131 = Constraint(expr= m.x105 - m.b333 <= 0)
m.c132 = Constraint(expr= m.x106 - m.b330 <= 0)
m.c133 = Constraint(expr= m.x107 - m.b331 <= 0)
m.c134 = Constraint(expr= m.x108 - m.b332 <= 0)
m.c135 = Constraint(expr= m.x109 - m.b333 <= 0)
m.c136 = Constraint(expr= m.x110 - m.b330 <= 0)
m.c137 = Constraint(expr= m.x111 - m.b331 <= 0)
m.c138 = Constraint(expr= m.x112 - m.b332 <= 0)
m.c139 = Constraint(expr= m.x113 - m.b333 <= 0)
m.c140 = Constraint(expr= m.x114 - m.b330 <= 0)
m.c141 = Constraint(expr= m.x115 - m.b331 <= 0)
m.c142 = Constraint(expr= m.x116 - m.b332 <= 0)
m.c143 = Constraint(expr= m.x117 - m.b333 <= 0)
m.c144 = Constraint(expr= m.x118 - m.b330 <= 0)
m.c145 = Constraint(expr= m.x119 - m.b331 <= 0)
m.c146 = Constraint(expr= m.x120 - m.b332 <= 0)
m.c147 = Constraint(expr= m.x121 - m.b333 <= 0)
m.c148 = Constraint(expr= m.x122 - m.b334 <= 0)
m.c149 = Constraint(expr= m.x123 - m.b335 <= 0)
m.c150 = Constraint(expr= m.x124 - m.b336 <= 0)
m.c151 = Constraint(expr= m.x125 - m.b337 <= 0)
m.c152 = Constraint(expr= m.x126 - m.b334 <= 0)
m.c153 = Constraint(expr= m.x127 - m.b335 <= 0)
m.c154 = Constraint(expr= m.x128 - m.b336 <= 0)
m.c155 = Constraint(expr= m.x129 - m.b337 <= 0)
m.c156 = Constraint(expr= m.x130 - m.b338 <= 0)
m.c157 = Constraint(expr= m.x131 - m.b339 <= 0)
m.c158 = Constraint(expr= m.x132 - m.b340 <= 0)
m.c159 = Constraint(expr= m.x133 - m.b341 <= 0)
m.c160 = Constraint(expr= m.x134 - m.b342 <= 0)
m.c161 = Constraint(expr= m.x135 - m.b343 <= 0)
m.c162 = Constraint(expr= m.x136 - m.b344 <= 0)
m.c163 = Constraint(expr= m.x137 - m.b345 <= 0)
m.c164 = Constraint(expr= m.x138 - m.b342 <= 0)
m.c165 = Constraint(expr= m.x139 - m.b343 <= 0)
m.c166 = Constraint(expr= m.x140 - m.b344 <= 0)
m.c167 = Constraint(expr= m.x141 - m.b345 <= 0)
m.c168 = Constraint(expr= m.x142 - m.b342 <= 0)
m.c169 = Constraint(expr= m.x143 - m.b343 <= 0)
m.c170 = Constraint(expr= m.x144 - m.b344 <= 0)
m.c171 = Constraint(expr= m.x145 - m.b345 <= 0)
m.c172 = Constraint(expr= m.x146 - m.b346 <= 0)
m.c173 = Constraint(expr= m.x147 - m.b347 <= 0)
m.c174 = Constraint(expr= m.x148 - m.b348 <= 0)
m.c175 = Constraint(expr= m.x149 - m.b349 <= 0)
m.c176 = Constraint(expr= m.x150 - m.b346 <= 0)
m.c177 = Constraint(expr= m.x151 - m.b347 <= 0)
m.c178 = Constraint(expr= m.x152 - m.b348 <= 0)
m.c179 = Constraint(expr= m.x153 - m.b349 <= 0)
m.c180 = Constraint(expr= m.x154 - m.b346 <= 0)
m.c181 = Constraint(expr= m.x155 - m.b347 <= 0)
m.c182 = Constraint(expr= m.x156 - m.b348 <= 0)
m.c183 = Constraint(expr= m.x157 - m.b349 <= 0)
m.c184 = Constraint(expr= m.x158 - m.b346 <= 0)
m.c185 = Constraint(expr= m.x159 - m.b347 <= 0)
m.c186 = Constraint(expr= m.x160 - m.b348 <= 0)
m.c187 = Constraint(expr= m.x161 - m.b349 <= 0)
m.c188 = Constraint(expr= m.x162 - m.b350 <= 0)
m.c189 = Constraint(expr= m.x163 - m.b351 <= 0)
m.c190 = Constraint(expr= m.x164 - m.b352 <= 0)
m.c191 = Constraint(expr= m.x165 - m.b353 <= 0)
m.c192 = Constraint(expr= m.x166 - m.b350 <= 0)
m.c193 = Constraint(expr= m.x167 - m.b351 <= 0)
m.c194 = Constraint(expr= m.x168 - m.b352 <= 0)
m.c195 = Constraint(expr= m.x169 - m.b353 <= 0)
m.c196 = Constraint(expr= m.x170 - m.b354 <= 0)
m.c197 = Constraint(expr= m.x171 - m.b355 <= 0)
m.c198 = Constraint(expr= m.x172 - m.b356 <= 0)
m.c199 = Constraint(expr= m.x173 - m.b357 <= 0)
m.c200 = Constraint(expr= m.x174 - m.b354 <= 0)
m.c201 = Constraint(expr= m.x175 - m.b355 <= 0)
m.c202 = Constraint(expr= m.x176 - m.b356 <= 0)
m.c203 = Constraint(expr= m.x177 - m.b357 <= 0)
m.c204 = Constraint(expr= m.x178 - m.b354 <= 0)
m.c205 = Constraint(expr= m.x179 - m.b355 <= 0)
m.c206 = Constraint(expr= m.x180 - m.b356 <= 0)
m.c207 = Constraint(expr= m.x181 - m.b357 <= 0)
m.c208 = Constraint(expr= m.x182 - m.b354 <= 0)
m.c209 = Constraint(expr= m.x183 - m.b355 <= 0)
m.c210 = Constraint(expr= m.x184 - m.b356 <= 0)
m.c211 = Constraint(expr= m.x185 - m.b357 <= 0)
m.c212 = Constraint(expr= m.x186 - m.b362 <= 0)
m.c213 = Constraint(expr= m.x187 - m.b363 <= 0)
m.c214 = Constraint(expr= m.x188 - m.b364 <= 0)
m.c215 = Constraint(expr= m.x189 - m.b365 <= 0)
m.c216 = Constraint(expr= m.x190 - m.b366 <= 0)
m.c217 = Constraint(expr= m.x191 - m.b367 <= 0)
m.c218 = Constraint(expr= m.x192 - m.b368 <= 0)
m.c219 = Constraint(expr= m.x193 - m.b369 <= 0)
m.c220 = Constraint(expr= m.x194 - m.b366 <= 0)
m.c221 = Constraint(expr= m.x195 - m.b367 <= 0)
m.c222 = Constraint(expr= m.x196 - m.b368 <= 0)
m.c223 = Constraint(expr= m.x197 - m.b369 <= 0)
m.c224 = Constraint(expr= m.x198 - m.b366 <= 0)
m.c225 = Constraint(expr= m.x199 - m.b367 <= 0)
m.c226 = Constraint(expr= m.x200 - m.b368 <= 0)
m.c227 = Constraint(expr= m.x201 - m.b369 <= 0)
m.c228 = Constraint(expr= m.x202 - m.b370 <= 0)
m.c229 = Constraint(expr= m.x203 - m.b371 <= 0)
m.c230 = Constraint(expr= m.x204 - m.b372 <= 0)
m.c231 = Constraint(expr= m.x205 - m.b373 <= 0)
m.c232 = Constraint(expr= m.x206 - m.b370 <= 0)
m.c233 = Constraint(expr= m.x207 - m.b371 <= 0)
m.c234 = Constraint(expr= m.x208 - m.b372 <= 0)
m.c235 = Constraint(expr= m.x209 - m.b373 <= 0)
m.c236 = Constraint(expr= m.x210 - m.b370 <= 0)
m.c237 = Constraint(expr= m.x211 - m.b371 <= 0)
m.c238 = Constraint(expr= m.x212 - m.b372 <= 0)
m.c239 = Constraint(expr= m.x213 - m.b373 <= 0)
m.c240 = Constraint(expr= m.x214 - m.b370 <= 0)
m.c241 = Constraint(expr= m.x215 - m.b371 <= 0)
m.c242 = Constraint(expr= m.x216 - m.b372 <= 0)
m.c243 = Constraint(expr= m.x217 - m.b373 <= 0)
m.c244 = Constraint(expr= m.x218 - m.b374 <= 0)
m.c245 = Constraint(expr= m.x219 - m.b375 <= 0)
m.c246 = Constraint(expr= m.x220 - m.b376 <= 0)
m.c247 = Constraint(expr= m.x221 - m.b377 <= 0)
m.c248 = Constraint(expr= m.x222 - m.b374 <= 0)
m.c249 = Constraint(expr= m.x223 - m.b375 <= 0)
m.c250 = Constraint(expr= m.x224 - m.b376 <= 0)
m.c251 = Constraint(expr= m.x225 - m.b377 <= 0)
m.c252 = Constraint(expr= m.x226 - m.b374 <= 0)
m.c253 = Constraint(expr= m.x227 - m.b375 <= 0)
m.c254 = Constraint(expr= m.x228 - m.b376 <= 0)
m.c255 = Constraint(expr= m.x229 - m.b377 <= 0)
m.c256 = Constraint(expr= m.x230 - m.b378 <= 0)
m.c257 = Constraint(expr= m.x231 - m.b379 <= 0)
m.c258 = Constraint(expr= m.x232 - m.b380 <= 0)
m.c259 = Constraint(expr= m.x233 - m.b381 <= 0)
m.c260 = Constraint(expr= m.x234 - m.b378 <= 0)
m.c261 = Constraint(expr= m.x235 - m.b379 <= 0)
m.c262 = Constraint(expr= m.x236 - m.b380 <= 0)
m.c263 = Constraint(expr= m.x237 - m.b381 <= 0)
m.c264 = Constraint(expr= m.x238 - m.b382 <= 0)
m.c265 = Constraint(expr= m.x239 - m.b383 <= 0)
m.c266 = Constraint(expr= m.x240 - m.b384 <= 0)
m.c267 = Constraint(expr= m.x241 - m.b385 <= 0)
m.c268 = Constraint(expr= m.x242 - m.b382 <= 0)
m.c269 = Constraint(expr= m.x243 - m.b383 <= 0)
m.c270 = Constraint(expr= m.x244 - m.b384 <= 0)
m.c271 = Constraint(expr= m.x245 - m.b385 <= 0)
m.c272 = Constraint(expr= m.x246 - m.b386 <= 0)
m.c273 = Constraint(expr= m.x247 - m.b387 <= 0)
m.c274 = Constraint(expr= m.x248 - m.b388 <= 0)
m.c275 = Constraint(expr= m.x249 - m.b389 <= 0)
m.c276 = Constraint(expr= m.x250 - m.b390 <= 0)
m.c277 = Constraint(expr= m.x251 - m.b391 <= 0)
m.c278 = Constraint(expr= m.x252 - m.b392 <= 0)
m.c279 = Constraint(expr= m.x253 - m.b393 <= 0)
m.c280 = Constraint(expr= m.x254 - m.b390 <= 0)
m.c281 = Constraint(expr= m.x255 - m.b391 <= 0)
m.c282 = Constraint(expr= m.x256 - m.b392 <= 0)
m.c283 = Constraint(expr= m.x257 - m.b393 <= 0)
m.c284 = Constraint(expr= m.x258 - m.b390 <= 0)
m.c285 = Constraint(expr= m.x259 - m.b391 <= 0)
m.c286 = Constraint(expr= m.x260 - m.b392 <= 0)
m.c287 = Constraint(expr= m.x261 - m.b393 <= 0)
m.c288 = Constraint(expr= m.x262 - m.b394 <= 0)
m.c289 = Constraint(expr= m.x263 - m.b395 <= 0)
m.c290 = Constraint(expr= m.x264 - m.b396 <= 0)
m.c291 = Constraint(expr= m.x265 - m.b397 <= 0)
m.c292 = Constraint(expr= m.x266 - m.b394 <= 0)
m.c293 = Constraint(expr= m.x267 - m.b395 <= 0)
m.c294 = Constraint(expr= m.x268 - m.b396 <= 0)
m.c295 = Constraint(expr= m.x269 - m.b397 <= 0)
m.c296 = Constraint(expr= m.x270 - m.b402 <= 0)
m.c297 = Constraint(expr= m.x271 - m.b403 <= 0)
m.c298 = Constraint(expr= m.x272 - m.b404 <= 0)
m.c299 = Constraint(expr= m.x273 - m.b405 <= 0)
m.c300 = Constraint(expr= m.x274 - m.b402 <= 0)
m.c301 = Constraint(expr= m.x275 - m.b403 <= 0)
m.c302 = Constraint(expr= m.x276 - m.b404 <= 0)
m.c303 = Constraint(expr= m.x277 - m.b405 <= 0)
m.c304 = Constraint(expr= m.x278 - m.b406 <= 0)
m.c305 = Constraint(expr= m.x279 - m.b407 <= 0)
m.c306 = Constraint(expr= m.x280 - m.b408 <= 0)
m.c307 = Constraint(expr= m.x281 - m.b409 <= 0)
m.c308 = Constraint(expr= m.x282 - m.b410 <= 0)
m.c309 = Constraint(expr= m.x283 - m.b411 <= 0)
m.c310 = Constraint(expr= m.x284 - m.b412 <= 0)
m.c311 = Constraint(expr= m.x285 - m.b413 <= 0)
m.c312 = Constraint(expr= m.x286 - m.b414 <= 0)
m.c313 = Constraint(expr= m.x287 - m.b415 <= 0)
m.c314 = Constraint(expr= m.x288 - m.b416 <= 0)
m.c315 = Constraint(expr= m.x289 - m.b417 <= 0)
m.c316 = Constraint(expr= m.x290 - m.b414 <= 0)
m.c317 = Constraint(expr= m.x291 - m.b415 <= 0)
m.c318 = Constraint(expr= m.x292 - m.b416 <= 0)
m.c319 = Constraint(expr= m.x293 - m.b417 <= 0)
m.c320 = Constraint(expr= m.x294 - m.b426 <= 0)
m.c321 = Constraint(expr= m.x295 - m.b427 <= 0)
m.c322 = Constraint(expr= m.x296 - m.b428 <= 0)
m.c323 = Constraint(expr= m.x297 - m.b429 <= 0)
m.c324 = Constraint(expr= m.x298 - m.b426 <= 0)
m.c325 = Constraint(expr= m.x299 - m.b427 <= 0)
m.c326 = Constraint(expr= m.x300 - m.b428 <= 0)
m.c327 = Constraint(expr= m.x301 - m.b429 <= 0)
m.c328 = Constraint(expr= m.x302 - m.b430 <= 0)
m.c329 = Constraint(expr= m.x303 - m.b431 <= 0)
m.c330 = Constraint(expr= m.x304 - m.b432 <= 0)
m.c331 = Constraint(expr= m.x305 - m.b433 <= 0)
m.c332 = Constraint(expr= m.x306 - m.b430 <= 0)
m.c333 = Constraint(expr= m.x307 - m.b431 <= 0)
m.c334 = Constraint(expr= m.x308 - m.b432 <= 0)
m.c335 = Constraint(expr= m.x309 - m.b433 <= 0)
m.c336 = Constraint(expr= m.x310 - m.b442 <= 0)
m.c337 = Constraint(expr= m.x311 - m.b443 <= 0)
m.c338 = Constraint(expr= m.x312 - m.b444 <= 0)
m.c339 = Constraint(expr= m.x313 - m.b445 <= 0)
m.c340 = Constraint(expr= m.x314 - m.b442 <= 0)
m.c341 = Constraint(expr= m.x315 - m.b443 <= 0)
m.c342 = Constraint(expr= m.x316 - m.b444 <= 0)
m.c343 = Constraint(expr= m.x317 - m.b445 <= 0)
m.c344 = Constraint(expr= m.x318 - m.b454 <= 0)
m.c345 = Constraint(expr= m.x319 - m.b455 <= 0)
m.c346 = Constraint(expr= m.x320 - m.b456 <= 0)
m.c347 = Constraint(expr= m.x321 - m.b457 <= 0)
m.c348 = Constraint(expr= m.x10 - m.b330 <= 0)
m.c349 = Constraint(expr= m.x11 - m.b331 <= 0)
m.c350 = Constraint(expr= m.x12 - m.b332 <= 0)
m.c351 = Constraint(expr= m.x13 - m.b333 <= 0)
m.c352 = Constraint(expr= m.x14 - m.b346 <= 0)
m.c353 = Constraint(expr= m.x15 - m.b347 <= 0)
m.c354 = Constraint(expr= m.x16 - m.b348 <= 0)
m.c355 = Constraint(expr= m.x17 - m.b349 <= 0)
m.c356 = Constraint(expr= m.x18 - m.b350 <= 0)
m.c357 = Constraint(expr= m.x19 - m.b351 <= 0)
m.c358 = Constraint(expr= m.x20 - m.b352 <= 0)
m.c359 = Constraint(expr= m.x21 - m.b353 <= 0)
m.c360 = Constraint(expr= m.x22 - m.b354 <= 0)
m.c361 = Constraint(expr= m.x23 - m.b355 <= 0)
m.c362 = Constraint(expr= m.x24 - m.b356 <= 0)
m.c363 = Constraint(expr= m.x25 - m.b357 <= 0)
m.c364 = Constraint(expr= m.x26 - m.b358 <= 0)
m.c365 = Constraint(expr= m.x27 - m.b359 <= 0)
m.c366 = Constraint(expr= m.x28 - m.b360 <= 0)
m.c367 = Constraint(expr= m.x29 - m.b361 <= 0)
m.c368 = Constraint(expr= m.x30 - m.b374 <= 0)
m.c369 = Constraint(expr= m.x31 - m.b375 <= 0)
m.c370 = Constraint(expr= m.x32 - m.b376 <= 0)
m.c371 = Constraint(expr= m.x33 - m.b377 <= 0)
m.c372 = Constraint(expr= m.x34 - m.b378 <= 0)
m.c373 = Constraint(expr= m.x35 - m.b379 <= 0)
m.c374 = Constraint(expr= m.x36 - m.b380 <= 0)
m.c375 = Constraint(expr= m.x37 - m.b381 <= 0)
m.c376 = Constraint(expr= m.x38 - m.b382 <= 0)
m.c377 = Constraint(expr= m.x39 - m.b383 <= 0)
m.c378 = Constraint(expr= m.x40 - m.b384 <= 0)
m.c379 = Constraint(expr= m.x41 - m.b385 <= 0)
m.c380 = Constraint(expr= m.x42 - m.b394 <= 0)
m.c381 = Constraint(expr= m.x43 - m.b395 <= 0)
m.c382 = Constraint(expr= m.x44 - m.b396 <= 0)
m.c383 = Constraint(expr= m.x45 - m.b397 <= 0)
m.c384 = Constraint(expr= m.x46 - m.b406 <= 0)
m.c385 = Constraint(expr= m.x47 - m.b407 <= 0)
m.c386 = Constraint(expr= m.x48 - m.b408 <= 0)
m.c387 = Constraint(expr= m.x49 - m.b409 <= 0)
m.c388 = Constraint(expr= m.x50 - m.b410 <= 0)
m.c389 = Constraint(expr= m.x51 - m.b411 <= 0)
m.c390 = Constraint(expr= m.x52 - m.b412 <= 0)
m.c391 = Constraint(expr= m.x53 - m.b413 <= 0)
m.c392 = Constraint(expr= m.x54 - m.b418 <= 0)
m.c393 = Constraint(expr= m.x55 - m.b419 <= 0)
m.c394 = Constraint(expr= m.x56 - m.b420 <= 0)
m.c395 = Constraint(expr= m.x57 - m.b421 <= 0)
m.c396 = Constraint(expr= m.x58 - m.b422 <= 0)
m.c397 = Constraint(expr= m.x59 - m.b423 <= 0)
m.c398 = Constraint(expr= m.x60 - m.b424 <= 0)
m.c399 = Constraint(expr= m.x61 - m.b425 <= 0)
m.c400 = Constraint(expr= m.x62 - m.b434 <= 0)
m.c401 = Constraint(expr= m.x63 - m.b435 <= 0)
m.c402 = Constraint(expr= m.x64 - m.b436 <= 0)
m.c403 = Constraint(expr= m.x65 - m.b437 <= 0)
m.c404 = Constraint(expr= m.x66 - m.b438 <= 0)
m.c405 = Constraint(expr= m.x67 - m.b439 <= 0)
m.c406 = Constraint(expr= m.x68 - m.b440 <= 0)
m.c407 = Constraint(expr= m.x69 - m.b441 <= 0)
m.c408 = Constraint(expr= m.x70 - m.b446 <= 0)
m.c409 = Constraint(expr= m.x71 - m.b447 <= 0)
m.c410 = Constraint(expr= m.x72 - m.b448 <= 0)
m.c411 = Constraint(expr= m.x73 - m.b449 <= 0)
m.c412 = Constraint(expr= m.x74 - m.b450 <= 0)
m.c413 = Constraint(expr= m.x75 - m.b451 <= 0)
m.c414 = Constraint(expr= m.x76 - m.b452 <= 0)
m.c415 = Constraint(expr= m.x77 - m.b453 <= 0)
m.c416 = Constraint(expr= m.x78 - m.b334 <= 0)
m.c417 = Constraint(expr= m.x79 - m.b335 <= 0)
m.c418 = Constraint(expr= m.x80 - m.b336 <= 0)
m.c419 = Constraint(expr= m.x81 - m.b337 <= 0)
m.c420 = Constraint(expr= m.x82 - m.b346 <= 0)
m.c421 = Constraint(expr= m.x83 - m.b347 <= 0)
m.c422 = Constraint(expr= m.x84 - m.b348 <= 0)
m.c423 = Constraint(expr= m.x85 - m.b349 <= 0)
m.c424 = Constraint(expr= m.x86 - m.b350 <= 0)
m.c425 = Constraint(expr= m.x87 - m.b351 <= 0)
m.c426 = Constraint(expr= m.x88 - m.b352 <= 0)
m.c427 = Constraint(expr= m.x89 - m.b353 <= 0)
m.c428 = Constraint(expr= m.x90 - m.b358 <= 0)
m.c429 = Constraint(expr= m.x91 - m.b359 <= 0)
m.c430 = Constraint(expr= m.x92 - m.b360 <= 0)
m.c431 = Constraint(expr= m.x93 - m.b361 <= 0)
m.c432 = Constraint(expr= m.x94 - m.b378 <= 0)
m.c433 = Constraint(expr= m.x95 - m.b379 <= 0)
m.c434 = Constraint(expr= m.x96 - m.b380 <= 0)
m.c435 = Constraint(expr= m.x97 - m.b381 <= 0)
m.c436 = Constraint(expr= m.x98 - m.b382 <= 0)
m.c437 = Constraint(expr= m.x99 - m.b383 <= 0)
m.c438 = Constraint(expr= m.x100 - m.b384 <= 0)
m.c439 = Constraint(expr= m.x101 - m.b385 <= 0)
m.c440 = Constraint(expr= m.x102 - m.b418 <= 0)
m.c441 = Constraint(expr= m.x103 - m.b419 <= 0)
m.c442 = Constraint(expr= m.x104 - m.b420 <= 0)
m.c443 = Constraint(expr= m.x105 - m.b421 <= 0)
m.c444 = Constraint(expr= m.x106 - m.b422 <= 0)
m.c445 = Constraint(expr= m.x107 - m.b423 <= 0)
m.c446 = Constraint(expr= m.x108 - m.b424 <= 0)
m.c447 = Constraint(expr= m.x109 - m.b425 <= 0)
m.c448 = Constraint(expr= m.x110 - m.b434 <= 0)
m.c449 = Constraint(expr= m.x111 - m.b435 <= 0)
m.c450 = Constraint(expr= m.x112 - m.b436 <= 0)
m.c451 = Constraint(expr= m.x113 - m.b437 <= 0)
m.c452 = Constraint(expr= m.x114 - m.b438 <= 0)
m.c453 = Constraint(expr= m.x115 - m.b439 <= 0)
m.c454 = Constraint(expr= m.x116 - m.b440 <= 0)
m.c455 = Constraint(expr= m.x117 - m.b441 <= 0)
m.c456 = Constraint(expr= m.x118 - m.b446 <= 0)
m.c457 = Constraint(expr= m.x119 - m.b447 <= 0)
m.c458 = Constraint(expr= m.x120 - m.b448 <= 0)
m.c459 = Constraint(expr= m.x121 - m.b449 <= 0)
m.c460 = Constraint(expr= m.x122 - m.b326 <= 0)
m.c461 = Constraint(expr= m.x123 - m.b327 <= 0)
m.c462 = Constraint(expr= m.x124 - m.b328 <= 0)
m.c463 = Constraint(expr= m.x125 - m.b329 <= 0)
m.c464 = Constraint(expr= m.x126 - m.b338 <= 0)
m.c465 = Constraint(expr= m.x127 - m.b339 <= 0)
m.c466 = Constraint(expr= m.x128 - m.b340 <= 0)
m.c467 = Constraint(expr= m.x129 - m.b341 <= 0)
m.c468 = Constraint(expr= m.x130 - m.b326 <= 0)
m.c469 = Constraint(expr= m.x131 - m.b327 <= 0)
m.c470 = Constraint(expr= m.x132 - m.b328 <= 0)
m.c471 = Constraint(expr= m.x133 - m.b329 <= 0)
m.c472 = Constraint(expr= m.x134 - m.b326 <= 0)
m.c473 = Constraint(expr= m.x135 - m.b327 <= 0)
m.c474 = Constraint(expr= m.x136 - m.b328 <= 0)
m.c475 = Constraint(expr= m.x137 - m.b329 <= 0)
m.c476 = Constraint(expr= m.x138 - m.b334 <= 0)
m.c477 = Constraint(expr= m.x139 - m.b335 <= 0)
m.c478 = Constraint(expr= m.x140 - m.b336 <= 0)
m.c479 = Constraint(expr= m.x141 - m.b337 <= 0)
m.c480 = Constraint(expr= m.x142 - m.b338 <= 0)
m.c481 = Constraint(expr= m.x143 - m.b339 <= 0)
m.c482 = Constraint(expr= m.x144 - m.b340 <= 0)
m.c483 = Constraint(expr= m.x145 - m.b341 <= 0)
m.c484 = Constraint(expr= m.x146 - m.b326 <= 0)
m.c485 = Constraint(expr= m.x147 - m.b327 <= 0)
m.c486 = Constraint(expr= m.x148 - m.b328 <= 0)
m.c487 = Constraint(expr= m.x149 - m.b329 <= 0)
m.c488 = Constraint(expr= m.x150 - m.b398 <= 0)
m.c489 = Constraint(expr= m.x151 - m.b399 <= 0)
m.c490 = Constraint(expr= m.x152 - m.b400 <= 0)
m.c491 = Constraint(expr= m.x153 - m.b401 <= 0)
m.c492 = Constraint(expr= m.x154 - m.b402 <= 0)
m.c493 = Constraint(expr= m.x155 - m.b403 <= 0)
m.c494 = Constraint(expr= m.x156 - m.b404 <= 0)
m.c495 = Constraint(expr= m.x157 - m.b405 <= 0)
m.c496 = Constraint(expr= m.x158 - m.b406 <= 0)
m.c497 = Constraint(expr= m.x159 - m.b407 <= 0)
m.c498 = Constraint(expr= m.x160 - m.b408 <= 0)
m.c499 = Constraint(expr= m.x161 - m.b409 <= 0)
m.c500 = Constraint(expr= m.x162 - m.b326 <= 0)
m.c501 = Constraint(expr= m.x163 - m.b327 <= 0)
m.c502 = Constraint(expr= m.x164 - m.b328 <= 0)
m.c503 = Constraint(expr= m.x165 - m.b329 <= 0)
m.c504 = Constraint(expr= m.x166 - m.b334 <= 0)
m.c505 = Constraint(expr= m.x167 - m.b335 <= 0)
m.c506 = Constraint(expr= m.x168 - m.b336 <= 0)
m.c507 = Constraint(expr= m.x169 - m.b337 <= 0)
m.c508 = Constraint(expr= m.x170 - m.b326 <= 0)
m.c509 = Constraint(expr= m.x171 - m.b327 <= 0)
m.c510 = Constraint(expr= m.x172 - m.b328 <= 0)
m.c511 = Constraint(expr= m.x173 - m.b329 <= 0)
m.c512 = Constraint(expr= m.x174 - m.b334 <= 0)
m.c513 = Constraint(expr= m.x175 - m.b335 <= 0)
m.c514 = Constraint(expr= m.x176 - m.b336 <= 0)
m.c515 = Constraint(expr= m.x177 - m.b337 <= 0)
m.c516 = Constraint(expr= m.x178 - m.b338 <= 0)
m.c517 = Constraint(expr= m.x179 - m.b339 <= 0)
m.c518 = Constraint(expr= m.x180 - m.b340 <= 0)
m.c519 = Constraint(expr= m.x181 - m.b341 <= 0)
m.c520 = Constraint(expr= m.x182 - m.b342 <= 0)
m.c521 = Constraint(expr= m.x183 - m.b343 <= 0)
m.c522 = Constraint(expr= m.x184 - m.b344 <= 0)
m.c523 = Constraint(expr= m.x185 - m.b345 <= 0)
m.c524 = Constraint(expr= m.x186 - m.b326 <= 0)
m.c525 = Constraint(expr= m.x187 - m.b327 <= 0)
m.c526 = Constraint(expr= m.x188 - m.b328 <= 0)
m.c527 = Constraint(expr= m.x189 - m.b329 <= 0)
m.c528 = Constraint(expr= m.x190 - m.b326 <= 0)
m.c529 = Constraint(expr= m.x191 - m.b327 <= 0)
m.c530 = Constraint(expr= m.x192 - m.b328 <= 0)
m.c531 = Constraint(expr= m.x193 - m.b329 <= 0)
m.c532 = Constraint(expr= m.x194 - m.b362 <= 0)
m.c533 = Constraint(expr= m.x195 - m.b363 <= 0)
m.c534 = Constraint(expr= m.x196 - m.b364 <= 0)
m.c535 = Constraint(expr= m.x197 - m.b365 <= 0)
m.c536 = Constraint(expr= m.x198 - m.b386 <= 0)
m.c537 = Constraint(expr= m.x199 - m.b387 <= 0)
m.c538 = Constraint(expr= m.x200 - m.b388 <= 0)
m.c539 = Constraint(expr= m.x201 - m.b389 <= 0)
m.c540 = Constraint(expr= m.x202 - m.b326 <= 0)
m.c541 = Constraint(expr= m.x203 - m.b327 <= 0)
m.c542 = Constraint(expr= m.x204 - m.b328 <= 0)
m.c543 = Constraint(expr= m.x205 - m.b329 <= 0)
m.c544 = Constraint(expr= m.x206 - m.b334 <= 0)
m.c545 = Constraint(expr= m.x207 - m.b335 <= 0)
m.c546 = Constraint(expr= m.x208 - m.b336 <= 0)
m.c547 = Constraint(expr= m.x209 - m.b337 <= 0)
m.c548 = Constraint(expr= m.x210 - m.b338 <= 0)
m.c549 = Constraint(expr= m.x211 - m.b339 <= 0)
m.c550 = Constraint(expr= m.x212 - m.b340 <= 0)
m.c551 = Constraint(expr= m.x213 - m.b341 <= 0)
m.c552 = Constraint(expr= m.x214 - m.b342 <= 0)
m.c553 = Constraint(expr= m.x215 - m.b343 <= 0)
m.c554 = Constraint(expr= m.x216 - m.b344 <= 0)
m.c555 = Constraint(expr= m.x217 - m.b345 <= 0)
m.c556 = Constraint(expr= m.x218 - m.b334 <= 0)
m.c557 = Constraint(expr= m.x219 - m.b335 <= 0)
m.c558 = Constraint(expr= m.x220 - m.b336 <= 0)
m.c559 = Constraint(expr= m.x221 - m.b337 <= 0)
m.c560 = Constraint(expr= m.x222 - m.b358 <= 0)
m.c561 = Constraint(expr= m.x223 - m.b359 <= 0)
m.c562 = Constraint(expr= m.x224 - m.b360 <= 0)
m.c563 = Constraint(expr= m.x225 - m.b361 <= 0)
m.c564 = Constraint(expr= m.x226 - m.b398 <= 0)
m.c565 = Constraint(expr= m.x227 - m.b399 <= 0)
m.c566 = Constraint(expr= m.x228 - m.b400 <= 0)
m.c567 = Constraint(expr= m.x229 - m.b401 <= 0)
m.c568 = Constraint(expr= m.x230 - m.b358 <= 0)
m.c569 = Constraint(expr= m.x231 - m.b359 <= 0)
m.c570 = Constraint(expr= m.x232 - m.b360 <= 0)
m.c571 = Constraint(expr= m.x233 - m.b361 <= 0)
m.c572 = Constraint(expr= m.x234 - m.b450 <= 0)
m.c573 = Constraint(expr= m.x235 - m.b451 <= 0)
m.c574 = Constraint(expr= m.x236 - m.b452 <= 0)
m.c575 = Constraint(expr= m.x237 - m.b453 <= 0)
m.c576 = Constraint(expr= m.x238 - m.b338 <= 0)
m.c577 = Constraint(expr= m.x239 - m.b339 <= 0)
m.c578 = Constraint(expr= m.x240 - m.b340 <= 0)
m.c579 = Constraint(expr= m.x241 - m.b341 <= 0)
m.c580 = Constraint(expr= m.x242 - m.b350 <= 0)
m.c581 = Constraint(expr= m.x243 - m.b351 <= 0)
m.c582 = Constraint(expr= m.x244 - m.b352 <= 0)
m.c583 = Constraint(expr= m.x245 - m.b353 <= 0)
m.c584 = Constraint(expr= m.x246 - m.b326 <= 0)
m.c585 = Constraint(expr= m.x247 - m.b327 <= 0)
m.c586 = Constraint(expr= m.x248 - m.b328 <= 0)
m.c587 = Constraint(expr= m.x249 - m.b329 <= 0)
m.c588 = Constraint(expr= m.x250 - m.b326 <= 0)
m.c589 = Constraint(expr= m.x251 - m.b327 <= 0)
m.c590 = Constraint(expr= m.x252 - m.b328 <= 0)
m.c591 = Constraint(expr= m.x253 - m.b329 <= 0)
m.c592 = Constraint(expr= m.x254 - m.b362 <= 0)
m.c593 = Constraint(expr= m.x255 - m.b363 <= 0)
m.c594 = Constraint(expr= m.x256 - m.b364 <= 0)
m.c595 = Constraint(expr= m.x257 - m.b365 <= 0)
m.c596 = Constraint(expr= m.x258 - m.b386 <= 0)
m.c597 = Constraint(expr= m.x259 - m.b387 <= 0)
m.c598 = Constraint(expr= m.x260 - m.b388 <= 0)
m.c599 = Constraint(expr= m.x261 - m.b389 <= 0)
m.c600 = Constraint(expr= m.x262 - m.b326 <= 0)
m.c601 = Constraint(expr= m.x263 - m.b327 <= 0)
m.c602 = Constraint(expr= m.x264 - m.b328 <= 0)
m.c603 = Constraint(expr= m.x265 - m.b329 <= 0)
m.c604 = Constraint(expr= m.x266 - m.b338 <= 0)
m.c605 = Constraint(expr= m.x267 - m.b339 <= 0)
m.c606 = Constraint(expr= m.x268 - m.b340 <= 0)
m.c607 = Constraint(expr= m.x269 - m.b341 <= 0)
m.c608 = Constraint(expr= m.x270 - m.b358 <= 0)
m.c609 = Constraint(expr= m.x271 - m.b359 <= 0)
m.c610 = Constraint(expr= m.x272 - m.b360 <= 0)
m.c611 = Constraint(expr= m.x273 - m.b361 <= 0)
m.c612 = Constraint(expr= m.x274 - m.b398 <= 0)
m.c613 = Constraint(expr= m.x275 - m.b399 <= 0)
m.c614 = Constraint(expr= m.x276 - m.b400 <= 0)
m.c615 = Constraint(expr= m.x277 - m.b401 <= 0)
m.c616 = Constraint(expr= m.x278 - m.b334 <= 0)
m.c617 = Constraint(expr= m.x279 - m.b335 <= 0)
m.c618 = Constraint(expr= m.x280 - m.b336 <= 0)
m.c619 = Constraint(expr= m.x281 - m.b337 <= 0)
m.c620 = Constraint(expr= m.x282 - m.b334 <= 0)
m.c621 = Constraint(expr= m.x283 - m.b335 <= 0)
m.c622 = Constraint(expr= m.x284 - m.b336 <= 0)
m.c623 = Constraint(expr= m.x285 - m.b337 <= 0)
m.c624 = Constraint(expr= m.x286 - m.b326 <= 0)
m.c625 = Constraint(expr= m.x287 - m.b327 <= 0)
m.c626 = Constraint(expr= m.x288 - m.b328 <= 0)
m.c627 = Constraint(expr= m.x289 - m.b329 <= 0)
m.c628 = Constraint(expr= m.x290 - m.b342 <= 0)
m.c629 = Constraint(expr= m.x291 - m.b343 <= 0)
m.c630 = Constraint(expr= m.x292 - m.b344 <= 0)
m.c631 = Constraint(expr= m.x293 - m.b345 <= 0)
m.c632 = Constraint(expr= m.x294 - m.b362 <= 0)
m.c633 = Constraint(expr= m.x295 - m.b363 <= 0)
m.c634 = Constraint(expr= m.x296 - m.b364 <= 0)
m.c635 = Constraint(expr= m.x297 - m.b365 <= 0)
m.c636 = Constraint(expr= m.x298 - m.b366 <= 0)
m.c637 = Constraint(expr= m.x299 - m.b367 <= 0)
m.c638 = Constraint(expr= m.x300 - m.b368 <= 0)
m.c639 = Constraint(expr= m.x301 - m.b369 <= 0)
m.c640 = Constraint(expr= m.x302 - m.b326 <= 0)
m.c641 = Constraint(expr= m.x303 - m.b327 <= 0)
m.c642 = Constraint(expr= m.x304 - m.b328 <= 0)
m.c643 = Constraint(expr= m.x305 - m.b329 <= 0)
m.c644 = Constraint(expr= m.x306 - m.b338 <= 0)
m.c645 = Constraint(expr= m.x307 - m.b339 <= 0)
m.c646 = Constraint(expr= m.x308 - m.b340 <= 0)
m.c647 = Constraint(expr= m.x309 - m.b341 <= 0)
m.c648 = Constraint(expr= m.x310 - m.b326 <= 0)
m.c649 = Constraint(expr= m.x311 - m.b327 <= 0)
m.c650 = Constraint(expr= m.x312 - m.b328 <= 0)
m.c651 = Constraint(expr= m.x313 - m.b329 <= 0)
m.c652 = Constraint(expr= m.x314 - m.b338 <= 0)
m.c653 = Constraint(expr= m.x315 - m.b339 <= 0)
m.c654 = Constraint(expr= m.x316 - m.b340 <= 0)
m.c655 = Constraint(expr= m.x317 - m.b341 <= 0)
m.c656 = Constraint(expr= m.x318 - m.b326 <= 0)
m.c657 = Constraint(expr= m.x319 - m.b327 <= 0)
m.c658 = Constraint(expr= m.x320 - m.b328 <= 0)
m.c659 = Constraint(expr= m.x321 - m.b329 <= 0)
m.c660 = Constraint(expr= m.x2 - m.x10 - m.x14 - m.x18 - m.x22 - m.x26 - m.x30 - m.x34 - m.x38 - m.x42 - m.x46 - m.x50
- m.x54 - m.x58 - m.x62 - m.x66 - m.x70 - m.x74 - m.x78 - m.x82 - m.x86 - m.x90 - m.x94
- m.x98 - m.x102 - m.x106 - m.x110 - m.x114 - m.x118 - m.x122 - m.x126 - m.x130 - m.x134
- m.x138 - m.x142 - m.x146 - m.x150 - m.x154 - m.x158 - m.x162 - m.x166 - m.x170 - m.x174
- m.x178 - m.x182 - m.x186 - m.x190 - m.x194 - m.x198 - m.x202 - m.x206 - m.x210 - m.x214
- m.x218 - m.x222 - m.x226 - m.x230 - m.x234 - m.x238 - m.x242 - m.x246 - m.x250 - m.x254
- m.x258 - m.x262 - m.x266 - m.x270 - m.x274 - m.x278 - m.x282 - m.x286 - m.x290 - m.x294
- m.x298 - m.x302 - m.x306 - m.x310 - m.x314 - m.x318 == 0)
m.c661 = Constraint(expr= m.x3 - m.x11 - m.x15 - m.x19 - m.x23 - m.x27 - m.x31 - m.x35 - m.x39 - m.x43 - m.x47 - m.x51
- m.x55 - m.x59 - m.x63 - m.x67 - m.x71 - m.x75 - m.x79 - m.x83 - m.x87 - m.x91 - m.x95
- m.x99 - m.x103 - m.x107 - m.x111 - m.x115 - m.x119 - m.x123 - m.x127 - m.x131 - m.x135
- m.x139 - m.x143 - m.x147 - m.x151 - m.x155 - m.x159 - m.x163 - m.x167 - m.x171 - m.x175
- m.x179 - m.x183 - m.x187 - m.x191 - m.x195 - m.x199 - m.x203 - m.x207 - m.x211 - m.x215
- m.x219 - m.x223 - m.x227 - m.x231 - m.x235 - m.x239 - m.x243 - m.x247 - m.x251 - m.x255
- m.x259 - m.x263 - m.x267 - m.x271 - m.x275 - m.x279 - m.x283 - m.x287 - m.x291 - m.x295
- m.x299 - m.x303 - m.x307 - m.x311 - m.x315 - m.x319 == 0)
m.c662 = Constraint(expr= m.x4 - m.x12 - m.x16 - m.x20 - m.x24 - m.x28 - m.x32 - m.x36 - m.x40 - m.x44 - m.x48 - m.x52
- m.x56 - m.x60 - m.x64 - m.x68 - m.x72 - m.x76 - m.x80 - m.x84 - m.x88 - m.x92 - m.x96
- m.x100 - m.x104 - m.x108 - m.x112 - m.x116 - m.x120 - m.x124 - m.x128 - m.x132 - m.x136
- m.x140 - m.x144 - m.x148 - m.x152 - m.x156 - m.x160 - m.x164 - m.x168 - m.x172 - m.x176
- m.x180 - m.x184 - m.x188 - m.x192 - m.x196 - m.x200 - m.x204 - m.x208 - m.x212 - m.x216
- m.x220 - m.x224 - m.x228 - m.x232 - m.x236 - m.x240 - m.x244 - m.x248 - m.x252 - m.x256
- m.x260 - m.x264 - m.x268 - m.x272 - m.x276 - m.x280 - m.x284 - m.x288 - m.x292 - m.x296
- m.x300 - m.x304 - m.x308 - m.x312 - m.x316 - m.x320 == 0)
m.c663 = Constraint(expr= m.x5 - m.x13 - m.x17 - m.x21 - m.x25 - m.x29 - m.x33 - m.x37 - m.x41 - m.x45 - m.x49 - m.x53
- m.x57 - m.x61 - m.x65 - m.x69 - m.x73 - m.x77 - m.x81 - m.x85 - m.x89 - m.x93 - m.x97
- m.x101 - m.x105 - m.x109 - m.x113 - m.x117 - m.x121 - m.x125 - m.x129 - m.x133 - m.x137
- m.x141 - m.x145 - m.x149 - m.x153 - m.x157 - m.x161 - m.x165 - m.x169 - m.x173 - m.x177
- m.x181 - m.x185 - m.x189 - m.x193 - m.x197 - m.x201 - m.x205 - m.x209 - m.x213 - m.x217
- m.x221 - m.x225 - m.x229 - m.x233 - m.x237 - m.x241 - m.x245 - m.x249 - m.x253 - m.x257
- m.x261 - m.x265 - m.x269 - m.x273 - m.x277 - m.x281 - m.x285 - m.x289 - m.x293 - m.x297
- m.x301 - m.x305 - m.x309 - m.x313 - m.x317 - m.x321 == 0)
m.c664 = Constraint(expr= m.x6 - 17*m.b322 - 16*m.b326 - 12*m.b330 - 10*m.b334 - 9*m.b338 - 6*m.b342 - 6*m.b346
- 5*m.b350 - 5*m.b354 - 5*m.b358 - 4*m.b362 - 4*m.b366 - 4*m.b370 - 4*m.b374 - 4*m.b378
- 4*m.b382 - 3*m.b386 - 3*m.b390 - 3*m.b394 - 3*m.b398 - 3*m.b402 - 3*m.b406 - 2*m.b410
- 2*m.b414 - 2*m.b418 - 2*m.b422 - 2*m.b426 - 2*m.b430 - 2*m.b434 - 2*m.b438 - 2*m.b442
- 2*m.b446 - 2*m.b450 - m.b454 == 0)
m.c665 = Constraint(expr= m.x7 - 17*m.b323 - 16*m.b327 - 12*m.b331 - 10*m.b335 - 9*m.b339 - 6*m.b343 - 6*m.b347
- 5*m.b351 - 5*m.b355 - 5*m.b359 - 4*m.b363 - 4*m.b367 - 4*m.b371 - 4*m.b375 - 4*m.b379
- 4*m.b383 - 3*m.b387 - 3*m.b391 - 3*m.b395 - 3*m.b399 - 3*m.b403 - 3*m.b407 - 2*m.b411
- 2*m.b415 - 2*m.b419 - 2*m.b423 - 2*m.b427 - 2*m.b431 - 2*m.b435 - 2*m.b439 - 2*m.b443
- 2*m.b447 - 2*m.b451 - m.b455 == 0)
m.c666 = Constraint(expr= m.x8 - 17*m.b324 - 16*m.b328 - 12*m.b332 - 10*m.b336 - 9*m.b340 - 6*m.b344 - 6*m.b348
- 5*m.b352 - 5*m.b356 - 5*m.b360 - 4*m.b364 - 4*m.b368 - 4*m.b372 - 4*m.b376 - 4*m.b380
- 4*m.b384 - 3*m.b388 - 3*m.b392 - 3*m.b396 - 3*m.b400 - 3*m.b404 - 3*m.b408 - 2*m.b412
- 2*m.b416 - 2*m.b420 - 2*m.b424 - 2*m.b428 - 2*m.b432 - 2*m.b436 - 2*m.b440 - 2*m.b444
- 2*m.b448 - 2*m.b452 - m.b456 == 0)
m.c667 = Constraint(expr= m.x9 - 17*m.b325 - 16*m.b329 - 12*m.b333 - 10*m.b337 - 9*m.b341 - 6*m.b345 - 6*m.b349
- 5*m.b353 - 5*m.b357 - 5*m.b361 - 4*m.b365 - 4*m.b369 - 4*m.b373 - 4*m.b377 - 4*m.b381
- 4*m.b385 - 3*m.b389 - 3*m.b393 - 3*m.b397 - 3*m.b401 - 3*m.b405 - 3*m.b409 - 2*m.b413
- 2*m.b417 - 2*m.b421 - 2*m.b425 - 2*m.b429 - 2*m.b433 - 2*m.b437 - 2*m.b441 - 2*m.b445
- 2*m.b449 - 2*m.b453 - m.b457 == 0)
| [
"[email protected]"
] | |
6d4f999516d1bf9e5181244fe6b40e4b59277e3c | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/4/lkf.py | d19b23b9ac9f5e630d69a188c39ceef2922114b2 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'lkF':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
3e29d4d7c333026e5344ef9516e21f5e220cfd24 | f98de2db6b24d30d64f1145c7d8da4a40385a87f | /packages/grid_control_cms/lumi_tools.py | 50eb266c862e2c20ae303976fae5474ea14c2247 | [] | no_license | greyxray/grid-control | f9f453491fe7bc506d4cfc240afaa364ba9db84b | ed10fdb6ff604006a5d52dcd43c2e55c9e962c0a | refs/heads/master | 2020-04-15T13:15:21.103357 | 2019-01-08T18:23:07 | 2019-01-08T18:23:07 | 164,709,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,665 | py | # | Copyright 2010-2016 Karlsruhe Institute of Technology
# |
# | Licensed under the Apache License, Version 2.0 (the "License");
# | you may not use this file except in compliance with the License.
# | You may obtain a copy of the License at
# |
# | http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.
import os
from python_compat import imap, json, lmap, sort_inplace
def makeint(x):
if x.strip().upper() not in ['', 'MAX', 'MIN']:
return int(x)
def parseLumiFromJSON(data, select = ''):
runs = json.loads(data)
rr = lmap(makeint, select.split('-') + [''])[:2]
for run in imap(int, runs.keys()):
if (rr[0] and run < rr[0]) or (rr[1] and run > rr[1]):
continue
for lumi in runs[str(run)]:
yield ([run, lumi[0]], [run, lumi[1]])
def keyLumi(a):
return tuple(a[0])
def mergeLumi(rlrange):
""" Merge consecutive lumi sections
>>> mergeLumi([([1, 11], [1, 20]), ([1, 1], [1, 10]), ([1, 22], [1, 30])])
[([1, 1], [1, 20]), ([1, 22], [1, 30])]
>>> mergeLumi([([1, 1], [2, 2]), ([2, 3], [2, 10]), ([2, 11], [4, 30])])
[([1, 1], [4, 30])]
"""
sort_inplace(rlrange, keyLumi)
i = 0
while i < len(rlrange) - 1:
(end_run, end_lumi) = rlrange[i][1]
(start_next_run, start_next_lumi) = rlrange[i+1][0]
if (end_run == start_next_run) and (end_lumi == start_next_lumi - 1):
rlrange[i] = (rlrange[i][0], rlrange[i + 1][1])
del rlrange[i+1]
else:
i += 1
return rlrange
def parseLumiFromString(rlrange):
""" Parse user supplied lumi info into easier to handle format
>>> lmap(parseLumiFromString, ['1', '1-', '-1', '1-2'])
[([1, None], [1, None]), ([1, None], [None, None]), ([None, None], [1, None]), ([1, None], [2, None])]
>>> lmap(parseLumiFromString, ['1:5', '1:5-', '-1:5', '1:5-2:6'])
[([1, 5], [1, 5]), ([1, 5], [None, None]), ([None, None], [1, 5]), ([1, 5], [2, 6])]
>>> lmap(parseLumiFromString, ['1-:5', ':5-1', ':5-:6'])
[([1, None], [None, 5]), ([None, 5], [1, None]), ([None, 5], [None, 6])]
>>> lmap(parseLumiFromString, ['1:5-2', '1-2:5'])
[([1, 5], [2, None]), ([1, None], [2, 5])]
"""
def parseRunLumi(rl):
if ':' in rl:
return lmap(makeint, rl.split(':'))
else:
return [makeint(rl), None]
if '-' in rlrange:
return tuple(imap(parseRunLumi, rlrange.split('-')))
else:
tmp = parseRunLumi(rlrange)
return (tmp, tmp)
def parseLumiFilter(lumiexpr):
if lumiexpr == '':
return None
lumis = []
from grid_control.config import ConfigError
for token in imap(str.strip, lumiexpr.split(',')):
token = lmap(str.strip, token.split('|'))
if True in imap(str.isalpha, token[0].lower().replace('min', '').replace('max', '')):
if len(token) == 1:
token.append('')
try:
json_fn = os.path.normpath(os.path.expandvars(os.path.expanduser(token[0].strip())))
json_fp = open(json_fn)
lumis.extend(parseLumiFromJSON(json_fp.read(), token[1]))
json_fp.close()
except Exception:
raise ConfigError('Could not process lumi filter file: %r (filter: %r)' % tuple(token))
else:
try:
lumis.append(parseLumiFromString(token[0]))
except Exception:
raise ConfigError('Could not process lumi filter expression:\n\t%s' % token[0])
return mergeLumi(lumis)
def filterLumiFilter(runs, lumifilter):
""" Filter lumifilter for entries that contain the given runs
>>> formatLumi(filterLumiFilter([2,3,6], [([1, None], [2, None]), ([4, 1], [4, None]), ([5, 1], [None,3])]))
['1:MIN-2:MAX', '5:1-9999999:3']
>>> formatLumi(filterLumiFilter([2,3,6], [([1, 1], [2, 2]), ([3, 1], [5, 2]), ([5, 2], [7,3])]))
['1:1-2:2', '3:1-5:2', '5:2-7:3']
"""
for filterEntry in lumifilter:
(sel_start, sel_end) = (filterEntry[0][0], filterEntry[1][0])
for run in runs:
if (sel_start is None) or (run >= sel_start):
if (sel_end is None) or (run <= sel_end):
yield filterEntry
break
def selectRun(run, lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> selectRun(1, [([1, None], [2, None])])
True
>>> selectRun(2, [([1, 3], [5, 12])])
True
>>> selectRun(6, [([1, 3], [5, 12])])
False
>>> selectRun(9, [([3, 23], [None, None])])
True
"""
for (sel_start, sel_end) in lumifilter:
(sel_start_run, sel_end_run) = (sel_start[0], sel_end[0])
if (sel_start_run is None) or (run >= sel_start_run):
if (sel_end_run is None) or (run <= sel_end_run):
return True
return False
def selectLumi(run_lumi, lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> selectLumi((1,2), [([1, None], [2, None])])
True
>>> selectLumi((1,2), [([1, 3], [5, 12])])
False
>>> selectLumi((2,1), [([1, 3], [5, 12])])
True
>>> selectLumi((9,2), [([3, 23], [None, None])])
True
"""
(run, lumi) = run_lumi
for (sel_start, sel_end) in lumifilter:
(sel_start_run, sel_start_lumi) = sel_start
(sel_end_run, sel_end_lumi) = sel_end
if (sel_start_run is None) or (run >= sel_start_run):
if (sel_end_run is None) or (run <= sel_end_run):
# At this point, run_lumi is contained in the selected run
if (sel_start_run is not None) and (run > sel_start_run):
sel_start_lumi = None
if (sel_start_lumi is None) or (lumi >= sel_start_lumi):
if (sel_end_run is not None) and (run < sel_end_run):
sel_end_lumi = None
if (sel_end_lumi is None) or (lumi <= sel_end_lumi):
return True
return False
def formatLumi(lumifilter):
""" Check if lumifilter selects the given run/lumi
>>> formatLumi(imap(parseLumiFromString, ['1', '1-', '-1', '1-2']))
['1:MIN-1:MAX', '1:MIN-9999999:MAX', '1:MIN-1:MAX', '1:MIN-2:MAX']
>>> formatLumi(imap(parseLumiFromString, ['1:5', '1:5-', '-1:5', '1:5-2:6']))
['1:5-1:5', '1:5-9999999:MAX', '1:MIN-1:5', '1:5-2:6']
>>> formatLumi(imap(parseLumiFromString, ['1-:5', ':5-1', ':5-:6']))
['1:MIN-9999999:5', '1:5-1:MAX', '1:5-9999999:6']
>>> formatLumi(imap(parseLumiFromString, ['1:5-2', '1-2:5']))
['1:5-2:MAX', '1:MIN-2:5']
"""
def formatRange(rlrange):
(start, end) = rlrange
default = lambda x, d: (x, d)[x is None]
start = [default(start[0], '1'), default(start[1], 'MIN')]
end = [default(end[0], '9999999'), default(end[1], 'MAX')]
return str.join('-', imap(lambda x: '%s:%s' % tuple(x), (start, end)))
if lumifilter:
return lmap(formatRange, lumifilter)
return ''
def strLumi(lumifilter):
return str.join(',', formatLumi(lumifilter))
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"[email protected]"
] | |
afc06ae4b405fbce9055d076027588304160a0e4 | 83b242997a1560214285fd38ab4d39a0b1210ddc | /SOL4Py/network/ZThreadedTCPServer.py | add7627d5756461363417a09cff04384cc3dbf66 | [] | no_license | ivartz/vid2fft | 0a25d853e178b43fd0a5f765934887963f5c37f9 | 1b6ec82de04f86819ab4c1056d4f9d9bde1ed9c8 | refs/heads/master | 2020-08-07T21:44:28.745553 | 2019-10-08T09:18:41 | 2019-10-08T09:18:41 | 213,594,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,963 | py | #/******************************************************************************
#
# Copyright (c) 2018 Antillia.com TOSHIYUKI ARAI. ALL RIGHTS RESERVED.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#******************************************************************************/
# 2018/09/20
# ZThreadedTCPServer.py
# encoding utf-8
# Simple TCPServer example to accept single TCPClien
# See https://docs.python.org/3/library/socketserver.html
# See also: https://gist.github.com/arthurafarias/7258a2b83433dfda013f1954aaecd50a#file-server-py
import os
import sys
import time
import socketserver
import threading
import traceback
from SOL4Py.ZSingleton import *
##
# Simple TCPServer thread class, which handles a stream request from a TCP client.
#
class ZThreadedTCPServer(threading.Thread, ZSingleton):
#---------------------------------------------------------
# Inner class starts.
# Define your subclass derived from StreanRequestHandler
class _TCPRequestHandler(socketserver.StreamRequestHandler):
# Define your own handle method if needed.
def handle(self):
print(self.__class__.__name__ + self.handle.__name__ + " start")
print("Curent thread name:{}".format(threading.current_thread().name))
try:
while True:
print("Curent thread name:{}".format(threading.current_thread().name))
bytes = self.rfile.readline().strip()
if len(bytes) == 0:
print("breaking handle loop")
break
ZSingleton.get_instance().request_handle_callback(bytes, self.wfile)
self.request.close()
except:
traceback.print_exc()
# Inner class ends.
##
#
# Constructor
def __init__(self, ipaddress, port, request_handler_class = None):
super(ZThreadedTCPServer, self).__init__()
print(self.__class__.__name__ + "::" + self.run.__name__ + " start")
ZSingleton.set_instance(self)
print("IPAddress:{} Port:{}".format(ipaddress, port))
self.server_address = (ipaddress, port)
if request_handler_class == None:
# Register the default request handler class: self._TCPRequestHandler.
self.sock_server = socketserver.TCPServer(self.server_address, self._TCPRequestHandler)
else:
self.sock_server = socketserver.TCPServer(self.server_address, request_handler_class)
self.sock_server.allow_reuse_address = True
# Please redefine your own method 'request_handle_callback' in a subclass derived from this class.
def request_handle_callback(self, bytes, writer):
text = bytes.decode("utf-8")
import datetime
now = datetime.datetime.now()
print("Recieved at {} data :{}".format(now, text))
reply = "OK"
breply = reply.encode("utf-8")
writer.write(breply)
# Thread main procedure.
def run(self):
print(self.__class__.__name__ + "::" + self.run.__name__ + " start")
if self.sock_server != None:
self.sock_server.serve_forever()
print(self.__class__.__name__ + "::" + self.run.__name__ + " end")
# Shdown and close server_socket.
def close(self):
if self.sock_server != None:
self.sock_server.shutdown()
print("sock_server shutdown")
self.sock_server.server_close()
print("sock_server close")
| [
"[email protected]"
] | |
84c806a6c6711ceb7dc060bcec0926b8246fdadb | e0980f704a573894350e285f66f4cf390837238e | /.history/rocketman/settings/production_20210104181634.py | 89446f098f49d16a5372b9f81e7bc516ac235f9c | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import os
from .base import *
DEBUG = False
SECRET_KEY = '$^8&x#8a5!7@r!#6ov9bfl(j8k^6+$v-1x+*#!uqf(=^n+*$w3'
ALLOWED_HOSTS = ['localhost', 'rocketman.naukawagtail.com', '*']
cwd=os.getcwd()
CASHES = {
'default': {
'BA'
}
}
try:
from .local import *
except ImportError:
pass
| [
"[email protected]"
] | |
2c197d376b5580c493f3dddf7bdbd0b7cfbe9d98 | 7b12eb45c1ea76ad9c186b858b5dfebf2c5b862a | /.history/DEBER_20210905000450.py | c9d0c0d9993b0f24c7ecbb3cf98c786e2d4f0c05 | [
"MIT"
] | permissive | Alopezm5/PROYECTO-PARTE-1 | a1dce04009b24852c1c60e69bdf602ad3af0574b | bd7a8594edf08d41c6ca544cf6bac01ea4fcb684 | refs/heads/main | 2023-07-25T11:22:17.994770 | 2021-09-07T03:27:34 | 2021-09-07T03:27:34 | 403,670,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,825 | py | import os
class Empresa():
def __init__(self,nom="",ruc=0,dire="",tele=0,ciud="",tipEmpr=""):
self.nombre=nom
self.ruc=ruc
self.direccion=dire
self.telefono=tele
self.ciudad=ciud
self.tipoEmpresa=tipEmpr
def datosEmpresa(self):#3
self.nombre=input("Ingresar nombre de la empresa: ")
self.ruc=int(input("Ingresar ruc de la empresa: "))
self.direccion=input("Ingresar la direccion de la empresa: ")
self.telefono=int(input("Ingresar el numero de telefono de la empresa: "))
self.ciudad=input("Ingresar ciudad donde esta la empresa: ")
self.tipoEmpresa=input("Ingresar tipo de empresa publica o privada: ")
def mostrarEmpresa(self):
print("")
print("Empresa")
print("La empresa de nombre {}\n De RUC #{} \n Está ubicada en {}\n Se puede comunicar al #{}\n Está empresa esta en la ciudad de {}\n Es una entidad {}".format(self.nombre,self.ruc,self.direccion, self.telefono,self.ciudad, self.tipoEmpresa))
class Empleado(Empresa):
def __init__(self,nom="",cedu=0,dire="",tele=0,email="",estado="",profe=""):
self.nombre=nom
self.cedula=cedu
self.direccion=dire
self.telefono=tele
self.correo=email
self.estadocivil=estado
self.profesion=profe
def empleado(self):
self.nombre=input("Ingresar nombre del empleado: ")
self.cedula=int(input("Ingresar numero de cedula del empleado: "))
self.direccion=input("Ingresar la direccion del empleado: ")
self.telefono=int(input("Ingresar numero de contacto del empleado: "))
self.correo=input("Ingresar correo personal del empleado: ")
def empleadoObrero(self):
self.estadocivil=input("Ingresar estado civil del empleado: ")
def empleadoOficina(self):
self.profesion=input("Ingresar profesion del empleado: ")
def mostrarempleado(self):
print("El empleado: {} con # de C.I. {} \n Con direccion {}, y numero de contacto{}\n Y correo {}".format(self.nombre,self.cedula,self.direccion,self.telefono,self.correo))
class Departamento(Empleado):
def __init__(self,dep=""):
self.departamento=dep
def departa(self):
self.departamento=input("Ingresar el departamento al que pertenece el empleado: ")
def mostrarDeparta(self):
print("El empleado pertenece al departamento de: {}".format(self.departamento))
class Pagos(Empleado):
def __init__(self, desper=0,valhora=0,hotraba=0,extra=0,suel=0,hrecar=0,hextra=0,pres=0,mcou=0,valho=0,sobtiem=0,comofi=0,antobre=0,iemple=0,cuopres=0,tot=0,liquid=0,cuota=0,anti=0,comi=0,fNomina="",fIngreso="",iess=0):
self.permisos=desper
self.valorhora=valhora
self.horastrabajadas=hotraba
self.valextra=extra
self.sueldo= suel
self.horasRecargo= hrecar
self.horasExtraordinarias=hextra
self.prestamo= pres
self.mesCuota= mcou
self.valor_hora= valho
self.sobretiempo=sobtiem
self.comEmpOficina = comofi
self.antiEmpObrero = antobre
self.iessEmpleado = iemple
self.cuotaPrestamo=cuopres
self.totdes = tot
self.liquidoRecibir = liquid
self.mesCuota=cuota
self.antiguedad=anti
self.comision=comi
self.fechaNomina=fNomina
self.fechaIngreso=fIngreso
self.iess=iess
def pagoNormal(self):
self.sueldo=float(input("Ingresar sueldo del trabajador: $ "))
self.prestamo=float(input("Ingresar monto del prestamo que ha generado el empleado: $ "))
self.mesCuota=int(input("Ingresar meses a diferir el prestamo: "))
self.comision=float(input("Ingresar valor de la comsion: "))
self.antiguedad=int(input("Ingresar antiguedad: "))
self.iess=float(input("Ingresar valor del iees recordar que debe ser porcentuado Ejemplo si quiere decir 20% debe ingresar 0.20"))
def pagoExtra(self):
self.horasRecargo=int(input("Ingresar horas de recargo: "))
self.horasExtraordinarias=int(input("Ingresar horas extraordinarias: "))
self.fechaNomina=float(input("Ingresar fecha de nomida (formato año-mes-dia): "))
self.fechaIngreso=float(input("Ingresar fecha de ingreso (formato año-mes-dia): "))
def calculoSueldo(self):
self.valor_hora=self.sueldo/240
self.sobretiempo= self.valor_hora * (self.horasRecargo*0.50+self.horasExtraordinarias*2)
self.comEmpOficina = self.comision*self.sueldo
self.antiEmpObrero = self.antiguedad*(self.fechaNomina - self.fechaIngreso)/365*self.sueldo
self.iessEmpleado = self.iess*(self.sueldo+self.sobretiempo)
self.cuotaPrestamo=self.prestamo/self.mesCuota
self.toting = self.sueldo+self.sobretiempo+ self.comEmpOficina + self.antiEmpObrero
self.totdes = self.iessEmpleado + self.prestamo
self.liquidoRecibir = self.toting - self.totdes
def mostrarSueldo(self):
print("SUELDO BASE")
print("El empleado tiene un sueldo de {}".format(self.sueldo))
print("")
print("SOBRETIEMPO")
print("El valor de sobretiempo es de {}, con {} horas extras trabajadas".format(self.sobretiempo,self.horasExtraordinarias))
print("")
print("PRESTAMO")
print("El valor de prestamo es de {}, a ser pagado en {} meses, con cuotas de {}".format(self.p))
emp=Empresa()
emp.datosEmpresa()
os.system ("cls")
emple=Empleado()
emple.empleado()
os.system ("cls")
emple.empleadoObrero()
emple.empleadoOficina()
os.system ("cls")
depa=Departamento()
depa.departa()
pag=Pagos()
pag.pagoNormal()
pag.pagoExtra()
pag.calculoSueldo()
os.system ("cls")
emp.mostrarEmpresa()
print("")
emple.mostrarempleado()
print("")
pag.mostrarSueldo() | [
"[email protected]"
] | |
159e1bbb69f50777b2ba294e3298a272b72dcb2a | 7f4c82f7eb8d2805e378586f14e214cdaacfdb4a | /books/model/CommentList.py | 4c2ece791575dd51f9a9ea502c5e8bd24457084a | [
"MIT"
] | permissive | deepubansal/books-python-wrappers | 5a922267ec8382b3542638d894c96f4891b57bf5 | 51210c8d557a32564f976a56214d3c0807f46a90 | refs/heads/master | 2022-12-05T11:25:01.694021 | 2020-08-29T07:35:23 | 2020-08-29T07:35:23 | 288,738,813 | 0 | 0 | MIT | 2020-08-29T07:35:24 | 2020-08-19T13:26:04 | Python | UTF-8 | Python | false | false | 1,042 | py | #$Id$
from books.model.PageContext import PageContext
class CommentList:
"""This class is used to create object for comments."""
def __init__(self):
"""Initialize parameters for Comments list."""
self.comments = []
self.page_context = PageContext()
def set_comments(self, comment):
"""Set comments.
Args:
comment(instance): Comment object.
"""
self.comments.append(comment)
def get_comments(self):
"""Get comments.
Returns:
list: List of comments object.
"""
return self.comments
def set_page_context(self, page_context):
"""Set page context.
Args:
page_context(instance): Page context object.
"""
self.page_context = page_context
def get_page_context(self):
"""Get page context.
Returns:
instance: Page context object.
"""
return self.page_context
| [
"[email protected]"
] | |
243f9844bc2368e7521d055910a36de2f944497b | c03225c395b5b0d8cdd79e88f78c13666c7c953a | /utils/dataset/__init__.py | 816fb0be6aedd8710f9820a6ca430f6ba99fe353 | [] | no_license | zhaoyin214/cv_lab_pytorch | de52bacd098c378cd06a80f6f77a76b42d7d0e42 | 4ccaf973678943f6b8e3b07d053cc447d5d57e76 | refs/heads/master | 2022-11-17T06:48:58.121305 | 2020-07-15T23:48:33 | 2020-07-15T23:48:33 | 272,972,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | from .load import load_ibug_300w | [
"[email protected]"
] | |
aee868eb2597469429538bbd075d10a018a753ac | 6fe2d3c27c4cb498b7ad6d9411cc8fa69f4a38f8 | /algorithms/algorithms-python/leetcode_medium/Question_111_Combination_Sum_III.py | 96ac21f6e928162be84fa4ea48977d9e38d1fd35 | [] | no_license | Lanceolata/code | aae54af632a212c878ce45b11dab919bba55bcb3 | f7d5a7de27c3cc8a7a4abf63eab9ff9b21d512fb | refs/heads/master | 2022-09-01T04:26:56.190829 | 2021-07-29T05:14:40 | 2021-07-29T05:14:40 | 87,202,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | #!/usr/bin/python
# coding: utf-8
class Solution(object):
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
vec = []
res = []
self.helper(k, n, 1, vec, res)
return res
def helper(self, k, n, l, vec, res):
if n < 0:
return
if n == 0 and len(vec) == k:
res.append(vec[:])
return
for i in range(l, 10):
vec.append(i)
self.helper(k, n - i, i + 1, vec, res)
vec.pop()
| [
"[email protected]"
] | |
d7e694d8b7e339f353fe621aef7be75b1bd0d979 | 9a1b033774e371bd6442048f43e862dfb71abed7 | /Lists As Stacks And Queues/Exercises/Cups_and_Bottles.py | 57a94258fc2f957d4054343b06ab2bb9d026c989 | [] | no_license | mialskywalker/PythonAdvanced | ea4fde32ba201f6999cd0d59d1a95f00fb5f674b | c74ad063154c94b247aaf73b7104df9c6033b1a5 | refs/heads/master | 2023-03-09T00:13:28.471328 | 2021-02-24T15:21:11 | 2021-02-24T15:21:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 727 | py | from collections import deque
cups_capacity = deque(int(el) for el in input().split())
bottles_capacity = [int(el) for el in input().split()]
wasted_water = 0
while True:
if not cups_capacity or not bottles_capacity:
break
bottle = bottles_capacity.pop()
cup = cups_capacity.popleft()
total = bottle - cup
if total >= 0:
wasted_water += total
elif total < 0:
cups_capacity.appendleft(abs(total))
if not cups_capacity:
print(f"Bottles: {' '.join(map(str, bottles_capacity))}")
print(f"Wasted litters of water: {wasted_water}")
elif not bottles_capacity:
print(f"Cups: {' '.join(map(str, cups_capacity))}")
print(f"Wasted litters of water: {wasted_water}")
| [
"[email protected]"
] | |
5c4481583616b552e258f790a750fce1afc4245a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/MPOA-EXT-MIB.py | 8cccf569872dba8ee2a1f2f3442c0ee661ce4a91 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 10,427 | py | #
# PySNMP MIB module MPOA-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MPOA-EXT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:14:59 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
Boolean, extensions = mibBuilder.importSymbols("CENTILLION-ROOT-MIB", "Boolean", "extensions")
mpcIndex, = mibBuilder.importSymbols("MPOA-MIB", "mpcIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Integer32, NotificationType, Gauge32, MibIdentifier, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Bits, iso, TimeTicks, Counter64, ObjectIdentity, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "NotificationType", "Gauge32", "MibIdentifier", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Bits", "iso", "TimeTicks", "Counter64", "ObjectIdentity", "Unsigned32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
RowStatus, TruthValue = mibBuilder.importSymbols("SNMPv2-TC-v1", "RowStatus", "TruthValue")
cnMpoaExt = MibIdentifier((1, 3, 6, 1, 4, 1, 930, 3, 7))
cnMpcConfigTable = MibTable((1, 3, 6, 1, 4, 1, 930, 3, 7, 2), )
if mibBuilder.loadTexts: cnMpcConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcConfigTable.setDescription('The MPOA Bay Networks proprietary Client Configuration Table. This table contains configuration information for all MPOA Clients which this agent manages.')
cnMpcConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 930, 3, 7, 2, 1), ).setIndexNames((0, "MPOA-MIB", "mpcIndex"))
if mibBuilder.loadTexts: cnMpcConfigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcConfigEntry.setDescription('MPOA Client Bay Networks Configuration Entry. Each entry contains configuration information for one MPOA Client.')
cnMpcShareControlVccs = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 2, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpcShareControlVccs.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcShareControlVccs.setDescription('This Parameter enables VCC sharing for MPOA Control VCCs if set to true. LLC encapsulation is always signaled, regardless of sharing.')
cnMpcShareDataVccs = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 2, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpcShareDataVccs.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcShareDataVccs.setDescription('This parameter enables VCC sharing for MPOA Data VCCs if set to true. LLC encapsulation is always signaled, regardless of sharing.')
cnMpcValidEntryCheckInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpcValidEntryCheckInterval.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcValidEntryCheckInterval.setDescription('This parameter specifies the interval in seconds, to check LOCAL IP FDB entries in the Valid state for minimum activity.')
cnMpcMinFlowPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpcMinFlowPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpcMinFlowPacketCount.setDescription('This parameter specifies the minimum number of packets to be forwarded by a Local FDB Entry in the Valid state in cnMpcValidEntryCheckInterval to maintain minimum activity level. If minimum activity is not maintained, the entry is deleted.')
cnMpoaIpVerification = MibIdentifier((1, 3, 6, 1, 4, 1, 930, 3, 7, 3))
cnMpoaIpVerificationTableType = MibScalar((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("exclusion", 2), ("inclusion", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpoaIpVerificationTableType.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationTableType.setDescription('This object controls the type of verification table that is being used. This object is used in combination with the status and download object and the IP verification table. Any change made to this object must be downloaded to the switch cards using the cnMpoaIpVerificationTableDownload object before the settings actually take effect. To enable a verification table, the table type must be set to exclusion or inclusion, enabled using the table status object and then downloaded to the cards using the download object. To delete the IP verification information, you must set the table status object to clear and then downloaded to the cards using the download object. When the information is deleted, the table type is read as unknown. To change the table type between exclusion and inclusion, you must first delete the IP verification information and then recreate it.')
cnMpoaIpVerificationTableStatus = MibScalar((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2), ("clear", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpoaIpVerificationTableStatus.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationTableStatus.setDescription('This object is used to enable, disable or clear the IP Verification information. Any change to this object information must be downloaded to the switch cards using the cnMpoaIpVerificationTableDownload object. An empty IP verification table will yield disable on a get.')
cnMpoaIpVerificationTableDownload = MibScalar((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 3), Boolean()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpoaIpVerificationTableDownload.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationTableDownload.setDescription('Setting this object to true(1) causes the MPOA IP Verification Table information to be downloaded to all of the cards in the switch that support MPOA Clients (MPCs). You must download the IP Verification Table information to the cards before it will become effective when you are dynamically configuring this feature. The IP Verification Table is automatically downloaded to the MPC configured cards at card initialization. When read, this object always returns false(2).')
cnMpoaIpVerificationTable = MibTable((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 4), )
if mibBuilder.loadTexts: cnMpoaIpVerificationTable.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationTable.setDescription('The MPC IP Verification Table is either an inclusion or exclusion list as indicated by the cnMpoaIpVerificationTableType object. Any change to this table must be downloaded to the switch cards using the cnMpoaIpVerificationTableDownload object.')
cnMpoaIpVerificationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 4, 1), ).setIndexNames((0, "MPOA-EXT-MIB", "cnMpoaIpVerificationAddress"), (0, "MPOA-EXT-MIB", "cnMpoaIpVerificationMask"))
if mibBuilder.loadTexts: cnMpoaIpVerificationEntry.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationEntry.setDescription('Each row of the cnMpoaIpVerificationTable consists of an IP address and IP mask that is used to identify a range of addresses that are included or excluded when creating MPOA IP shortcuts. This cnMpoaIpVerificationStatus object is used to control adding or deleting each row.')
cnMpoaIpVerificationAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 4, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cnMpoaIpVerificationAddress.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationAddress.setDescription('This object is one of the two keys used to access the cnMpoaIpVerificationTable entries. This object contains an IP address used in conjunction with the cnMpoaIpVerificationMask to identify a range of one or more IP addresses.')
cnMpoaIpVerificationMask = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 4, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cnMpoaIpVerificationMask.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationMask.setDescription('This object is one of the two keys used to access the cnMpoaIpVerificationTable entries. This object contains an IP mask used in conjunction with the cnMpoaIpVerificationAddress to identify a range of one or more IP addresses.')
cnMpoaIpVerificationStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 930, 3, 7, 3, 4, 1, 3), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cnMpoaIpVerificationStatus.setStatus('mandatory')
if mibBuilder.loadTexts: cnMpoaIpVerificationStatus.setDescription('Use this object to add or delete rows in the cnMpoaIpVerificationTable. To create new rows, use createAndGo(4) or createAndWait(5). To delete entries use destroy(6). A valid row will have the status of active(1) on a get.')
mibBuilder.exportSymbols("MPOA-EXT-MIB", cnMpoaIpVerificationMask=cnMpoaIpVerificationMask, cnMpoaIpVerificationTable=cnMpoaIpVerificationTable, cnMpoaIpVerificationAddress=cnMpoaIpVerificationAddress, cnMpcShareDataVccs=cnMpcShareDataVccs, cnMpcConfigTable=cnMpcConfigTable, cnMpcValidEntryCheckInterval=cnMpcValidEntryCheckInterval, cnMpoaIpVerification=cnMpoaIpVerification, cnMpoaIpVerificationTableType=cnMpoaIpVerificationTableType, cnMpoaIpVerificationTableStatus=cnMpoaIpVerificationTableStatus, cnMpcMinFlowPacketCount=cnMpcMinFlowPacketCount, cnMpcConfigEntry=cnMpcConfigEntry, cnMpcShareControlVccs=cnMpcShareControlVccs, cnMpoaExt=cnMpoaExt, cnMpoaIpVerificationEntry=cnMpoaIpVerificationEntry, cnMpoaIpVerificationStatus=cnMpoaIpVerificationStatus, cnMpoaIpVerificationTableDownload=cnMpoaIpVerificationTableDownload)
| [
"[email protected]"
] | |
da07c9bf4e4dfa6fedec67e45efc284753925f26 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02952/s489061530.py | c11b8cfc760ea2a0ea44bc3ca92b0888dbd71b04 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | s=str(input())
n=len(s)
s=int(s)
if n==1:
print(s)
elif n==2:
print(9)
elif n==3:
print(10+s-100)
elif n==4:
print(909)
elif n==5:
print(910+s-10000)
else:
print(90909) | [
"[email protected]"
] | |
fa20d3ae1f8e6295713b6a8f217a871b4d843616 | b6f4393777d4f6a3a8b599700ce3405be76c4bc4 | /Apple-Music/Leticia/api/models.py | eb394de811af6473a82f9e7f5f7aa8d11e8e4c24 | [] | no_license | azatnt/Apple-Music-rest_framework- | b13897dd40337384469df269cdf46bd085487442 | 09b7e602078a6d82f63725b757bb657afd221776 | refs/heads/main | 2023-02-10T16:45:47.618860 | 2021-01-14T14:37:25 | 2021-01-14T14:37:25 | 326,934,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | from django.db import models
import string
import random
def generate_unique_code():
length = 6
while True:
code = ''.join(random.choices(string.ascii_uppercase, k=length))
if Room.objects.filter(code=code).count() == 0:
break
return code
class Room(models.Model):
code = models.CharField(
max_length=8, default=generate_unique_code, unique=True)
host = models.CharField(max_length=50, unique=True)
guest_can_pause = models.BooleanField(default=False, null=False)
votes_to_skip = models.IntegerField(null=False, default=2)
created_at = models.DateTimeField(auto_now_add=True)
current_song = models.CharField(max_length=50, null=True)
def __str__(self):
return self.code
| [
"[email protected]"
] | |
4226d913c82fc3fd3d68a44df6697fe697b6cc5c | ca5b5c217e0053645c2664d777699e9a5050715e | /tex/gen_links.py | 2a733f8bffe856d7ac4c2dffecd46daa7733bfae | [
"MIT"
] | permissive | rodluger/starrynight | 1405ffdb5a0dd0fefc0ae34e7cdaf7eab4735356 | d3f015e466621189cb271d4d18b538430b14a557 | refs/heads/master | 2021-10-26T03:32:15.220725 | 2021-10-22T15:16:48 | 2021-10-22T15:16:48 | 236,542,672 | 7 | 1 | MIT | 2020-06-03T19:51:10 | 2020-01-27T16:58:05 | Jupyter Notebook | UTF-8 | Python | false | false | 881 | py | from __future__ import print_function
import subprocess
import os
# Generate the github links
hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("utf-8")[:-1]
slug = "rodluger/starrynight"
with open("gitlinks.tex", "w") as f:
print(
r"\newcommand{\codelink}[1]{\href{https://github.com/%s/blob/%s/tex/figures/#1.py}{\codeicon}\,\,}"
% (slug, hash),
file=f,
)
print(
r"\newcommand{\animlink}[1]{\href{https://github.com/%s/blob/%s/tex/figures/#1.gif}{\animicon}\,\,}"
% (slug, hash),
file=f,
)
print(
r"\newcommand{\prooflink}[1]{\href{https://github.com/%s/blob/%s/tex/proofs/#1.ipynb}{\raisebox{-0.1em}{\prooficon}}}"
% (slug, hash),
file=f,
)
print(
r"\newcommand{\cilink}[1]{\href{https://dev.azure.com/%s/_build}{#1}}" % (slug),
file=f,
)
| [
"[email protected]"
] | |
a40845fe784984a2a2ef36f79556424959d0fcd3 | 5689bffe9a9594e52f934542994db464ed095d71 | /08_unittest/test05_assert_exercises.py | 1d6a1bd6fa8d1033455a67c98cde5e33428fe349 | [] | no_license | WenhaoChen0907/Web-automation | 5c0e2c61e247f32b0e5f2f2a33c9f8cc6e73dc20 | 5488f2c62016f02c934b709e7e9e6ea831d9891c | refs/heads/master | 2023-03-07T13:31:10.265019 | 2021-02-15T06:33:50 | 2021-02-15T06:33:50 | 338,986,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,618 | py | # iwebshop正向登录代码练习
import unittest
import sys
import time
from time import sleep
from selenium import webdriver
class IwebLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.maximize_window()
self.driver.implicitly_wait(30)
self.driver.get("http://localhost/iwebshop/")
def tearDown(self):
sleep(2)
self.driver.quit()
def testLogin(self):
driver = self.driver
driver.find_element_by_link_text("登录").click()
driver.find_element_by_css_selector("input[alt*='邮箱']").send_keys("admin")
driver.find_element_by_css_selector("input[alt*='密码']").send_keys("123456")
driver.find_element_by_css_selector(".submit_login").click()
sleep(3)
# 获取登陆信息
text = driver.find_element_by_css_selector(".loginfo").text
# 断言
try:
self.assertIn("admin", text)
except AssertionError:
# driver.get_screenshot_as_file("../images/img2.jpg")
# 图片名称添加动态时间-加时间戳的写法,-推荐
now = time.strftime("%Y_%m_%d %H_%M_%S")
# 图片名称添加断言错误日志
rep = sys.exc_info()[1]
driver.get_screenshot_as_file("../images/%s--%s.jpg" % (now, rep))
# 抛出异常
raise AssertionError
sleep(3)
driver.find_element_by_css_selector(".reg").click()
if __name__ == '__main__':
# 调用main方法执行unitetest内所有test开头方法
unittest.main()
| [
"[email protected]"
] | |
96fd031274243132b38f7eb70a57d295d5cdd79e | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/7/sy9.py | dbeb1cd55cf03ba4853cb5971ccaa811db6542f3 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'sy9':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
61b227fb19c0098e0d8449df91b59cc77ac3049d | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_142/662.py | e253a45877c6e9142d258233d25715ca05f57e07 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,452 | py | if __name__ == "__main__":
with open("A-small-attempt1.in", 'r') as inputf:
outputf=open("A_out.out",'w')
line=inputf.readline()
line=line.rstrip('\n')
test_num=int(line)
for test in range(test_num):
line = inputf.readline()
line = line.rstrip('\n')
n = int(line)
analysis = [[[[]]],[[[]]]]
j = [0, 0]
for i in range(n):
line = inputf.readline()
line = line.rstrip('\n')
temp = line[0]
analysis[i][0][0]=temp
count = 0
char_c = len(line)
for char in line:
if char == temp:
count = count + 1
else:
analysis[i][j[i]].append(count)
temp = char
j[i] = j[i]+1
count = 1
analysis[i].append([temp])
char_c = char_c-1
if char_c == 0:
analysis[i][j[i]].append(count)
change = 0
pos = True
if j[0]!=j[1]:
result = "Case #%d: Fegla Won"%(test+1)
outputf.write(result)
pos = False
else:
for k in range(j[0]+1):
if analysis[0][k][0] != analysis[1][k][0]:
result = "Case #%d: Fegla Won"%(test+1)
outputf.write(result)
pos = False
break
else:
if analysis[0][k][1] > analysis[1][k][1]:
change = change + analysis[0][k][1] - analysis[1][k][1]
else:
change = change - analysis[0][k][1] + analysis[1][k][1]
if pos == True:
result = "Case #%d: %d" %(test+1, change)
outputf.write(result)
if test != test_num - 1:
outputf.write('\n')
| [
"[email protected]"
] | |
9706c26c8869c0333343a0dae2cbbd2467b37e93 | 94a2c4417c1fdd8577a75b09a17912ebae129e6c | /test/test_prop_is.py | e211fe127da287bfb4f0504b7a588929a7f6c795 | [
"MIT"
] | permissive | slavaGanzin/ramda.py | ad88a3cf6e7eb1461d4a09aad35ae1c18ca32db8 | 634bfbe0dcb300315ded327756cb3e33241589b8 | refs/heads/master | 2023-01-23T04:43:48.485314 | 2023-01-06T10:11:53 | 2023-01-06T10:11:53 | 142,413,822 | 68 | 7 | MIT | 2021-12-22T13:59:56 | 2018-07-26T08:43:31 | Python | UTF-8 | Python | false | false | 278 | py | from ramda import *
from ramda.private.asserts import *
from numbers import Number
def test_prop_is():
assert_equal(prop_is(Number, "x", {"x": 1, "y": 2}), True)
assert_equal(prop_is(Number, "x", {"x": "foo"}), False)
assert_equal(prop_is(Number, "x", {}), False)
| [
"[email protected]"
] | |
326f720d3f00ce6fea68425c9d1ebfbc2906b8df | 9de9bcd87e3f15f743de436d669feb979e55f005 | /timesketch/lib/analyzers/ssh_sessionizer_test.py | a432041fce5b9ee04a020c5228287f633afbcdc1 | [
"Apache-2.0"
] | permissive | jorlamd/timesketch | 97b1f08e9797837672a51bc817426ae61f5fb529 | c7704bede82747d42a8579a264d2b385b93d6dee | refs/heads/master | 2020-12-04T02:54:57.496194 | 2019-11-12T21:07:21 | 2019-11-12T21:07:21 | 230,008,261 | 0 | 0 | Apache-2.0 | 2019-12-24T22:09:17 | 2019-12-24T22:09:16 | null | UTF-8 | Python | false | false | 6,551 | py | """Tests for SSHSessionizerSketchPlugin"""
from __future__ import unicode_literals
import mock
from timesketch.lib.analyzers.ssh_sessionizer import SSHSessionizerSketchPlugin
from timesketch.lib.testlib import BaseTest
from timesketch.lib.testlib import MockDataStore
# TODO _create_mock_event will be renamed in another pull request. It's name
# should be also changed here.
from timesketch.lib.analyzers.sequence_sessionizer_test \
import _create_mock_event
# Message attributes for events that represent one mock SSH session.
one_ssh_session_args = [{
'message':
'[sshd] [1]: Connection from 1.1.1.1 port 1 on 1.1.1.1 port 1'
}, {
'message': '[sshd] [1]: Accepted certificate ID'
}]
# Message attributes for events that represent two mock SSH sessions.
many_ssh_session_args = [{
'message':
'[sshd] [1]: Connection from 1.1.1.1 port 1 on 1.1.1.1 port 1'
}, {
'message': '[sshd] [1]: Accepted certificate ID'
}, {
'message':
'[sshd] [2]: Connection from 2.2.2.2 port 2 on 2.2.2.2 port 2'
}, {
'message': '[sshd] [2]: Accepted certificate ID'
}]
# Message attributes for a SSH event that is not a connection SSH event
no_ssh_session_args = [{
'message': '[sshd] [0]: Loaded keys'
}]
class TestSSHSessionizerPlugin(BaseTest):
"""Tests the functionality of the ssh sessionizing sketch analyzer."""
@mock.patch('timesketch.lib.analyzers.interface.ElasticsearchDataStore',
MockDataStore)
def test_sessionizer(self):
"""Test basic ssh sessionizer functionality."""
index = 'test_index'
sketch_id = 1
sessionizer = SSHSessionizerSketchPlugin(index, sketch_id)
self.assertIsInstance(sessionizer, SSHSessionizerSketchPlugin)
self.assertEqual(index, sessionizer.index_name)
self.assertEqual(sketch_id, sessionizer.sketch.id)
@mock.patch('timesketch.lib.analyzers.interface.ElasticsearchDataStore',
MockDataStore)
def test_session_starts_with_connection_event(self):
"""Test a session is created if it starts with SSH connection event."""
index = 'test_index'
sketch_id = 1
sessionizer = SSHSessionizerSketchPlugin(index, sketch_id)
sessionizer.datastore.client = mock.Mock()
datastore = sessionizer.datastore
_create_mock_event(datastore, 0, 1, one_ssh_session_args)
message = sessionizer.run()
self.assertEqual(
message,
'Sessionizing completed, number of ssh_session sessions created: 1'
)
session_id = '1.1.1.1_1'
#pylint: disable=unexpected-keyword-arg
event = datastore.get_event('test_index', '0', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id)
@mock.patch('timesketch.lib.analyzers.interface.ElasticsearchDataStore',
MockDataStore)
def test_all_events_from_session_are_labeled(self):
"""Test one SSH session of events is finded and allocated correctly."""
index = 'test_index'
sketch_id = 1
sessionizer = SSHSessionizerSketchPlugin(index, sketch_id)
sessionizer.datastore.client = mock.Mock()
datastore = sessionizer.datastore
_create_mock_event(datastore, 0, 2, one_ssh_session_args, [1])
message = sessionizer.run()
self.assertEqual(
message,
'Sessionizing completed, number of ssh_session sessions created: 1'
)
session_id = '1.1.1.1_1'
#pylint: disable=unexpected-keyword-arg
event = datastore.get_event('test_index', '0', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id)
event = datastore.get_event('test_index', '101', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id)
@mock.patch('timesketch.lib.analyzers.interface.ElasticsearchDataStore',
MockDataStore)
def test_session_doesnt_start_with_no_connection_event(self):
"""Test a session is not created if it doesn't start with SSH connection
event."""
index = 'test_index'
sketch_id = 1
sessionizer = SSHSessionizerSketchPlugin(index, sketch_id)
sessionizer.datastore.client = mock.Mock()
datastore = sessionizer.datastore
_create_mock_event(datastore, 0, 1, no_ssh_session_args)
message = sessionizer.run()
self.assertEqual(
message,
'Sessionizing completed, number of ssh_session sessions created: 0'
)
#pylint: disable=unexpected-keyword-arg
event = datastore.get_event('test_index', '0', stored_events=True)
self.assertNotIn('session_id', event['_source'])
@mock.patch('timesketch.lib.analyzers.interface.ElasticsearchDataStore',
MockDataStore)
def test_multiple_sessions(self):
"""Test multiple sessions are found and allocated correctly."""
index = 'test_index'
sketch_id = 1
sessionizer = SSHSessionizerSketchPlugin(index, sketch_id)
sessionizer.datastore.client = mock.Mock()
datastore = sessionizer.datastore
_create_mock_event(datastore,
0,
4,
many_ssh_session_args,
time_diffs=[1, 1, 1])
message = sessionizer.run()
self.assertEqual(
message,
'Sessionizing completed, number of ssh_session sessions created: 2'
)
session_id_1 = '1.1.1.1_1'
session_id_2 = '2.2.2.2_2'
#pylint: disable=unexpected-keyword-arg
event = datastore.get_event('test_index', '0', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id_1)
event = datastore.get_event('test_index', '101', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id_1)
event = datastore.get_event('test_index', '202', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id_2)
event = datastore.get_event('test_index', '303', stored_events=True)
self.assertEqual(event['_source']['session_id']['ssh_session'],
session_id_2)
| [
"[email protected]"
] | |
b3eecc48b5a6655fb0ae16960cff65aa207ed89d | a6ef13387c24c719a0dcfeb173521cd70beac282 | /devops/day4/ding_Robot.py | 8f01ac1bc53391322f4ad3edd35ab0fd70672935 | [] | no_license | youjiahe/python | f60472d61daf58b7f5bb6aa557949de4babf8c9c | 74eb4c5ba211ae5ffed2040576e5eead75d16e7d | refs/heads/master | 2020-03-31T02:35:55.787809 | 2019-12-02T16:32:54 | 2019-12-02T16:32:54 | 151,831,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | #!/usr/bin/env python3
import json
import requests
import sys
def send_msg(url,remiders,msg):
headers = {'Content-Type':'application/json;charset=utf-8'}
data={
"msgtype": "text", # 发送消息类型为文本
"at": {
"atMobiles": reminders,
"isAtAll": False, # 不@所有人
},
"text": {
"content": msg, # 消息正文
}
}
r = requests.post(url,data=json.dumps(data),headers=headers)
return r.text
if __name__ == '__main__':
msg = sys.argv[1]
reminders= ['13676240551']
url = 'https://oapi.dingtalk.com/robot/send?access_token=47f4ae71f59ee1624cf30a4f6a4641fac15478aeec406c7f952556906096d790'
print(send_msg(url,reminders,msg)) | [
"[email protected]"
] | |
ee7845df3aecfb80de476bbf727aca8a2ade8529 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R2/benchmark/startQiskit_QC118.py | 5c08e2c7132cd11baec4e6f28b5a87f2de870cc0 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,785 | py | # qubit number=3
# total number=20
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[2]) # number=17
prog.cz(input_qubit[0],input_qubit[2]) # number=18
prog.h(input_qubit[2]) # number=19
prog.x(input_qubit[2]) # number=12
prog.cx(input_qubit[0],input_qubit[2]) # number=13
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.h(input_qubit[1]) # number=9
prog.cx(input_qubit[2],input_qubit[1]) # number=4
prog.y(input_qubit[1]) # number=14
prog.cx(input_qubit[2],input_qubit[1]) # number=10
prog.z(input_qubit[2]) # number=3
prog.y(input_qubit[2]) # number=5
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_QC118.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
15fcc498298fb27365a93e3595794528564152ce | 9a2fd5e27d3f811cb18763ed388c2d56ae9907b6 | /爬虫练习/gupiao.py | ee4595015acd484f424596fda32dc78170398d30 | [] | no_license | wzc-ob/PycharmProjects | 5297ce60bade883495e5dbdb614131d31c47682e | 09f5ad6004dbdc83d456cabd78b769fde13d5357 | refs/heads/master | 2020-05-05T07:12:38.789400 | 2019-04-06T10:06:08 | 2019-04-06T10:06:08 | 179,817,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,007 | py | import re
import requests
from bs4 import BeautifulSoup
import traceback
def getHTMLText(url,code = 'UTF-8'):
try:
kv = {'user-agent': 'Mozilla/5.0'}
r = requests.get(url, headers=kv, timeout=30)
r.encoding = code
# print(r.text)
return r.text
except:
return ""
def getStockList(lst,stockURL):
html = getHTMLText(stockURL,'GB2312')
soup = BeautifulSoup(html,'html.parser')
a = soup.find_all('a')
for i in a:
try:
href = i.attrs['href']
lst.append(re.findall(r'[s][hz]\d{6}',href)[0])
except:
continue
def getStockInfo(lst,stockURL,fpath):
count = 0
for stock in lst:
url = stockURL +stock +".html"
print(url)
html = getHTMLText(url)
try:
if html =='':
continue
infoDict = {}
soup = BeautifulSoup(html,'html.parser')
stockInfo = soup.find('div',attrs={'class':'stock-bets'})
name = stockInfo.find_all(attrs = {'class':'bets-name'})[0]
print(name.text.split()[0])
infoDict.update({'股票名称':name.text.split()[0]})
keyList = stockInfo.find_all('dt')
valueList = stockInfo.find_all('dd')
for i in range(len(keyList)):
key = keyList[i].text
val = valueList[i].text
infoDict[key] = val
with open(fpath,'a',encoding='UTF-8') as f:
f.write(str(infoDict) +'\n')
count = count+1
print('\r当前进度:{:.2f}%'.format(count*100/len(lst)),end='')
except:
traceback.print_exc()
continue
def main():
stock_list_url = 'http://quote.eastmoney.com/stocklist.html'
stock_info_url = 'https://gupiao.baidu.com/stock/'
output_file = 'E://BaiduStockInfo(1).txt'
slist = []
getStockList(slist,stock_list_url)
getStockInfo(slist,stock_info_url,output_file)
main() | [
"[email protected]"
] | |
edcb1a2c177f6634d25b679f32eaa3d10997b8ca | b6aed63c49d24b4c3e2d5be6795ecbcf0a793653 | /examples/feature_engineering/get_scdv.py | 13cd8123885cea7a8d6159052e017ea37f9643c2 | [] | no_license | sidhee-hande/nlp-recipes-ja | 713f053a3cc907a314c6575a0ce65de2b36076c9 | 8ac5e898864137841de8b03c11da34815009af24 | refs/heads/master | 2023-04-25T03:41:33.536244 | 2021-04-10T23:07:45 | 2021-04-10T23:07:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | from konoha import WordTokenizer
import neologdn
import numpy as np
from utils_nlp.dataset.livedoor import load_pandas_df
from utils_nlp.features import scdv
from utils_nlp.models.pretrained_embeddings.word2vec import load_pretrained_vectors
if __name__ == '__main__':
df = load_pandas_df(nrows=10)
# Normalization
df['text'] = df['text'].apply(neologdn.normalize)
tokenizer = WordTokenizer('MeCab')
docs = np.array([
map(str, tokenizer.tokenize(text)) for text in df['text']
])
print(docs.shape)
# (10,)
word_vec = load_pretrained_vectors('data')
scdv = scdv.create(docs, word_vec, n_components=10)
print(scdv.shape)
# (10, 3000)
| [
"[email protected]"
] | |
71bf52c3f75e834fe7938987cc7b559aa46b54db | ab0e9b543852bc2d3c828b2351c30d1626f0b321 | /CustomProceduralRiggingTool/CustomProceduralRigTool/rigLib/base/controlShape/unitSliderControl.py | f55103622c28f26d51caf910f83abbbaf7302f2a | [] | no_license | tHeBeStXu/CustomProceduralRigTool | 397011b9519a3e5382aec5aee6115f3e6a14a802 | 003fa61b460d8e76c026f47913ebdab5c0cbfef8 | refs/heads/master | 2021-07-13T09:02:07.697909 | 2020-07-09T07:28:27 | 2020-07-09T07:28:27 | 157,082,564 | 15 | 3 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | import maya.cmds as cmds
def createShape(prefix=''):
"""
create a unit slider for blend operation
:param prefix: str, prefix of the control
:param scale: float, scale of the control
:return: str, ctrlBox of the unitSliderControl
"""
Ctrl = cmds.circle(radius=0.2, nr=(1, 0, 0), n=prefix + '_Ctrl')[0]
cmds.transformLimits(Ctrl, tx=(0, 0), ty=(0, 1), tz=(0, 0), etx=(1, 1), ety=(1, 1), etz=(1, 1))
CtrlBox = cmds.curve(d=1, p=[(0, 0, 0), (0, 1, 0)], k=[0, 1], n=prefix + '_CtrlBox')
parentCrvShape = cmds.listRelatives(CtrlBox, s=1)
cmds.setAttr(parentCrvShape[0] + '.template', 1)
cmds.parent(Ctrl, CtrlBox)
cmds.makeIdentity(CtrlBox, apply=1, t=1, r=1, s=1, n=0)
cmds.select(cl=1)
return CtrlBox
| [
"[email protected]"
] | |
aac4d5fd43519d3e8b5e64343338316a33460a65 | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/catapult/telemetry/telemetry/web_perf/metrics/rendering_stats_unittest.py | e96c06c9f476e5202881973b78f825e7781a8062 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 24,876 | py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import random
import unittest
from telemetry.timeline import async_slice
from telemetry.timeline import model
from telemetry.util import perf_tests_helper
from telemetry.util import statistics
from telemetry.web_perf import timeline_interaction_record as tir_module
from telemetry.web_perf.metrics import rendering_stats
class MockTimer(object):
"""A mock timer class which can generate random durations.
An instance of this class is used as a global timer to generate random
durations for stats and consistent timestamps for all mock trace events.
The unit of time is milliseconds.
"""
def __init__(self):
self.milliseconds = 0
def Advance(self, low=0.1, high=1):
delta = random.uniform(low, high)
self.milliseconds += delta
return delta
def AdvanceAndGet(self, low=0.1, high=1):
self.Advance(low, high)
return self.milliseconds
class MockVblankTimer(object):
"""A mock vblank timer class which can generate random durations.
An instance of this class is used as a vblank timer to generate random
durations for drm stats and consistent timeval for mock trace drm events.
The unit of time is microseconds.
"""
def __init__(self):
self.microseconds = 200000000
def TvAdvance(self, low=100, high=1000):
delta = random.randint(low, high)
self.microseconds += delta
return delta
def TvAdvanceAndGet(self, low=100, high=1000):
self.TvAdvance(low, high)
return self.microseconds
class ReferenceRenderingStats(object):
""" Stores expected data for comparison with actual RenderingStats """
def __init__(self):
self.frame_timestamps = []
self.frame_times = []
self.approximated_pixel_percentages = []
self.checkerboarded_pixel_percentages = []
def AppendNewRange(self):
self.frame_timestamps.append([])
self.frame_times.append([])
self.approximated_pixel_percentages.append([])
self.checkerboarded_pixel_percentages.append([])
class ReferenceInputLatencyStats(object):
""" Stores expected data for comparison with actual input latency stats """
def __init__(self):
self.input_event_latency = []
self.input_event = []
def AddSurfaceFlingerStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random surface flinger stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create random data and timestamp for impl thread rendering stats.
data = {'frame_count': 1,
'refresh_period': 16.6666}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'SurfaceFlinger', 'vsync_before',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if data['frame_count'] == 1:
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
def AddDrmEventFlipStats(mock_timer, vblank_timer, thread,
first_frame, ref_stats=None):
""" Adds a random drm flip complete event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create random data and timestamp for drm thread flip complete stats.
vblank_timeval = vblank_timer.TvAdvanceAndGet()
vblank_tv_sec = vblank_timeval / 1000000
vblank_tv_usec = vblank_timeval % 1000000
data = {'frame_count': 1,
'vblank.tv_usec': vblank_tv_usec,
'vblank.tv_sec': vblank_tv_sec}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'benchmark,drm', 'DrmEventFlipComplete',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add vblank timeval only if a frame was output.
cur_timestamp = vblank_tv_sec * 1000.0 + vblank_tv_usec / 1000.0
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(cur_timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(cur_timestamp)
def AddDisplayRenderingStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random display rendering stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create random data and timestamp for main thread rendering stats.
data = {'frame_count': 1}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'benchmark', 'BenchmarkInstrumentation::DisplayRenderingStats',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
def AddImplThreadRenderingStats(mock_timer, thread, first_frame,
ref_stats=None):
""" Adds a random impl thread rendering stats event.
thread: The timeline model thread to which the event will be added.
first_frame: Is this the first frame within the bounds of an action?
ref_stats: A ReferenceRenderingStats object to record expected values.
"""
# Create random data and timestamp for impl thread rendering stats.
data = {'frame_count': 1,
'visible_content_area': random.uniform(0, 100),
'approximated_visible_content_area': random.uniform(0, 5),
'checkerboarded_visible_content_area': random.uniform(0, 5)}
timestamp = mock_timer.AdvanceAndGet()
# Add a slice with the event data to the given thread.
thread.PushCompleteSlice(
'benchmark', 'BenchmarkInstrumentation::ImplThreadRenderingStats',
timestamp, duration=0.0, thread_timestamp=None, thread_duration=None,
args={'data': data})
if not ref_stats:
return
# Add timestamp only if a frame was output
if data['frame_count'] == 1:
if not first_frame:
# Add frame_time if this is not the first frame in within the bounds of an
# action.
prev_timestamp = ref_stats.frame_timestamps[-1][-1]
ref_stats.frame_times[-1].append(timestamp - prev_timestamp)
ref_stats.frame_timestamps[-1].append(timestamp)
ref_stats.approximated_pixel_percentages[-1].append(
round(statistics.DivideIfPossibleOrZero(
data['approximated_visible_content_area'],
data['visible_content_area']) * 100.0, 3))
ref_stats.checkerboarded_pixel_percentages[-1].append(
round(statistics.DivideIfPossibleOrZero(
data['checkerboarded_visible_content_area'],
data['visible_content_area']) * 100.0, 3))
def AddInputLatencyStats(mock_timer, start_thread, end_thread,
ref_latency_stats=None):
""" Adds a random input latency stats event.
start_thread: The start thread on which the async slice is added.
end_thread: The end thread on which the async slice is ended.
ref_latency_stats: A ReferenceInputLatencyStats object for expected values.
"""
original_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
ui_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
begin_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
forward_comp_time = mock_timer.AdvanceAndGet(2, 4) * 1000.0
end_comp_time = mock_timer.AdvanceAndGet(10, 20) * 1000.0
data = {rendering_stats.ORIGINAL_COMP_NAME: {'time': original_comp_time},
rendering_stats.UI_COMP_NAME: {'time': ui_comp_time},
rendering_stats.BEGIN_COMP_NAME: {'time': begin_comp_time},
rendering_stats.END_COMP_NAME: {'time': end_comp_time}}
timestamp = mock_timer.AdvanceAndGet(2, 4)
tracing_async_slice = async_slice.AsyncSlice(
'benchmark', 'InputLatency', timestamp)
async_sub_slice = async_slice.AsyncSlice(
'benchmark', rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME, timestamp)
async_sub_slice.args = {'data': data}
async_sub_slice.parent_slice = tracing_async_slice
async_sub_slice.start_thread = start_thread
async_sub_slice.end_thread = end_thread
tracing_async_slice.sub_slices.append(async_sub_slice)
tracing_async_slice.start_thread = start_thread
tracing_async_slice.end_thread = end_thread
start_thread.AddAsyncSlice(tracing_async_slice)
# Add scroll update latency info.
scroll_update_data = {
rendering_stats.BEGIN_SCROLL_UPDATE_COMP_NAME: {'time': begin_comp_time},
rendering_stats.FORWARD_SCROLL_UPDATE_COMP_NAME:
{'time': forward_comp_time},
rendering_stats.END_COMP_NAME: {'time': end_comp_time}
}
scroll_async_slice = async_slice.AsyncSlice(
'benchmark', 'InputLatency', timestamp)
scroll_async_sub_slice = async_slice.AsyncSlice(
'benchmark', rendering_stats.MAIN_THREAD_SCROLL_UPDATE_EVENT_NAME,
timestamp)
scroll_async_sub_slice.args = {'data': scroll_update_data}
scroll_async_sub_slice.parent_slice = scroll_async_slice
scroll_async_sub_slice.start_thread = start_thread
scroll_async_sub_slice.end_thread = end_thread
scroll_async_slice.sub_slices.append(scroll_async_sub_slice)
scroll_async_slice.start_thread = start_thread
scroll_async_slice.end_thread = end_thread
start_thread.AddAsyncSlice(scroll_async_slice)
# Also add some dummy frame statistics so we can feed the resulting timeline
# to RenderingStats.
AddImplThreadRenderingStats(mock_timer, end_thread, False)
if not ref_latency_stats:
return
ref_latency_stats.input_event.append(async_sub_slice)
ref_latency_stats.input_event.append(scroll_async_sub_slice)
ref_latency_stats.input_event_latency.append((
rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME,
(data[rendering_stats.END_COMP_NAME]['time'] -
data[rendering_stats.ORIGINAL_COMP_NAME]['time']) / 1000.0))
scroll_update_time = (
scroll_update_data[rendering_stats.END_COMP_NAME]['time'] -
scroll_update_data[rendering_stats.BEGIN_SCROLL_UPDATE_COMP_NAME]['time'])
ref_latency_stats.input_event_latency.append((
rendering_stats.MAIN_THREAD_SCROLL_UPDATE_EVENT_NAME,
scroll_update_time / 1000.0))
class RenderingStatsUnitTest(unittest.TestCase):
def testBothSurfaceFlingerAndDisplayStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
ref_stats = ReferenceRenderingStats()
ref_stats.AppendNewRange()
surface_flinger = timeline.GetOrCreateProcess(pid=4)
surface_flinger.name = 'SurfaceFlinger'
surface_flinger_thread = surface_flinger.GetOrCreateThread(tid=41)
renderer = timeline.GetOrCreateProcess(pid=2)
browser = timeline.GetOrCreateProcess(pid=3)
browser_main = browser.GetOrCreateThread(tid=31)
browser_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
# Create SurfaceFlinger stats and display rendering stats.
for i in xrange(0, 10):
first = (i == 0)
AddSurfaceFlingerStats(timer, surface_flinger_thread, first, ref_stats)
timer.Advance(2, 4)
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, None)
timer.Advance(5, 10)
browser_main.EndSlice(timer.AdvanceAndGet())
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, surface_flinger, None, records)
# Compare rendering stats to reference - Only SurfaceFlinger stats should
# count
self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, ref_stats.frame_times)
def testBothDrmAndDisplayStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
vblank_timer = MockVblankTimer()
ref_stats = ReferenceRenderingStats()
ref_stats.AppendNewRange()
gpu = timeline.GetOrCreateProcess(pid=6)
gpu.name = 'GPU Process'
gpu_drm_thread = gpu.GetOrCreateThread(tid=61)
renderer = timeline.GetOrCreateProcess(pid=2)
browser = timeline.GetOrCreateProcess(pid=3)
browser_main = browser.GetOrCreateThread(tid=31)
browser_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
vblank_timer.TvAdvance(2000, 4000)
# Create drm flip stats and display rendering stats.
for i in xrange(0, 10):
first = (i == 0)
AddDrmEventFlipStats(timer, vblank_timer, gpu_drm_thread,
first, ref_stats)
timer.Advance(2, 4)
vblank_timer.TvAdvance(2000, 4000)
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, None)
timer.Advance(5, 10)
vblank_timer.TvAdvance(5000, 10000)
browser_main.EndSlice(timer.AdvanceAndGet())
timer.Advance(2, 4)
vblank_timer.TvAdvance(2000, 4000)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, None, gpu, records)
# Compare rendering stats to reference - Only drm flip stats should
# count
self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, ref_stats.frame_times)
def testBothDisplayAndImplStats(self):
timeline = model.TimelineModel()
timer = MockTimer()
ref_stats = ReferenceRenderingStats()
ref_stats.AppendNewRange()
renderer = timeline.GetOrCreateProcess(pid=2)
browser = timeline.GetOrCreateProcess(pid=3)
browser_main = browser.GetOrCreateThread(tid=31)
browser_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
# Create main, impl, and display rendering stats.
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(timer, browser_main, first, None)
timer.Advance(2, 4)
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, ref_stats)
timer.Advance(5, 10)
browser_main.EndSlice(timer.AdvanceAndGet())
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, None, None, records)
# Compare rendering stats to reference - Only display stats should count
self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, ref_stats.frame_times)
def testRangeWithoutFrames(self):
timer = MockTimer()
timeline = model.TimelineModel()
# Create a renderer process, with a main thread and impl thread.
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
renderer_compositor = renderer.GetOrCreateThread(tid=22)
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
timer.Advance(2, 4)
# Create 5 main and impl rendering stats events not within any action.
for i in xrange(0, 5):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
# Create Action B without any frames. This should trigger
# NotEnoughFramesError when the RenderingStats object is created.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(['ActionA', 'ActionB'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
stats = rendering_stats.RenderingStats(renderer, None, None, None, records)
self.assertEquals(0, len(stats.frame_timestamps[1]))
def testFromTimeline(self):
timeline = model.TimelineModel()
# Create a browser process and a renderer process, and a main thread and
# impl thread for each.
browser = timeline.GetOrCreateProcess(pid=1)
browser_main = browser.GetOrCreateThread(tid=11)
browser_compositor = browser.GetOrCreateThread(tid=12)
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
renderer_compositor = renderer.GetOrCreateThread(tid=22)
timer = MockTimer()
renderer_ref_stats = ReferenceRenderingStats()
browser_ref_stats = ReferenceRenderingStats()
browser_ref_stats.AppendNewRange()
renderer_ref_stats.AppendNewRange()
# Add display rendering stats.
browser_main.BeginSlice('webkit.console', 'Action0',
timer.AdvanceAndGet(2, 4), '')
for i in xrange(0, 10):
first = (i == 0)
AddDisplayRenderingStats(timer, browser_main, first, browser_ref_stats)
timer.Advance(5, 10)
browser_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, None)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 5 main and impl rendering stats events not within any action.
for i in xrange(0, 5):
first = (i == 0)
AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
AddImplThreadRenderingStats(timer, browser_compositor, first, None)
# Create 10 main and impl rendering stats events for Action B.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, None)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 10 main and impl rendering stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
renderer_ref_stats.AppendNewRange()
browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
AddImplThreadRenderingStats(
timer, renderer_compositor, first, renderer_ref_stats)
AddImplThreadRenderingStats(
timer, browser_compositor, first, None)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
timer.Advance(2, 4)
browser.FinalizeImport()
renderer.FinalizeImport()
timeline_markers = timeline.FindTimelineMarkers(
['Action0', 'ActionA', 'ActionB', 'ActionA'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
stats = rendering_stats.RenderingStats(
renderer, browser, None, None, records)
# Compare rendering stats to reference.
self.assertEquals(stats.frame_timestamps,
browser_ref_stats.frame_timestamps)
self.assertEquals(stats.frame_times, browser_ref_stats.frame_times)
self.assertEquals(stats.approximated_pixel_percentages,
renderer_ref_stats.approximated_pixel_percentages)
self.assertEquals(stats.checkerboarded_pixel_percentages,
renderer_ref_stats.checkerboarded_pixel_percentages)
def testInputLatencyFromTimeline(self):
timeline = model.TimelineModel()
# Create a browser process and a renderer process.
browser = timeline.GetOrCreateProcess(pid=1)
browser_main = browser.GetOrCreateThread(tid=11)
renderer = timeline.GetOrCreateProcess(pid=2)
renderer_main = renderer.GetOrCreateThread(tid=21)
timer = MockTimer()
ref_latency = ReferenceInputLatencyStats()
# Create 10 input latency stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 5 input latency stats events not within any action.
timer.Advance(2, 4)
for _ in xrange(0, 5):
AddInputLatencyStats(timer, browser_main, renderer_main, None)
# Create 10 input latency stats events for Action B.
renderer_main.BeginSlice('webkit.console', 'ActionB',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
# Create 10 input latency stats events for Action A.
renderer_main.BeginSlice('webkit.console', 'ActionA',
timer.AdvanceAndGet(2, 4), '')
for _ in xrange(0, 10):
AddInputLatencyStats(timer, browser_main, renderer_main, ref_latency)
renderer_main.EndSlice(timer.AdvanceAndGet(2, 4))
browser.FinalizeImport()
renderer.FinalizeImport()
latency_events = []
timeline_markers = timeline.FindTimelineMarkers(
['ActionA', 'ActionB', 'ActionA'])
records = [tir_module.TimelineInteractionRecord(e.name, e.start, e.end)
for e in timeline_markers]
for record in records:
if record.GetBounds().is_empty:
continue
latency_events.extend(rendering_stats.GetLatencyEvents(
browser, record.GetBounds()))
self.assertEquals(latency_events, ref_latency.input_event)
event_latency_result = rendering_stats.ComputeEventLatencies(latency_events)
self.assertEquals(event_latency_result,
ref_latency.input_event_latency)
stats = rendering_stats.RenderingStats(
renderer, browser, None, None, records)
self.assertEquals(
perf_tests_helper.FlattenList(stats.input_event_latency),
[latency for name, latency in ref_latency.input_event_latency
if name != rendering_stats.MAIN_THREAD_SCROLL_UPDATE_EVENT_NAME])
self.assertEquals(
perf_tests_helper.FlattenList(stats.main_thread_scroll_latency),
[latency for name, latency in ref_latency.input_event_latency
if name == rendering_stats.MAIN_THREAD_SCROLL_UPDATE_EVENT_NAME])
self.assertEquals(
perf_tests_helper.FlattenList(stats.gesture_scroll_update_latency),
[latency for name, latency in ref_latency.input_event_latency
if name == rendering_stats.GESTURE_SCROLL_UPDATE_EVENT_NAME])
| [
"[email protected]"
] | |
249edc0e5fb7c5fae23b6d8c5752ffa60b404a5b | 60aa3bcf5ace0282210685e74ee8ed31debe1769 | /base/lib/uu.py | 6ee9f9acad9bccce569ad6152e8da80b8e368319 | [] | no_license | TheBreadGuy/sims4-ai-engine | 42afc79b8c02527353cc084117a4b8da900ebdb4 | 865212e841c716dc4364e0dba286f02af8d716e8 | refs/heads/master | 2023-03-16T00:57:45.672706 | 2016-05-01T17:26:01 | 2016-05-01T17:26:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,505 | py | import binascii
import os
import sys
__all__ = ['Error', 'encode', 'decode']
class Error(Exception):
__qualname__ = 'Error'
def encode(in_file, out_file, name=None, mode=None):
opened_files = []
try:
if in_file == '-':
in_file = sys.stdin.buffer
elif isinstance(in_file, str):
if name is None:
name = os.path.basename(in_file)
if mode is None:
try:
mode = os.stat(in_file).st_mode
except AttributeError:
pass
in_file = open(in_file, 'rb')
opened_files.append(in_file)
if out_file == '-':
out_file = sys.stdout.buffer
elif isinstance(out_file, str):
out_file = open(out_file, 'wb')
opened_files.append(out_file)
if name is None:
name = '-'
if mode is None:
mode = 438
out_file.write(('begin %o %s\n' % (mode & 511, name)).encode('ascii'))
data = in_file.read(45)
while len(data) > 0:
out_file.write(binascii.b2a_uu(data))
data = in_file.read(45)
out_file.write(b' \nend\n')
finally:
for f in opened_files:
f.close()
def decode(in_file, out_file=None, mode=None, quiet=False):
opened_files = []
if in_file == '-':
in_file = sys.stdin.buffer
elif isinstance(in_file, str):
in_file = open(in_file, 'rb')
opened_files.append(in_file)
try:
while True:
hdr = in_file.readline()
if not hdr:
raise Error('No valid begin line found in input file')
if not hdr.startswith(b'begin'):
continue
hdrfields = hdr.split(b' ', 2)
if len(hdrfields) == 3 and hdrfields[0] == b'begin':
try:
int(hdrfields[1], 8)
break
except ValueError:
pass
if out_file is None:
out_file = hdrfields[2].rstrip(b' \t\r\n\x0c').decode('ascii')
if os.path.exists(out_file):
raise Error('Cannot overwrite existing file: %s' % out_file)
if mode is None:
mode = int(hdrfields[1], 8)
if out_file == '-':
out_file = sys.stdout.buffer
elif isinstance(out_file, str):
fp = open(out_file, 'wb')
try:
os.path.chmod(out_file, mode)
except AttributeError:
pass
out_file = fp
opened_files.append(out_file)
s = in_file.readline()
while s:
while s.strip(b' \t\r\n\x0c') != b'end':
try:
data = binascii.a2b_uu(s)
except binascii.Error as v:
nbytes = ((s[0] - 32 & 63)*4 + 5)//3
data = binascii.a2b_uu(s[:nbytes])
while not quiet:
sys.stderr.write('Warning: %s\n' % v)
out_file.write(data)
s = in_file.readline()
while not s:
raise Error('Truncated input file')
finally:
for f in opened_files:
f.close()
def test():
import optparse
parser = optparse.OptionParser(usage='usage: %prog [-d] [-t] [input [output]]')
parser.add_option('-d', '--decode', dest='decode', help='Decode (instead of encode)?', default=False, action='store_true')
parser.add_option('-t', '--text', dest='text', help='data is text, encoded format unix-compatible text?', default=False, action='store_true')
(options, args) = parser.parse_args()
if len(args) > 2:
parser.error('incorrect number of arguments')
sys.exit(1)
input = sys.stdin.buffer
output = sys.stdout.buffer
if len(args) > 0:
input = args[0]
if len(args) > 1:
output = args[1]
if options.decode:
if options.text:
if isinstance(output, str):
output = open(output, 'wb')
else:
print(sys.argv[0], ': cannot do -t to stdout')
sys.exit(1)
decode(input, output)
else:
if options.text:
if isinstance(input, str):
input = open(input, 'rb')
else:
print(sys.argv[0], ': cannot do -t from stdin')
sys.exit(1)
encode(input, output)
if __name__ == '__main__':
test()
| [
"[email protected]"
] | |
92df5f0ae14e23c0600fd57b407368f340103547 | 4b431704fa58900a7b848aada3d10949be76ba65 | /student/views.py | 8ed0983ae38fa181f3a834d1c67585b80d645e7b | [] | no_license | priyankaonly1/Session_project | 1b5e48a77753cfa87c93fff7463d758cf0f1dcd8 | 41529270c0390627824b6de1aed6fdf4bb75a95c | refs/heads/main | 2023-06-03T04:21:38.411008 | 2021-06-17T10:32:13 | 2021-06-17T10:32:13 | 377,792,361 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | from django.shortcuts import render
# Create your views here.
def setsession(request):
request.session['name'] = 'sonam'
request.session['lname'] = 'Jha'
return render(request, 'student/setsession.html')
# def delsession(request):
# if 'name' in request.session:
# del request.session['name']
# return render(request, 'student/delsession.html')
def getsession(request):
name = request.session.get('name')
lname = request.session.get('lname')
return render(request, 'student/getsession.html', {'name':name, 'lname':lname})
def delsession(request):
request.session.flush()
return render(request, 'student/delsession.html') | [
"[email protected]"
] | |
5f78dc2017f0e9588d5ed2188d02785b189ec637 | 0bb474290e13814c2498c086780da5096453da05 | /abc133/E/main.py | 20212d42d80ef6a27ba00b0743cbd41a23b91777 | [] | no_license | ddtkra/atcoder | 49b6205bf1bf6a50106b4ae94d2206a324f278e0 | eb57c144b5c2dbdd4abc432ecd8b1b3386244e30 | refs/heads/master | 2022-01-25T15:38:10.415959 | 2020-03-18T09:22:08 | 2020-03-18T09:22:08 | 208,825,724 | 1 | 0 | null | 2022-01-21T20:10:20 | 2019-09-16T14:51:01 | Python | UTF-8 | Python | false | false | 790 | py | #!/usr/bin/env python3
import sys
MOD = 1000000007 # type: int
def solve(N: int, K: int, a: "List[int]", b: "List[int]"):
return
# Generated by 1.1.4 https://github.com/kyuridenamida/atcoder-tools (tips: You use the default template now. You can remove this line by using your custom template)
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
N = int(next(tokens)) # type: int
K = int(next(tokens)) # type: int
a = [int()] * (N-1) # type: "List[int]"
b = [int()] * (N-1) # type: "List[int]"
for i in range(N-1):
a[i] = int(next(tokens))
b[i] = int(next(tokens))
solve(N, K, a, b)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ee5638f427e266afc5d5855606c34b7c76ac09b2 | c68d36ed1d36ede96a5a22e1052c73b8515feaae | /HyperNews Portal/task/hypernews/news/views.py | 3cefca44c6ba79a74ca61d32697ea15338fb602a | [] | no_license | wangpengda1210/HyperNews-Portal | dd531889666794c11158dc92a9dcdb03293d409b | 436e257dd315999187650dedf3dce2ff12267a77 | refs/heads/main | 2023-03-03T03:22:59.644304 | 2021-02-09T00:19:23 | 2021-02-09T00:19:23 | 336,978,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,124 | py | from django.shortcuts import render
from django.views import View
from django.http import Http404, QueryDict
from django.shortcuts import redirect
import datetime
from collections import defaultdict
import json
from hypernews.settings import NEWS_JSON_PATH
with open(NEWS_JSON_PATH, 'r') as f:
news_list = json.load(f)
for news in news_list:
news['created'] = datetime.datetime.strptime(news['created'], '%Y-%m-%d %H:%M:%S')
# Create your views here.
class IndexView(View):
def get(self, request, *args, **kwargs):
return redirect("news/")
class NewsContentView(View):
def get(self, request, link, *args, **kwargs):
for news in news_list:
if int(link) == news['link']:
return render(request, 'news/news_content.html',
context={
'title': news['title'],
'created': news['created'],
'text': news['text']
})
raise Http404
class AllNewsView(View):
def get(self, request, *args, **kwargs):
query_dict = request.GET
keyword = query_dict['q'] if 'q' in query_dict else ''
times = defaultdict()
for news in news_list:
if keyword in news['title']:
times.setdefault(news['created'].date(), []).append(news)
time_dict = [{'created': key, 'value': value} for key, value in times.items()]
return render(request, 'news/news_all.html',
context={'time_dict': time_dict})
class CreateNewsView(View):
def get(self, request, *args, **kwargs):
return render(request, 'news/news_create.html')
def post(self, request, *args, **kwargs):
title = request.POST.get('title')
text = request.POST.get('text')
created = datetime.datetime.now()
news_list.append({'title': title,
'text': text,
'created': created,
'link': len(news_list) + 1})
return redirect('/news/')
| [
"[email protected]"
] | |
85fd333d2b6f43110d9c7b7171b122dfcdc0a466 | e19527d95fb2105a09bc1435146a1148bfe01476 | /utils/general.py | 122f37a50fc4e4ba87c3765b807a74616dfeb9fd | [] | no_license | shuaih7/ishop_ocr | 7da1bc8f3f764853d7c0151e784b821cc3d4b58c | 57e80d336f1362adefeb57a13fa4ca4d2cfd265f | refs/heads/main | 2023-02-22T15:50:36.294246 | 2021-01-28T03:46:36 | 2021-01-28T03:46:36 | 329,258,528 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,469 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on 01.24.2021
Created on 01.24.2021
Author: [email protected]
'''
import os
import cv2
import sys
import numpy as np
abs_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(abs_path)
def draw_results(image, results, isClosed=True, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
for result in results:
line = np.array(result[0], dtype=np.int32)
pt = (int(line[0][0]), int(line[0][1]))
line = line.reshape((-1,1,2))
image = cv2.polylines(image, [line], isClosed=isClosed, color=color, thickness=thickness)
image = cv2.putText(image, result[1][0], pt, fontFace=font,
fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_polylines(image, polylines, texts=None, isClosed=True, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
polylines = np.array(polylines, dtype=np.int32)#.reshape((-1,1,2))
for i, line in enumerate(polylines):
pt = (int(line[0][0]), int(line[0][1]))
line = line.reshape((-1,1,2))
image = cv2.polylines(image, [line], isClosed=isClosed, color=color, thickness=thickness)
if texts is not None:
image = cv2.putText(image, texts[i], pt, fontFace=font,
fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_texts(image, texts, positions, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
for pos, text in zip(positions, texts):
pt = (int(pos[0]), int(pos[1]))
image = cv2.putText(image, text, pt, fontFace=font, fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_boxes(image, boxes=[], scale=(1.0,1.0), color=(255,0,0), thickness=2):
if len(boxes) == 0: return image
for box in boxes:
start_point = (int(box[0]*scale[1]), int(box[1]*scale[0]))
end_point = (int(box[2]*scale[1]), int(box[3]*scale[0]))
image = cv2.rectangle(image, start_point, end_point, color=color, thickness=thickness)
return image
def create_background(size, seed=0):
image = np.ones(size, dtype=np.uint8) * seed
save_dir = os.path.join(abs_path, "icon")
save_name = os.path.join(save_dir, "background.jpg")
cv2.imwrite(save_name, image)
def transparent_background(img_file, save_name, thresh=10):
image = cv2.imread(img_file, cv2.IMREAD_COLOR)
image_gray = cv2.imread(img_file, cv2.IMREAD_GRAYSCALE)
trans_image = np.zeros((image.shape[0],image.shape[1],4), dtype=np.uint8)
alpha = np.ones(image_gray.shape, dtype=np.uint8) * 255
alpha[image_gray>(255-thresh)] = 0
trans_image[:,:,:3] = image
trans_image[:,:,-1] = alpha
cv2.imwrite(save_name, trans_image)
print("Done")
def resize_image(img_file, save_name, size=(100,100)):
image = cv2.imread(img_file, -1)
image = cv2.resize(image, size, interpolation=cv2.INTER_CUBIC)
cv2.imwrite(save_name, image)
print("Done")
if __name__ == "__main__":
#create_background((352,352))
img_file = r"C:\Users\shuai\Documents\GitHub\FabricUI\FabricUI\icon\folder.jpg"
save_name = r"C:\Users\shuai\Documents\GitHub\FabricUI\FabricUI\icon\folder_icon.png"
#resize_image(img_file, save_name)
transparent_background(img_file, save_name)
| [
"[email protected]"
] | |
df0df3a114e599c36a4d9a1fef81af871183c836 | c82a04b8aa975b1596e48e13deaf5f11a2ae94ba | /test.py | 99b9847323d2a912600184ba1f913a0369ba9259 | [
"MIT"
] | permissive | budsus/CodeSearchNet | 466e6d06b8b0f08f418906151af6018cc7253ca1 | d79d0fde2569e4ed7ab0454e3b019fba3d6c7b90 | refs/heads/master | 2023-03-17T07:48:40.451414 | 2019-12-12T13:08:47 | 2019-12-12T13:08:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | import torch
x = torch.randn(10, 5)
print(x)
labels = torch.LongTensor([1,2,3,3,0,0,0,0,0,0])
n_classes = x.shape[-1]
one_hot = torch.nn.functional.one_hot(labels, n_classes)
print(one_hot)
print(x * one_hot)
compare = (x * one_hot).sum(-1).unsqueeze(-1).repeat(1, n_classes)
print(compare)
compared_scores = x >= compare
print(compared_scores)
rr = 1 / compared_scores.float().sum(-1)
print(rr)
mrr = rr.mean()
print(mrr) | [
"[email protected]"
] | |
0a607ad298916549426f6f843ef6ce749fadc185 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03610/s415894874.py | 9a33970973344ddf348592e3209a4248803c0737 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | s = list(input())
t = []
for i in range(0, len(s), 2):
t.append(s[i])
print(''.join(t))
| [
"[email protected]"
] | |
3f1e7c2be5e4aad81dc3c4cc8973865624a09628 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03645/s021155194.py | 844491319f19594eb094e23da7af0f647cb6eb7c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | n, m = map(int, input().split())
root_map = dict()
root_map[1] = set()
root_map[n] = set()
for i in range(m):
a, b = map(int, input().split())
if a == 1 or a == n:
root_map[a].add(b)
if b == 1 or b == n:
root_map[b].add(a)
for i in root_map[1]:
if i in root_map[n]:
print("POSSIBLE")
break
else:
print("IMPOSSIBLE")
| [
"[email protected]"
] | |
b8ea6089fbf982c699ef0f102f4a0842d32f6a53 | 24caa6710105a060fab2e17147e6d56609939011 | /03-Python_Data_Science_Toolbox_(Part_1)/02-Default_arguments,_variable-length_arguments_and_scope/01-Pop_quiz_on_understanding_scope.py | 359f983d8c5327a8ca9e09fa52071fdeceb8fece | [] | no_license | inverseundefined/DataCamp | 99607022ad3f899d7681ad1f70fcedab290e269a | 7226b6b6f41888c3610a884db9a226e013d37e56 | refs/heads/master | 2022-01-10T00:53:21.714908 | 2019-07-24T13:27:49 | 2019-07-24T13:27:49 | 198,280,648 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | '''
Pop quiz on understanding scope
In this exercise, you will practice what you've learned about scope in functions. The variable num has been predefined as 5, alongside the following function definitions:
def func1():
num = 3
print(num)
def func2():
global num
double_num = num * 2
num = 6
print(double_num)
Try calling func1() and func2() in the shell, then answer the following questions:
What are the values printed out when you call func1() and func2()?
What is the value of num in the global scope after calling func1() and func2()?
Instructions
50 XP
Possible Answers
func1() prints out 3, func2() prints out 6, and the value of num in the global scope is 3.
func1() prints out 3, func2() prints out 3, and the value of num in the global scope is 3.
func1() prints out 3, func2() prints out 10, and the value of num in the global scope is 10.
-> func1() prints out 3, func2() prints out 10, and the value of num in the global scope is 6.
Take Hint (-15 XP)
'''
| [
"[email protected]"
] | |
24524f83587d385ff97aec5e49d9379dfb3f883b | b8085ef607da70023214f105eb27bdbc713e596f | /Day2/Slots.py | db6ff0a4f8e7383e149a01736bdb559e14f236c2 | [] | no_license | artheadsweden/python_adv_april19 | 893c9ec76e8505a580439b7a2fd7aa2776503c77 | 04eecd25d4a291dddd608d94968b217fed7b88d8 | refs/heads/master | 2020-05-07T13:41:15.545033 | 2019-04-11T18:47:22 | 2019-04-11T18:47:22 | 180,559,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | from pympler import asizeof
class NoSlots:
def __init__(self, name, identifier):
self.name = name
self.identifier = identifier
class WithSlots:
__slots__ = ['name', 'identifier']
def __init__(self, name, identifier):
self.name = name
self.identifier = identifier
def main():
no_slots = [NoSlots(str(n), n) for n in range(100_000)]
size1 = round(asizeof.asizeof(no_slots)/1024/1024, 2)
print("No slots", size1, "mb")
with_slots = [WithSlots(str(n), n) for n in range(100_000)]
size2 = round(asizeof.asizeof(with_slots)/1024/1024, 2)
print("With slots", size2, "mb")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
8e8665d33a8f3df1f93560af494176e055b876a4 | 81207a57ae84b2b786b373d9eaa89e04ca662473 | /scripts/update_index.py | b4fa72f135adfbde5960f9e2c3f51b20f42df2a6 | [
"MIT"
] | permissive | ncarkaci/acoustid-server | 9a9187db34c25a4eedbe297564f9d13f05b9c907 | bb0098016d210be8d04ee64d9b42ed80bb947280 | refs/heads/master | 2020-07-22T18:25:46.258746 | 2019-09-05T11:05:01 | 2019-09-05T11:05:01 | 207,288,602 | 1 | 0 | MIT | 2019-09-09T10:58:51 | 2019-09-09T10:58:51 | null | UTF-8 | Python | false | false | 421 | py | #!/usr/bin/env python
# Copyright (C) 2011-2012 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.fingerprint import update_fingerprint_index
def main(script, opts, args):
with closing(script.engine.connect()) as db:
update_fingerprint_index(db, script.index)
run_script(main)
| [
"[email protected]"
] | |
47b13cbf68cba49d07c499ee6026f47fc228aece | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week02/1/proxy/proxy/spiders/maoyan_20200705155519.py | 5832d2f7ffe5ee7f1c5b3c601dddf5c249d1eb51 | [] | no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 1,400 | py | import scrapy
from proxy.items import ProxyItem
import lxml.etree
class MaoyanSpider(scrapy.Spider):
name = 'maoyan'
allowed_domains = ['maoyan.com']
start_urls = ['http://maoyan.com/']
header =
# def parse(self, response):
# pass
def start_requests(self):
url = f'https://maoyan.com/board/4'
yield scrapy.Request(url=url,headers=self.header,callback=self.parse)
def parse(self, response):
selector = lxml.etree.HTML(response.text)
item =ProxyItem()
for i in range(0,10):
link = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[1]/a').get('href')
name = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[1]/a').get('title')
time = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[3]').text
item['films_name'] = name
item['release_time'] = time
print(link)
yield scrapy.Request(url=link, headers = self.header, meta={'item':item},callback=self.parse1)
def parse1(self, response):
item = response.meta['item']
selector = lxml.etree.HTML(response.text)
type = selector.xpath('/html/body/div[3]/div/div[2]/div[1]/ul/li[1]').text.replace('\n',' ')
print(type)
item['films_type'] = type
print(item)
yield item
| [
"[email protected]"
] | |
85658af6a7b79e5450b577beccbc06522bd0f00d | 25c1bba5c9954ab757fed0ce3236cd6b3bd50c59 | /BUILD.cr.py | 47ccf5633ca37f891f4761834ecae7183d4632fb | [] | no_license | craftr-build/craftr-chaiscript | c09e32e7ddd72c75d482cd3b627f2183cceaf379 | 09e6434016915c9745e3c841076ad193cdebb9dd | refs/heads/master | 2021-09-04T17:35:54.491031 | 2018-01-20T14:31:21 | 2018-01-20T14:31:21 | 118,172,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,092 | py |
import craftr, {fmt, glob, path} from 'craftr'
import cxx from '@craftr/cxx'
source_dir = craftr.options.get('chaiscript.source_dir')
gitref = craftr.options.get('chaiscript.gitref', 'v6.0.0')
if not source_dir:
url = fmt('https://github.com/ChaiScript/ChaiScript/archive/{gitref}.zip')
source_dir = path.join(craftr.get_source_archive(url), 'ChaiScript-' + gitref.lstrip('v'))
defines = []
if craftr.options.get('chaiscript.no_threads', True):
defines.append('CHAISCRIPT_NO_THREADS')
if craftr.options.get('chaiscript.no_protect_dividebyzero', False):
defines.append('CHAISCRIPT_NO_PROTECT_DIVIDEBYZERO')
cxx.prebuilt(
name = 'chaiscript',
includes = [path.join(source_dir, 'include')],
defines = defines
)
cxx.library(
name = 'chaiscript-static',
public_deps = [':chaiscript'],
explicit = True,
srcs = glob('static_libs/*.cpp', parent=source_dir),
cpp_std = 'c++11',
options = dict(
msvc_compile_flags = ['/bigobj']
)
)
cxx.binary(
name = 'main',
deps = [':chaiscript-static'],
explicit = True,
srcs = [path.join(source_dir, 'src/main.cpp')]
)
| [
"[email protected]"
] | |
8e830639fc2ef0cc682f1d742ee537d47985f00f | a643c2ed78b48e4cacf140776fbedd0191881e18 | /samples/openapi3/client/3_0_3_unit_test/python/unit_test_api/paths/request_body_post_array_type_matches_arrays_request_body/post.py | 0ea29561b94824f85ffa27f8c2c1bf9e99c5e0c7 | [
"Apache-2.0"
] | permissive | padamstx/openapi-generator | 5ae41f68a4f9349d76c1db81b9ff82e18e5b4b7c | 00604aff594864447c134ddb1982565136e27857 | refs/heads/master | 2023-03-08T20:11:36.318959 | 2022-09-28T16:34:17 | 2022-09-28T16:34:17 | 160,528,958 | 0 | 1 | Apache-2.0 | 2023-02-24T16:13:11 | 2018-12-05T14:17:50 | Java | UTF-8 | Python | false | false | 5,206 | py | # coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from unit_test_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from unit_test_api import schemas # noqa: F401
from unit_test_api.model.array_type_matches_arrays import ArrayTypeMatchesArrays
from . import path
# body param
SchemaForRequestBodyApplicationJson = ArrayTypeMatchesArrays
request_body_array_type_matches_arrays = api_client.RequestBody(
content={
'application/json': api_client.MediaType(
schema=SchemaForRequestBodyApplicationJson),
},
required=True,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
)
_status_code_to_response = {
'200': _response_for_200,
}
class BaseApi(api_client.Api):
def _post_array_type_matches_arrays_request_body_oapg(
self: api_client.Api,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
used_path = path.value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if body is schemas.unset:
raise exceptions.ApiValueError(
'The required body parameter has an invalid value of: unset. Set a valid value instead')
_fields = None
_body = None
serialized_data = request_body_array_type_matches_arrays.serialize(body, content_type)
_headers.add('Content-Type', content_type)
if 'fields' in serialized_data:
_fields = serialized_data['fields']
elif 'body' in serialized_data:
_body = serialized_data['body']
response = self.api_client.call_api(
resource_path=used_path,
method='post'.upper(),
headers=_headers,
fields=_fields,
body=_body,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class PostArrayTypeMatchesArraysRequestBody(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def post_array_type_matches_arrays_request_body(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForpost(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def post(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
| [
"[email protected]"
] | |
f32c32efd6824655e6ea2871c24a9a2b562f8933 | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /stypy/sgmc/sgmc_cache/distutils/emxccompiler.py | 62c23b54b675b3edfdfdaf8c2e59d042e5b9440b | [] | no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186,190 | py |
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: '''distutils.emxccompiler
2:
3: Provides the EMXCCompiler class, a subclass of UnixCCompiler that
4: handles the EMX port of the GNU C compiler to OS/2.
5: '''
6:
7: # issues:
8: #
9: # * OS/2 insists that DLLs can have names no longer than 8 characters
10: # We put export_symbols in a def-file, as though the DLL can have
11: # an arbitrary length name, but truncate the output filename.
12: #
13: # * only use OMF objects and use LINK386 as the linker (-Zomf)
14: #
15: # * always build for multithreading (-Zmt) as the accompanying OS/2 port
16: # of Python is only distributed with threads enabled.
17: #
18: # tested configurations:
19: #
20: # * EMX gcc 2.81/EMX 0.9d fix03
21:
22: __revision__ = "$Id$"
23:
24: import os,sys,copy
25: from distutils.ccompiler import gen_preprocess_options, gen_lib_options
26: from distutils.unixccompiler import UnixCCompiler
27: from distutils.file_util import write_file
28: from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
29: from distutils import log
30:
31: class EMXCCompiler (UnixCCompiler):
32:
33: compiler_type = 'emx'
34: obj_extension = ".obj"
35: static_lib_extension = ".lib"
36: shared_lib_extension = ".dll"
37: static_lib_format = "%s%s"
38: shared_lib_format = "%s%s"
39: res_extension = ".res" # compiled resource file
40: exe_extension = ".exe"
41:
42: def __init__ (self,
43: verbose=0,
44: dry_run=0,
45: force=0):
46:
47: UnixCCompiler.__init__ (self, verbose, dry_run, force)
48:
49: (status, details) = check_config_h()
50: self.debug_print("Python's GCC status: %s (details: %s)" %
51: (status, details))
52: if status is not CONFIG_H_OK:
53: self.warn(
54: "Python's pyconfig.h doesn't seem to support your compiler. " +
55: ("Reason: %s." % details) +
56: "Compiling may fail because of undefined preprocessor macros.")
57:
58: (self.gcc_version, self.ld_version) = \
59: get_versions()
60: self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
61: (self.gcc_version,
62: self.ld_version) )
63:
64: # Hard-code GCC because that's what this is all about.
65: # XXX optimization, warnings etc. should be customizable.
66: self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
67: compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
68: linker_exe='gcc -Zomf -Zmt -Zcrtdll',
69: linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
70:
71: # want the gcc library statically linked (so that we don't have
72: # to distribute a version dependent on the compiler we have)
73: self.dll_libraries=["gcc"]
74:
75: # __init__ ()
76:
77: def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
78: if ext == '.rc':
79: # gcc requires '.rc' compiled to binary ('.res') files !!!
80: try:
81: self.spawn(["rc", "-r", src])
82: except DistutilsExecError, msg:
83: raise CompileError, msg
84: else: # for other files use the C-compiler
85: try:
86: self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
87: extra_postargs)
88: except DistutilsExecError, msg:
89: raise CompileError, msg
90:
91: def link (self,
92: target_desc,
93: objects,
94: output_filename,
95: output_dir=None,
96: libraries=None,
97: library_dirs=None,
98: runtime_library_dirs=None,
99: export_symbols=None,
100: debug=0,
101: extra_preargs=None,
102: extra_postargs=None,
103: build_temp=None,
104: target_lang=None):
105:
106: # use separate copies, so we can modify the lists
107: extra_preargs = copy.copy(extra_preargs or [])
108: libraries = copy.copy(libraries or [])
109: objects = copy.copy(objects or [])
110:
111: # Additional libraries
112: libraries.extend(self.dll_libraries)
113:
114: # handle export symbols by creating a def-file
115: # with executables this only works with gcc/ld as linker
116: if ((export_symbols is not None) and
117: (target_desc != self.EXECUTABLE)):
118: # (The linker doesn't do anything if output is up-to-date.
119: # So it would probably better to check if we really need this,
120: # but for this we had to insert some unchanged parts of
121: # UnixCCompiler, and this is not what we want.)
122:
123: # we want to put some files in the same directory as the
124: # object files are, build_temp doesn't help much
125: # where are the object files
126: temp_dir = os.path.dirname(objects[0])
127: # name of dll to give the helper files the same base name
128: (dll_name, dll_extension) = os.path.splitext(
129: os.path.basename(output_filename))
130:
131: # generate the filenames for these files
132: def_file = os.path.join(temp_dir, dll_name + ".def")
133:
134: # Generate .def file
135: contents = [
136: "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
137: os.path.splitext(os.path.basename(output_filename))[0],
138: "DATA MULTIPLE NONSHARED",
139: "EXPORTS"]
140: for sym in export_symbols:
141: contents.append(' "%s"' % sym)
142: self.execute(write_file, (def_file, contents),
143: "writing %s" % def_file)
144:
145: # next add options for def-file and to creating import libraries
146: # for gcc/ld the def-file is specified as any other object files
147: objects.append(def_file)
148:
149: #end: if ((export_symbols is not None) and
150: # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
151:
152: # who wants symbols and a many times larger output file
153: # should explicitly switch the debug mode on
154: # otherwise we let dllwrap/ld strip the output file
155: # (On my machine: 10KB < stripped_file < ??100KB
156: # unstripped_file = stripped_file + XXX KB
157: # ( XXX=254 for a typical python extension))
158: if not debug:
159: extra_preargs.append("-s")
160:
161: UnixCCompiler.link(self,
162: target_desc,
163: objects,
164: output_filename,
165: output_dir,
166: libraries,
167: library_dirs,
168: runtime_library_dirs,
169: None, # export_symbols, we do this in our def-file
170: debug,
171: extra_preargs,
172: extra_postargs,
173: build_temp,
174: target_lang)
175:
176: # link ()
177:
178: # -- Miscellaneous methods -----------------------------------------
179:
180: # override the object_filenames method from CCompiler to
181: # support rc and res-files
182: def object_filenames (self,
183: source_filenames,
184: strip_dir=0,
185: output_dir=''):
186: if output_dir is None: output_dir = ''
187: obj_names = []
188: for src_name in source_filenames:
189: # use normcase to make sure '.rc' is really '.rc' and not '.RC'
190: (base, ext) = os.path.splitext (os.path.normcase(src_name))
191: if ext not in (self.src_extensions + ['.rc']):
192: raise UnknownFileError, \
193: "unknown file type '%s' (from '%s')" % \
194: (ext, src_name)
195: if strip_dir:
196: base = os.path.basename (base)
197: if ext == '.rc':
198: # these need to be compiled to object files
199: obj_names.append (os.path.join (output_dir,
200: base + self.res_extension))
201: else:
202: obj_names.append (os.path.join (output_dir,
203: base + self.obj_extension))
204: return obj_names
205:
206: # object_filenames ()
207:
208: # override the find_library_file method from UnixCCompiler
209: # to deal with file naming/searching differences
210: def find_library_file(self, dirs, lib, debug=0):
211: shortlib = '%s.lib' % lib
212: longlib = 'lib%s.lib' % lib # this form very rare
213:
214: # get EMX's default library directory search path
215: try:
216: emx_dirs = os.environ['LIBRARY_PATH'].split(';')
217: except KeyError:
218: emx_dirs = []
219:
220: for dir in dirs + emx_dirs:
221: shortlibp = os.path.join(dir, shortlib)
222: longlibp = os.path.join(dir, longlib)
223: if os.path.exists(shortlibp):
224: return shortlibp
225: elif os.path.exists(longlibp):
226: return longlibp
227:
228: # Oops, didn't find it in *any* of 'dirs'
229: return None
230:
231: # class EMXCCompiler
232:
233:
234: # Because these compilers aren't configured in Python's pyconfig.h file by
235: # default, we should at least warn the user if he is using a unmodified
236: # version.
237:
238: CONFIG_H_OK = "ok"
239: CONFIG_H_NOTOK = "not ok"
240: CONFIG_H_UNCERTAIN = "uncertain"
241:
242: def check_config_h():
243:
244: '''Check if the current Python installation (specifically, pyconfig.h)
245: appears amenable to building extensions with GCC. Returns a tuple
246: (status, details), where 'status' is one of the following constants:
247: CONFIG_H_OK
248: all is well, go ahead and compile
249: CONFIG_H_NOTOK
250: doesn't look good
251: CONFIG_H_UNCERTAIN
252: not sure -- unable to read pyconfig.h
253: 'details' is a human-readable string explaining the situation.
254:
255: Note there are two ways to conclude "OK": either 'sys.version' contains
256: the string "GCC" (implying that this Python was built with GCC), or the
257: installed "pyconfig.h" contains the string "__GNUC__".
258: '''
259:
260: # XXX since this function also checks sys.version, it's not strictly a
261: # "pyconfig.h" check -- should probably be renamed...
262:
263: from distutils import sysconfig
264: import string
265: # if sys.version contains GCC then python was compiled with
266: # GCC, and the pyconfig.h file should be OK
267: if string.find(sys.version,"GCC") >= 0:
268: return (CONFIG_H_OK, "sys.version mentions 'GCC'")
269:
270: fn = sysconfig.get_config_h_filename()
271: try:
272: # It would probably better to read single lines to search.
273: # But we do this only once, and it is fast enough
274: f = open(fn)
275: try:
276: s = f.read()
277: finally:
278: f.close()
279:
280: except IOError, exc:
281: # if we can't read this file, we cannot say it is wrong
282: # the compiler will complain later about this file as missing
283: return (CONFIG_H_UNCERTAIN,
284: "couldn't read '%s': %s" % (fn, exc.strerror))
285:
286: else:
287: # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
288: if string.find(s,"__GNUC__") >= 0:
289: return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
290: else:
291: return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
292:
293:
294: def get_versions():
295: ''' Try to find out the versions of gcc and ld.
296: If not possible it returns None for it.
297: '''
298: from distutils.version import StrictVersion
299: from distutils.spawn import find_executable
300: import re
301:
302: gcc_exe = find_executable('gcc')
303: if gcc_exe:
304: out = os.popen(gcc_exe + ' -dumpversion','r')
305: try:
306: out_string = out.read()
307: finally:
308: out.close()
309: result = re.search('(\d+\.\d+\.\d+)',out_string)
310: if result:
311: gcc_version = StrictVersion(result.group(1))
312: else:
313: gcc_version = None
314: else:
315: gcc_version = None
316: # EMX ld has no way of reporting version number, and we use GCC
317: # anyway - so we can link OMF DLLs
318: ld_version = None
319: return (gcc_version, ld_version)
320:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
str_3103 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 5, (-1)), 'str', 'distutils.emxccompiler\n\nProvides the EMXCCompiler class, a subclass of UnixCCompiler that\nhandles the EMX port of the GNU C compiler to OS/2.\n')
# Assigning a Str to a Name (line 22):
# Assigning a Str to a Name (line 22):
str_3104 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 15), 'str', '$Id$')
# Assigning a type to the variable '__revision__' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 0), '__revision__', str_3104)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 24, 0))
# Multiple import statement. import os (1/3) (line 24)
import os
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'os', os, module_type_store)
# Multiple import statement. import sys (2/3) (line 24)
import sys
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'sys', sys, module_type_store)
# Multiple import statement. import copy (3/3) (line 24)
import copy
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'copy', copy, module_type_store)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 25, 0))
# 'from distutils.ccompiler import gen_preprocess_options, gen_lib_options' statement (line 25)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3105 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler')
if (type(import_3105) is not StypyTypeError):
if (import_3105 != 'pyd_module'):
__import__(import_3105)
sys_modules_3106 = sys.modules[import_3105]
import_from_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', sys_modules_3106.module_type_store, module_type_store, ['gen_preprocess_options', 'gen_lib_options'])
nest_module(stypy.reporting.localization.Localization(__file__, 25, 0), __file__, sys_modules_3106, sys_modules_3106.module_type_store, module_type_store)
else:
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
import_from_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', None, module_type_store, ['gen_preprocess_options', 'gen_lib_options'], [gen_preprocess_options, gen_lib_options])
else:
# Assigning a type to the variable 'distutils.ccompiler' (line 25)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', import_3105)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 26, 0))
# 'from distutils.unixccompiler import UnixCCompiler' statement (line 26)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3107 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler')
if (type(import_3107) is not StypyTypeError):
if (import_3107 != 'pyd_module'):
__import__(import_3107)
sys_modules_3108 = sys.modules[import_3107]
import_from_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', sys_modules_3108.module_type_store, module_type_store, ['UnixCCompiler'])
nest_module(stypy.reporting.localization.Localization(__file__, 26, 0), __file__, sys_modules_3108, sys_modules_3108.module_type_store, module_type_store)
else:
from distutils.unixccompiler import UnixCCompiler
import_from_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', None, module_type_store, ['UnixCCompiler'], [UnixCCompiler])
else:
# Assigning a type to the variable 'distutils.unixccompiler' (line 26)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', import_3107)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 27, 0))
# 'from distutils.file_util import write_file' statement (line 27)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3109 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util')
if (type(import_3109) is not StypyTypeError):
if (import_3109 != 'pyd_module'):
__import__(import_3109)
sys_modules_3110 = sys.modules[import_3109]
import_from_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', sys_modules_3110.module_type_store, module_type_store, ['write_file'])
nest_module(stypy.reporting.localization.Localization(__file__, 27, 0), __file__, sys_modules_3110, sys_modules_3110.module_type_store, module_type_store)
else:
from distutils.file_util import write_file
import_from_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', None, module_type_store, ['write_file'], [write_file])
else:
# Assigning a type to the variable 'distutils.file_util' (line 27)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', import_3109)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 28, 0))
# 'from distutils.errors import DistutilsExecError, CompileError, UnknownFileError' statement (line 28)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3111 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors')
if (type(import_3111) is not StypyTypeError):
if (import_3111 != 'pyd_module'):
__import__(import_3111)
sys_modules_3112 = sys.modules[import_3111]
import_from_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', sys_modules_3112.module_type_store, module_type_store, ['DistutilsExecError', 'CompileError', 'UnknownFileError'])
nest_module(stypy.reporting.localization.Localization(__file__, 28, 0), __file__, sys_modules_3112, sys_modules_3112.module_type_store, module_type_store)
else:
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
import_from_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', None, module_type_store, ['DistutilsExecError', 'CompileError', 'UnknownFileError'], [DistutilsExecError, CompileError, UnknownFileError])
else:
# Assigning a type to the variable 'distutils.errors' (line 28)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', import_3111)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 29, 0))
# 'from distutils import log' statement (line 29)
try:
from distutils import log
except:
log = UndefinedType
import_from_module(stypy.reporting.localization.Localization(__file__, 29, 0), 'distutils', None, module_type_store, ['log'], [log])
# Declaration of the 'EMXCCompiler' class
# Getting the type of 'UnixCCompiler' (line 31)
UnixCCompiler_3113 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 31, 20), 'UnixCCompiler')
class EMXCCompiler(UnixCCompiler_3113, ):
# Assigning a Str to a Name (line 33):
# Assigning a Str to a Name (line 34):
# Assigning a Str to a Name (line 35):
# Assigning a Str to a Name (line 36):
# Assigning a Str to a Name (line 37):
# Assigning a Str to a Name (line 38):
# Assigning a Str to a Name (line 39):
# Assigning a Str to a Name (line 40):
@norecursion
def __init__(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3114 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 43, 26), 'int')
int_3115 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 44, 26), 'int')
int_3116 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 45, 24), 'int')
defaults = [int_3114, int_3115, int_3116]
# Create a new context for function '__init__'
module_type_store = module_type_store.open_function_context('__init__', 42, 4, False)
# Assigning a type to the variable 'self' (line 43)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 43, 4), 'self', type_of_self)
# Passed parameters checking function
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.__init__', ['verbose', 'dry_run', 'force'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return
# Initialize method data
init_call_information(module_type_store, '__init__', localization, ['verbose', 'dry_run', 'force'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '__init__(...)' code ##################
# Call to __init__(...): (line 47)
# Processing the call arguments (line 47)
# Getting the type of 'self' (line 47)
self_3119 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 32), 'self', False)
# Getting the type of 'verbose' (line 47)
verbose_3120 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 38), 'verbose', False)
# Getting the type of 'dry_run' (line 47)
dry_run_3121 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 47), 'dry_run', False)
# Getting the type of 'force' (line 47)
force_3122 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 56), 'force', False)
# Processing the call keyword arguments (line 47)
kwargs_3123 = {}
# Getting the type of 'UnixCCompiler' (line 47)
UnixCCompiler_3117 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 8), 'UnixCCompiler', False)
# Obtaining the member '__init__' of a type (line 47)
init___3118 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 47, 8), UnixCCompiler_3117, '__init__')
# Calling __init__(args, kwargs) (line 47)
init___call_result_3124 = invoke(stypy.reporting.localization.Localization(__file__, 47, 8), init___3118, *[self_3119, verbose_3120, dry_run_3121, force_3122], **kwargs_3123)
# Assigning a Call to a Tuple (line 49):
# Assigning a Subscript to a Name (line 49):
# Obtaining the type of the subscript
int_3125 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 8), 'int')
# Call to check_config_h(...): (line 49)
# Processing the call keyword arguments (line 49)
kwargs_3127 = {}
# Getting the type of 'check_config_h' (line 49)
check_config_h_3126 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 28), 'check_config_h', False)
# Calling check_config_h(args, kwargs) (line 49)
check_config_h_call_result_3128 = invoke(stypy.reporting.localization.Localization(__file__, 49, 28), check_config_h_3126, *[], **kwargs_3127)
# Obtaining the member '__getitem__' of a type (line 49)
getitem___3129 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 8), check_config_h_call_result_3128, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 49)
subscript_call_result_3130 = invoke(stypy.reporting.localization.Localization(__file__, 49, 8), getitem___3129, int_3125)
# Assigning a type to the variable 'tuple_var_assignment_3095' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3095', subscript_call_result_3130)
# Assigning a Subscript to a Name (line 49):
# Obtaining the type of the subscript
int_3131 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 8), 'int')
# Call to check_config_h(...): (line 49)
# Processing the call keyword arguments (line 49)
kwargs_3133 = {}
# Getting the type of 'check_config_h' (line 49)
check_config_h_3132 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 28), 'check_config_h', False)
# Calling check_config_h(args, kwargs) (line 49)
check_config_h_call_result_3134 = invoke(stypy.reporting.localization.Localization(__file__, 49, 28), check_config_h_3132, *[], **kwargs_3133)
# Obtaining the member '__getitem__' of a type (line 49)
getitem___3135 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 8), check_config_h_call_result_3134, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 49)
subscript_call_result_3136 = invoke(stypy.reporting.localization.Localization(__file__, 49, 8), getitem___3135, int_3131)
# Assigning a type to the variable 'tuple_var_assignment_3096' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3096', subscript_call_result_3136)
# Assigning a Name to a Name (line 49):
# Getting the type of 'tuple_var_assignment_3095' (line 49)
tuple_var_assignment_3095_3137 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3095')
# Assigning a type to the variable 'status' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 9), 'status', tuple_var_assignment_3095_3137)
# Assigning a Name to a Name (line 49):
# Getting the type of 'tuple_var_assignment_3096' (line 49)
tuple_var_assignment_3096_3138 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3096')
# Assigning a type to the variable 'details' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 17), 'details', tuple_var_assignment_3096_3138)
# Call to debug_print(...): (line 50)
# Processing the call arguments (line 50)
str_3141 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 50, 25), 'str', "Python's GCC status: %s (details: %s)")
# Obtaining an instance of the builtin type 'tuple' (line 51)
tuple_3142 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 51, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 51)
# Adding element type (line 51)
# Getting the type of 'status' (line 51)
status_3143 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 26), 'status', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 51, 26), tuple_3142, status_3143)
# Adding element type (line 51)
# Getting the type of 'details' (line 51)
details_3144 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 34), 'details', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 51, 26), tuple_3142, details_3144)
# Applying the binary operator '%' (line 50)
result_mod_3145 = python_operator(stypy.reporting.localization.Localization(__file__, 50, 25), '%', str_3141, tuple_3142)
# Processing the call keyword arguments (line 50)
kwargs_3146 = {}
# Getting the type of 'self' (line 50)
self_3139 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 50, 8), 'self', False)
# Obtaining the member 'debug_print' of a type (line 50)
debug_print_3140 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 50, 8), self_3139, 'debug_print')
# Calling debug_print(args, kwargs) (line 50)
debug_print_call_result_3147 = invoke(stypy.reporting.localization.Localization(__file__, 50, 8), debug_print_3140, *[result_mod_3145], **kwargs_3146)
# Getting the type of 'status' (line 52)
status_3148 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 11), 'status')
# Getting the type of 'CONFIG_H_OK' (line 52)
CONFIG_H_OK_3149 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 25), 'CONFIG_H_OK')
# Applying the binary operator 'isnot' (line 52)
result_is_not_3150 = python_operator(stypy.reporting.localization.Localization(__file__, 52, 11), 'isnot', status_3148, CONFIG_H_OK_3149)
# Testing the type of an if condition (line 52)
if_condition_3151 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 52, 8), result_is_not_3150)
# Assigning a type to the variable 'if_condition_3151' (line 52)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 52, 8), 'if_condition_3151', if_condition_3151)
# SSA begins for if statement (line 52)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to warn(...): (line 53)
# Processing the call arguments (line 53)
str_3154 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 54, 16), 'str', "Python's pyconfig.h doesn't seem to support your compiler. ")
str_3155 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 55, 17), 'str', 'Reason: %s.')
# Getting the type of 'details' (line 55)
details_3156 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 33), 'details', False)
# Applying the binary operator '%' (line 55)
result_mod_3157 = python_operator(stypy.reporting.localization.Localization(__file__, 55, 17), '%', str_3155, details_3156)
# Applying the binary operator '+' (line 54)
result_add_3158 = python_operator(stypy.reporting.localization.Localization(__file__, 54, 16), '+', str_3154, result_mod_3157)
str_3159 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 56, 16), 'str', 'Compiling may fail because of undefined preprocessor macros.')
# Applying the binary operator '+' (line 55)
result_add_3160 = python_operator(stypy.reporting.localization.Localization(__file__, 55, 42), '+', result_add_3158, str_3159)
# Processing the call keyword arguments (line 53)
kwargs_3161 = {}
# Getting the type of 'self' (line 53)
self_3152 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 53, 12), 'self', False)
# Obtaining the member 'warn' of a type (line 53)
warn_3153 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 53, 12), self_3152, 'warn')
# Calling warn(args, kwargs) (line 53)
warn_call_result_3162 = invoke(stypy.reporting.localization.Localization(__file__, 53, 12), warn_3153, *[result_add_3160], **kwargs_3161)
# SSA join for if statement (line 52)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Call to a Tuple (line 58):
# Assigning a Subscript to a Name (line 58):
# Obtaining the type of the subscript
int_3163 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 58, 8), 'int')
# Call to get_versions(...): (line 59)
# Processing the call keyword arguments (line 59)
kwargs_3165 = {}
# Getting the type of 'get_versions' (line 59)
get_versions_3164 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 12), 'get_versions', False)
# Calling get_versions(args, kwargs) (line 59)
get_versions_call_result_3166 = invoke(stypy.reporting.localization.Localization(__file__, 59, 12), get_versions_3164, *[], **kwargs_3165)
# Obtaining the member '__getitem__' of a type (line 58)
getitem___3167 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 8), get_versions_call_result_3166, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 58)
subscript_call_result_3168 = invoke(stypy.reporting.localization.Localization(__file__, 58, 8), getitem___3167, int_3163)
# Assigning a type to the variable 'tuple_var_assignment_3097' (line 58)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3097', subscript_call_result_3168)
# Assigning a Subscript to a Name (line 58):
# Obtaining the type of the subscript
int_3169 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 58, 8), 'int')
# Call to get_versions(...): (line 59)
# Processing the call keyword arguments (line 59)
kwargs_3171 = {}
# Getting the type of 'get_versions' (line 59)
get_versions_3170 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 12), 'get_versions', False)
# Calling get_versions(args, kwargs) (line 59)
get_versions_call_result_3172 = invoke(stypy.reporting.localization.Localization(__file__, 59, 12), get_versions_3170, *[], **kwargs_3171)
# Obtaining the member '__getitem__' of a type (line 58)
getitem___3173 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 8), get_versions_call_result_3172, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 58)
subscript_call_result_3174 = invoke(stypy.reporting.localization.Localization(__file__, 58, 8), getitem___3173, int_3169)
# Assigning a type to the variable 'tuple_var_assignment_3098' (line 58)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3098', subscript_call_result_3174)
# Assigning a Name to a Attribute (line 58):
# Getting the type of 'tuple_var_assignment_3097' (line 58)
tuple_var_assignment_3097_3175 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3097')
# Getting the type of 'self' (line 58)
self_3176 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 9), 'self')
# Setting the type of the member 'gcc_version' of a type (line 58)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 9), self_3176, 'gcc_version', tuple_var_assignment_3097_3175)
# Assigning a Name to a Attribute (line 58):
# Getting the type of 'tuple_var_assignment_3098' (line 58)
tuple_var_assignment_3098_3177 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3098')
# Getting the type of 'self' (line 58)
self_3178 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 27), 'self')
# Setting the type of the member 'ld_version' of a type (line 58)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 27), self_3178, 'ld_version', tuple_var_assignment_3098_3177)
# Call to debug_print(...): (line 60)
# Processing the call arguments (line 60)
# Getting the type of 'self' (line 60)
self_3181 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 60, 25), 'self', False)
# Obtaining the member 'compiler_type' of a type (line 60)
compiler_type_3182 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 60, 25), self_3181, 'compiler_type')
str_3183 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 60, 46), 'str', ': gcc %s, ld %s\n')
# Obtaining an instance of the builtin type 'tuple' (line 61)
tuple_3184 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 61, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 61)
# Adding element type (line 61)
# Getting the type of 'self' (line 61)
self_3185 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 61, 26), 'self', False)
# Obtaining the member 'gcc_version' of a type (line 61)
gcc_version_3186 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 61, 26), self_3185, 'gcc_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 61, 26), tuple_3184, gcc_version_3186)
# Adding element type (line 61)
# Getting the type of 'self' (line 62)
self_3187 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 62, 26), 'self', False)
# Obtaining the member 'ld_version' of a type (line 62)
ld_version_3188 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 62, 26), self_3187, 'ld_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 61, 26), tuple_3184, ld_version_3188)
# Applying the binary operator '%' (line 60)
result_mod_3189 = python_operator(stypy.reporting.localization.Localization(__file__, 60, 46), '%', str_3183, tuple_3184)
# Applying the binary operator '+' (line 60)
result_add_3190 = python_operator(stypy.reporting.localization.Localization(__file__, 60, 25), '+', compiler_type_3182, result_mod_3189)
# Processing the call keyword arguments (line 60)
kwargs_3191 = {}
# Getting the type of 'self' (line 60)
self_3179 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 60, 8), 'self', False)
# Obtaining the member 'debug_print' of a type (line 60)
debug_print_3180 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 60, 8), self_3179, 'debug_print')
# Calling debug_print(args, kwargs) (line 60)
debug_print_call_result_3192 = invoke(stypy.reporting.localization.Localization(__file__, 60, 8), debug_print_3180, *[result_add_3190], **kwargs_3191)
# Call to set_executables(...): (line 66)
# Processing the call keyword arguments (line 66)
str_3195 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 66, 38), 'str', 'gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall')
keyword_3196 = str_3195
str_3197 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 67, 41), 'str', 'gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall')
keyword_3198 = str_3197
str_3199 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 68, 40), 'str', 'gcc -Zomf -Zmt -Zcrtdll')
keyword_3200 = str_3199
str_3201 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 69, 39), 'str', 'gcc -Zomf -Zmt -Zcrtdll -Zdll')
keyword_3202 = str_3201
kwargs_3203 = {'linker_exe': keyword_3200, 'compiler_so': keyword_3198, 'linker_so': keyword_3202, 'compiler': keyword_3196}
# Getting the type of 'self' (line 66)
self_3193 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 66, 8), 'self', False)
# Obtaining the member 'set_executables' of a type (line 66)
set_executables_3194 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 66, 8), self_3193, 'set_executables')
# Calling set_executables(args, kwargs) (line 66)
set_executables_call_result_3204 = invoke(stypy.reporting.localization.Localization(__file__, 66, 8), set_executables_3194, *[], **kwargs_3203)
# Assigning a List to a Attribute (line 73):
# Assigning a List to a Attribute (line 73):
# Obtaining an instance of the builtin type 'list' (line 73)
list_3205 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 27), 'list')
# Adding type elements to the builtin type 'list' instance (line 73)
# Adding element type (line 73)
str_3206 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 28), 'str', 'gcc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 73, 27), list_3205, str_3206)
# Getting the type of 'self' (line 73)
self_3207 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 73, 8), 'self')
# Setting the type of the member 'dll_libraries' of a type (line 73)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 73, 8), self_3207, 'dll_libraries', list_3205)
# ################# End of '__init__(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
@norecursion
def _compile(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function '_compile'
module_type_store = module_type_store.open_function_context('_compile', 77, 4, False)
# Assigning a type to the variable 'self' (line 78)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 78, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler._compile.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler._compile.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler._compile.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler._compile.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler._compile')
EMXCCompiler._compile.__dict__.__setitem__('stypy_param_names_list', ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'])
EMXCCompiler._compile.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler._compile.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler._compile.__dict__.__setitem__('stypy_declared_arg_number', 7)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler._compile', ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, '_compile', localization, ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '_compile(...)' code ##################
# Getting the type of 'ext' (line 78)
ext_3208 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 78, 11), 'ext')
str_3209 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 78, 18), 'str', '.rc')
# Applying the binary operator '==' (line 78)
result_eq_3210 = python_operator(stypy.reporting.localization.Localization(__file__, 78, 11), '==', ext_3208, str_3209)
# Testing the type of an if condition (line 78)
if_condition_3211 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 78, 8), result_eq_3210)
# Assigning a type to the variable 'if_condition_3211' (line 78)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 78, 8), 'if_condition_3211', if_condition_3211)
# SSA begins for if statement (line 78)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# SSA begins for try-except statement (line 80)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Call to spawn(...): (line 81)
# Processing the call arguments (line 81)
# Obtaining an instance of the builtin type 'list' (line 81)
list_3214 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 27), 'list')
# Adding type elements to the builtin type 'list' instance (line 81)
# Adding element type (line 81)
str_3215 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 28), 'str', 'rc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, str_3215)
# Adding element type (line 81)
str_3216 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 34), 'str', '-r')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, str_3216)
# Adding element type (line 81)
# Getting the type of 'src' (line 81)
src_3217 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 81, 40), 'src', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, src_3217)
# Processing the call keyword arguments (line 81)
kwargs_3218 = {}
# Getting the type of 'self' (line 81)
self_3212 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 81, 16), 'self', False)
# Obtaining the member 'spawn' of a type (line 81)
spawn_3213 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 81, 16), self_3212, 'spawn')
# Calling spawn(args, kwargs) (line 81)
spawn_call_result_3219 = invoke(stypy.reporting.localization.Localization(__file__, 81, 16), spawn_3213, *[list_3214], **kwargs_3218)
# SSA branch for the except part of a try statement (line 80)
# SSA branch for the except 'DistutilsExecError' branch of a try statement (line 80)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'DistutilsExecError' (line 82)
DistutilsExecError_3220 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 82, 19), 'DistutilsExecError')
# Assigning a type to the variable 'msg' (line 82)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 82, 12), 'msg', DistutilsExecError_3220)
# Getting the type of 'CompileError' (line 83)
CompileError_3221 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 83, 22), 'CompileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 83, 16), CompileError_3221, 'raise parameter', BaseException)
# SSA join for try-except statement (line 80)
module_type_store = module_type_store.join_ssa_context()
# SSA branch for the else part of an if statement (line 78)
module_type_store.open_ssa_branch('else')
# SSA begins for try-except statement (line 85)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Call to spawn(...): (line 86)
# Processing the call arguments (line 86)
# Getting the type of 'self' (line 86)
self_3224 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 27), 'self', False)
# Obtaining the member 'compiler_so' of a type (line 86)
compiler_so_3225 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 86, 27), self_3224, 'compiler_so')
# Getting the type of 'cc_args' (line 86)
cc_args_3226 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 46), 'cc_args', False)
# Applying the binary operator '+' (line 86)
result_add_3227 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 27), '+', compiler_so_3225, cc_args_3226)
# Obtaining an instance of the builtin type 'list' (line 86)
list_3228 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 86, 56), 'list')
# Adding type elements to the builtin type 'list' instance (line 86)
# Adding element type (line 86)
# Getting the type of 'src' (line 86)
src_3229 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 57), 'src', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, src_3229)
# Adding element type (line 86)
str_3230 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 86, 62), 'str', '-o')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, str_3230)
# Adding element type (line 86)
# Getting the type of 'obj' (line 86)
obj_3231 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 68), 'obj', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, obj_3231)
# Applying the binary operator '+' (line 86)
result_add_3232 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 54), '+', result_add_3227, list_3228)
# Getting the type of 'extra_postargs' (line 87)
extra_postargs_3233 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 87, 27), 'extra_postargs', False)
# Applying the binary operator '+' (line 86)
result_add_3234 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 73), '+', result_add_3232, extra_postargs_3233)
# Processing the call keyword arguments (line 86)
kwargs_3235 = {}
# Getting the type of 'self' (line 86)
self_3222 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 16), 'self', False)
# Obtaining the member 'spawn' of a type (line 86)
spawn_3223 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 86, 16), self_3222, 'spawn')
# Calling spawn(args, kwargs) (line 86)
spawn_call_result_3236 = invoke(stypy.reporting.localization.Localization(__file__, 86, 16), spawn_3223, *[result_add_3234], **kwargs_3235)
# SSA branch for the except part of a try statement (line 85)
# SSA branch for the except 'DistutilsExecError' branch of a try statement (line 85)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'DistutilsExecError' (line 88)
DistutilsExecError_3237 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 88, 19), 'DistutilsExecError')
# Assigning a type to the variable 'msg' (line 88)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 88, 12), 'msg', DistutilsExecError_3237)
# Getting the type of 'CompileError' (line 89)
CompileError_3238 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 89, 22), 'CompileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 89, 16), CompileError_3238, 'raise parameter', BaseException)
# SSA join for try-except statement (line 85)
module_type_store = module_type_store.join_ssa_context()
# SSA join for if statement (line 78)
module_type_store = module_type_store.join_ssa_context()
# ################# End of '_compile(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function '_compile' in the type store
# Getting the type of 'stypy_return_type' (line 77)
stypy_return_type_3239 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 77, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3239)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function '_compile'
return stypy_return_type_3239
@norecursion
def link(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
# Getting the type of 'None' (line 95)
None_3240 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 95, 25), 'None')
# Getting the type of 'None' (line 96)
None_3241 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 96, 24), 'None')
# Getting the type of 'None' (line 97)
None_3242 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 97, 27), 'None')
# Getting the type of 'None' (line 98)
None_3243 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 98, 35), 'None')
# Getting the type of 'None' (line 99)
None_3244 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 99, 29), 'None')
int_3245 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 100, 20), 'int')
# Getting the type of 'None' (line 101)
None_3246 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 101, 28), 'None')
# Getting the type of 'None' (line 102)
None_3247 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 102, 29), 'None')
# Getting the type of 'None' (line 103)
None_3248 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 103, 25), 'None')
# Getting the type of 'None' (line 104)
None_3249 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 104, 26), 'None')
defaults = [None_3240, None_3241, None_3242, None_3243, None_3244, int_3245, None_3246, None_3247, None_3248, None_3249]
# Create a new context for function 'link'
module_type_store = module_type_store.open_function_context('link', 91, 4, False)
# Assigning a type to the variable 'self' (line 92)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 92, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.link.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.link.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.link.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.link.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.link')
EMXCCompiler.link.__dict__.__setitem__('stypy_param_names_list', ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'])
EMXCCompiler.link.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.link.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.link.__dict__.__setitem__('stypy_declared_arg_number', 14)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.link', ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'link', localization, ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'link(...)' code ##################
# Assigning a Call to a Name (line 107):
# Assigning a Call to a Name (line 107):
# Call to copy(...): (line 107)
# Processing the call arguments (line 107)
# Evaluating a boolean operation
# Getting the type of 'extra_preargs' (line 107)
extra_preargs_3252 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 107, 34), 'extra_preargs', False)
# Obtaining an instance of the builtin type 'list' (line 107)
list_3253 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 107, 51), 'list')
# Adding type elements to the builtin type 'list' instance (line 107)
# Applying the binary operator 'or' (line 107)
result_or_keyword_3254 = python_operator(stypy.reporting.localization.Localization(__file__, 107, 34), 'or', extra_preargs_3252, list_3253)
# Processing the call keyword arguments (line 107)
kwargs_3255 = {}
# Getting the type of 'copy' (line 107)
copy_3250 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 107, 24), 'copy', False)
# Obtaining the member 'copy' of a type (line 107)
copy_3251 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 107, 24), copy_3250, 'copy')
# Calling copy(args, kwargs) (line 107)
copy_call_result_3256 = invoke(stypy.reporting.localization.Localization(__file__, 107, 24), copy_3251, *[result_or_keyword_3254], **kwargs_3255)
# Assigning a type to the variable 'extra_preargs' (line 107)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 107, 8), 'extra_preargs', copy_call_result_3256)
# Assigning a Call to a Name (line 108):
# Assigning a Call to a Name (line 108):
# Call to copy(...): (line 108)
# Processing the call arguments (line 108)
# Evaluating a boolean operation
# Getting the type of 'libraries' (line 108)
libraries_3259 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 108, 30), 'libraries', False)
# Obtaining an instance of the builtin type 'list' (line 108)
list_3260 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 108, 43), 'list')
# Adding type elements to the builtin type 'list' instance (line 108)
# Applying the binary operator 'or' (line 108)
result_or_keyword_3261 = python_operator(stypy.reporting.localization.Localization(__file__, 108, 30), 'or', libraries_3259, list_3260)
# Processing the call keyword arguments (line 108)
kwargs_3262 = {}
# Getting the type of 'copy' (line 108)
copy_3257 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 108, 20), 'copy', False)
# Obtaining the member 'copy' of a type (line 108)
copy_3258 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 108, 20), copy_3257, 'copy')
# Calling copy(args, kwargs) (line 108)
copy_call_result_3263 = invoke(stypy.reporting.localization.Localization(__file__, 108, 20), copy_3258, *[result_or_keyword_3261], **kwargs_3262)
# Assigning a type to the variable 'libraries' (line 108)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 108, 8), 'libraries', copy_call_result_3263)
# Assigning a Call to a Name (line 109):
# Assigning a Call to a Name (line 109):
# Call to copy(...): (line 109)
# Processing the call arguments (line 109)
# Evaluating a boolean operation
# Getting the type of 'objects' (line 109)
objects_3266 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 109, 28), 'objects', False)
# Obtaining an instance of the builtin type 'list' (line 109)
list_3267 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 109, 39), 'list')
# Adding type elements to the builtin type 'list' instance (line 109)
# Applying the binary operator 'or' (line 109)
result_or_keyword_3268 = python_operator(stypy.reporting.localization.Localization(__file__, 109, 28), 'or', objects_3266, list_3267)
# Processing the call keyword arguments (line 109)
kwargs_3269 = {}
# Getting the type of 'copy' (line 109)
copy_3264 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 109, 18), 'copy', False)
# Obtaining the member 'copy' of a type (line 109)
copy_3265 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 109, 18), copy_3264, 'copy')
# Calling copy(args, kwargs) (line 109)
copy_call_result_3270 = invoke(stypy.reporting.localization.Localization(__file__, 109, 18), copy_3265, *[result_or_keyword_3268], **kwargs_3269)
# Assigning a type to the variable 'objects' (line 109)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 109, 8), 'objects', copy_call_result_3270)
# Call to extend(...): (line 112)
# Processing the call arguments (line 112)
# Getting the type of 'self' (line 112)
self_3273 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 112, 25), 'self', False)
# Obtaining the member 'dll_libraries' of a type (line 112)
dll_libraries_3274 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 112, 25), self_3273, 'dll_libraries')
# Processing the call keyword arguments (line 112)
kwargs_3275 = {}
# Getting the type of 'libraries' (line 112)
libraries_3271 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 112, 8), 'libraries', False)
# Obtaining the member 'extend' of a type (line 112)
extend_3272 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 112, 8), libraries_3271, 'extend')
# Calling extend(args, kwargs) (line 112)
extend_call_result_3276 = invoke(stypy.reporting.localization.Localization(__file__, 112, 8), extend_3272, *[dll_libraries_3274], **kwargs_3275)
# Evaluating a boolean operation
# Getting the type of 'export_symbols' (line 116)
export_symbols_3277 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 116, 13), 'export_symbols')
# Getting the type of 'None' (line 116)
None_3278 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 116, 35), 'None')
# Applying the binary operator 'isnot' (line 116)
result_is_not_3279 = python_operator(stypy.reporting.localization.Localization(__file__, 116, 13), 'isnot', export_symbols_3277, None_3278)
# Getting the type of 'target_desc' (line 117)
target_desc_3280 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 117, 13), 'target_desc')
# Getting the type of 'self' (line 117)
self_3281 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 117, 28), 'self')
# Obtaining the member 'EXECUTABLE' of a type (line 117)
EXECUTABLE_3282 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 117, 28), self_3281, 'EXECUTABLE')
# Applying the binary operator '!=' (line 117)
result_ne_3283 = python_operator(stypy.reporting.localization.Localization(__file__, 117, 13), '!=', target_desc_3280, EXECUTABLE_3282)
# Applying the binary operator 'and' (line 116)
result_and_keyword_3284 = python_operator(stypy.reporting.localization.Localization(__file__, 116, 12), 'and', result_is_not_3279, result_ne_3283)
# Testing the type of an if condition (line 116)
if_condition_3285 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 116, 8), result_and_keyword_3284)
# Assigning a type to the variable 'if_condition_3285' (line 116)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 116, 8), 'if_condition_3285', if_condition_3285)
# SSA begins for if statement (line 116)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 126):
# Assigning a Call to a Name (line 126):
# Call to dirname(...): (line 126)
# Processing the call arguments (line 126)
# Obtaining the type of the subscript
int_3289 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 126, 47), 'int')
# Getting the type of 'objects' (line 126)
objects_3290 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 126, 39), 'objects', False)
# Obtaining the member '__getitem__' of a type (line 126)
getitem___3291 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 39), objects_3290, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 126)
subscript_call_result_3292 = invoke(stypy.reporting.localization.Localization(__file__, 126, 39), getitem___3291, int_3289)
# Processing the call keyword arguments (line 126)
kwargs_3293 = {}
# Getting the type of 'os' (line 126)
os_3286 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 126, 23), 'os', False)
# Obtaining the member 'path' of a type (line 126)
path_3287 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 23), os_3286, 'path')
# Obtaining the member 'dirname' of a type (line 126)
dirname_3288 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 23), path_3287, 'dirname')
# Calling dirname(args, kwargs) (line 126)
dirname_call_result_3294 = invoke(stypy.reporting.localization.Localization(__file__, 126, 23), dirname_3288, *[subscript_call_result_3292], **kwargs_3293)
# Assigning a type to the variable 'temp_dir' (line 126)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 126, 12), 'temp_dir', dirname_call_result_3294)
# Assigning a Call to a Tuple (line 128):
# Assigning a Subscript to a Name (line 128):
# Obtaining the type of the subscript
int_3295 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 128, 12), 'int')
# Call to splitext(...): (line 128)
# Processing the call arguments (line 128)
# Call to basename(...): (line 129)
# Processing the call arguments (line 129)
# Getting the type of 'output_filename' (line 129)
output_filename_3302 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 33), 'output_filename', False)
# Processing the call keyword arguments (line 129)
kwargs_3303 = {}
# Getting the type of 'os' (line 129)
os_3299 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 16), 'os', False)
# Obtaining the member 'path' of a type (line 129)
path_3300 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), os_3299, 'path')
# Obtaining the member 'basename' of a type (line 129)
basename_3301 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), path_3300, 'basename')
# Calling basename(args, kwargs) (line 129)
basename_call_result_3304 = invoke(stypy.reporting.localization.Localization(__file__, 129, 16), basename_3301, *[output_filename_3302], **kwargs_3303)
# Processing the call keyword arguments (line 128)
kwargs_3305 = {}
# Getting the type of 'os' (line 128)
os_3296 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 40), 'os', False)
# Obtaining the member 'path' of a type (line 128)
path_3297 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), os_3296, 'path')
# Obtaining the member 'splitext' of a type (line 128)
splitext_3298 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), path_3297, 'splitext')
# Calling splitext(args, kwargs) (line 128)
splitext_call_result_3306 = invoke(stypy.reporting.localization.Localization(__file__, 128, 40), splitext_3298, *[basename_call_result_3304], **kwargs_3305)
# Obtaining the member '__getitem__' of a type (line 128)
getitem___3307 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 12), splitext_call_result_3306, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 128)
subscript_call_result_3308 = invoke(stypy.reporting.localization.Localization(__file__, 128, 12), getitem___3307, int_3295)
# Assigning a type to the variable 'tuple_var_assignment_3099' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3099', subscript_call_result_3308)
# Assigning a Subscript to a Name (line 128):
# Obtaining the type of the subscript
int_3309 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 128, 12), 'int')
# Call to splitext(...): (line 128)
# Processing the call arguments (line 128)
# Call to basename(...): (line 129)
# Processing the call arguments (line 129)
# Getting the type of 'output_filename' (line 129)
output_filename_3316 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 33), 'output_filename', False)
# Processing the call keyword arguments (line 129)
kwargs_3317 = {}
# Getting the type of 'os' (line 129)
os_3313 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 16), 'os', False)
# Obtaining the member 'path' of a type (line 129)
path_3314 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), os_3313, 'path')
# Obtaining the member 'basename' of a type (line 129)
basename_3315 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), path_3314, 'basename')
# Calling basename(args, kwargs) (line 129)
basename_call_result_3318 = invoke(stypy.reporting.localization.Localization(__file__, 129, 16), basename_3315, *[output_filename_3316], **kwargs_3317)
# Processing the call keyword arguments (line 128)
kwargs_3319 = {}
# Getting the type of 'os' (line 128)
os_3310 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 40), 'os', False)
# Obtaining the member 'path' of a type (line 128)
path_3311 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), os_3310, 'path')
# Obtaining the member 'splitext' of a type (line 128)
splitext_3312 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), path_3311, 'splitext')
# Calling splitext(args, kwargs) (line 128)
splitext_call_result_3320 = invoke(stypy.reporting.localization.Localization(__file__, 128, 40), splitext_3312, *[basename_call_result_3318], **kwargs_3319)
# Obtaining the member '__getitem__' of a type (line 128)
getitem___3321 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 12), splitext_call_result_3320, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 128)
subscript_call_result_3322 = invoke(stypy.reporting.localization.Localization(__file__, 128, 12), getitem___3321, int_3309)
# Assigning a type to the variable 'tuple_var_assignment_3100' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3100', subscript_call_result_3322)
# Assigning a Name to a Name (line 128):
# Getting the type of 'tuple_var_assignment_3099' (line 128)
tuple_var_assignment_3099_3323 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3099')
# Assigning a type to the variable 'dll_name' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 13), 'dll_name', tuple_var_assignment_3099_3323)
# Assigning a Name to a Name (line 128):
# Getting the type of 'tuple_var_assignment_3100' (line 128)
tuple_var_assignment_3100_3324 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3100')
# Assigning a type to the variable 'dll_extension' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 23), 'dll_extension', tuple_var_assignment_3100_3324)
# Assigning a Call to a Name (line 132):
# Assigning a Call to a Name (line 132):
# Call to join(...): (line 132)
# Processing the call arguments (line 132)
# Getting the type of 'temp_dir' (line 132)
temp_dir_3328 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 36), 'temp_dir', False)
# Getting the type of 'dll_name' (line 132)
dll_name_3329 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 46), 'dll_name', False)
str_3330 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 132, 57), 'str', '.def')
# Applying the binary operator '+' (line 132)
result_add_3331 = python_operator(stypy.reporting.localization.Localization(__file__, 132, 46), '+', dll_name_3329, str_3330)
# Processing the call keyword arguments (line 132)
kwargs_3332 = {}
# Getting the type of 'os' (line 132)
os_3325 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 23), 'os', False)
# Obtaining the member 'path' of a type (line 132)
path_3326 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 132, 23), os_3325, 'path')
# Obtaining the member 'join' of a type (line 132)
join_3327 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 132, 23), path_3326, 'join')
# Calling join(args, kwargs) (line 132)
join_call_result_3333 = invoke(stypy.reporting.localization.Localization(__file__, 132, 23), join_3327, *[temp_dir_3328, result_add_3331], **kwargs_3332)
# Assigning a type to the variable 'def_file' (line 132)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 132, 12), 'def_file', join_call_result_3333)
# Assigning a List to a Name (line 135):
# Assigning a List to a Name (line 135):
# Obtaining an instance of the builtin type 'list' (line 135)
list_3334 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 135, 23), 'list')
# Adding type elements to the builtin type 'list' instance (line 135)
# Adding element type (line 135)
str_3335 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 136, 16), 'str', 'LIBRARY %s INITINSTANCE TERMINSTANCE')
# Obtaining the type of the subscript
int_3336 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 137, 68), 'int')
# Call to splitext(...): (line 137)
# Processing the call arguments (line 137)
# Call to basename(...): (line 137)
# Processing the call arguments (line 137)
# Getting the type of 'output_filename' (line 137)
output_filename_3343 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 50), 'output_filename', False)
# Processing the call keyword arguments (line 137)
kwargs_3344 = {}
# Getting the type of 'os' (line 137)
os_3340 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 33), 'os', False)
# Obtaining the member 'path' of a type (line 137)
path_3341 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 33), os_3340, 'path')
# Obtaining the member 'basename' of a type (line 137)
basename_3342 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 33), path_3341, 'basename')
# Calling basename(args, kwargs) (line 137)
basename_call_result_3345 = invoke(stypy.reporting.localization.Localization(__file__, 137, 33), basename_3342, *[output_filename_3343], **kwargs_3344)
# Processing the call keyword arguments (line 137)
kwargs_3346 = {}
# Getting the type of 'os' (line 137)
os_3337 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 16), 'os', False)
# Obtaining the member 'path' of a type (line 137)
path_3338 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), os_3337, 'path')
# Obtaining the member 'splitext' of a type (line 137)
splitext_3339 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), path_3338, 'splitext')
# Calling splitext(args, kwargs) (line 137)
splitext_call_result_3347 = invoke(stypy.reporting.localization.Localization(__file__, 137, 16), splitext_3339, *[basename_call_result_3345], **kwargs_3346)
# Obtaining the member '__getitem__' of a type (line 137)
getitem___3348 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), splitext_call_result_3347, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 137)
subscript_call_result_3349 = invoke(stypy.reporting.localization.Localization(__file__, 137, 16), getitem___3348, int_3336)
# Applying the binary operator '%' (line 136)
result_mod_3350 = python_operator(stypy.reporting.localization.Localization(__file__, 136, 16), '%', str_3335, subscript_call_result_3349)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, result_mod_3350)
# Adding element type (line 135)
str_3351 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 138, 16), 'str', 'DATA MULTIPLE NONSHARED')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, str_3351)
# Adding element type (line 135)
str_3352 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 139, 16), 'str', 'EXPORTS')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, str_3352)
# Assigning a type to the variable 'contents' (line 135)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 135, 12), 'contents', list_3334)
# Getting the type of 'export_symbols' (line 140)
export_symbols_3353 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 140, 23), 'export_symbols')
# Testing the type of a for loop iterable (line 140)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 140, 12), export_symbols_3353)
# Getting the type of the for loop variable (line 140)
for_loop_var_3354 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 140, 12), export_symbols_3353)
# Assigning a type to the variable 'sym' (line 140)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 140, 12), 'sym', for_loop_var_3354)
# SSA begins for a for statement (line 140)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Call to append(...): (line 141)
# Processing the call arguments (line 141)
str_3357 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 141, 32), 'str', ' "%s"')
# Getting the type of 'sym' (line 141)
sym_3358 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 141, 43), 'sym', False)
# Applying the binary operator '%' (line 141)
result_mod_3359 = python_operator(stypy.reporting.localization.Localization(__file__, 141, 32), '%', str_3357, sym_3358)
# Processing the call keyword arguments (line 141)
kwargs_3360 = {}
# Getting the type of 'contents' (line 141)
contents_3355 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 141, 16), 'contents', False)
# Obtaining the member 'append' of a type (line 141)
append_3356 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 141, 16), contents_3355, 'append')
# Calling append(args, kwargs) (line 141)
append_call_result_3361 = invoke(stypy.reporting.localization.Localization(__file__, 141, 16), append_3356, *[result_mod_3359], **kwargs_3360)
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Call to execute(...): (line 142)
# Processing the call arguments (line 142)
# Getting the type of 'write_file' (line 142)
write_file_3364 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 25), 'write_file', False)
# Obtaining an instance of the builtin type 'tuple' (line 142)
tuple_3365 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 142, 38), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 142)
# Adding element type (line 142)
# Getting the type of 'def_file' (line 142)
def_file_3366 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 38), 'def_file', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 142, 38), tuple_3365, def_file_3366)
# Adding element type (line 142)
# Getting the type of 'contents' (line 142)
contents_3367 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 48), 'contents', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 142, 38), tuple_3365, contents_3367)
str_3368 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 143, 25), 'str', 'writing %s')
# Getting the type of 'def_file' (line 143)
def_file_3369 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 143, 40), 'def_file', False)
# Applying the binary operator '%' (line 143)
result_mod_3370 = python_operator(stypy.reporting.localization.Localization(__file__, 143, 25), '%', str_3368, def_file_3369)
# Processing the call keyword arguments (line 142)
kwargs_3371 = {}
# Getting the type of 'self' (line 142)
self_3362 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 12), 'self', False)
# Obtaining the member 'execute' of a type (line 142)
execute_3363 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 142, 12), self_3362, 'execute')
# Calling execute(args, kwargs) (line 142)
execute_call_result_3372 = invoke(stypy.reporting.localization.Localization(__file__, 142, 12), execute_3363, *[write_file_3364, tuple_3365, result_mod_3370], **kwargs_3371)
# Call to append(...): (line 147)
# Processing the call arguments (line 147)
# Getting the type of 'def_file' (line 147)
def_file_3375 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 147, 27), 'def_file', False)
# Processing the call keyword arguments (line 147)
kwargs_3376 = {}
# Getting the type of 'objects' (line 147)
objects_3373 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 147, 12), 'objects', False)
# Obtaining the member 'append' of a type (line 147)
append_3374 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 147, 12), objects_3373, 'append')
# Calling append(args, kwargs) (line 147)
append_call_result_3377 = invoke(stypy.reporting.localization.Localization(__file__, 147, 12), append_3374, *[def_file_3375], **kwargs_3376)
# SSA join for if statement (line 116)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'debug' (line 158)
debug_3378 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 158, 15), 'debug')
# Applying the 'not' unary operator (line 158)
result_not__3379 = python_operator(stypy.reporting.localization.Localization(__file__, 158, 11), 'not', debug_3378)
# Testing the type of an if condition (line 158)
if_condition_3380 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 158, 8), result_not__3379)
# Assigning a type to the variable 'if_condition_3380' (line 158)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 158, 8), 'if_condition_3380', if_condition_3380)
# SSA begins for if statement (line 158)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to append(...): (line 159)
# Processing the call arguments (line 159)
str_3383 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 159, 33), 'str', '-s')
# Processing the call keyword arguments (line 159)
kwargs_3384 = {}
# Getting the type of 'extra_preargs' (line 159)
extra_preargs_3381 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 159, 12), 'extra_preargs', False)
# Obtaining the member 'append' of a type (line 159)
append_3382 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 159, 12), extra_preargs_3381, 'append')
# Calling append(args, kwargs) (line 159)
append_call_result_3385 = invoke(stypy.reporting.localization.Localization(__file__, 159, 12), append_3382, *[str_3383], **kwargs_3384)
# SSA join for if statement (line 158)
module_type_store = module_type_store.join_ssa_context()
# Call to link(...): (line 161)
# Processing the call arguments (line 161)
# Getting the type of 'self' (line 161)
self_3388 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 161, 27), 'self', False)
# Getting the type of 'target_desc' (line 162)
target_desc_3389 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 162, 27), 'target_desc', False)
# Getting the type of 'objects' (line 163)
objects_3390 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 163, 27), 'objects', False)
# Getting the type of 'output_filename' (line 164)
output_filename_3391 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 164, 27), 'output_filename', False)
# Getting the type of 'output_dir' (line 165)
output_dir_3392 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 165, 27), 'output_dir', False)
# Getting the type of 'libraries' (line 166)
libraries_3393 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 166, 27), 'libraries', False)
# Getting the type of 'library_dirs' (line 167)
library_dirs_3394 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 167, 27), 'library_dirs', False)
# Getting the type of 'runtime_library_dirs' (line 168)
runtime_library_dirs_3395 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 168, 27), 'runtime_library_dirs', False)
# Getting the type of 'None' (line 169)
None_3396 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 169, 27), 'None', False)
# Getting the type of 'debug' (line 170)
debug_3397 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 170, 27), 'debug', False)
# Getting the type of 'extra_preargs' (line 171)
extra_preargs_3398 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 171, 27), 'extra_preargs', False)
# Getting the type of 'extra_postargs' (line 172)
extra_postargs_3399 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 172, 27), 'extra_postargs', False)
# Getting the type of 'build_temp' (line 173)
build_temp_3400 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 173, 27), 'build_temp', False)
# Getting the type of 'target_lang' (line 174)
target_lang_3401 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 174, 27), 'target_lang', False)
# Processing the call keyword arguments (line 161)
kwargs_3402 = {}
# Getting the type of 'UnixCCompiler' (line 161)
UnixCCompiler_3386 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 161, 8), 'UnixCCompiler', False)
# Obtaining the member 'link' of a type (line 161)
link_3387 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 161, 8), UnixCCompiler_3386, 'link')
# Calling link(args, kwargs) (line 161)
link_call_result_3403 = invoke(stypy.reporting.localization.Localization(__file__, 161, 8), link_3387, *[self_3388, target_desc_3389, objects_3390, output_filename_3391, output_dir_3392, libraries_3393, library_dirs_3394, runtime_library_dirs_3395, None_3396, debug_3397, extra_preargs_3398, extra_postargs_3399, build_temp_3400, target_lang_3401], **kwargs_3402)
# ################# End of 'link(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'link' in the type store
# Getting the type of 'stypy_return_type' (line 91)
stypy_return_type_3404 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 91, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3404)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'link'
return stypy_return_type_3404
@norecursion
def object_filenames(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3405 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 184, 36), 'int')
str_3406 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 185, 37), 'str', '')
defaults = [int_3405, str_3406]
# Create a new context for function 'object_filenames'
module_type_store = module_type_store.open_function_context('object_filenames', 182, 4, False)
# Assigning a type to the variable 'self' (line 183)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 183, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.object_filenames')
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_param_names_list', ['source_filenames', 'strip_dir', 'output_dir'])
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_declared_arg_number', 4)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.object_filenames', ['source_filenames', 'strip_dir', 'output_dir'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'object_filenames', localization, ['source_filenames', 'strip_dir', 'output_dir'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'object_filenames(...)' code ##################
# Type idiom detected: calculating its left and rigth part (line 186)
# Getting the type of 'output_dir' (line 186)
output_dir_3407 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 186, 11), 'output_dir')
# Getting the type of 'None' (line 186)
None_3408 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 186, 25), 'None')
(may_be_3409, more_types_in_union_3410) = may_be_none(output_dir_3407, None_3408)
if may_be_3409:
if more_types_in_union_3410:
# Runtime conditional SSA (line 186)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'idiom if')
else:
module_type_store = module_type_store
# Assigning a Str to a Name (line 186):
# Assigning a Str to a Name (line 186):
str_3411 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 186, 44), 'str', '')
# Assigning a type to the variable 'output_dir' (line 186)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 186, 31), 'output_dir', str_3411)
if more_types_in_union_3410:
# SSA join for if statement (line 186)
module_type_store = module_type_store.join_ssa_context()
# Assigning a List to a Name (line 187):
# Assigning a List to a Name (line 187):
# Obtaining an instance of the builtin type 'list' (line 187)
list_3412 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 187, 20), 'list')
# Adding type elements to the builtin type 'list' instance (line 187)
# Assigning a type to the variable 'obj_names' (line 187)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 187, 8), 'obj_names', list_3412)
# Getting the type of 'source_filenames' (line 188)
source_filenames_3413 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 188, 24), 'source_filenames')
# Testing the type of a for loop iterable (line 188)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 188, 8), source_filenames_3413)
# Getting the type of the for loop variable (line 188)
for_loop_var_3414 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 188, 8), source_filenames_3413)
# Assigning a type to the variable 'src_name' (line 188)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 188, 8), 'src_name', for_loop_var_3414)
# SSA begins for a for statement (line 188)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Call to a Tuple (line 190):
# Assigning a Subscript to a Name (line 190):
# Obtaining the type of the subscript
int_3415 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 190, 12), 'int')
# Call to splitext(...): (line 190)
# Processing the call arguments (line 190)
# Call to normcase(...): (line 190)
# Processing the call arguments (line 190)
# Getting the type of 'src_name' (line 190)
src_name_3422 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 61), 'src_name', False)
# Processing the call keyword arguments (line 190)
kwargs_3423 = {}
# Getting the type of 'os' (line 190)
os_3419 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 44), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3420 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), os_3419, 'path')
# Obtaining the member 'normcase' of a type (line 190)
normcase_3421 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), path_3420, 'normcase')
# Calling normcase(args, kwargs) (line 190)
normcase_call_result_3424 = invoke(stypy.reporting.localization.Localization(__file__, 190, 44), normcase_3421, *[src_name_3422], **kwargs_3423)
# Processing the call keyword arguments (line 190)
kwargs_3425 = {}
# Getting the type of 'os' (line 190)
os_3416 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 26), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3417 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), os_3416, 'path')
# Obtaining the member 'splitext' of a type (line 190)
splitext_3418 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), path_3417, 'splitext')
# Calling splitext(args, kwargs) (line 190)
splitext_call_result_3426 = invoke(stypy.reporting.localization.Localization(__file__, 190, 26), splitext_3418, *[normcase_call_result_3424], **kwargs_3425)
# Obtaining the member '__getitem__' of a type (line 190)
getitem___3427 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 12), splitext_call_result_3426, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 190)
subscript_call_result_3428 = invoke(stypy.reporting.localization.Localization(__file__, 190, 12), getitem___3427, int_3415)
# Assigning a type to the variable 'tuple_var_assignment_3101' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3101', subscript_call_result_3428)
# Assigning a Subscript to a Name (line 190):
# Obtaining the type of the subscript
int_3429 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 190, 12), 'int')
# Call to splitext(...): (line 190)
# Processing the call arguments (line 190)
# Call to normcase(...): (line 190)
# Processing the call arguments (line 190)
# Getting the type of 'src_name' (line 190)
src_name_3436 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 61), 'src_name', False)
# Processing the call keyword arguments (line 190)
kwargs_3437 = {}
# Getting the type of 'os' (line 190)
os_3433 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 44), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3434 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), os_3433, 'path')
# Obtaining the member 'normcase' of a type (line 190)
normcase_3435 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), path_3434, 'normcase')
# Calling normcase(args, kwargs) (line 190)
normcase_call_result_3438 = invoke(stypy.reporting.localization.Localization(__file__, 190, 44), normcase_3435, *[src_name_3436], **kwargs_3437)
# Processing the call keyword arguments (line 190)
kwargs_3439 = {}
# Getting the type of 'os' (line 190)
os_3430 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 26), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3431 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), os_3430, 'path')
# Obtaining the member 'splitext' of a type (line 190)
splitext_3432 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), path_3431, 'splitext')
# Calling splitext(args, kwargs) (line 190)
splitext_call_result_3440 = invoke(stypy.reporting.localization.Localization(__file__, 190, 26), splitext_3432, *[normcase_call_result_3438], **kwargs_3439)
# Obtaining the member '__getitem__' of a type (line 190)
getitem___3441 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 12), splitext_call_result_3440, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 190)
subscript_call_result_3442 = invoke(stypy.reporting.localization.Localization(__file__, 190, 12), getitem___3441, int_3429)
# Assigning a type to the variable 'tuple_var_assignment_3102' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3102', subscript_call_result_3442)
# Assigning a Name to a Name (line 190):
# Getting the type of 'tuple_var_assignment_3101' (line 190)
tuple_var_assignment_3101_3443 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3101')
# Assigning a type to the variable 'base' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 13), 'base', tuple_var_assignment_3101_3443)
# Assigning a Name to a Name (line 190):
# Getting the type of 'tuple_var_assignment_3102' (line 190)
tuple_var_assignment_3102_3444 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3102')
# Assigning a type to the variable 'ext' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 19), 'ext', tuple_var_assignment_3102_3444)
# Getting the type of 'ext' (line 191)
ext_3445 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 191, 15), 'ext')
# Getting the type of 'self' (line 191)
self_3446 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 191, 27), 'self')
# Obtaining the member 'src_extensions' of a type (line 191)
src_extensions_3447 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 191, 27), self_3446, 'src_extensions')
# Obtaining an instance of the builtin type 'list' (line 191)
list_3448 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 191, 49), 'list')
# Adding type elements to the builtin type 'list' instance (line 191)
# Adding element type (line 191)
str_3449 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 191, 50), 'str', '.rc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 191, 49), list_3448, str_3449)
# Applying the binary operator '+' (line 191)
result_add_3450 = python_operator(stypy.reporting.localization.Localization(__file__, 191, 27), '+', src_extensions_3447, list_3448)
# Applying the binary operator 'notin' (line 191)
result_contains_3451 = python_operator(stypy.reporting.localization.Localization(__file__, 191, 15), 'notin', ext_3445, result_add_3450)
# Testing the type of an if condition (line 191)
if_condition_3452 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 191, 12), result_contains_3451)
# Assigning a type to the variable 'if_condition_3452' (line 191)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 191, 12), 'if_condition_3452', if_condition_3452)
# SSA begins for if statement (line 191)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'UnknownFileError' (line 192)
UnknownFileError_3453 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 192, 22), 'UnknownFileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 192, 16), UnknownFileError_3453, 'raise parameter', BaseException)
# SSA join for if statement (line 191)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'strip_dir' (line 195)
strip_dir_3454 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 195, 15), 'strip_dir')
# Testing the type of an if condition (line 195)
if_condition_3455 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 195, 12), strip_dir_3454)
# Assigning a type to the variable 'if_condition_3455' (line 195)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 195, 12), 'if_condition_3455', if_condition_3455)
# SSA begins for if statement (line 195)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 196):
# Assigning a Call to a Name (line 196):
# Call to basename(...): (line 196)
# Processing the call arguments (line 196)
# Getting the type of 'base' (line 196)
base_3459 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 196, 41), 'base', False)
# Processing the call keyword arguments (line 196)
kwargs_3460 = {}
# Getting the type of 'os' (line 196)
os_3456 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 196, 23), 'os', False)
# Obtaining the member 'path' of a type (line 196)
path_3457 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 196, 23), os_3456, 'path')
# Obtaining the member 'basename' of a type (line 196)
basename_3458 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 196, 23), path_3457, 'basename')
# Calling basename(args, kwargs) (line 196)
basename_call_result_3461 = invoke(stypy.reporting.localization.Localization(__file__, 196, 23), basename_3458, *[base_3459], **kwargs_3460)
# Assigning a type to the variable 'base' (line 196)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 196, 16), 'base', basename_call_result_3461)
# SSA join for if statement (line 195)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'ext' (line 197)
ext_3462 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 197, 15), 'ext')
str_3463 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 197, 22), 'str', '.rc')
# Applying the binary operator '==' (line 197)
result_eq_3464 = python_operator(stypy.reporting.localization.Localization(__file__, 197, 15), '==', ext_3462, str_3463)
# Testing the type of an if condition (line 197)
if_condition_3465 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 197, 12), result_eq_3464)
# Assigning a type to the variable 'if_condition_3465' (line 197)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 197, 12), 'if_condition_3465', if_condition_3465)
# SSA begins for if statement (line 197)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to append(...): (line 199)
# Processing the call arguments (line 199)
# Call to join(...): (line 199)
# Processing the call arguments (line 199)
# Getting the type of 'output_dir' (line 199)
output_dir_3471 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 48), 'output_dir', False)
# Getting the type of 'base' (line 200)
base_3472 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 200, 44), 'base', False)
# Getting the type of 'self' (line 200)
self_3473 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 200, 51), 'self', False)
# Obtaining the member 'res_extension' of a type (line 200)
res_extension_3474 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 200, 51), self_3473, 'res_extension')
# Applying the binary operator '+' (line 200)
result_add_3475 = python_operator(stypy.reporting.localization.Localization(__file__, 200, 44), '+', base_3472, res_extension_3474)
# Processing the call keyword arguments (line 199)
kwargs_3476 = {}
# Getting the type of 'os' (line 199)
os_3468 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 34), 'os', False)
# Obtaining the member 'path' of a type (line 199)
path_3469 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 34), os_3468, 'path')
# Obtaining the member 'join' of a type (line 199)
join_3470 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 34), path_3469, 'join')
# Calling join(args, kwargs) (line 199)
join_call_result_3477 = invoke(stypy.reporting.localization.Localization(__file__, 199, 34), join_3470, *[output_dir_3471, result_add_3475], **kwargs_3476)
# Processing the call keyword arguments (line 199)
kwargs_3478 = {}
# Getting the type of 'obj_names' (line 199)
obj_names_3466 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 16), 'obj_names', False)
# Obtaining the member 'append' of a type (line 199)
append_3467 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 16), obj_names_3466, 'append')
# Calling append(args, kwargs) (line 199)
append_call_result_3479 = invoke(stypy.reporting.localization.Localization(__file__, 199, 16), append_3467, *[join_call_result_3477], **kwargs_3478)
# SSA branch for the else part of an if statement (line 197)
module_type_store.open_ssa_branch('else')
# Call to append(...): (line 202)
# Processing the call arguments (line 202)
# Call to join(...): (line 202)
# Processing the call arguments (line 202)
# Getting the type of 'output_dir' (line 202)
output_dir_3485 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 48), 'output_dir', False)
# Getting the type of 'base' (line 203)
base_3486 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 203, 44), 'base', False)
# Getting the type of 'self' (line 203)
self_3487 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 203, 51), 'self', False)
# Obtaining the member 'obj_extension' of a type (line 203)
obj_extension_3488 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 203, 51), self_3487, 'obj_extension')
# Applying the binary operator '+' (line 203)
result_add_3489 = python_operator(stypy.reporting.localization.Localization(__file__, 203, 44), '+', base_3486, obj_extension_3488)
# Processing the call keyword arguments (line 202)
kwargs_3490 = {}
# Getting the type of 'os' (line 202)
os_3482 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 34), 'os', False)
# Obtaining the member 'path' of a type (line 202)
path_3483 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 34), os_3482, 'path')
# Obtaining the member 'join' of a type (line 202)
join_3484 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 34), path_3483, 'join')
# Calling join(args, kwargs) (line 202)
join_call_result_3491 = invoke(stypy.reporting.localization.Localization(__file__, 202, 34), join_3484, *[output_dir_3485, result_add_3489], **kwargs_3490)
# Processing the call keyword arguments (line 202)
kwargs_3492 = {}
# Getting the type of 'obj_names' (line 202)
obj_names_3480 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 16), 'obj_names', False)
# Obtaining the member 'append' of a type (line 202)
append_3481 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 16), obj_names_3480, 'append')
# Calling append(args, kwargs) (line 202)
append_call_result_3493 = invoke(stypy.reporting.localization.Localization(__file__, 202, 16), append_3481, *[join_call_result_3491], **kwargs_3492)
# SSA join for if statement (line 197)
module_type_store = module_type_store.join_ssa_context()
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'obj_names' (line 204)
obj_names_3494 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 204, 15), 'obj_names')
# Assigning a type to the variable 'stypy_return_type' (line 204)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 204, 8), 'stypy_return_type', obj_names_3494)
# ################# End of 'object_filenames(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'object_filenames' in the type store
# Getting the type of 'stypy_return_type' (line 182)
stypy_return_type_3495 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 182, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3495)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'object_filenames'
return stypy_return_type_3495
@norecursion
def find_library_file(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3496 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 210, 49), 'int')
defaults = [int_3496]
# Create a new context for function 'find_library_file'
module_type_store = module_type_store.open_function_context('find_library_file', 210, 4, False)
# Assigning a type to the variable 'self' (line 211)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 211, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.find_library_file')
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_param_names_list', ['dirs', 'lib', 'debug'])
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_declared_arg_number', 4)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.find_library_file', ['dirs', 'lib', 'debug'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'find_library_file', localization, ['dirs', 'lib', 'debug'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'find_library_file(...)' code ##################
# Assigning a BinOp to a Name (line 211):
# Assigning a BinOp to a Name (line 211):
str_3497 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 211, 19), 'str', '%s.lib')
# Getting the type of 'lib' (line 211)
lib_3498 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 211, 30), 'lib')
# Applying the binary operator '%' (line 211)
result_mod_3499 = python_operator(stypy.reporting.localization.Localization(__file__, 211, 19), '%', str_3497, lib_3498)
# Assigning a type to the variable 'shortlib' (line 211)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 211, 8), 'shortlib', result_mod_3499)
# Assigning a BinOp to a Name (line 212):
# Assigning a BinOp to a Name (line 212):
str_3500 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 212, 18), 'str', 'lib%s.lib')
# Getting the type of 'lib' (line 212)
lib_3501 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 212, 32), 'lib')
# Applying the binary operator '%' (line 212)
result_mod_3502 = python_operator(stypy.reporting.localization.Localization(__file__, 212, 18), '%', str_3500, lib_3501)
# Assigning a type to the variable 'longlib' (line 212)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 212, 8), 'longlib', result_mod_3502)
# SSA begins for try-except statement (line 215)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Assigning a Call to a Name (line 216):
# Assigning a Call to a Name (line 216):
# Call to split(...): (line 216)
# Processing the call arguments (line 216)
str_3509 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 216, 56), 'str', ';')
# Processing the call keyword arguments (line 216)
kwargs_3510 = {}
# Obtaining the type of the subscript
str_3503 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 216, 34), 'str', 'LIBRARY_PATH')
# Getting the type of 'os' (line 216)
os_3504 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 216, 23), 'os', False)
# Obtaining the member 'environ' of a type (line 216)
environ_3505 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), os_3504, 'environ')
# Obtaining the member '__getitem__' of a type (line 216)
getitem___3506 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), environ_3505, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 216)
subscript_call_result_3507 = invoke(stypy.reporting.localization.Localization(__file__, 216, 23), getitem___3506, str_3503)
# Obtaining the member 'split' of a type (line 216)
split_3508 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), subscript_call_result_3507, 'split')
# Calling split(args, kwargs) (line 216)
split_call_result_3511 = invoke(stypy.reporting.localization.Localization(__file__, 216, 23), split_3508, *[str_3509], **kwargs_3510)
# Assigning a type to the variable 'emx_dirs' (line 216)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 216, 12), 'emx_dirs', split_call_result_3511)
# SSA branch for the except part of a try statement (line 215)
# SSA branch for the except 'KeyError' branch of a try statement (line 215)
module_type_store.open_ssa_branch('except')
# Assigning a List to a Name (line 218):
# Assigning a List to a Name (line 218):
# Obtaining an instance of the builtin type 'list' (line 218)
list_3512 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 218, 23), 'list')
# Adding type elements to the builtin type 'list' instance (line 218)
# Assigning a type to the variable 'emx_dirs' (line 218)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 218, 12), 'emx_dirs', list_3512)
# SSA join for try-except statement (line 215)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'dirs' (line 220)
dirs_3513 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 220, 19), 'dirs')
# Getting the type of 'emx_dirs' (line 220)
emx_dirs_3514 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 220, 26), 'emx_dirs')
# Applying the binary operator '+' (line 220)
result_add_3515 = python_operator(stypy.reporting.localization.Localization(__file__, 220, 19), '+', dirs_3513, emx_dirs_3514)
# Testing the type of a for loop iterable (line 220)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 220, 8), result_add_3515)
# Getting the type of the for loop variable (line 220)
for_loop_var_3516 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 220, 8), result_add_3515)
# Assigning a type to the variable 'dir' (line 220)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 220, 8), 'dir', for_loop_var_3516)
# SSA begins for a for statement (line 220)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Call to a Name (line 221):
# Assigning a Call to a Name (line 221):
# Call to join(...): (line 221)
# Processing the call arguments (line 221)
# Getting the type of 'dir' (line 221)
dir_3520 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 37), 'dir', False)
# Getting the type of 'shortlib' (line 221)
shortlib_3521 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 42), 'shortlib', False)
# Processing the call keyword arguments (line 221)
kwargs_3522 = {}
# Getting the type of 'os' (line 221)
os_3517 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 24), 'os', False)
# Obtaining the member 'path' of a type (line 221)
path_3518 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 221, 24), os_3517, 'path')
# Obtaining the member 'join' of a type (line 221)
join_3519 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 221, 24), path_3518, 'join')
# Calling join(args, kwargs) (line 221)
join_call_result_3523 = invoke(stypy.reporting.localization.Localization(__file__, 221, 24), join_3519, *[dir_3520, shortlib_3521], **kwargs_3522)
# Assigning a type to the variable 'shortlibp' (line 221)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 221, 12), 'shortlibp', join_call_result_3523)
# Assigning a Call to a Name (line 222):
# Assigning a Call to a Name (line 222):
# Call to join(...): (line 222)
# Processing the call arguments (line 222)
# Getting the type of 'dir' (line 222)
dir_3527 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 36), 'dir', False)
# Getting the type of 'longlib' (line 222)
longlib_3528 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 41), 'longlib', False)
# Processing the call keyword arguments (line 222)
kwargs_3529 = {}
# Getting the type of 'os' (line 222)
os_3524 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 23), 'os', False)
# Obtaining the member 'path' of a type (line 222)
path_3525 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 222, 23), os_3524, 'path')
# Obtaining the member 'join' of a type (line 222)
join_3526 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 222, 23), path_3525, 'join')
# Calling join(args, kwargs) (line 222)
join_call_result_3530 = invoke(stypy.reporting.localization.Localization(__file__, 222, 23), join_3526, *[dir_3527, longlib_3528], **kwargs_3529)
# Assigning a type to the variable 'longlibp' (line 222)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 222, 12), 'longlibp', join_call_result_3530)
# Call to exists(...): (line 223)
# Processing the call arguments (line 223)
# Getting the type of 'shortlibp' (line 223)
shortlibp_3534 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 223, 30), 'shortlibp', False)
# Processing the call keyword arguments (line 223)
kwargs_3535 = {}
# Getting the type of 'os' (line 223)
os_3531 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 223, 15), 'os', False)
# Obtaining the member 'path' of a type (line 223)
path_3532 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 223, 15), os_3531, 'path')
# Obtaining the member 'exists' of a type (line 223)
exists_3533 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 223, 15), path_3532, 'exists')
# Calling exists(args, kwargs) (line 223)
exists_call_result_3536 = invoke(stypy.reporting.localization.Localization(__file__, 223, 15), exists_3533, *[shortlibp_3534], **kwargs_3535)
# Testing the type of an if condition (line 223)
if_condition_3537 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 223, 12), exists_call_result_3536)
# Assigning a type to the variable 'if_condition_3537' (line 223)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 223, 12), 'if_condition_3537', if_condition_3537)
# SSA begins for if statement (line 223)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'shortlibp' (line 224)
shortlibp_3538 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 224, 23), 'shortlibp')
# Assigning a type to the variable 'stypy_return_type' (line 224)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 224, 16), 'stypy_return_type', shortlibp_3538)
# SSA branch for the else part of an if statement (line 223)
module_type_store.open_ssa_branch('else')
# Call to exists(...): (line 225)
# Processing the call arguments (line 225)
# Getting the type of 'longlibp' (line 225)
longlibp_3542 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 225, 32), 'longlibp', False)
# Processing the call keyword arguments (line 225)
kwargs_3543 = {}
# Getting the type of 'os' (line 225)
os_3539 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 225, 17), 'os', False)
# Obtaining the member 'path' of a type (line 225)
path_3540 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 225, 17), os_3539, 'path')
# Obtaining the member 'exists' of a type (line 225)
exists_3541 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 225, 17), path_3540, 'exists')
# Calling exists(args, kwargs) (line 225)
exists_call_result_3544 = invoke(stypy.reporting.localization.Localization(__file__, 225, 17), exists_3541, *[longlibp_3542], **kwargs_3543)
# Testing the type of an if condition (line 225)
if_condition_3545 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 225, 17), exists_call_result_3544)
# Assigning a type to the variable 'if_condition_3545' (line 225)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 225, 17), 'if_condition_3545', if_condition_3545)
# SSA begins for if statement (line 225)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'longlibp' (line 226)
longlibp_3546 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 226, 23), 'longlibp')
# Assigning a type to the variable 'stypy_return_type' (line 226)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 226, 16), 'stypy_return_type', longlibp_3546)
# SSA join for if statement (line 225)
module_type_store = module_type_store.join_ssa_context()
# SSA join for if statement (line 223)
module_type_store = module_type_store.join_ssa_context()
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'None' (line 229)
None_3547 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 229, 15), 'None')
# Assigning a type to the variable 'stypy_return_type' (line 229)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 229, 8), 'stypy_return_type', None_3547)
# ################# End of 'find_library_file(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'find_library_file' in the type store
# Getting the type of 'stypy_return_type' (line 210)
stypy_return_type_3548 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 210, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3548)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'find_library_file'
return stypy_return_type_3548
# Assigning a type to the variable 'EMXCCompiler' (line 31)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 31, 0), 'EMXCCompiler', EMXCCompiler)
# Assigning a Str to a Name (line 33):
str_3549 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 33, 20), 'str', 'emx')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3550 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'compiler_type' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3550, 'compiler_type', str_3549)
# Assigning a Str to a Name (line 34):
str_3551 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 20), 'str', '.obj')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3552 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'obj_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3552, 'obj_extension', str_3551)
# Assigning a Str to a Name (line 35):
str_3553 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 35, 27), 'str', '.lib')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3554 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'static_lib_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3554, 'static_lib_extension', str_3553)
# Assigning a Str to a Name (line 36):
str_3555 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 27), 'str', '.dll')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3556 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'shared_lib_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3556, 'shared_lib_extension', str_3555)
# Assigning a Str to a Name (line 37):
str_3557 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 24), 'str', '%s%s')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3558 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'static_lib_format' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3558, 'static_lib_format', str_3557)
# Assigning a Str to a Name (line 38):
str_3559 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 38, 24), 'str', '%s%s')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3560 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'shared_lib_format' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3560, 'shared_lib_format', str_3559)
# Assigning a Str to a Name (line 39):
str_3561 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 39, 20), 'str', '.res')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3562 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'res_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3562, 'res_extension', str_3561)
# Assigning a Str to a Name (line 40):
str_3563 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 40, 20), 'str', '.exe')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3564 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'exe_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3564, 'exe_extension', str_3563)
# Assigning a Str to a Name (line 238):
# Assigning a Str to a Name (line 238):
str_3565 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 238, 14), 'str', 'ok')
# Assigning a type to the variable 'CONFIG_H_OK' (line 238)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 238, 0), 'CONFIG_H_OK', str_3565)
# Assigning a Str to a Name (line 239):
# Assigning a Str to a Name (line 239):
str_3566 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 239, 17), 'str', 'not ok')
# Assigning a type to the variable 'CONFIG_H_NOTOK' (line 239)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 239, 0), 'CONFIG_H_NOTOK', str_3566)
# Assigning a Str to a Name (line 240):
# Assigning a Str to a Name (line 240):
str_3567 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 240, 21), 'str', 'uncertain')
# Assigning a type to the variable 'CONFIG_H_UNCERTAIN' (line 240)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 240, 0), 'CONFIG_H_UNCERTAIN', str_3567)
@norecursion
def check_config_h(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'check_config_h'
module_type_store = module_type_store.open_function_context('check_config_h', 242, 0, False)
# Passed parameters checking function
check_config_h.stypy_localization = localization
check_config_h.stypy_type_of_self = None
check_config_h.stypy_type_store = module_type_store
check_config_h.stypy_function_name = 'check_config_h'
check_config_h.stypy_param_names_list = []
check_config_h.stypy_varargs_param_name = None
check_config_h.stypy_kwargs_param_name = None
check_config_h.stypy_call_defaults = defaults
check_config_h.stypy_call_varargs = varargs
check_config_h.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'check_config_h', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'check_config_h', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'check_config_h(...)' code ##################
str_3568 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 258, (-1)), 'str', 'Check if the current Python installation (specifically, pyconfig.h)\n appears amenable to building extensions with GCC. Returns a tuple\n (status, details), where \'status\' is one of the following constants:\n CONFIG_H_OK\n all is well, go ahead and compile\n CONFIG_H_NOTOK\n doesn\'t look good\n CONFIG_H_UNCERTAIN\n not sure -- unable to read pyconfig.h\n \'details\' is a human-readable string explaining the situation.\n\n Note there are two ways to conclude "OK": either \'sys.version\' contains\n the string "GCC" (implying that this Python was built with GCC), or the\n installed "pyconfig.h" contains the string "__GNUC__".\n ')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 263, 4))
# 'from distutils import sysconfig' statement (line 263)
try:
from distutils import sysconfig
except:
sysconfig = UndefinedType
import_from_module(stypy.reporting.localization.Localization(__file__, 263, 4), 'distutils', None, module_type_store, ['sysconfig'], [sysconfig])
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 264, 4))
# 'import string' statement (line 264)
import string
import_module(stypy.reporting.localization.Localization(__file__, 264, 4), 'string', string, module_type_store)
# Call to find(...): (line 267)
# Processing the call arguments (line 267)
# Getting the type of 'sys' (line 267)
sys_3571 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 267, 19), 'sys', False)
# Obtaining the member 'version' of a type (line 267)
version_3572 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 267, 19), sys_3571, 'version')
str_3573 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 267, 31), 'str', 'GCC')
# Processing the call keyword arguments (line 267)
kwargs_3574 = {}
# Getting the type of 'string' (line 267)
string_3569 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 267, 7), 'string', False)
# Obtaining the member 'find' of a type (line 267)
find_3570 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 267, 7), string_3569, 'find')
# Calling find(args, kwargs) (line 267)
find_call_result_3575 = invoke(stypy.reporting.localization.Localization(__file__, 267, 7), find_3570, *[version_3572, str_3573], **kwargs_3574)
int_3576 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 267, 41), 'int')
# Applying the binary operator '>=' (line 267)
result_ge_3577 = python_operator(stypy.reporting.localization.Localization(__file__, 267, 7), '>=', find_call_result_3575, int_3576)
# Testing the type of an if condition (line 267)
if_condition_3578 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 267, 4), result_ge_3577)
# Assigning a type to the variable 'if_condition_3578' (line 267)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 267, 4), 'if_condition_3578', if_condition_3578)
# SSA begins for if statement (line 267)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Obtaining an instance of the builtin type 'tuple' (line 268)
tuple_3579 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 268, 16), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 268)
# Adding element type (line 268)
# Getting the type of 'CONFIG_H_OK' (line 268)
CONFIG_H_OK_3580 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 268, 16), 'CONFIG_H_OK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 268, 16), tuple_3579, CONFIG_H_OK_3580)
# Adding element type (line 268)
str_3581 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 268, 29), 'str', "sys.version mentions 'GCC'")
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 268, 16), tuple_3579, str_3581)
# Assigning a type to the variable 'stypy_return_type' (line 268)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 268, 8), 'stypy_return_type', tuple_3579)
# SSA join for if statement (line 267)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Call to a Name (line 270):
# Assigning a Call to a Name (line 270):
# Call to get_config_h_filename(...): (line 270)
# Processing the call keyword arguments (line 270)
kwargs_3584 = {}
# Getting the type of 'sysconfig' (line 270)
sysconfig_3582 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 270, 9), 'sysconfig', False)
# Obtaining the member 'get_config_h_filename' of a type (line 270)
get_config_h_filename_3583 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 270, 9), sysconfig_3582, 'get_config_h_filename')
# Calling get_config_h_filename(args, kwargs) (line 270)
get_config_h_filename_call_result_3585 = invoke(stypy.reporting.localization.Localization(__file__, 270, 9), get_config_h_filename_3583, *[], **kwargs_3584)
# Assigning a type to the variable 'fn' (line 270)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 270, 4), 'fn', get_config_h_filename_call_result_3585)
# SSA begins for try-except statement (line 271)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Assigning a Call to a Name (line 274):
# Assigning a Call to a Name (line 274):
# Call to open(...): (line 274)
# Processing the call arguments (line 274)
# Getting the type of 'fn' (line 274)
fn_3587 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 274, 17), 'fn', False)
# Processing the call keyword arguments (line 274)
kwargs_3588 = {}
# Getting the type of 'open' (line 274)
open_3586 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 274, 12), 'open', False)
# Calling open(args, kwargs) (line 274)
open_call_result_3589 = invoke(stypy.reporting.localization.Localization(__file__, 274, 12), open_3586, *[fn_3587], **kwargs_3588)
# Assigning a type to the variable 'f' (line 274)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 274, 8), 'f', open_call_result_3589)
# Try-finally block (line 275)
# Assigning a Call to a Name (line 276):
# Assigning a Call to a Name (line 276):
# Call to read(...): (line 276)
# Processing the call keyword arguments (line 276)
kwargs_3592 = {}
# Getting the type of 'f' (line 276)
f_3590 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 276, 16), 'f', False)
# Obtaining the member 'read' of a type (line 276)
read_3591 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 276, 16), f_3590, 'read')
# Calling read(args, kwargs) (line 276)
read_call_result_3593 = invoke(stypy.reporting.localization.Localization(__file__, 276, 16), read_3591, *[], **kwargs_3592)
# Assigning a type to the variable 's' (line 276)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 276, 12), 's', read_call_result_3593)
# finally branch of the try-finally block (line 275)
# Call to close(...): (line 278)
# Processing the call keyword arguments (line 278)
kwargs_3596 = {}
# Getting the type of 'f' (line 278)
f_3594 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 278, 12), 'f', False)
# Obtaining the member 'close' of a type (line 278)
close_3595 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 278, 12), f_3594, 'close')
# Calling close(args, kwargs) (line 278)
close_call_result_3597 = invoke(stypy.reporting.localization.Localization(__file__, 278, 12), close_3595, *[], **kwargs_3596)
# SSA branch for the except part of a try statement (line 271)
# SSA branch for the except 'IOError' branch of a try statement (line 271)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'IOError' (line 280)
IOError_3598 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 280, 11), 'IOError')
# Assigning a type to the variable 'exc' (line 280)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 280, 4), 'exc', IOError_3598)
# Obtaining an instance of the builtin type 'tuple' (line 283)
tuple_3599 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 283, 16), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 283)
# Adding element type (line 283)
# Getting the type of 'CONFIG_H_UNCERTAIN' (line 283)
CONFIG_H_UNCERTAIN_3600 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 283, 16), 'CONFIG_H_UNCERTAIN')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 283, 16), tuple_3599, CONFIG_H_UNCERTAIN_3600)
# Adding element type (line 283)
str_3601 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 284, 16), 'str', "couldn't read '%s': %s")
# Obtaining an instance of the builtin type 'tuple' (line 284)
tuple_3602 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 284, 44), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 284)
# Adding element type (line 284)
# Getting the type of 'fn' (line 284)
fn_3603 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 284, 44), 'fn')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 284, 44), tuple_3602, fn_3603)
# Adding element type (line 284)
# Getting the type of 'exc' (line 284)
exc_3604 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 284, 48), 'exc')
# Obtaining the member 'strerror' of a type (line 284)
strerror_3605 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 284, 48), exc_3604, 'strerror')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 284, 44), tuple_3602, strerror_3605)
# Applying the binary operator '%' (line 284)
result_mod_3606 = python_operator(stypy.reporting.localization.Localization(__file__, 284, 16), '%', str_3601, tuple_3602)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 283, 16), tuple_3599, result_mod_3606)
# Assigning a type to the variable 'stypy_return_type' (line 283)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 283, 8), 'stypy_return_type', tuple_3599)
# SSA branch for the else branch of a try statement (line 271)
module_type_store.open_ssa_branch('except else')
# Call to find(...): (line 288)
# Processing the call arguments (line 288)
# Getting the type of 's' (line 288)
s_3609 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 288, 23), 's', False)
str_3610 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 288, 25), 'str', '__GNUC__')
# Processing the call keyword arguments (line 288)
kwargs_3611 = {}
# Getting the type of 'string' (line 288)
string_3607 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 288, 11), 'string', False)
# Obtaining the member 'find' of a type (line 288)
find_3608 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 288, 11), string_3607, 'find')
# Calling find(args, kwargs) (line 288)
find_call_result_3612 = invoke(stypy.reporting.localization.Localization(__file__, 288, 11), find_3608, *[s_3609, str_3610], **kwargs_3611)
int_3613 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 288, 40), 'int')
# Applying the binary operator '>=' (line 288)
result_ge_3614 = python_operator(stypy.reporting.localization.Localization(__file__, 288, 11), '>=', find_call_result_3612, int_3613)
# Testing the type of an if condition (line 288)
if_condition_3615 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 288, 8), result_ge_3614)
# Assigning a type to the variable 'if_condition_3615' (line 288)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 288, 8), 'if_condition_3615', if_condition_3615)
# SSA begins for if statement (line 288)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Obtaining an instance of the builtin type 'tuple' (line 289)
tuple_3616 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 289, 20), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 289)
# Adding element type (line 289)
# Getting the type of 'CONFIG_H_OK' (line 289)
CONFIG_H_OK_3617 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 289, 20), 'CONFIG_H_OK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 289, 20), tuple_3616, CONFIG_H_OK_3617)
# Adding element type (line 289)
str_3618 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 289, 33), 'str', "'%s' mentions '__GNUC__'")
# Getting the type of 'fn' (line 289)
fn_3619 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 289, 62), 'fn')
# Applying the binary operator '%' (line 289)
result_mod_3620 = python_operator(stypy.reporting.localization.Localization(__file__, 289, 33), '%', str_3618, fn_3619)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 289, 20), tuple_3616, result_mod_3620)
# Assigning a type to the variable 'stypy_return_type' (line 289)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 289, 12), 'stypy_return_type', tuple_3616)
# SSA branch for the else part of an if statement (line 288)
module_type_store.open_ssa_branch('else')
# Obtaining an instance of the builtin type 'tuple' (line 291)
tuple_3621 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 291, 20), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 291)
# Adding element type (line 291)
# Getting the type of 'CONFIG_H_NOTOK' (line 291)
CONFIG_H_NOTOK_3622 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 291, 20), 'CONFIG_H_NOTOK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 291, 20), tuple_3621, CONFIG_H_NOTOK_3622)
# Adding element type (line 291)
str_3623 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 291, 36), 'str', "'%s' does not mention '__GNUC__'")
# Getting the type of 'fn' (line 291)
fn_3624 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 291, 73), 'fn')
# Applying the binary operator '%' (line 291)
result_mod_3625 = python_operator(stypy.reporting.localization.Localization(__file__, 291, 36), '%', str_3623, fn_3624)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 291, 20), tuple_3621, result_mod_3625)
# Assigning a type to the variable 'stypy_return_type' (line 291)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 291, 12), 'stypy_return_type', tuple_3621)
# SSA join for if statement (line 288)
module_type_store = module_type_store.join_ssa_context()
# SSA join for try-except statement (line 271)
module_type_store = module_type_store.join_ssa_context()
# ################# End of 'check_config_h(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'check_config_h' in the type store
# Getting the type of 'stypy_return_type' (line 242)
stypy_return_type_3626 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 242, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3626)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'check_config_h'
return stypy_return_type_3626
# Assigning a type to the variable 'check_config_h' (line 242)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 242, 0), 'check_config_h', check_config_h)
@norecursion
def get_versions(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'get_versions'
module_type_store = module_type_store.open_function_context('get_versions', 294, 0, False)
# Passed parameters checking function
get_versions.stypy_localization = localization
get_versions.stypy_type_of_self = None
get_versions.stypy_type_store = module_type_store
get_versions.stypy_function_name = 'get_versions'
get_versions.stypy_param_names_list = []
get_versions.stypy_varargs_param_name = None
get_versions.stypy_kwargs_param_name = None
get_versions.stypy_call_defaults = defaults
get_versions.stypy_call_varargs = varargs
get_versions.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'get_versions', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'get_versions', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'get_versions(...)' code ##################
str_3627 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 297, (-1)), 'str', ' Try to find out the versions of gcc and ld.\n If not possible it returns None for it.\n ')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 298, 4))
# 'from distutils.version import StrictVersion' statement (line 298)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3628 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version')
if (type(import_3628) is not StypyTypeError):
if (import_3628 != 'pyd_module'):
__import__(import_3628)
sys_modules_3629 = sys.modules[import_3628]
import_from_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', sys_modules_3629.module_type_store, module_type_store, ['StrictVersion'])
nest_module(stypy.reporting.localization.Localization(__file__, 298, 4), __file__, sys_modules_3629, sys_modules_3629.module_type_store, module_type_store)
else:
from distutils.version import StrictVersion
import_from_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', None, module_type_store, ['StrictVersion'], [StrictVersion])
else:
# Assigning a type to the variable 'distutils.version' (line 298)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', import_3628)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 299, 4))
# 'from distutils.spawn import find_executable' statement (line 299)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3630 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn')
if (type(import_3630) is not StypyTypeError):
if (import_3630 != 'pyd_module'):
__import__(import_3630)
sys_modules_3631 = sys.modules[import_3630]
import_from_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', sys_modules_3631.module_type_store, module_type_store, ['find_executable'])
nest_module(stypy.reporting.localization.Localization(__file__, 299, 4), __file__, sys_modules_3631, sys_modules_3631.module_type_store, module_type_store)
else:
from distutils.spawn import find_executable
import_from_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', None, module_type_store, ['find_executable'], [find_executable])
else:
# Assigning a type to the variable 'distutils.spawn' (line 299)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', import_3630)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 300, 4))
# 'import re' statement (line 300)
import re
import_module(stypy.reporting.localization.Localization(__file__, 300, 4), 're', re, module_type_store)
# Assigning a Call to a Name (line 302):
# Assigning a Call to a Name (line 302):
# Call to find_executable(...): (line 302)
# Processing the call arguments (line 302)
str_3633 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 302, 30), 'str', 'gcc')
# Processing the call keyword arguments (line 302)
kwargs_3634 = {}
# Getting the type of 'find_executable' (line 302)
find_executable_3632 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 302, 14), 'find_executable', False)
# Calling find_executable(args, kwargs) (line 302)
find_executable_call_result_3635 = invoke(stypy.reporting.localization.Localization(__file__, 302, 14), find_executable_3632, *[str_3633], **kwargs_3634)
# Assigning a type to the variable 'gcc_exe' (line 302)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 302, 4), 'gcc_exe', find_executable_call_result_3635)
# Getting the type of 'gcc_exe' (line 303)
gcc_exe_3636 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 303, 7), 'gcc_exe')
# Testing the type of an if condition (line 303)
if_condition_3637 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 303, 4), gcc_exe_3636)
# Assigning a type to the variable 'if_condition_3637' (line 303)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 303, 4), 'if_condition_3637', if_condition_3637)
# SSA begins for if statement (line 303)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 304):
# Assigning a Call to a Name (line 304):
# Call to popen(...): (line 304)
# Processing the call arguments (line 304)
# Getting the type of 'gcc_exe' (line 304)
gcc_exe_3640 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 304, 23), 'gcc_exe', False)
str_3641 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 304, 33), 'str', ' -dumpversion')
# Applying the binary operator '+' (line 304)
result_add_3642 = python_operator(stypy.reporting.localization.Localization(__file__, 304, 23), '+', gcc_exe_3640, str_3641)
str_3643 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 304, 49), 'str', 'r')
# Processing the call keyword arguments (line 304)
kwargs_3644 = {}
# Getting the type of 'os' (line 304)
os_3638 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 304, 14), 'os', False)
# Obtaining the member 'popen' of a type (line 304)
popen_3639 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 304, 14), os_3638, 'popen')
# Calling popen(args, kwargs) (line 304)
popen_call_result_3645 = invoke(stypy.reporting.localization.Localization(__file__, 304, 14), popen_3639, *[result_add_3642, str_3643], **kwargs_3644)
# Assigning a type to the variable 'out' (line 304)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 304, 8), 'out', popen_call_result_3645)
# Try-finally block (line 305)
# Assigning a Call to a Name (line 306):
# Assigning a Call to a Name (line 306):
# Call to read(...): (line 306)
# Processing the call keyword arguments (line 306)
kwargs_3648 = {}
# Getting the type of 'out' (line 306)
out_3646 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 306, 25), 'out', False)
# Obtaining the member 'read' of a type (line 306)
read_3647 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 306, 25), out_3646, 'read')
# Calling read(args, kwargs) (line 306)
read_call_result_3649 = invoke(stypy.reporting.localization.Localization(__file__, 306, 25), read_3647, *[], **kwargs_3648)
# Assigning a type to the variable 'out_string' (line 306)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 306, 12), 'out_string', read_call_result_3649)
# finally branch of the try-finally block (line 305)
# Call to close(...): (line 308)
# Processing the call keyword arguments (line 308)
kwargs_3652 = {}
# Getting the type of 'out' (line 308)
out_3650 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 308, 12), 'out', False)
# Obtaining the member 'close' of a type (line 308)
close_3651 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 308, 12), out_3650, 'close')
# Calling close(args, kwargs) (line 308)
close_call_result_3653 = invoke(stypy.reporting.localization.Localization(__file__, 308, 12), close_3651, *[], **kwargs_3652)
# Assigning a Call to a Name (line 309):
# Assigning a Call to a Name (line 309):
# Call to search(...): (line 309)
# Processing the call arguments (line 309)
str_3656 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 309, 27), 'str', '(\\d+\\.\\d+\\.\\d+)')
# Getting the type of 'out_string' (line 309)
out_string_3657 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 309, 45), 'out_string', False)
# Processing the call keyword arguments (line 309)
kwargs_3658 = {}
# Getting the type of 're' (line 309)
re_3654 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 309, 17), 're', False)
# Obtaining the member 'search' of a type (line 309)
search_3655 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 309, 17), re_3654, 'search')
# Calling search(args, kwargs) (line 309)
search_call_result_3659 = invoke(stypy.reporting.localization.Localization(__file__, 309, 17), search_3655, *[str_3656, out_string_3657], **kwargs_3658)
# Assigning a type to the variable 'result' (line 309)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 309, 8), 'result', search_call_result_3659)
# Getting the type of 'result' (line 310)
result_3660 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 310, 11), 'result')
# Testing the type of an if condition (line 310)
if_condition_3661 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 310, 8), result_3660)
# Assigning a type to the variable 'if_condition_3661' (line 310)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 310, 8), 'if_condition_3661', if_condition_3661)
# SSA begins for if statement (line 310)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 311):
# Assigning a Call to a Name (line 311):
# Call to StrictVersion(...): (line 311)
# Processing the call arguments (line 311)
# Call to group(...): (line 311)
# Processing the call arguments (line 311)
int_3665 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 311, 53), 'int')
# Processing the call keyword arguments (line 311)
kwargs_3666 = {}
# Getting the type of 'result' (line 311)
result_3663 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 311, 40), 'result', False)
# Obtaining the member 'group' of a type (line 311)
group_3664 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 311, 40), result_3663, 'group')
# Calling group(args, kwargs) (line 311)
group_call_result_3667 = invoke(stypy.reporting.localization.Localization(__file__, 311, 40), group_3664, *[int_3665], **kwargs_3666)
# Processing the call keyword arguments (line 311)
kwargs_3668 = {}
# Getting the type of 'StrictVersion' (line 311)
StrictVersion_3662 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 311, 26), 'StrictVersion', False)
# Calling StrictVersion(args, kwargs) (line 311)
StrictVersion_call_result_3669 = invoke(stypy.reporting.localization.Localization(__file__, 311, 26), StrictVersion_3662, *[group_call_result_3667], **kwargs_3668)
# Assigning a type to the variable 'gcc_version' (line 311)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 311, 12), 'gcc_version', StrictVersion_call_result_3669)
# SSA branch for the else part of an if statement (line 310)
module_type_store.open_ssa_branch('else')
# Assigning a Name to a Name (line 313):
# Assigning a Name to a Name (line 313):
# Getting the type of 'None' (line 313)
None_3670 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 313, 26), 'None')
# Assigning a type to the variable 'gcc_version' (line 313)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 313, 12), 'gcc_version', None_3670)
# SSA join for if statement (line 310)
module_type_store = module_type_store.join_ssa_context()
# SSA branch for the else part of an if statement (line 303)
module_type_store.open_ssa_branch('else')
# Assigning a Name to a Name (line 315):
# Assigning a Name to a Name (line 315):
# Getting the type of 'None' (line 315)
None_3671 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 315, 22), 'None')
# Assigning a type to the variable 'gcc_version' (line 315)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 315, 8), 'gcc_version', None_3671)
# SSA join for if statement (line 303)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Name to a Name (line 318):
# Assigning a Name to a Name (line 318):
# Getting the type of 'None' (line 318)
None_3672 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 318, 17), 'None')
# Assigning a type to the variable 'ld_version' (line 318)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 318, 4), 'ld_version', None_3672)
# Obtaining an instance of the builtin type 'tuple' (line 319)
tuple_3673 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 319, 12), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 319)
# Adding element type (line 319)
# Getting the type of 'gcc_version' (line 319)
gcc_version_3674 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 319, 12), 'gcc_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 319, 12), tuple_3673, gcc_version_3674)
# Adding element type (line 319)
# Getting the type of 'ld_version' (line 319)
ld_version_3675 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 319, 25), 'ld_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 319, 12), tuple_3673, ld_version_3675)
# Assigning a type to the variable 'stypy_return_type' (line 319)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 319, 4), 'stypy_return_type', tuple_3673)
# ################# End of 'get_versions(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'get_versions' in the type store
# Getting the type of 'stypy_return_type' (line 294)
stypy_return_type_3676 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 294, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3676)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'get_versions'
return stypy_return_type_3676
# Assigning a type to the variable 'get_versions' (line 294)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 294, 0), 'get_versions', get_versions)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
| [
"[email protected]"
] | |
1b1c02e75d0c463404a738766c2fe6e24d2476c7 | ad849c40e75d098e38db897154c63054e6f89fca | /models_class/model.py | 8ff2a80539b1abd9c6c4efeda44b6b17c4e7e9f6 | [] | permissive | vietnamican/Pytorch_Retinaface | 768a96eb7e48b002dc91cc97cc41473206903c59 | 8d69dd191e16421bb399f49c7706d6e154d4a80e | refs/heads/main | 2023-06-18T08:23:25.860727 | 2021-06-24T10:48:18 | 2021-06-24T10:48:18 | 366,045,702 | 1 | 0 | MIT | 2021-05-10T13:06:47 | 2021-05-10T13:06:46 | null | UTF-8 | Python | false | false | 2,929 | py | import torch
from torch import nn
from torchmetrics import Accuracy
from .base import ConvBatchNormRelu
from .base import Base
class Config(object):
dataroot = 'data/mrleye'
train_image_dir = '../LaPa_negpos_fusion/train/images'
train_label_dir = '../LaPa_negpos_fusion/train/labels'
val_image_dir = '../LaPa_negpos_fusion/val/images'
val_label_dir = '../LaPa_negpos_fusion/val/labels'
batch_size = 512
pin_memory= True
num_workers = 6
device = 'gpu'
max_epochs = 200
steps = [0.5, 0.7, 0.9]
cfg = Config()
class IrisModel(Base):
def __init__(self, cfg=cfg):
super().__init__()
self.conv1 = ConvBatchNormRelu(3, 10, kernel_size=3, padding=1, with_relu=False)
self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = ConvBatchNormRelu(10, 20, kernel_size=3, padding=1, with_relu=False)
self.maxpool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = ConvBatchNormRelu(20, 50, kernel_size=3, padding=1, with_relu=False)
self.conv4 = ConvBatchNormRelu(50, 2, kernel_size=1, padding=0, with_relu=False)
self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
self.flatten = nn.Flatten()
self.criterion = nn.CrossEntropyLoss()
self.cfg = cfg
self.val_acc = Accuracy()
def forward(self, x):
x = self.relu1(self.maxpool1(self.conv1(x)))
x = self.relu2(self.maxpool2(self.conv2(x)))
x = self.conv3(x)
x = self.conv4(x)
x = self.avg_pool(x)
x = self.flatten(x)
return x
def _shared_step(self, batch, batch_dix):
eye, label, *_ = batch
logit = self.forward(eye)
loss = self.criterion(logit, label)
return loss, logit
def training_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('train_acc', self.val_acc(pred, label))
self.log('train_loss', loss)
return loss
def validation_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('val_acc', self.val_acc(pred, label))
self.log('val_loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-4)
max_epochs = self.cfg.max_epochs
step0, step1, step2 = self.cfg.steps
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, [max_epochs*step0, max_epochs*step1, max_epochs*step2], gamma=0.1)
return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler}
| [
"[email protected]"
] | |
213f42b8e3c626c96fdba83225479382cdd7034f | 544cfadc742536618168fc80a5bd81a35a5f2c99 | /tools/test/connectivity/acts/framework/acts/controllers/pdu_lib/synaccess/np02b.py | 655328feb4bede2c154cc3f44e04463ee9f339ee | [] | no_license | ZYHGOD-1/Aosp11 | 0400619993b559bf4380db2da0addfa9cccd698d | 78a61ca023cbf1a0cecfef8b97df2b274ac3a988 | refs/heads/main | 2023-04-21T20:13:54.629813 | 2021-05-22T05:28:21 | 2021-05-22T05:28:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,057 | py | #!/usr/bin/env python3
#
# Copyright 2019 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from acts import utils
from acts.controllers import pdu
import re
import telnetlib
import time
class PduDevice(pdu.PduDevice):
"""Implementation of pure abstract PduDevice object for the Synaccess np02b
Pdu.
"""
def __init__(self, host, username, password):
super(PduDevice, self).__init__(host, username, password)
self.tnhelper = _TNHelperNP02B(host)
def on_all(self):
""" Turns on both outlets on the np02b."""
self.tnhelper.cmd('ps 1')
self._verify_state({'1': True, '2': True})
def off_all(self):
""" Turns off both outlets on the np02b."""
self.tnhelper.cmd('ps 0')
self._verify_state({'1': False, '2': False})
def on(self, outlet):
""" Turns on specific outlet on the np02b.
Args:
outlet: string of the outlet to turn on ('1' or '2')
"""
self.tnhelper.cmd('pset %s 1' % outlet)
self._verify_state({outlet: True})
def off(self, outlet):
""" Turns off a specifc outlet on the np02b.
Args:
outlet: string of the outlet to turn off ('1' or '2')
"""
self.tnhelper.cmd('pset %s 0' % outlet)
self._verify_state({outlet: False})
def reboot(self, outlet):
""" Toggles a specific outlet on the np02b to off, then to on.
Args:
outlet: string of the outlet to reboot ('1' or '2')
"""
self.off(outlet)
self._verify_state({outlet: False})
self.on(outlet)
self._verify_state({outlet: True})
def status(self):
""" Returns the status of the np02b outlets.
Return:
a dict mapping outlet strings ('1' and '2') to:
True if outlet is ON
False if outlet is OFF
"""
res = self.tnhelper.cmd('pshow')
status_list = re.findall('(ON|OFF)', res)
status_dict = {}
for i, status in enumerate(status_list):
status_dict[str(i + 1)] = (status == 'ON')
return status_dict
def close(self):
"""Ensure connection to device is closed.
In this implementation, this shouldn't be necessary, but could be in
others that open on creation.
"""
self.tnhelper.close()
def _verify_state(self, expected_state, timeout=3):
"""Returns when expected_state is reached on device.
In order to prevent command functions from exiting until the desired
effect has occurred, this function verifys that the expected_state is a
subset of the desired state.
Args:
expected_state: a dict representing the expected state of one or
more outlets on the device. Maps outlet strings ('1' and/or '2')
to:
True if outlet is expected to be ON.
False if outlet is expected to be OFF.
timeout (default: 3): time in seconds until raising an exception.
Return:
True, if expected_state is reached.
Raises:
PduError if expected_state has not been reached by timeout.
"""
end_time = time.time() + timeout
while time.time() < end_time:
actual_state = self.status()
if expected_state.items() <= actual_state.items():
return True
time.sleep(.1)
raise pdu.PduError('Timeout while verifying state.\n'
'Expected State: %s\n'
'Actual State: %s' % (expected_state, actual_state))
class _TNHelperNP02B(object):
"""An internal helper class for Telnet with the Synaccess NP02B Pdu. This
helper is specific to the idiosyncrasies of the NP02B and therefore should
not be used with other devices.
"""
def __init__(self, host):
self._tn = telnetlib.Telnet()
self.host = host
self.tx_cmd_separator = '\n\r'
self.rx_cmd_separator = '\r\n'
self.prompt = '>'
"""
Executes a command on the device via telnet.
Args:
cmd_str: A string of the command to be run.
Returns:
A string of the response from the valid command (often empty).
"""
def cmd(self, cmd_str):
# Open session
try:
self._tn.open(self.host, timeout=3)
except:
raise pdu.PduError("Failed to open telnet session to host (%s)" %
self.host)
time.sleep(.1)
# Read to end of first prompt
cmd_str.strip(self.tx_cmd_separator)
self._tn.read_eager()
time.sleep(.1)
# Write command and read all output text
self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
res = self._tn.read_until(utils.ascii_string(self.prompt), 2)
# Parses out the commands output
if res is None:
raise pdu.PduError("Command failed: %s" % cmd_str)
res = res.decode()
if re.search('Invalid', res):
raise pdu.PduError("Command Invalid: %s" % cmd_str)
res = res.replace(self.prompt, '')
res = res.replace(self.tx_cmd_separator, '')
res = res.replace(self.rx_cmd_separator, '')
res = res.replace(cmd_str, '')
# Close session
self._tn.close()
time.sleep(0.5)
return res
def close(self):
self._tn.close() | [
"[email protected]"
] | |
61c90a5a68de5d9fddb0ef91c1c3666064a8f85e | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flasharray/FA_2_22/models/pod_replica_link_lag_response.py | 4b80aac36f529a563527baa650fb2b54f0d5839c | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 3,922 | py | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.22
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_22 import models
class PodReplicaLinkLagResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[PodReplicaLinkLag]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.PodReplicaLinkLag]
):
"""
Keyword args:
items (list[PodReplicaLinkLag]): A list of pod replica link lag objects.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PodReplicaLinkLagResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PodReplicaLinkLagResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
a14cb2cae1fd69db8497611253f1cb936df5a192 | 23ec2d87fb96626992df44af73a7daa202be79a6 | /src/examples/connectfour/vs.py | 3810ad1a332014b09a368e147f4ae73e9ef179df | [] | no_license | ishikota/pymcts | 5d560ec7d0dcdf881a52c607adfdd384ae23e0c2 | 2d1ba191cadbbaab0ab922a785478210cf0709f4 | refs/heads/master | 2021-01-01T19:31:00.932984 | 2015-07-28T14:45:23 | 2015-07-28T14:45:23 | 39,330,236 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | # add path to the src and test directory
import os
import sys
PARENT_PATH = os.getenv('PYMCTS_ROOT')
SRC_PATH = PARENT_PATH +"src/"
sys.path.append(SRC_PATH+"algorithm")
import mcts
import connectfour_model
import heuristic_model
# Clear the shell
os.system("clear")
# Setup for MCTS
model = heuristic_model.ConnectFour()
#model = connectfour_model.ConnectFour()
print '> Input the maximum number of iteration in MCTS...'
playout_num = int(raw_input())
_mcts = mcts.MCTS()
_mcts.set_playout(playout_num)
_mcts.show_progress = True
# start the game !!
print 'Let\'s ConnectFour !!'
model.display()
while True:
# Player turn
print '> Input the column to make a move...'
action = int(raw_input())-1
end_flg, score = model.is_terminal(1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou win !!!\n'
break
# MCTS CPU Turn
root, action = _mcts.start(model)
print 'MCTS make a move on column '+str(action+1)
end_flg, score = model.is_terminal(-1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou lose ...\n'
break
| [
"[email protected]"
] | |
288b4bd41f49b1124f0b189c46fb7fc1cba2ea02 | 066f812b051afffbe1a05630a728d15bab9f02bc | /django_503/models.py | 53c13d2c325313df137b454d59365c4a456316b9 | [
"MIT"
] | permissive | soul4code/django-503 | 04714af1a72813d5f6f1691eada97773adbe9c30 | 48f30e176f334988dafb48dff7c604b7f72ab290 | refs/heads/master | 2021-12-14T00:08:07.899188 | 2015-07-16T18:31:09 | 2015-07-16T18:31:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | from django.db import models
from django.utils.translation import ugettext as _
class Config(models.Model):
key = models.CharField(_('Key'), max_length=100, unique=True)
value = models.BooleanField(_('Value'))
| [
"[email protected]"
] | |
bc47db2fbd5e552d18ef8b17070331d9bf86f0a9 | 595c69f717fc3ceb4e0701cc433f6d7f927b6fdb | /Hogworts/Page_Object/Pages/main.py | 7f61ff32429f5e1477ac8b6a1941e7c335deb355 | [
"MIT"
] | permissive | archerckk/PyTest | d6462ebf46c6dbd5bb3ce03666aad0c2665367cd | 610dd89df8d70c096f4670ca11ed2f0ca3196ca5 | refs/heads/master | 2022-03-26T21:09:25.891745 | 2021-06-14T01:39:36 | 2021-06-14T01:39:36 | 129,497,345 | 0 | 0 | null | 2020-01-14T10:57:49 | 2018-04-14T08:23:03 | Python | UTF-8 | Python | false | false | 533 | py | from Page_Object.Pages.base_page import Base_Page
from Page_Object.Pages.login import Login
from Page_Object.Pages.register import Register
from selenium.webdriver.common.by import By
class Main(Base_Page):
_base_url='https://work.weixin.qq.com/'
def goto_register(self):
self.find(By.CSS_SELECTOR,'.index_head_info_pCDownloadBtn').click()
return Register(self._driver)
def goto_login(self):
self.find(By.CSS_SELECTOR,'.index_top_operation_loginBtn').click()
return Login(self._driver) | [
"[email protected]"
] | |
a66d33de13362abe85bb1eaea386c7fdb853db98 | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/l5/work/app_20200705183534.py | b04a220ca1102f3467a3b5ad0e580157e43c7a65 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | from flask import Flask
from flask import render_template
app = Flask(__name__)
scoreboard = [{"name": "Tester", "score": 10}, {"name": "Tester", "score": 11}]
@app.route("/<name>")
def hello(n):
return render_template("index.html")
@app.route("/game")
def game():
return render_template("index.html")
if __name__ == "__main__":
app.run() | [
"[email protected]"
] | |
defc1b7d74de6d1d58c5993550f7e8e9ad068c89 | 0f0a7adfae45e07a896c5cd5648ae081d4ef7790 | /python数据结构/python黑马数据结构/排序于搜索/桶排序.py | 12d443dea83de14b64c2fafd0db8a034651882fd | [] | no_license | renlei-great/git_window- | e2c578544c7a8bdd97a7a9da7be0464d6955186f | 8bff20a18d7bbeeaf714aa49bf15ab706153cc28 | refs/heads/master | 2021-07-19T13:09:01.075494 | 2020-06-13T06:14:37 | 2020-06-13T06:14:37 | 227,722,554 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,463 | py | lista = [12, 4, 5, 6, 22, 3, 43, 654, 765, 7, 234]
# 桶排序
"""
桶排序就是找出最大值和最小值,在这个区间进行分桶,然后将数组中的数按区间装桶,然后在对每个桶进行排序
"""
def pail_sort(alist):
"""桶排序"""
n = len(alist)
min_cur, max_cur = 0, 0
cur = 1
# 找出最大
while cur < n:
if alist[min_cur] > alist[cur]:
min_cur = cur
cur += 1
cur = 1
# 找出最小
while cur < n:
if alist[max_cur] < alist[cur]:
max_cur = cur
cur += 1
min_number, max_number = alist[min_cur], alist[max_cur]
# 初始化桶,和桶的区间,分出3个桶
for i in range(1,4):
number_name = 'number' + str(i)
pail_name = 'pail' + str(i)
number = max_number // i
setattr(pail_sort, pail_name, [])
setattr(pail_sort, number_name, number)
# 往桶里封装
for i in alist:
if i <= getattr(pail_sort, 'number1') and i > getattr(pail_sort, 'number2'):
pail_sort.__dict__['pail1'].append(i)
elif i < getattr(pail_sort, 'number2') and i > getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail2'].append(i)
elif i < getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail3'].append(i)
# 对每个桶进行排序后拼接返回
sort_pail = []
for i in range(3,0, -1):
sort_pail += marge_sort(pail_sort.__dict__['pail' + str(i)])
return sort_pail
def marge_sort(alist):
"""归并排序"""
n = len(alist)
if n <= 1:
return alist
mid = n // 2
left_li = marge_sort(alist[:mid])
right_li = marge_sort(alist[mid:])
left_cur, right_cur = 0, 0
result = []
while left_cur < len(left_li) and right_cur < len(right_li):
if left_li[left_cur] < right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
elif left_li[left_cur] > right_li[right_cur]:
result.append(right_li[right_cur])
right_cur += 1
elif left_li[left_cur] == right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
result.append(right_li[right_cur])
right_cur += 1
result += left_li[left_cur:] + right_li[right_cur:]
return result
if __name__ == "__main__":
new_li = pail_sort(lista)
# new_li = marge_sort(lista)
print(new_li)
| [
"[email protected]"
] | |
a0485c4cb332ebd75e227c8399d966b35342cc60 | 623065fb8f2fec97c7a4e201bff7ff1d9578e457 | /imgviz/data/kitti/__init__.py | afb8eb994cbe1b8a3520b78d531e100de2e1bc1e | [] | no_license | bigdatasciencegroup/imgviz | 4759c4264a43e9d37429489cc63a8a00fbb489d5 | cec9f1e3cc02cac46d11a99c63c696b8743ba6f1 | refs/heads/master | 2020-08-21T23:39:44.038394 | 2019-09-09T13:55:57 | 2019-09-09T13:55:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | import os.path as osp
import numpy as np
here = osp.dirname(osp.abspath(__file__))
def read_pose_file(filename):
with open(filename, 'r') as f:
transforms = []
for one_line in f:
one_line = one_line.split(' ')
Rt = [float(pose) for pose in one_line] + [0, 0, 0, 1]
Rt = np.reshape(np.array(Rt), (4, 4))
assert abs(Rt[3].sum() - 1) < 1e-5
transforms.append(Rt)
return transforms
def kitti_odometry():
# http://www.cvlibs.net/datasets/kitti/eval_odometry.php
pose_file = osp.join(here, 'odometry/00.txt')
transforms = read_pose_file(pose_file)
data = {'transforms': transforms}
return data
| [
"[email protected]"
] | |
3f2a7f8ca8c8b949eb087d6b60465bf94f7e9e90 | ac01d8bdab2140eae6332613142b784484877b78 | /main.py | c4d5cd5bb342e7b9329bf9ee51a5c37957b7ec15 | [] | no_license | 2020668/api_automation_course | eb19322485fdb7db4b9586597895c3ac97727e96 | 33da9f5f1f17de5a5892d28a9f6feea09e8c4adc | refs/heads/master | 2022-12-29T22:32:02.321058 | 2020-10-15T03:24:32 | 2020-10-15T03:24:32 | 304,195,531 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | # -*- coding: utf-8 -*-
"""
=================================
Author: keen
Created on: 2019/9/2
E-mail:[email protected]
=================================
"""
import unittest
import os
import time
from library.HTMLTestRunnerNew import HTMLTestRunner
from common.config import conf
from common.constant import CASE_DIR, REPORT_DIR
from common.send_email import SendEmail
_title = conf.get('report', 'title')
_description = conf.get('report', 'description')
_tester = conf.get('report', 'tester')
report_name = conf.get('report', 'report_name')
report_name = time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" + report_name
mail_title = conf.get('mail', 'mail_title')
mail_message = conf.get('mail', 'mail_message')
file_path = os.path.join(REPORT_DIR, report_name)
suite = unittest.TestSuite() # 创建测试集合
loader = unittest.TestLoader()
suite.addTest(loader.discover(CASE_DIR))
with open(file_path, 'wb') as f:
runner = HTMLTestRunner(
stream=f,
verbosity=2,
title=_title,
description=_description,
tester=_tester
)
runner.run(suite)
# 发送email
# SendEmail.send_qq_file_mail(mail_title, mail_message, file_path)
# SendEmail.send_outlook_file_mail(mail_title, mail_message, file_path)
| [
"[email protected]"
] | |
a8b44fa0be0fcec467b480ed13e5e1ddc5008900 | fc00b177802c49cf04dc6a8e430093bc14ae9b53 | /venv/Lib/site-packages/mypy/typeshed/stdlib/2and3/ctypes/__init__.pyi | 616d9df9283314885fca81c82384f607e3cd5fbd | [] | permissive | artisakov/vigilant-journey | 9c8264d36da5745374a0d08b0b0288a70f978a11 | 4fed9026071a64489d26422ba7cd1a9b9cb05e16 | refs/heads/master | 2022-11-16T03:10:06.418221 | 2020-07-16T07:33:06 | 2020-07-16T07:33:06 | 238,490,887 | 0 | 1 | MIT | 2020-03-01T10:12:22 | 2020-02-05T16:03:07 | HTML | UTF-8 | Python | false | false | 11,771 | pyi | # Stubs for ctypes
from array import array
from typing import (
Any, Callable, ClassVar, Iterator, Iterable, List, Mapping, Optional, Sequence, Sized, Text,
Tuple, Type, Generic, TypeVar, overload,
)
from typing import Union as _UnionT
import sys
_T = TypeVar('_T')
_DLLT = TypeVar('_DLLT', bound=CDLL)
_CT = TypeVar('_CT', bound=_CData)
RTLD_GLOBAL: int = ...
RTLD_LOCAL: int = ...
DEFAULT_MODE: int = ...
class CDLL(object):
_func_flags_: ClassVar[int] = ...
_func_restype_: ClassVar[_CData] = ...
_name: str = ...
_handle: int = ...
_FuncPtr: Type[_FuncPointer] = ...
def __init__(
self,
name: Optional[str],
mode: int = ...,
handle: Optional[int] = ...,
use_errno: bool = ...,
use_last_error: bool = ...,
winmode: Optional[int] = ...,
) -> None: ...
def __getattr__(self, name: str) -> _FuncPointer: ...
def __getitem__(self, name: str) -> _FuncPointer: ...
if sys.platform == 'win32':
class OleDLL(CDLL): ...
class WinDLL(CDLL): ...
class PyDLL(CDLL): ...
class LibraryLoader(Generic[_DLLT]):
def __init__(self, dlltype: Type[_DLLT]) -> None: ...
def __getattr__(self, name: str) -> _DLLT: ...
def __getitem__(self, name: str) -> _DLLT: ...
def LoadLibrary(self, name: str) -> _DLLT: ...
cdll: LibraryLoader[CDLL] = ...
if sys.platform == 'win32':
windll: LibraryLoader[WinDLL] = ...
oledll: LibraryLoader[OleDLL] = ...
pydll: LibraryLoader[PyDLL] = ...
pythonapi: PyDLL = ...
# Anything that implements the read-write buffer interface.
# The buffer interface is defined purely on the C level, so we cannot define a normal Protocol
# for it. Instead we have to list the most common stdlib buffer classes in a Union.
_WritableBuffer = _UnionT[bytearray, memoryview, array, _CData]
# Same as _WritableBuffer, but also includes read-only buffer types (like bytes).
_ReadOnlyBuffer = _UnionT[_WritableBuffer, bytes]
class _CDataMeta(type):
# By default mypy complains about the following two methods, because strictly speaking cls
# might not be a Type[_CT]. However this can never actually happen, because the only class that
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
def __mul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore
def __rmul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore
class _CData(metaclass=_CDataMeta):
_b_base: int = ...
_b_needsfree_: bool = ...
_objects: Optional[Mapping[Any, int]] = ...
@classmethod
def from_buffer(cls: Type[_CT], source: _WritableBuffer, offset: int = ...) -> _CT: ...
@classmethod
def from_buffer_copy(cls: Type[_CT], source: _ReadOnlyBuffer, offset: int = ...) -> _CT: ...
@classmethod
def from_address(cls: Type[_CT], address: int) -> _CT: ...
@classmethod
def from_param(cls: Type[_CT], obj: Any) -> _UnionT[_CT, _CArgObject]: ...
@classmethod
def in_dll(cls: Type[_CT], library: CDLL, name: str) -> _CT: ...
class _PointerLike(_CData): ...
_ECT = Callable[[Optional[Type[_CData]],
_FuncPointer,
Tuple[_CData, ...]],
_CData]
_PF = _UnionT[
Tuple[int],
Tuple[int, str],
Tuple[int, str, Any]
]
class _FuncPointer(_PointerLike, _CData):
restype: _UnionT[Type[_CData], Callable[[int], None], None] = ...
argtypes: Sequence[Type[_CData]] = ...
errcheck: _ECT = ...
@overload
def __init__(self, address: int) -> None: ...
@overload
def __init__(self, callable: Callable[..., Any]) -> None: ...
@overload
def __init__(self, func_spec: Tuple[_UnionT[str, int], CDLL],
paramflags: Tuple[_PF, ...] = ...) -> None: ...
@overload
def __init__(self, vtlb_index: int, name: str,
paramflags: Tuple[_PF, ...] = ...,
iid: pointer[c_int] = ...) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
class ArgumentError(Exception): ...
def CFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData],
use_errno: bool = ...,
use_last_error: bool = ...) -> Type[_FuncPointer]: ...
if sys.platform == 'win32':
def WINFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData],
use_errno: bool = ...,
use_last_error: bool = ...) -> Type[_FuncPointer]: ...
def PYFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData]) -> Type[_FuncPointer]: ...
class _CArgObject: ...
# Any type that can be implicitly converted to c_void_p when passed as a C function argument.
# (bytes is not included here, see below.)
_CVoidPLike = _UnionT[_PointerLike, Array[Any], _CArgObject, int]
# Same as above, but including types known to be read-only (i. e. bytes).
# This distinction is not strictly necessary (ctypes doesn't differentiate between const
# and non-const pointers), but it catches errors like memmove(b'foo', buf, 4)
# when memmove(buf, b'foo', 4) was intended.
_CVoidConstPLike = _UnionT[_CVoidPLike, bytes]
def addressof(obj: _CData) -> int: ...
def alignment(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ...
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
_PT = TypeVar('_PT', bound=_PointerLike)
def cast(obj: _UnionT[_CData, _CArgObject], type: Type[_PT]) -> _PT: ...
def create_string_buffer(init_or_size: _UnionT[int, bytes],
size: Optional[int] = ...) -> Array[c_char]: ...
c_buffer = create_string_buffer
def create_unicode_buffer(init_or_size: _UnionT[int, Text],
size: Optional[int] = ...) -> Array[c_wchar]: ...
if sys.platform == 'win32':
def DllCanUnloadNow() -> int: ...
def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented
def FormatError(code: int) -> str: ...
def GetLastError() -> int: ...
def get_errno() -> int: ...
if sys.platform == 'win32':
def get_last_error() -> int: ...
def memmove(dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> None: ...
def memset(dst: _CVoidPLike, c: int, count: int) -> None: ...
def POINTER(type: Type[_CT]) -> Type[pointer[_CT]]: ...
# The real ctypes.pointer is a function, not a class. The stub version of pointer behaves like
# ctypes._Pointer in that it is the base class for all pointer types. Unlike the real _Pointer,
# it can be instantiated directly (to mimic the behavior of the real pointer function).
class pointer(Generic[_CT], _PointerLike, _CData):
_type_: ClassVar[Type[_CT]] = ...
contents: _CT = ...
def __init__(self, arg: _CT = ...) -> None: ...
@overload
def __getitem__(self, i: int) -> _CT: ...
@overload
def __getitem__(self, s: slice) -> List[_CT]: ...
@overload
def __setitem__(self, i: int, o: _CT) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_CT]) -> None: ...
def resize(obj: _CData, size: int) -> None: ...
if sys.version_info < (3,):
def set_conversion_mode(encoding: str, errors: str) -> Tuple[str, str]: ...
def set_errno(value: int) -> int: ...
if sys.platform == 'win32':
def set_last_error(value: int) -> int: ...
def sizeof(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ...
def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ...
if sys.platform == 'win32':
def WinError(code: Optional[int] = ...,
desc: Optional[str] = ...) -> WindowsError: ...
def wstring_at(address: _CVoidConstPLike, size: int = ...) -> str: ...
class _SimpleCData(Generic[_T], _CData):
value: _T = ...
def __init__(self, value: _T = ...) -> None: ...
class c_byte(_SimpleCData[int]): ...
class c_char(_SimpleCData[bytes]):
def __init__(self, value: _UnionT[int, bytes] = ...) -> None: ...
class c_char_p(_PointerLike, _SimpleCData[Optional[bytes]]):
def __init__(self, value: Optional[_UnionT[int, bytes]] = ...) -> None: ...
class c_double(_SimpleCData[float]): ...
class c_longdouble(_SimpleCData[float]): ...
class c_float(_SimpleCData[float]): ...
class c_int(_SimpleCData[int]): ...
class c_int8(_SimpleCData[int]): ...
class c_int16(_SimpleCData[int]): ...
class c_int32(_SimpleCData[int]): ...
class c_int64(_SimpleCData[int]): ...
class c_long(_SimpleCData[int]): ...
class c_longlong(_SimpleCData[int]): ...
class c_short(_SimpleCData[int]): ...
class c_size_t(_SimpleCData[int]): ...
class c_ssize_t(_SimpleCData[int]): ...
class c_ubyte(_SimpleCData[int]): ...
class c_uint(_SimpleCData[int]): ...
class c_uint8(_SimpleCData[int]): ...
class c_uint16(_SimpleCData[int]): ...
class c_uint32(_SimpleCData[int]): ...
class c_uint64(_SimpleCData[int]): ...
class c_ulong(_SimpleCData[int]): ...
class c_ulonglong(_SimpleCData[int]): ...
class c_ushort(_SimpleCData[int]): ...
class c_void_p(_PointerLike, _SimpleCData[Optional[int]]): ...
class c_wchar(_SimpleCData[Text]): ...
class c_wchar_p(_PointerLike, _SimpleCData[Optional[Text]]):
def __init__(self, value: Optional[_UnionT[int, Text]] = ...) -> None: ...
class c_bool(_SimpleCData[bool]):
def __init__(self, value: bool = ...) -> None: ...
if sys.platform == 'win32':
class HRESULT(_SimpleCData[int]): ... # TODO undocumented
class py_object(_SimpleCData[_T]): ...
class _CField:
offset: int = ...
size: int = ...
class _StructUnionMeta(_CDataMeta):
_fields_: Sequence[_UnionT[Tuple[str, Type[_CData]], Tuple[str, Type[_CData], int]]] = ...
_pack_: int = ...
_anonymous_: Sequence[str] = ...
def __getattr__(self, name: str) -> _CField: ...
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
def __init__(self, *args: Any, **kw: Any) -> None: ...
def __getattr__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
class Union(_StructUnionBase): ...
class Structure(_StructUnionBase): ...
class BigEndianStructure(Structure): ...
class LittleEndianStructure(Structure): ...
class Array(Generic[_CT], _CData):
_length_: ClassVar[int] = ...
_type_: ClassVar[Type[_CT]] = ...
raw: bytes = ... # Note: only available if _CT == c_char
value: Any = ... # Note: bytes if _CT == c_char, Text if _CT == c_wchar, unavailable otherwise
# TODO These methods cannot be annotated correctly at the moment.
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
# here, because of a special feature of ctypes.
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
# and converts it to the corresponding Python primitive. For example, when accessing an element
# of an Array[c_int], a Python int object is returned, not a c_int.
# This behavior does *not* apply to subclasses of "simple types".
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
# a MyInt, not an int.
# This special behavior is not easy to model in a stub, so for now all places where
# the array element type would belong are annotated with Any instead.
def __init__(self, *args: Any) -> None: ...
@overload
def __getitem__(self, i: int) -> Any: ...
@overload
def __getitem__(self, s: slice) -> List[Any]: ...
@overload
def __setitem__(self, i: int, o: Any) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[Any]) -> None: ...
def __iter__(self) -> Iterator[Any]: ...
# Can't inherit from Sized because the metaclass conflict between
# Sized and _CData prevents using _CDataMeta.
def __len__(self) -> int: ...
| [
"[email protected]"
] | |
41894e7590dde3aa44f8c38b7453e8c364d924f5 | cd8f7ecd20c58ce1ae0fe3840f7c7ee961aa5819 | /Binary Tree Zigzag Level Order Traversal.py | 5ffebb0274f92ac415a122c9c02b477d302ff3ff | [
"Apache-2.0"
] | permissive | sugia/leetcode | 9b0f2a3521b088f8f7e5633c2c6c17c76d33dcaf | 6facec2a54d1d9f133f420c9bce1d1043f57ebc6 | refs/heads/master | 2021-06-05T07:20:04.099488 | 2021-02-24T07:24:50 | 2021-02-24T07:24:50 | 29,124,136 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | '''
Given a binary tree, return the zigzag level order traversal of its nodes' values. (ie, from left to right, then right to left for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def zigzagLevelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
res = []
if not root:
return res
zigzag = True
vec = [root]
while len(vec):
zigzag = not zigzag
next_vec = []
tmp = []
for node in vec:
if zigzag:
tmp = [node.val] + tmp
else:
tmp.append(node.val)
if node.left:
next_vec.append(node.left)
if node.right:
next_vec.append(node.right)
res.append(tmp)
vec = next_vec
return res
| [
"[email protected]"
] | |
654c3bc950e7ddde3eaff1bddd8c9718702a2352 | bfc2ba097b164af668efa29f883101673668456e | /nets/centernet_training.py | 109a2e753890bf22328bb9efcd06e247931de674 | [] | no_license | Sharpiless/Paddlepaddle-CenterNet | b4892e1ab85a65f655b44fc6699e61315f5a0274 | b02bca6bff55054bdb29ba370ac52b9e8951045a | refs/heads/main | 2023-06-17T17:22:35.265697 | 2021-07-17T02:46:33 | 2021-07-17T02:46:33 | 386,817,805 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,313 | py | import paddle
import paddle.nn.functional as F
def focal_loss(pred, target):
pred = paddle.transpose(pred, [0,2,3,1])
# pred = pred.permute(0,2,3,1)
#-------------------------------------------------------------------------#
# 找到每张图片的正样本和负样本
# 一个真实框对应一个正样本
# 除去正样本的特征点,其余为负样本
#-------------------------------------------------------------------------#
# pos_inds = target.equal(1).float()
pos_inds = target == 1
pos_inds = pos_inds.astype('float32')
# neg_inds = target.lt(1).float()
neg_inds = target < 1
neg_inds = neg_inds.astype('float32')
#-------------------------------------------------------------------------#
# 正样本特征点附近的负样本的权值更小一些
#-------------------------------------------------------------------------#
neg_weights = paddle.pow(1 - target, 4)
pred = paddle.clip(pred, 1e-6, 1 - 1e-6)
#-------------------------------------------------------------------------#
# 计算focal loss。难分类样本权重大,易分类样本权重小。
#-------------------------------------------------------------------------#
pos_loss = paddle.log(pred) * paddle.pow(1 - pred, 2) * pos_inds
neg_loss = paddle.log(1 - pred) * paddle.pow(pred, 2) * neg_weights * neg_inds
#-------------------------------------------------------------------------#
# 进行损失的归一化
#-------------------------------------------------------------------------#
num_pos = pos_inds.astype('float32').sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = -neg_loss
else:
loss = -(pos_loss + neg_loss) / num_pos
return loss
def reg_l1_loss(pred, target, mask):
#--------------------------------#
# 计算l1_loss
#--------------------------------#
# pred = pred.permute(0,2,3,1)
pred = paddle.transpose(pred, [0,2,3,1])
# expand_mask = paddle.unsqueeze(mask,-1).repeat(1,1,1,2)
expand_mask = paddle.tile(paddle.unsqueeze(mask,-1), [1,1,1,2])
loss = F.l1_loss(pred * expand_mask, target * expand_mask, reduction='sum')
loss = loss / (mask.sum() + 1e-4)
return loss
| [
"[email protected]"
] | |
9f2eaee40308723324858966dcd6932750b0241b | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/googlecloudsdk/command_lib/storage/tasks/task_buffer.py | dee39b0c1165d965f0fa3a433725b4686336f215 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 2,762 | py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements a buffer for tasks used in task_graph_executor.
See go/parallel-processing-in-gcloud-storage for more information.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from six.moves import queue
class _PriorityWrapper:
"""Wraps a buffered task and tracks priority information.
Attributes:
task (Union[task.Task, str]): A buffered item. Expected to be a task or a
string (to handle shutdowns) when used by task_graph_executor.
priority (int): The priority of this task. A task with a lower value will be
executed before a task with a higher value, since queue.PriorityQueue uses
a min-heap.
"""
def __init__(self, task, priority):
self.task = task
self.priority = priority
def __lt__(self, other):
return self.priority < other.priority
class TaskBuffer:
"""Stores and prioritizes tasks.
The current implementation uses a queue.PriorityQueue under the hood, since
in experiments we found that the heap it maintains did not add too much
overhead. If it does end up being a bottleneck, the same API can be
implemented with a collections.deque.
"""
def __init__(self):
self._queue = queue.PriorityQueue()
def get(self):
"""Removes and returns an item from the buffer.
Calls to `get` block if there are no elements in the queue, and return
prioritized items before non-prioritized items.
Returns:
A buffered item. Expected to be a task or a string (to handle shutdowns)
when used by task_graph_executor.
"""
return self._queue.get().task
def put(self, task, prioritize=False):
"""Adds an item to the buffer.
Args:
task (Union[task.Task, str]): A buffered item. Expected to be a task or a
string (to handle shutdowns) when used by task_graph_executor.
prioritize (bool): Tasks added with prioritize=True will be returned by
`get` before tasks added with prioritize=False.
"""
priority = 0 if prioritize else 1
prioritized_item = _PriorityWrapper(task, priority)
self._queue.put(prioritized_item)
| [
"[email protected]"
] | |
8534041473d28f92fb8db6079f28b29f4e1c7743 | cae9ca1dda110cd6f65d5021c5891fdee76ec6fe | /day2/set/2.py | e7bb3bc3680e0158226f7a9475d6dce754b58602 | [] | no_license | shobhit-nigam/yagami | fb33d6de76a698a160f9e8df9d7d9f5b836797d8 | 283e2a464f74ac07c21ae7095b9a45fa632aa38a | refs/heads/main | 2023-07-04T09:46:51.057558 | 2021-08-10T05:13:27 | 2021-08-10T05:13:27 | 391,846,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | basket_a = {'apple', 'banana', 'pear', 'apple', 'kiwi', 'banana', 'avocado'}
basket_b = {'orange', 'plum', 'grapes', 'apple', 'pear', 'raspberry'}
print(type(basket_a))
print("basket_a =", basket_a)
print("basket_b =", basket_b)
print("basket_a[2] =", basket_a[2])
| [
"[email protected]"
] | |
74630a900649910f52610167dda5e5175c1009c7 | e3d33f5d82a541d7491e079c394dcebf1568f078 | /server/settings/migrations/0003_delete_tournamenttype.py | 746daa2db4b9fb8720e479ddcbeaca42296afee0 | [
"MIT"
] | permissive | MahjongRepository/mahjong-portal | 51bd1300c3e6b8a341fbddb67a750b268950627e | 20f01433858bed4610d60b27a98bafce5a810097 | refs/heads/master | 2023-07-09T09:05:23.155419 | 2023-07-08T10:47:14 | 2023-07-08T10:47:14 | 114,328,632 | 12 | 10 | MIT | 2022-07-29T01:29:59 | 2017-12-15T04:53:02 | Python | UTF-8 | Python | false | false | 435 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-01-18 13:32
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tournament', '0005_remove_tournament_tournament_type'),
('settings', '0002_auto_20180117_0643'),
]
operations = [
migrations.DeleteModel(
name='TournamentType',
),
]
| [
"[email protected]"
] | |
428b845f68e1d7c602aa7f74a604609708605c11 | b35aea9f4411f5dc7942392d78dc31bb76c7ec73 | /ARTIN/index/forms.py | 145690526dcce52d9b06a9000dcf43e2949b4874 | [] | no_license | ashkanusefi/rondshow | 1079b81704fff55a1d54fa8dee2712ab61e92f4a | 7e5a80fcc6e326b8b1737a54fb53becc4195e475 | refs/heads/master | 2023-09-01T18:45:33.170465 | 2021-09-18T11:24:52 | 2021-09-18T11:24:52 | 407,820,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | from django import forms
from index.models import Contactform
class Contact_Form(forms.ModelForm):
class Meta:
model = Contactform
widgets = {
'name': forms.TextInput(attrs={'placeholder': 'نام و نام خوانوادگی'}),
'email': forms.TextInput(attrs={'placeholder': 'ایمیل'}),
'subject': forms.TextInput(attrs={'placeholder': 'موضوع'}),
'phone': forms.TextInput(attrs={'placeholder': 'شماره تماس'}),
'description': forms.Textarea(attrs={'placeholder': 'پیام خود را وارد کنید'}),
}
fields = [
'name',
'email',
'subject',
'phone',
'description',
]
| [
"[email protected]"
] | |
165b320a0f937ccc6fd4ef9e6bae85487e84034d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/100/usersdata/199/49629/submittedfiles/prova1.py | 6b7bd8a5436b67d150ae6be7d451d557c92d2016 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO ABAIXO DESTA LINHA
Carta1 =(int(input('Digite C1:'))
Carta2 =(int(input('Digite C2:'))
Carta3 =(int(input('Digite C3:'))
Carta4 =(int(input('Digite C4:'))
Carta5 =(int(input('Digite C5:'))
if Carta1>Carta2>Carta3>Carta4>Carta5:
print('D')
elif Carta1<Carta2<Carta3<Carta4>Carta5:
print('C')
else:
print('N')
| [
"[email protected]"
] | |
b410d142b81c1ff46a841b791aac9e8f0c825de6 | 7c1b5af77fbfde1f4f2c698a489e07024c147edc | /docs/sphinxext/example.py | a3a898c3d74b35d6e48e079745f272267a2beaef | [] | no_license | migueldvb/pyasdf | e7812da935ee3e4fec6d3c61fb16425ac2e1bdc7 | 4a72952b0196ede261e07569fc4da2616fa5e4b3 | refs/heads/master | 2020-12-26T18:44:07.562442 | 2015-03-30T16:18:19 | 2015-03-30T16:18:19 | 29,930,850 | 0 | 0 | null | 2015-02-04T20:23:04 | 2015-01-27T19:29:17 | Python | UTF-8 | Python | false | false | 3,764 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import atexit
import io
import os
import shutil
import tempfile
import textwrap
from docutils.parsers.rst import Directive
from docutils import nodes
from sphinx.util.nodes import set_source_info
from pyasdf import AsdfFile
from pyasdf.constants import ASDF_MAGIC, BLOCK_FLAG_STREAMED
from pyasdf import versioning
from pyasdf import yamlutil
version_string = versioning.version_to_string(versioning.default_version)
TMPDIR = tempfile.mkdtemp()
def delete_tmpdir():
shutil.rmtree(TMPDIR)
GLOBALS = {}
LOCALS = {}
FLAGS = {
BLOCK_FLAG_STREAMED: "BLOCK_FLAG_STREAMED"
}
class RunCodeDirective(Directive):
has_content = True
def run(self):
code = textwrap.dedent('\n'.join(self.content))
cwd = os.getcwd()
os.chdir(TMPDIR)
try:
try:
exec(code, GLOBALS, LOCALS)
except:
print(code)
raise
literal = nodes.literal_block(code, code)
literal['language'] = 'python'
set_source_info(self, literal)
finally:
os.chdir(cwd)
return [literal]
class AsdfDirective(Directive):
required_arguments = 1
def run(self):
filename = self.arguments[0]
cwd = os.getcwd()
os.chdir(TMPDIR)
parts = []
try:
code = AsdfFile.read(filename, _get_yaml_content=True)
code = '{0}{1}\n'.format(ASDF_MAGIC, version_string) + code.strip()
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
set_source_info(self, literal)
parts.append(literal)
ff = AsdfFile.read(filename)
for i, block in enumerate(ff.blocks.internal_blocks):
data = block.data.tostring().encode('hex')
if len(data) > 40:
data = data[:40] + '...'
allocated = block._allocated
size = block._size
data_size = block._data_size
flags = block._flags
if flags & BLOCK_FLAG_STREAMED:
allocated = size = data_size = 0
lines = []
lines.append('BLOCK {0}:'.format(i))
human_flags = []
for key, val in FLAGS.items():
if flags & key:
human_flags.append(val)
if len(human_flags):
lines.append(' flags: {0}'.format(' | '.join(human_flags)))
if block.compression:
lines.append(' compression: {0}'.format(block.compression))
lines.append(' allocated_size: {0}'.format(allocated))
lines.append(' used_size: {0}'.format(size))
lines.append(' data_size: {0}'.format(data_size))
lines.append(' data: {0}'.format(data))
code = '\n'.join(lines)
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
set_source_info(self, literal)
parts.append(literal)
finally:
os.chdir(cwd)
result = nodes.admonition()
textnodes, messages = self.state.inline_text(filename, self.lineno)
title = nodes.title(filename, '', *textnodes)
result += title
result.children.extend(parts)
return [result]
def setup(app):
app.add_directive('runcode', RunCodeDirective)
app.add_directive('asdf', AsdfDirective)
atexit.register(delete_tmpdir)
| [
"[email protected]"
] | |
855617ea99f031e0e80b2b054a95363b3b16af6b | 43c268536a396b7f105f15e717c9f1f6b9044082 | /cltk/phonology/latin/transcription.py | 10b54850b62e788a602cee6b2e5adfc903a3f61d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | codeSG/cltk | 9b5357d123d22fa423ecea3ffea9d2b9688445c3 | 6fbc96e3afa19f0c43b9eb64b739029c04f352b9 | refs/heads/master | 2020-12-31T00:29:35.046678 | 2017-06-04T05:26:23 | 2017-06-04T05:26:23 | 85,152,321 | 0 | 0 | null | 2017-03-24T11:01:04 | 2017-03-16T04:22:57 | Python | UTF-8 | Python | false | false | 23,177 | py | """Convert a word from Latin orthography into its hypothesized
pronunciation in the International Phonetic Alphabet (IPA).
https://raw.githubusercontent.com/j-duff/cltk/ipa/
cltk/phonology/latin/transcription.py
"""
from cltk.utils.cltk_logger import logger
from cltk.prosody.latin import macronizer as m
macronizer = m.Macronizer("tag_ngram_123_backoff")
from nltk.tokenize import wordpunct_tokenize
import re
import unicodedata
try:
# James Tauber's greek_accentuation package
from greek_accentuation import characters as chars
except ImportError as import_error:
print('Missing "greek_accentuation" package. Install with '
+ '`pip install greek-accentuation`.')
logger.error(import_error)
raise
__author__ = ['Jack Duff <[email protected]>']
__license__ = 'MIT License. See LICENSE.'
# Dictionaries of phonological reconstructions for use in transcribing.
# Allen, W. Sidney. 1965. Vox Latina.
LATIN = {'Classical': {
'Allen': {
'correspondence': {
'p': 'p',
't': 't̪',
'c': 'k',
'k': 'k',
'qu': 'kʷ',
'b': 'b',
'd': 'd̪',
'g': 'g',
'gu': 'gʷ',
'ph': 'pʰ',
'th': 't̪ʰ',
'ch': 'kʰ',
'n': 'n̪',
'm': 'm',
'r': 'r',
'rh': 'r', # Voiceless r was spelled but not pronounced.
'l': 'l',
'f': 'f',
's': 's',
'h': 'h',
'j': 'j',
'v': 'w',
'x': 'ks',
'z': 'z',
'ī': 'iː',
'ū': 'uː',
'i': 'ɪ',
'u': 'ʊ',
'e': 'ɛ',
'o': 'ɔ',
'ē': 'eː',
'ō': 'oː',
'a': 'a',
'ā': 'aː',
'y': 'y',
'ȳ': 'y:',
'ae': 'aj',
'au': 'aw',
'oe': 'oj',
'eu': 'ew',
'ei': 'ej'},
'diphthongs': # and digraphs
['qu', 'gu', 'ph', 'th', 'ch', 'rh',
'ae', 'au', 'oe', 'eu', 'ei'],
'punctuation':
['.', ',', ';', ':', '-', '–', '?', '!',
'(', ')', "'", "\"", "[", "]"],
'alternations': [
'j_maker', # word initial and intervocalic i is assumed j
'w_maker', # word initial and intervocalic u is assumed w
'wj_block', # prevents accidental sequence wj
'uj_diph_maker', # after w and j have been created, recognizes
# <ui> = [uj]
'b_devoice', # b devoices before /t/, /s/
'g_n_nasality_assimilation', # only before n
'n_place_assimilation', # should also do labial, and
# labio-dental before f.
'final_m_drop', # m drops and lengthens + nasalizes preceding
# vowel word-finally
'ns_nf_lengthening', # vowels lengthen before ns or nf
'l_darken', # l darkens to ɫ in coda
'j_z_doubling', # intervocalic j and z > jj and zz
'long_vowel_catcher', # corrects accidental instances of ɪː
# and similar.
'e_i_closer_before_vowel', # ɛ to ɛ̣, ɪ to ɪ̣ before another vowel
'intervocalic_j', # j glide between vowels
]
}
}
}
# Unhandled exceptions: preposition "ad" becomes [at̪] not [ad̪] before s and t
# subf > suff, subm > summ, subg > sugg, subc > succ, subr > rr
# j exceptions like ad*j*ectivum and con*j*unx
# All IPA characters used sorted by natural classes.
# WILL NEED ADDITIONS AS MORE RECONSTRUCTIONS USED
IPA = {'voiced': # [+voice]
['b', 'd̪', 'g', 'gʷ',
'm', 'n̪', 'ŋ', 'ɱ'
'l', 'ɫ', 'r', 'z'],
'labial': # [+labial, -labiodental]
['b', 'p', 'pʰ', 'm'],
'labiodental': # [+labial, +labiodental]
['f', 'ɱ'],
'coronal': # [+coronal]
['d̪', 't̪', 't̪ʰ', 'n̪', 's', 'z', 'r', 'l', 'ɫ'],
'velar': # [+velar]
['g', 'k', 'kʰ', 'kʷ', 'gʷ', 'ŋ'],
'nasal': # [+consonantal, +nasal]
['m', 'ɱ', 'n', 'ŋ'],
'approximant': # [+approximant]
['l', 'ɫ', 'r', 'j', 'w'],
'continuant': # [+continuant, +consonantal]
['h', 'f', 's', 'z', 'l', 'ɫ', 'r'],
'vowel': # [-consonantal -approximant]
['a', 'aː', 'ɛ', 'ɛ̣', 'eː', 'ɪ', 'ɪ̣', 'iː',
'ɔ', 'oː', 'ʊ', 'u', 'uː', 'y', 'yː',
'ãː', 'ẽː', 'ĩː', 'õː', 'ũː'],
'high': # [-consonantal, +high]
['ɪ', 'ɪ̣', 'iː', 'ʊ', 'u', 'uː', 'y', 'yː',
'ɪ̃', 'ɪ̣̃', 'ĩː', 'ʊ̃', 'ũ', 'ũː', 'ỹ', 'ỹː'],
'mid': # [-consonantal, -high, -low]
['ɛ', 'ɛ̣', 'eː', 'ɔ', 'oː',
'ɛ̃', 'ɛ̣̃', 'ẽː', 'ɔ̃', 'õː'],
'low': # [-consonantal, +low]
['a', 'aː',
'ã', 'ãː'],
'front': # [-consonantal, +front]
['ɪ', 'ɪ̣', 'iː', 'y', 'yː', 'ɛ', 'ɛ̣', 'eː',
'ɪ̃', 'ɪ̣̃', 'ĩː', 'ỹ', 'ỹː', 'ɛ̃', 'ɛ̣̃', 'ẽː'],
'central': # [-consonantal, -front, -back]
['a', 'aː',
'ã', 'ãː'],
'back': # [-consonantal, +back]
['ʊ', 'u', 'uː', 'ɔ', 'oː',
'ʊ̃', 'ũ', 'ũː', 'ɔ̃', 'õː'],
'boundary':
['#']}
class Phone:
'A phonological unit to be manipulated and represented as an IPA string.'
# Has a bundle of feature values that help classify it so that it can
# trigger contextual pronunciation changes.
def __init__(self, ipa_ch):
# eventually exported to output string
self.ipa = unicodedata.normalize('NFC', ipa_ch)
# will be assigned once in Word, as the pre-context of this phone
self.left = ""
# .... as the post-context of this phone
self.right = ""
# bundle of features, stored as booleans:
self.vce = self.ipa in IPA['voiced']
self.lab = self.ipa in IPA['labial']
self.lbd = self.ipa in IPA['labiodental']
self.cor = self.ipa in IPA['coronal']
self.vel = self.ipa in IPA['velar']
self.nas = self.ipa in IPA['nasal']
self.app = self.ipa in IPA['approximant']
self.cont = self.ipa in IPA['continuant']
self.vow = self.ipa in IPA['vowel']
self.hi = self.ipa in IPA['high']
self.mid = self.ipa in IPA['mid']
self.lo = self.ipa in IPA['low']
self.fr = self.ipa in IPA['front']
self.ctr = self.ipa in IPA['central']
self.bk = self.ipa in IPA['back']
self.bound = self.ipa in IPA['boundary']
class Word:
'Max. phonological unit, contains phones and triggers alternations.'
# An ordered collection of Phones, which are bundles of
# features/IPA strings.
def __init__(self, ipa_str, root):
self.string = unicodedata.normalize('NFC', ipa_str)
# Appropriate directory in the reconstruction dictionary
self.root = root
# list of contextual pronunciation alternations
self.alts = self.root['alternations']
# Turns string of IPA characters into list of Phones
self.phones = [Phone(c) for c
in re.findall(r'.[̪̣̃ʷʰ]*ː?', self.string)]
# Assigns left and right contexts for every phone
def _refresh(self):
for n in range(len(self.phones)):
p = self.phones[n]
if n != 0:
p.left = self.phones[n - 1]
else:
p.left = Phone("#")
if n != len(self.phones) - 1:
p.right = self.phones[n + 1]
else:
p.right = Phone("#")
def _j_maker(self):
# Assume word-initial or intervocalic i to be j
out_phones = self.phones
target = Phone("j")
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa == 'ɪ' and ((p.left.bound and p.right.vow)
or (p.left.vow and p.right.vow)):
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _w_maker(self):
# Assume word-initial or intervocalic u to be w
out_phones = self.phones
target = Phone("w")
for n in range(len(self.phones)):
p = self.phones[n]
if (((p.ipa == 'ʊ') or (p.ipa =='u'))
and ((p.left.bound
and (p.right.vow or p.right.ipa == 'j'))
or (p.left.vow and p.right.vow))):
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _wj_block(self):
# Addendum to correct possible 'wj' sequences
out_phones = self.phones
target = Phone("ɪ")
for n in range(len(self.phones)):
p = self.phones[n]
if p.left.ipa == 'w' and p.ipa == 'j':
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _uj_diph_maker(self):
# Find accidental "ʊɪ" instances and treat as diphthong [uj].
out_phones = self.phones
for n in range(len(self.phones)):
p = self.phones[n]
if p.left.ipa == 'ʊ' and p.ipa == 'ɪ':
out_phones[n-1] = Phone('u')
out_phones[n] = Phone('j')
self.phones = out_phones
self._refresh()
def _b_devoice(self):
# Pronounce b as p when followed by s or t.
out_phones = self.phones
target = Phone("p")
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa == 'b' and (p.right.ipa == 's' or p.right.ipa == 't̪'):
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _final_m_drop(self):
# Final m nasalizes and lengthens nucleus and drops.
out_phones = self.phones
for n in range(len(self.phones)):
p = self.phones[n]
if p.left.vow and p.ipa == 'm' and p.right.bound:
out_phones[n-1] = Phone(p.left.ipa + "̃ː")
del out_phones[n]
self.phones = out_phones
self._refresh()
def _n_place_assimilation(self):
# Pronounce n as ŋ when followed by velar.
out_phones = self.phones
target = Phone("ŋ")
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa == 'n̪' and p.right.vel:
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _g_n_nasality_assimilation(self):
# Pronounce g as ŋ when followed by n.
out_phones = self.phones
target = Phone("ŋ")
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa == "g" and p.right.ipa == "n̪":
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _ns_nf_lengthening(self):
# Lengthen vowel before ns or nf.
out_phones = self.phones
for n in range(len(self.phones)):
p = self.phones[n]
if (p.left.vow and "ː" not in p.left.ipa and p.ipa == "n̪"
and (p.right.ipa == "s" or p.right.ipa == "f")):
out_phones[n-1] = Phone(p.left.ipa + "ː")
self.phones = out_phones
self._refresh()
def _l_darken(self):
# Pronounce l as ɫ in coda.
out_phones = self.phones
target = Phone("ɫ")
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa == "l" and ((not p.right.vow) or p.right.bound):
out_phones[n] = target
self.phones = out_phones
self._refresh()
def _j_z_doubling(self):
# Double j and z between vowels.
out_phones = self.phones
dupl = []
for n in range(len(self.phones)):
p = self.phones[n]
if p.right.vow and (p.ipa == "j" or p.ipa == "z") and p.left.vow:
dupl.append((True, n - len(self.phones), p.ipa))
else: dupl.append((False, n - len(self.phones), None))
for t in sorted(dupl, key=lambda tup: tup[1]):
if t[0]:
out_phones.insert(t[1], Phone(t[2]))
self.phones = out_phones
self._refresh()
def _long_vowel_catcher(self):
# Replace ɪː with iː, ʊː with uː, and ɛː with eː.
out_phones = self.phones
target_dict = {'ɪː': 'iː', 'ʊː': 'uː', 'ɛː': 'eː',
'ɪ̃ː': 'ĩː', 'ʊ̃ː': 'ũː', 'ɛ̃ː': 'ẽː'}
for n in range(len(self.phones)):
p = self.phones[n]
if p.ipa in target_dict.keys():
out_phones[n] = Phone(target_dict[p.ipa])
self.phones = out_phones
self._refresh()
def _e_i_closer_before_vowel(self):
# e and i become closer (̣) when followed by a vowel.
out_phones = self.phones
for n in range(len(self.phones)):
p = self.phones[n]
if (p.ipa == "ɛ" or p.ipa == "ɪ") and p.right.vow:
out_phones[n] = Phone(p.ipa + "̣")
self.phones = out_phones
self._refresh()
def _intervocalic_j(self):
# epenthesize j between vowels
out_phones = self.phones
target = Phone("j")
j = []
for n in range(len(self.phones)):
p = self.phones[n]
if p.left.vow and p.vow:
j.append((True, n - len(self.phones)))
else: j.append((False, n - len(self.phones)))
for t in sorted(j, key=lambda tup: tup[1]):
if t[0]:
out_phones.insert(t[1], target)
self.phones = out_phones
self._refresh()
# list of all possible alternations
ALTERNATIONS = [("j_maker", _j_maker),
("w_maker", _w_maker),
("wj_block", _wj_block),
("uj_diph_maker", _uj_diph_maker),
("b_devoice", _b_devoice),
("final_m_drop", _final_m_drop),
("n_place_assimilation", _n_place_assimilation),
("g_n_nasality_assimilation", _g_n_nasality_assimilation),
("ns_nf_lengthening", _ns_nf_lengthening),
("l_darken", _l_darken),
("j_z_doubling", _j_z_doubling),
("long_vowel_catcher", _long_vowel_catcher),
("e_i_closer_before_vowel", _e_i_closer_before_vowel),
("intervocalic_j", _intervocalic_j)]
def _alternate(self):
# after setting left and right contexts for every phone...
self._refresh()
# runs all alternations
for a in Word.ALTERNATIONS:
if a[0] in self.alts:
a[1](self)
def _syllabify(self):
# takes Word input and returns a list of syllables as
# (onset, nucleus, coda) tuples
# where onset, nucleus, and coda are all lists of Phones.
nuclei = []
for n in range(len(self.phones)):
p = self.phones[n]
if p.vow:
nuclei.append(n)
# initialize syllables with a tuple for the first syllable
# where onset is everything before the first nucleus
# and coda remains unknown.
syllables = [[self.phones[0:nuclei[0]],
[self.phones[nuclei[0]]], []]]
# continue for every nucleus, assuming that everything between
# the previous nucleus and it is the onset.
for x in range(len(nuclei)-1):
i = nuclei[x+1]
onset = self.phones[nuclei[x]+1:i]
nucleus = [self.phones[i]]
syllables.append([onset, nucleus, []])
# assume that everything after the final nucleus is final coda.
syllables[-1][2] = self.phones[nuclei[-1]+1:]
# now go through and check onset viability
for x in range(len(syllables)-1):
onset = syllables[x+1][0]
nucleus = syllables[x+1][1]
coda = syllables[x+1][2]
# trim all onsets greater than the maximum 2 phones
# removing extra phones from the left
# and appending them to the previous coda
if len(onset) > 2:
trim = onset[:-2]
del onset[:-2]
syllables[x][2] = trim
# once onset is 2 phones...
if len(onset) == 2:
# stop + liquid is the only viable sequence and passes
if ((not onset[0].cont) and (not onset[0].app)
and (onset[1].nas or onset[1].app)):
break
# otherwise, onset must be right Phone only
# the left phone is appended to the previous coda
else:
trim = onset[0]
del onset[0]
syllables[x][2] += [trim]
self.syllables = syllables
return(syllables)
def _print_ipa(self, syllabify, accentuate):
# depending on the syllabify and accentuate parameters
# prints an appropriately marked up version of the transcription
out = ""
if syllabify:
syllables = self._syllabify()
# the ultima is the final syllable
ultima = syllables[-1]
# identify which syllable has stress and store index as accent
if accentuate:
# one syllable words have ultimate stress
if len(syllables) == 1:
accent = -1
# two syllable words have penultimate stress
elif len(syllables) == 2:
accent = -2
else:
# penult is second to last syllable
penult = syllables[-2]
# if penult is diphthong (long), penultimate stress
if len(penult[1]) > 1:
accent = -2
# if penult is long vowel, penultimate stress
elif "ː" in penult[1][0].ipa:
accent = -2
# if penult has coda (closed/long by position),
# penultimate stress
elif len(penult[2]) > 0:
accent = -2
# otherwise (penult is short) antepenultimate stress
else:
accent = -3
# loop over syllables by index
for x in range(len(syllables)):
s = syllables[x]
# if index matches accent index set above
if x-len(syllables) == accent:
# precede that syllable with
# IPA stress punctuation: '
out += "'"
# then, print IPA by syllable segment as usual
for n in s:
for p in n:
out += p.ipa
# seperate all syllables with IPA syllable punctuation: .
if s != ultima:
out += "."
# if no accentuation flag, proceed with syllabified printing
else:
for s in syllables:
for n in s:
for p in n:
out += p.ipa
# seperate all syllables with IPA syllable punctuation: .
if s != ultima:
out += "."
# if no syllabification flag, proceed with
# unsyllabified IPA printing
else:
for p in self.phones:
out += p.ipa
return out
class Transcriber:
'Uses a reconstruction to transcribe a orthographic string into IPA.'
def __init__(self, dialect, reconstruction):
self.lect = dialect
self.recon = reconstruction
self.root = LATIN[self.lect][self.recon]
self.table = self.root["correspondence"]
self.diphs = self.root["diphthongs"]
self.punc = self.root["punctuation"]
def _parse_diacritics(self, ch):
# Returns a string with seperated and organized diacritics
# for easier access later.
# EG: input with base a -> a/LENGTH/DIAERESIS/
out = chars.base(ch).lower() # Initialize out as base of character.
length = chars.length(ch)
dia = chars.diaeresis(ch)
out += "/" # Create 1st boundary
# If any length, place between 1st and 2nd boundary
if length != None:
out += length
out += "/" # Create 2nd boundary
if dia != None: # If any diaeresis,
out += dia # place between second and final boundary
out += "/" # Create final boundary
return out
def _prep_text(self, text):
# Performs preperatory tasks grouping and reordering characters
# in order to make transcription formulaic.
string_in = "".join([self._parse_diacritics(ch) for ch in text])
# searches for diphthongs and treats them as one phone
for d in self.diphs:
d1 = d[0]
d2 = d[1]
pattern = r"(" + d1 + r")\/\/\/(" + d2 + r")(\/\/\/)"
string_in = re.sub(pattern, r"\1\2\3", string_in)
tup_out = re.findall(r"(..?)\/([̄̆]*)\/(¨?)\/", string_in)
return tup_out
def transcribe(
self, text, macronize=True, syllabify=True, accentuate=True
):
# if macronize, will first use the tagger to macronize input
# otherwise, input will be the raw input string
if macronize:
text = macronizer.macronize_text(text)
# input is word-tokenized, stripped of non-diacritic punctuation,
# and diphthongs and diacritics are handled
inp = [self._prep_text(w) for w in wordpunct_tokenize(text)
if w not in self.punc]
words = []
for w in inp:
out = ""
for c in w:
if "̄" in c[1]:
macron_added = c[0]+'̄'
ipa = self.table.get(macron_added, macron_added)
else:
ipa = self.table.get(c[0], c[0])
out += ipa
transcription = Word(out, self.root)
transcription._alternate()
words.append(transcription)
# Encloses output in brackets, proper notation for surface form.
return "[" + " ".join([w._print_ipa(syllabify, accentuate)
for w in words]) + "]"
if __name__ == '__main__':
allen_transcriber = Transcriber("Classical", "Allen")
example = allen_transcriber.transcribe("Quo usque tandem, O Catilina, "
+ "abutere nostra patientia?")
print(example)
| [
"[email protected]"
] | |
e2e3817a1e7ab097aac8071c3eb9fa89df0186b9 | 863509e794b069c9688f6263454c06d2c48868b2 | /backend/backend/ml_model/client_server/ml_client.py | e892990daa897d7cb55cee5604a142de6051e99b | [
"MIT"
] | permissive | TheDuckWhisperer/tournesol | c6df38185963bbecc6109dac275075a3ceca857a | 0fde7587e91a42e5a2218f2ffb70d4fc8cff7f73 | refs/heads/master | 2023-04-18T15:31:20.627952 | 2021-05-01T19:59:07 | 2021-05-01T19:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,881 | py | from jsonrpcclient.clients.http_client import HTTPClient
from multiprocessing import Process
from backend.rating_fields import VIDEO_FIELDS
import numpy as np
import pickle
from time import time
def time_cache_wrapper(f, expire_sec=3600):
"""Decorator which caches results for some seconds."""
# format pickle(x) -> (compute_time, value)
cache = {}
def wrapper(*args):
x_str = pickle.dumps(args)
if x_str in cache:
if time() - cache[x_str][0] <= expire_sec:
return cache[x_str][1]
result = f(*args)
cache[x_str] = (time(), result)
return result
return wrapper
class DatabaseLearnerCommunicator(object):
"""Communicate with training/inference workers."""
def __init__(
self,
port_inference=5000,
port_training=5001,
host='localhost'):
"""Initialize (remember ports)."""
self.port_inference = port_inference
self.port_training = port_training
self.host = host
def build_client(self, port):
"""Return an http client pointing to the worker."""
return HTTPClient("http://%s:%d" % (self.host, port))
@time_cache_wrapper
def __call__(self, x):
"""Transform embedding into preferences."""
try:
client = self.build_client(port=self.port_inference)
return client.call([float(t) for t in x]).data.result
except Exception as e:
print(e)
return np.zeros(len(VIDEO_FIELDS))
def fit(self):
"""Fit on data from the dataset."""
def fit_helper():
client = self.build_client(port=self.port_training)
client.fit()
client_inference = self.build_client(port=self.port_inference)
client_inference.reload()
Process(target=fit_helper).start()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.