blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ebda7187c990755ed5402fe5d8d27ea22d320b99 | a84b99ce26f86a467df95b3ef2b57dd15cb1b49e | /boston_housing_v1.py | 3422493d35a3c0ff7bae1422e2c0fc0720a5c705 | [] | no_license | yanqinghao/KerasDL | 3ea6246495bc01bda5792a7146ad4519088375dc | 853f2872fff600fdd62d882d52d6dd6f82200cd8 | refs/heads/master | 2020-03-28T14:07:10.799496 | 2019-05-20T06:17:51 | 2019-05-20T06:17:51 | 148,458,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,120 | py | import ptvsd
# Allow other computers to attach to ptvsd at this IP address and port.
ptvsd.enable_attach(address=('10.122.24.46', 3000), redirect_output=True)
# Pause the program until a remote debugger is attached
ptvsd.wait_for_attach()
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
from keras.datasets import boston_housing
from keras import models
from keras import layers
import numpy as np
import matplotlib.pyplot as plt
(train_data, train_targets), (test_data, test_targets) = boston_housing.load_data()
print(train_data.shape, '|', test_data.shape)
print(train_targets)
mean = train_data.mean(axis=0)
train_data -= mean
std = train_data.std(axis=0)
train_data /= std
test_data -= mean
test_data /= std
def build_model():
model = models.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(train_data.shape[1],)))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(1))
model.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])
return model
## crossvalidation
k = 4
num_val_samples = len(train_data) // k
num_epochs = 500
all_scores = []
all_mae_histories = []
for i in range(k):
print('processing fold #', i)
val_data = train_data[i * num_val_samples: (i + 1) * num_val_samples]
val_targets = train_targets[i * num_val_samples: (i + 1) * num_val_samples]
partial_train_data = np.concatenate([train_data[:i * num_val_samples],
train_data[(i + 1) * num_val_samples:]], axis=0)
partial_train_targets = np.concatenate([train_targets[:i * num_val_samples],
train_targets[(i + 1) * num_val_samples:]], axis=0)
model = build_model()
history = model.fit(partial_train_data, partial_train_targets, epochs=num_epochs, batch_size=1,
verbose=0, validation_data=(val_data, val_targets))
mae_history = history.history['val_mean_absolute_error']
all_mae_histories.append(mae_history)
val_mse, val_mae = model.evaluate(val_data, val_targets, verbose=0)
all_scores.append(val_mae)
print(all_scores, '|', np.mean(all_scores))
average_mae_history = [np.mean([x[i] for x in all_mae_histories]) for i in range(num_epochs)]
plt.plot(range(1, len(average_mae_history) + 1), average_mae_history)
plt.xlabel('Epochs')
plt.ylabel('Validation MAE')
plt.show()
def smooth_curve(points, factor=0.9):
smoothed_points = []
for point in points:
if smoothed_points:
previous = smoothed_points[-1]
smoothed_points.append(previous * factor + point * (1 - factor))
else:
smoothed_points.append(point)
return smoothed_points
smooth_mae_history = smooth_curve(average_mae_history[10:])
plt.plot(range(1, len(smooth_mae_history) + 1), smooth_mae_history)
plt.xlabel('Epochs')
plt.ylabel('Validation MAE')
plt.show()
## Final model
model = build_model()
model.fit(train_data, train_targets, epochs=80, batch_size=16, verbose=0)
test_mse_score, test_mae_score = model.evaluate(test_data, test_targets)
print(test_mae_score) | [
"[email protected]"
] | |
1467080cc60312b2af114571aaf42827195feca1 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/EightTeV/LQ/LQToCMu_M_800_TuneZ2star_8TeV_pythia6_cff.py | 1e0c27817afa82ce4cc3f09aea6c3fd1520590fb | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 1,777 | py | import FWCore.ParameterSet.Config as cms
source = cms.Source("EmptySource")
from Configuration.Generator.PythiaUEZ2starSettings_cfi import *
generator = cms.EDFilter("Pythia6GeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1.0),
comEnergy = cms.double(8000.0),
crossSection = cms.untracked.double(0.001246),
PythiaParameters = cms.PSet(
pythiaUESettingsBlock,
processParameters = cms.vstring(
'PMAS(42,1)=800.0 ! LQ mass',
'IMSS(21)=33 ! LUN number for SLHA File (must be 33)',
'IMSS(22)=33 ! Read-in SLHA decay table',
'MSEL=0 ! (D=1) to select between full user control (0, then use MSUB) and some preprogrammed alternative',
'MSUB(163)=1 ! g+g->LQ+LQbar',
'MSUB(164)=1 ! q+qbar->LQ+LQbar'),
# This is a vector of ParameterSet names to be read, in this order
parameterSets = cms.vstring('pythiaUESettings',
'processParameters',
'SLHAParameters'),
SLHAParameters = cms.vstring('SLHAFILE = Configuration/Generator/data/LQ_cmu_beta1.0.out')
)
)
configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.3 $'),
name = cms.untracked.string('$Source: /cvs/CMSSW/CMSSW/Configuration/GenProduction/python/PYTHIA6_Exotica_LQ_cmu_250_7TeV_mumujj_cff.py,v $')
,
annotation = cms.untracked.string('default documentation string for PYTHIA6_Exotica_LQ_cmu_250_7TeV_mumujj_cff.py')
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
32d68d090c8a27cb436b9d09c1bee0e8e9633b49 | d4a78a9099884c1e1c203f7e5b78b844de053ff7 | /tensorflow/contrib/boosted_trees/estimator_batch/estimator_test.py | ee052ac60387d8f993e4942dd7dff39e191dd3a4 | [
"Apache-2.0"
] | permissive | pint1022/tensorflow | b4b7632c0f833135a0bb37ab5a939a6c1ec51ef6 | ab1f872bbcf7749112f76a7f9ba17406e8fbbf4e | refs/heads/master | 2020-04-15T00:16:48.132100 | 2019-02-05T17:48:11 | 2019-02-05T17:48:11 | 164,233,910 | 2 | 2 | Apache-2.0 | 2019-01-05T16:53:25 | 2019-01-05T16:53:25 | null | UTF-8 | Python | false | false | 31,748 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for GBDT estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import numpy as np
from tensorflow.contrib.boosted_trees.estimator_batch import estimator
from tensorflow.contrib.boosted_trees.proto import learner_pb2
from tensorflow.contrib.layers.python.layers import feature_column as contrib_feature_column
from tensorflow.contrib.learn.python.learn.estimators import run_config
from tensorflow.python.estimator.canned import head as head_lib
from tensorflow.python.estimator.inputs import numpy_io
from tensorflow.python.feature_column import feature_column_lib as core_feature_column
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
from tensorflow.python.training import checkpoint_utils
def _train_input_fn():
features = {"x": constant_op.constant([[2.], [1.], [1.]])}
label = constant_op.constant([[1], [0], [0]], dtype=dtypes.int32)
return features, label
def _multiclass_train_input_fn():
features = {
"x": constant_op.constant([[2.], [1.], [1.], [5.], [3.5], [4.6], [3.5]])
}
label = constant_op.constant([[1], [0], [0], [2], [2], [0], [1]],
dtype=dtypes.int32)
return features, label
def _ranking_train_input_fn():
features = {
"a.f1": constant_op.constant([[3.], [0.3], [1.]]),
"a.f2": constant_op.constant([[0.1], [3.], [1.]]),
"b.f1": constant_op.constant([[13.], [0.4], [5.]]),
"b.f2": constant_op.constant([[1.], [3.], [0.01]]),
}
label = constant_op.constant([[0], [0], [1]], dtype=dtypes.int32)
return features, label
def _eval_input_fn():
features = {"x": constant_op.constant([[1.], [2.], [2.]])}
label = constant_op.constant([[0], [1], [1]], dtype=dtypes.int32)
return features, label
def _infer_ranking_train_input_fn():
features = {
"f1": constant_op.constant([[3.], [2], [1.]]),
"f2": constant_op.constant([[0.1], [3.], [1.]])
}
return features, None
_QUANTILE_REGRESSION_SIZE = 1000
def _quantile_regression_input_fns(two_dimension=False):
# The data generation is taken from
# http://scikit-learn.org/stable/auto_examples/ensemble/plot_gradient_boosting_quantile.html
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
def g(x):
"""The function to predict."""
return x * np.cos(x)
# Training data.
x = np.atleast_2d(np.random.uniform(0, 10.0,
size=_QUANTILE_REGRESSION_SIZE)).T
x = x.astype(np.float32)
# Labels.
if not two_dimension:
y = f(x).ravel()
else:
y = np.column_stack((f(x).ravel(), g(x).ravel()))
# Add random noise.
dy = 1.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
y_original = y.astype(np.float32)
if not two_dimension:
y = y.reshape(_QUANTILE_REGRESSION_SIZE, 1)
train_input_fn = numpy_io.numpy_input_fn(
x=x,
y=y,
batch_size=_QUANTILE_REGRESSION_SIZE,
num_epochs=None,
shuffle=True)
# Test on the training data to make sure the predictions are calibrated.
test_input_fn = numpy_io.numpy_input_fn(
x=x,
y=y,
batch_size=_QUANTILE_REGRESSION_SIZE,
num_epochs=1,
shuffle=False)
return train_input_fn, test_input_fn, y_original
class BoostedTreeEstimatorTest(test_util.TensorFlowTestCase):
def setUp(self):
self._export_dir_base = tempfile.mkdtemp() + "export/"
gfile.MkDir(self._export_dir_base)
def _assert_checkpoint(self, model_dir, global_step):
reader = checkpoint_utils.load_checkpoint(model_dir)
self.assertEqual(global_step, reader.get_tensor(ops.GraphKeys.GLOBAL_STEP))
def testFitAndEvaluateDontThrowException(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[contrib_feature_column.real_valued_column("x")])
classifier.fit(input_fn=_train_input_fn, steps=15)
classifier.evaluate(input_fn=_eval_input_fn, steps=1)
classifier.export(self._export_dir_base)
def testThatLeafIndexIsInPredictions(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[contrib_feature_column.real_valued_column("x")],
output_leaf_index=True)
classifier.fit(input_fn=_train_input_fn, steps=15)
result_iter = classifier.predict(input_fn=_eval_input_fn)
for prediction_dict in result_iter:
self.assertTrue("leaf_index" in prediction_dict)
self.assertTrue("logits" in prediction_dict)
def testFitAndEvaluateDontThrowExceptionWithCoreForEstimator(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
# Use core head
head_fn = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
loss_reduction=losses.Reduction.SUM_OVER_BATCH_SIZE)
model = estimator.GradientBoostedDecisionTreeEstimator(
head=head_fn,
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")],
use_core_libs=True)
model.fit(input_fn=_train_input_fn, steps=15)
model.evaluate(input_fn=_eval_input_fn, steps=1)
model.export(self._export_dir_base)
def testFitAndEvaluateDontThrowExceptionWithCoreForClassifier(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")],
use_core_libs=True)
classifier.fit(input_fn=_train_input_fn, steps=15)
classifier.evaluate(input_fn=_eval_input_fn, steps=1)
classifier.export(self._export_dir_base)
def testFitAndEvaluateDontThrowExceptionWithCoreForRegressor(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
regressor = estimator.GradientBoostedDecisionTreeRegressor(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")],
use_core_libs=True)
regressor.fit(input_fn=_train_input_fn, steps=15)
regressor.evaluate(input_fn=_eval_input_fn, steps=1)
regressor.export(self._export_dir_base)
def testRankingDontThrowExceptionForForEstimator(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
head_fn = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
loss_reduction=losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)
model = estimator.GradientBoostedDecisionTreeRanker(
head=head_fn,
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
use_core_libs=True,
feature_columns=[
core_feature_column.numeric_column("f1"),
core_feature_column.numeric_column("f2")
],
ranking_model_pair_keys=("a", "b"))
model.fit(input_fn=_ranking_train_input_fn, steps=1000)
model.evaluate(input_fn=_ranking_train_input_fn, steps=1)
model.predict(input_fn=_infer_ranking_train_input_fn)
def testDoesNotOverrideGlobalSteps(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 2
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[contrib_feature_column.real_valued_column("x")],
output_leaf_index=False)
classifier.fit(input_fn=_train_input_fn, steps=15)
# When no override of global steps, 5 steps were used.
self._assert_checkpoint(classifier.model_dir, global_step=5)
def testOverridesGlobalSteps(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 2
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[contrib_feature_column.real_valued_column("x")],
output_leaf_index=False,
override_global_step_value=10000000)
classifier.fit(input_fn=_train_input_fn, steps=15)
self._assert_checkpoint(classifier.model_dir, global_step=10000000)
def testFitAndEvaluateMultiClassTreePerClassDontThrowException(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 3
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.TREE_PER_CLASS)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
n_classes=learner_config.num_classes,
num_trees=1,
examples_per_layer=7,
model_dir=model_dir,
config=config,
feature_columns=[contrib_feature_column.real_valued_column("x")])
classifier.fit(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_eval_input_fn, steps=1)
classifier.export(self._export_dir_base)
result_iter = classifier.predict(input_fn=_eval_input_fn)
for prediction_dict in result_iter:
self.assertTrue("classes" in prediction_dict)
def testFitAndEvaluateMultiClassDiagonalDontThrowException(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 3
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.DIAGONAL_HESSIAN)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
n_classes=learner_config.num_classes,
num_trees=1,
examples_per_layer=7,
model_dir=model_dir,
config=config,
center_bias=False,
feature_columns=[contrib_feature_column.real_valued_column("x")])
classifier.fit(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_eval_input_fn, steps=1)
classifier.export(self._export_dir_base)
result_iter = classifier.predict(input_fn=_eval_input_fn)
for prediction_dict in result_iter:
self.assertTrue("classes" in prediction_dict)
def testFitAndEvaluateMultiClassFullDontThrowException(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 3
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.FULL_HESSIAN)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.GradientBoostedDecisionTreeClassifier(
learner_config=learner_config,
n_classes=learner_config.num_classes,
num_trees=1,
examples_per_layer=7,
model_dir=model_dir,
config=config,
center_bias=False,
feature_columns=[contrib_feature_column.real_valued_column("x")])
classifier.fit(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_eval_input_fn, steps=1)
classifier.export(self._export_dir_base)
result_iter = classifier.predict(input_fn=_eval_input_fn)
for prediction_dict in result_iter:
self.assertTrue("classes" in prediction_dict)
# One dimensional quantile regression.
def testQuantileRegression(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 3
learner_config.growing_mode = learner_pb2.LearnerConfig.WHOLE_TREE
learner_config.constraints.min_node_weight = 1 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l2 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l1 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.tree_complexity = (
1.0 / _QUANTILE_REGRESSION_SIZE)
train_input_fn, test_input_fn, y = _quantile_regression_input_fns()
# 95% percentile.
model_upper = estimator.GradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.95],
learner_config=learner_config,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_upper.fit(input_fn=train_input_fn, steps=1000)
result_iter = model_upper.predict(input_fn=test_input_fn)
upper = []
for prediction_dict in result_iter:
upper.append(prediction_dict["scores"])
frac_below_upper = round(1. * np.count_nonzero(upper > y) / len(y), 3)
# +/- 3%
self.assertTrue(frac_below_upper >= 0.92)
self.assertTrue(frac_below_upper <= 0.98)
train_input_fn, test_input_fn, _ = _quantile_regression_input_fns()
model_lower = estimator.GradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.05],
learner_config=learner_config,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_lower.fit(input_fn=train_input_fn, steps=1000)
result_iter = model_lower.predict(input_fn=test_input_fn)
lower = []
for prediction_dict in result_iter:
lower.append(prediction_dict["scores"])
frac_above_lower = round(1. * np.count_nonzero(lower < y) / len(y), 3)
# +/- 3%
self.assertTrue(frac_above_lower >= 0.92)
self.assertTrue(frac_above_lower <= 0.98)
# Multi-dimensional quantile regression.
def testQuantileRegressionMultiDimLabel(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 3
learner_config.growing_mode = learner_pb2.LearnerConfig.WHOLE_TREE
learner_config.constraints.min_node_weight = 1 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l2 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l1 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.tree_complexity = (
1.0 / _QUANTILE_REGRESSION_SIZE)
train_input_fn, test_input_fn, y = _quantile_regression_input_fns(
two_dimension=True)
# 95% percentile.
model_upper = estimator.GradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.95],
learner_config=learner_config,
label_dimension=2,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_upper.fit(input_fn=train_input_fn, steps=1000)
result_iter = model_upper.predict(input_fn=test_input_fn)
upper = []
for prediction_dict in result_iter:
upper.append(prediction_dict["scores"])
count_below_upper = np.count_nonzero(upper > y, axis=0)
count_both_below_upper = np.count_nonzero(np.prod(upper > y, axis=1))
frac_below_upper_0 = round(1. * count_below_upper[0] / len(y), 3)
frac_below_upper_1 = round(1. * count_below_upper[1] / len(y), 3)
frac_both_below_upper = round(1. * count_both_below_upper / len(y), 3)
# +/- 3%
self.assertTrue(frac_below_upper_0 >= 0.92)
self.assertTrue(frac_below_upper_0 <= 0.98)
self.assertTrue(frac_below_upper_1 >= 0.92)
self.assertTrue(frac_below_upper_1 <= 0.98)
self.assertTrue(frac_both_below_upper >= 0.92)
self.assertTrue(frac_both_below_upper <= 0.98)
train_input_fn, test_input_fn, _ = _quantile_regression_input_fns(
two_dimension=True)
model_lower = estimator.GradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.05],
learner_config=learner_config,
label_dimension=2,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_lower.fit(input_fn=train_input_fn, steps=1000)
result_iter = model_lower.predict(input_fn=test_input_fn)
lower = []
for prediction_dict in result_iter:
lower.append(prediction_dict["scores"])
count_above_lower = np.count_nonzero(lower < y, axis=0)
count_both_aboce_lower = np.count_nonzero(np.prod(lower < y, axis=1))
frac_above_lower_0 = round(1. * count_above_lower[0] / len(y), 3)
frac_above_lower_1 = round(1. * count_above_lower[1] / len(y), 3)
frac_both_above_lower = round(1. * count_both_aboce_lower / len(y), 3)
# +/- 3%
self.assertTrue(frac_above_lower_0 >= 0.92)
self.assertTrue(frac_above_lower_0 <= 0.98)
self.assertTrue(frac_above_lower_1 >= 0.92)
self.assertTrue(frac_above_lower_1 <= 0.98)
self.assertTrue(frac_both_above_lower >= 0.92)
self.assertTrue(frac_both_above_lower <= 0.98)
class CoreGradientBoostedDecisionTreeEstimators(test_util.TensorFlowTestCase):
def testTrainEvaluateInferDoesNotThrowError(self):
head_fn = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
loss_reduction=losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
est = estimator.CoreGradientBoostedDecisionTreeEstimator(
head=head_fn,
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")])
# Train for a few steps.
est.train(input_fn=_train_input_fn, steps=1000)
est.evaluate(input_fn=_eval_input_fn, steps=1)
est.predict(input_fn=_eval_input_fn)
def testRankingDontThrowExceptionForForEstimator(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
head_fn = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
loss_reduction=losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)
est = estimator.CoreGradientBoostedDecisionTreeRanker(
head=head_fn,
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=[
core_feature_column.numeric_column("f1"),
core_feature_column.numeric_column("f2")
],
ranking_model_pair_keys=("a", "b"))
# Train for a few steps.
est.train(input_fn=_ranking_train_input_fn, steps=1000)
est.evaluate(input_fn=_ranking_train_input_fn, steps=1)
est.predict(input_fn=_infer_ranking_train_input_fn)
def testFitAndEvaluateMultiClassTreePerClasssDontThrowException(self):
n_classes = 3
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = n_classes
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.TREE_PER_CLASS)
head_fn = estimator.core_multiclass_head(n_classes=n_classes)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.CoreGradientBoostedDecisionTreeEstimator(
learner_config=learner_config,
head=head_fn,
num_trees=1,
center_bias=False,
examples_per_layer=7,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")])
classifier.train(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_multiclass_train_input_fn, steps=1)
classifier.predict(input_fn=_eval_input_fn)
def testFitAndEvaluateMultiClassDiagonalDontThrowException(self):
n_classes = 3
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = n_classes
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.DIAGONAL_HESSIAN)
head_fn = estimator.core_multiclass_head(n_classes=n_classes)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.CoreGradientBoostedDecisionTreeEstimator(
learner_config=learner_config,
head=head_fn,
num_trees=1,
center_bias=False,
examples_per_layer=7,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")])
classifier.train(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_multiclass_train_input_fn, steps=1)
classifier.predict(input_fn=_eval_input_fn)
def testFitAndEvaluateMultiClassFullDontThrowException(self):
n_classes = 3
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = n_classes
learner_config.constraints.max_tree_depth = 1
learner_config.multi_class_strategy = (
learner_pb2.LearnerConfig.FULL_HESSIAN)
head_fn = estimator.core_multiclass_head(n_classes=n_classes)
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
classifier = estimator.CoreGradientBoostedDecisionTreeEstimator(
learner_config=learner_config,
head=head_fn,
num_trees=1,
center_bias=False,
examples_per_layer=7,
model_dir=model_dir,
config=config,
feature_columns=[core_feature_column.numeric_column("x")])
classifier.train(input_fn=_multiclass_train_input_fn, steps=100)
classifier.evaluate(input_fn=_multiclass_train_input_fn, steps=1)
classifier.predict(input_fn=_eval_input_fn)
def testWeightedCategoricalColumn(self):
head_fn = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss(
loss_reduction=losses.Reduction.SUM_OVER_NONZERO_WEIGHTS)
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 1
model_dir = tempfile.mkdtemp()
config = run_config.RunConfig()
feature_columns = [
core_feature_column.weighted_categorical_column(
categorical_column=core_feature_column
.categorical_column_with_vocabulary_list(
key="word", vocabulary_list=["the", "cat", "dog"]),
weight_feature_key="weight")
]
labels = np.array([[1], [1], [0], [0.]], dtype=np.float32)
def _make_input_fn():
def _input_fn():
features_dict = {}
# Sparse tensor representing
# example 0: "cat","the"
# examaple 1: "dog"
# example 2: -
# example 3: "the"
# Weights for the words are 5 - cat, 6- dog and 1 -the.
features_dict["word"] = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 1], [1, 0], [3, 0]],
values=constant_op.constant(["the", "cat", "dog", "the"],
dtype=dtypes.string),
dense_shape=[4, 3])
features_dict["weight"] = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 1], [1, 0], [3, 0]],
values=[1., 5., 6., 1.],
dense_shape=[4, 3])
return features_dict, labels
return _input_fn
est = estimator.CoreGradientBoostedDecisionTreeEstimator(
head=head_fn,
learner_config=learner_config,
num_trees=1,
examples_per_layer=3,
model_dir=model_dir,
config=config,
feature_columns=feature_columns)
input_fn = _make_input_fn()
est.train(input_fn=input_fn, steps=100)
est.evaluate(input_fn=input_fn, steps=1)
est.predict(input_fn=input_fn)
# One dimensional quantile regression.
def testQuantileRegression(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 3
learner_config.growing_mode = learner_pb2.LearnerConfig.WHOLE_TREE
learner_config.constraints.min_node_weight = 1 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l2 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l1 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.tree_complexity = (
1.0 / _QUANTILE_REGRESSION_SIZE)
train_input_fn, test_input_fn, y = _quantile_regression_input_fns()
y = y.reshape(_QUANTILE_REGRESSION_SIZE, 1)
# 95% percentile.
model_upper = estimator.CoreGradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.95],
learner_config=learner_config,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_upper.train(input_fn=train_input_fn, steps=1000)
result_iter = model_upper.predict(input_fn=test_input_fn)
upper = []
for prediction_dict in result_iter:
upper.append(prediction_dict["predictions"])
frac_below_upper = round(1. * np.count_nonzero(upper > y) / len(y), 3)
# +/- 3%
self.assertTrue(frac_below_upper >= 0.92)
self.assertTrue(frac_below_upper <= 0.98)
train_input_fn, test_input_fn, _ = _quantile_regression_input_fns()
model_lower = estimator.CoreGradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.05],
learner_config=learner_config,
num_trees=100,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_lower.train(input_fn=train_input_fn, steps=1000)
result_iter = model_lower.predict(input_fn=test_input_fn)
lower = []
for prediction_dict in result_iter:
lower.append(prediction_dict["predictions"])
frac_above_lower = round(1. * np.count_nonzero(lower < y) / len(y), 3)
# +/- 3%
self.assertTrue(frac_above_lower >= 0.92)
self.assertTrue(frac_above_lower <= 0.98)
# Multi-dimensional quantile regression.
def testQuantileRegressionMultiDimLabel(self):
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
learner_config.constraints.max_tree_depth = 3
learner_config.growing_mode = learner_pb2.LearnerConfig.WHOLE_TREE
learner_config.constraints.min_node_weight = 1 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l2 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.l1 = 1.0 / _QUANTILE_REGRESSION_SIZE
learner_config.regularization.tree_complexity = (
1.0 / _QUANTILE_REGRESSION_SIZE)
train_input_fn, test_input_fn, y = _quantile_regression_input_fns(
two_dimension=True)
y = y.reshape(_QUANTILE_REGRESSION_SIZE, 2)
# 95% percentile.
model_upper = estimator.CoreGradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.95],
learner_config=learner_config,
num_trees=100,
label_dimension=2,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_upper.train(input_fn=train_input_fn, steps=1000)
result_iter = model_upper.predict(input_fn=test_input_fn)
upper = []
for prediction_dict in result_iter:
upper.append(prediction_dict["predictions"])
count_below_upper = np.count_nonzero(upper > y, axis=0)
count_both_below_upper = np.count_nonzero(np.prod(upper > y, axis=1))
frac_below_upper_0 = round(1. * count_below_upper[0] / len(y), 3)
frac_below_upper_1 = round(1. * count_below_upper[1] / len(y), 3)
frac_both_below_upper = round(1. * count_both_below_upper / len(y), 3)
# +/- 3%
self.assertTrue(frac_below_upper_0 >= 0.92)
self.assertTrue(frac_below_upper_0 <= 0.98)
self.assertTrue(frac_below_upper_1 >= 0.92)
self.assertTrue(frac_below_upper_1 <= 0.98)
self.assertTrue(frac_both_below_upper >= 0.92)
self.assertTrue(frac_both_below_upper <= 0.98)
train_input_fn, test_input_fn, _ = _quantile_regression_input_fns(
two_dimension=True)
model_lower = estimator.CoreGradientBoostedDecisionTreeQuantileRegressor(
quantiles=[0.05],
learner_config=learner_config,
num_trees=100,
label_dimension=2,
examples_per_layer=_QUANTILE_REGRESSION_SIZE,
center_bias=False)
model_lower.train(input_fn=train_input_fn, steps=1000)
result_iter = model_lower.predict(input_fn=test_input_fn)
lower = []
for prediction_dict in result_iter:
lower.append(prediction_dict["predictions"])
count_above_lower = np.count_nonzero(lower < y, axis=0)
count_both_aboce_lower = np.count_nonzero(np.prod(lower < y, axis=1))
frac_above_lower_0 = round(1. * count_above_lower[0] / len(y), 3)
frac_above_lower_1 = round(1. * count_above_lower[1] / len(y), 3)
frac_both_above_lower = round(1. * count_both_aboce_lower / len(y), 3)
# +/- 3%
self.assertTrue(frac_above_lower_0 >= 0.92)
self.assertTrue(frac_above_lower_0 <= 0.98)
self.assertTrue(frac_above_lower_1 >= 0.92)
self.assertTrue(frac_above_lower_1 <= 0.98)
self.assertTrue(frac_both_above_lower >= 0.92)
self.assertTrue(frac_both_above_lower <= 0.98)
if __name__ == "__main__":
googletest.main()
| [
"[email protected]"
] | |
c9d58c17e864ab025f0a150793e137ea2d21d2f7 | 009628e385aca8552dad5c1c5cba018ca6e5954d | /scripts/cazalsconnolling | 63c09a69565fefe3cdc31f78c056fdc17bca2aa8 | [] | no_license | csrocha/python-mtk | 565ebcfeb668a6409d48135bf081321d8121b263 | c3ba520f55c2e204feb6b98251abcb046e51c6cd | refs/heads/main | 2023-01-12T02:46:44.457520 | 2020-11-17T20:20:59 | 2020-11-17T20:20:59 | 313,939,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,092 | #!/usr/bin/python
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
import sys, os
import os.path as path
from math import copysign
from numpy import array, zeros, arange, float, abs, argmax, all
from numpy import ndenumerate,round, dot, outer, arccos, pi, nan
from numpy.linalg import det, norm
from mtk.log import log
from mtk.geometry.vol import BasicVolume
from mtk.geometry.iter import face, line
from mtk.geometry.planecut import cutfaces
from mtk.geometry.triangle import triangle
from mtk.geometry.sphere import sphere
from mtk.geometry.intersection import triangle_sphere
from mtk.geometry.line import segment
from mtk.geometry.arc import arc
from mtk.geometry.polygon import polygon
from mtk.io.ply_ff import load_ply
from mtk.io.vtk_ff import writer
import sys
import bisect as bs
import logging
def solidangles(plyfilename, outfilename, radius):
log.info("Loading")
vertexs, faces = load_ply(open(plyfilename))
vertexs = vertexs[:,0:3]
try:
P = polygon(vertexs, faces)
except ValueError:
import pdb; pdb.set_trace()
raise RuntimeError('Not all faces are triangles in your ply file')
l = float(len(P.v))
values = []
c = 0
for i in range(len(P.v)):
try:
values.append(P.area(i, radius))
except RuntimeError as m:
log.warning("Trouble to calculate curvature to vertex %i: '%s'" % (i, m))
values.append(nan)
log.info("Storing")
w = writer(points=vertexs, polygons=faces, scalars=values)
w.write(outfilename)
_usage_ = """
Calculate curvature to a body using cazals algorithm.
cazalsconnolling [ply filename] [sphere radius] [vtk output filename]
"""
if __name__ == "__main__":
if len(sys.argv) != 4:
print _usage_
log.info("Starting connolling")
log.info("Shell command '%s'" % ' '.join(sys.argv))
log.info("Work path '%s'" % os.getcwd())
plyfile = sys.argv[1]
radius = float(sys.argv[2])
outfile = sys.argv[3]
solidangles(plyfile, outfile, radius)
log.info("Stop conolling execution")
| [
"[email protected]"
] | ||
c4166f9d26df53b1e991a74fb3dd41c6032084d8 | 75c903567168c968ceb8ff780ff9b7c596620655 | /tests.py | cce6586afa285416561ed2d27e0ae3797ce08c31 | [
"MIT"
] | permissive | AxelVoitier/streams | 5f09e109c3743746e597f93d1bc13c0970c16f1e | 7624472790f33a1a268697cec3173224c60af74b | refs/heads/master | 2021-01-15T23:07:31.112439 | 2014-04-06T09:52:46 | 2014-04-06T09:52:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,234 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
from itertools import chain
from operator import mul
from random import shuffle
try:
from cdecimal import Decimal
except ImportError:
from decimal import Decimal
try:
from unittest2 import TestCase, main
except ImportError:
from unittest import TestCase, main
# noinspection PyUnresolvedReferences
from six.moves import xrange
from streams import Stream
###############################################################################
class StreamsCase(TestCase):
def test_filter(self):
stream = Stream(range(10))
stream = stream.filter(lambda item: item % 2)
self.assertEqual(stream.sum(), 25)
stream = Stream(dict((v, v) for v in xrange(100)))
stream = stream.filter(lambda kv: kv[0] % 2)
stream = stream.filter(lambda kv: kv[0] % 10, parallel=6)
stream = stream.limit(5).keys()
stream = list(stream)
self.assertListEqual(list(stream), [1, 3, 5, 7, 9])
def test_map(self):
stream = Stream(range(10))
stream = stream.map(lambda item: -item)
self.assertEqual(max(stream), 0)
stream = Stream(dict((v, v) for v in xrange(100)))
stream = stream.values().skip(10).limit(3)
self.assertListEqual(list(stream), [10, 11, 12])
def test_distinct(self):
elements = chain(xrange(10), xrange(10), xrange(10), xrange(20))
stream = Stream(elements)
self.assertListEqual(list(stream.distinct()), list(xrange(20)))
def test_sorted(self):
elements = reversed(xrange(100))
stream = Stream(elements)
stream = stream.sorted()
self.assertListEqual(list(stream), list(xrange(100)))
def test_limit(self):
stream = Stream(xrange(100))
stream = stream.limit(50)
self.assertEqual(stream.count(), 50)
stream = Stream(xrange(100))
stream = stream.limit(1000)
self.assertEqual(stream.count(), 100)
def test_skip(self):
stream = Stream(xrange(100))
stream = stream.skip(50)
self.assertEqual(list(stream), list(xrange(50, 100)))
def test_reversed(self):
stream = Stream(xrange(100))
stream = stream.reversed()
self.assertEqual(list(stream), list(reversed(xrange(100))))
def test_reduce(self):
stream = Stream(xrange(11))
stream = stream.skip(1)
stream = stream.reduce(mul)
self.assertEqual(stream, 3628800)
def test_median(self):
self.assertEqual(5, Stream(xrange(10)).median())
self.assertEqual(5, Stream(xrange(11)).median())
self.assertEqual(6, Stream(xrange(12)).median())
arr = list(xrange(12))
shuffle(arr)
self.assertEqual(6, Stream(arr).median())
arr = list(xrange(11))
shuffle(arr)
self.assertEqual(5, Stream(arr).median())
def test_nth(self):
self.assertEqual(0, Stream(xrange(10)).nth(1))
self.assertEqual(1, Stream(xrange(10)).nth(2))
self.assertEqual(9, Stream(xrange(10)).nth(10))
self.assertIsNone(Stream(xrange(10)).nth(100))
def test_any(self):
self.assertTrue(Stream(xrange(10)).any())
self.assertFalse(Stream([]).any())
self.assertTrue(Stream(xrange(10)).any(lambda item: item > 5,
parallel=True))
self.assertTrue(Stream(xrange(10)).any(lambda item: item > 5))
self.assertFalse(Stream(xrange(10)).any(lambda item: item < -1,
parallel=True))
def test_average(self):
self.assertAlmostEqual(Stream(xrange(200)).average(), 99.5)
def test_all(self):
self.assertTrue(Stream(xrange(1, 10)).all(parallel=True))
self.assertTrue(Stream(xrange(1, 10)).all())
self.assertTrue(Stream([]).all())
self.assertTrue(Stream([]).all())
self.assertFalse(Stream(xrange(10)).all(parallel=True))
self.assertFalse(Stream(xrange(10)).all())
self.assertFalse(Stream(xrange(10)).all(lambda item: item < 5))
self.assertTrue(Stream(xrange(10)).all(lambda item: item < 100))
def test_range(self):
self.assertListEqual(list(Stream.range(100)), list(xrange(100)))
def test_concat(self):
stream = Stream.concat(xrange(10), xrange(10), xrange(10))
self.assertListEqual(list(stream.distinct()), list(xrange(10)))
stream = Stream.concat(xrange(10), xrange(10), xrange(10))
self.assertEqual(stream.count(), 30)
def test_first(self):
stream = Stream(xrange(10))
self.assertEqual(stream.first, 0)
self.assertEqual(stream.first, 0)
self.assertEqual(stream.first, 0)
self.assertEqual(stream.count(), 10)
def test_regexp(self):
stream = Stream(str(item) for item in xrange(1000))
stream = stream.regexp(r"^10*$")
stream = stream.ints()
self.assertListEqual(list(stream), [1, 10, 100])
def test_divisibleby(self):
stream = Stream(xrange(2000))
stream = stream.ints().divisible_by(10)
self.assertEqual(stream.count(), 200)
stream = Stream(xrange(2000))
stream = stream.divisible_by(1000)
self.assertEquals(list(stream), [0, 1000])
def test_evens(self):
stream = Stream(xrange(200))
stream = stream.ints().evens()
elements = list(stream)
self.assertEqual(len(elements), 100)
self.assertTrue(all(item % 2 == 0 for item in elements))
def test_odds(self):
stream = Stream(xrange(200))
stream = stream.odds()
elements = list(stream)
self.assertEqual(len(elements), 100)
self.assertFalse(any(item % 2 == 0 for item in elements))
def test_instances_of(self):
elements = list(xrange(100))
# noinspection PyTypeChecker
elements = elements + [str(item) for item in elements] + [None, None]
strings = list(Stream(elements).instances_of(str))
ints = list(Stream(elements).instances_of(int))
self.assertEqual(len(strings), 100)
self.assertTrue(all(isinstance(item, str) for item in strings))
self.assertEqual(len(ints), 100)
self.assertTrue(all(isinstance(item, int) for item in ints))
def test_exclude_nones(self):
elements = list(xrange(100)) + [None, None]
without_nones = list(Stream(elements).exclude_nones())
self.assertEqual(without_nones, list(xrange(100)))
def test_exclude(self):
elements = list(xrange(100))
evens = list(Stream(elements).exclude(lambda item: item % 2 == 0))
evens2 = list(Stream(elements).evens())
self.assertEqual(evens, evens2)
def test_only_trues(self):
elements = list(xrange(5)) + [True, False, True, None, 1, object()]
stream = Stream(elements).only_trues()
self.assertTrue(all(bool(item) for item in stream))
def test_only_falses(self):
elements = list(xrange(5)) + [True, False, True, None, 1, object()]
stream = Stream(elements).only_falses()
self.assertFalse(any(bool(item) for item in stream))
def test_only_nones(self):
elements = list(xrange(5)) + [True, False, True, None, 1, object()]
stream = Stream(elements).only_nones()
self.assertTrue(all(item is None for item in stream))
def test_count(self):
elements = list(xrange(5)) * 2
self.assertEqual(Stream(elements).count(), 10)
self.assertEqual(Stream(elements).count(1), 2)
def test_sum(self):
elements = list(xrange(5))
int_result = Stream(elements).ints().sum()
float_result = Stream(elements).floats().sum()
decimal_result = Stream(elements).decimals().sum()
self.assertEqual(int_result, 10)
self.assertIsInstance(int_result, int)
self.assertAlmostEqual(float_result, 10)
self.assertIsInstance(float_result, float)
self.assertEqual(decimal_result, Decimal("10"))
self.assertIsInstance(decimal_result, Decimal)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
b2514650358f9f9158ce3d6cff20a73195fa8295 | c18bdb1abf3bb9be1997a568e2260850c11137ab | /lib/tinyusb/hw/mcu/microchip/samd/asf4/tools/update_from_atmel_start.py | 0ef2a04c372b36a6d8f7d128ead7f5776f2a39e1 | [
"MIT"
] | permissive | diybitcoinhardware/micropython | a730026ad7fa4e3cff4cb5ed14dd6b3f1fb54734 | 6bdf1b69162b673d48042ccd021f9efa019091fa | refs/heads/master | 2022-11-23T04:42:47.976301 | 2022-11-07T14:25:09 | 2022-11-07T14:25:28 | 182,322,508 | 4 | 4 | MIT | 2022-10-16T11:47:28 | 2019-04-19T20:34:49 | C | UTF-8 | Python | false | false | 2,514 | py | import requests
import zipfile
import os.path
import shutil
import os
import os.path
import sys
import subprocess
if not subprocess.check_output(['git', 'rev-parse', '--show-toplevel'], universal_newlines=True).strip() == os.getcwd():
print('Please run "{}" from the top directory in the asf4 repo.'.format(sys.argv[0]))
sys.exit(1)
# Change .gitignore if you change these dir names.
DOWNLOADED_ZIP_DIR = 'downloaded-zip'
DOWNLOADED_DIR = 'downloaded'
for chip in ['samd21', 'samd51']:
r = None
os.makedirs(DOWNLOADED_ZIP_DIR, exist_ok=True)
filename = os.path.join(DOWNLOADED_ZIP_DIR, chip + '.zip')
if os.path.isfile(filename):
print('NOTE:', filename, 'already downloaded. Delete it and re-run if you want to re-download')
else:
print("Downloading", filename, "...")
with open('tools/' + chip + '.json', 'r') as project_json:
headers = {'content-type': 'text/plain'}
r = requests.post('http://start.atmel.com/api/v1/generate/?format=atzip&compilers=[atmel_studio,make]&file_name_base=My%20Project', headers=headers, data=project_json)
if not r.ok:
# Double check that the JSON is minified. If it's not, you'll get a 404.
print(r.text)
sys.exit(1)
with open(filename, 'wb') as out:
out.write(r.content)
# Extract to a temporary location and normalize before replacing the existing location.
z = zipfile.ZipFile(filename)
downloaded_chip_dir = os.path.join(DOWNLOADED_DIR, chip)
# Clean up old zip extraction.
if os.path.isdir(downloaded_chip_dir):
shutil.rmtree(downloaded_chip_dir)
print("Unzipping ...")
z.extractall(downloaded_chip_dir)
# Remove all carriage returns.
for dirpath, dirnames, filenames in os.walk(downloaded_chip_dir):
for fn in filenames:
fn = os.path.join(dirpath, fn)
subprocess.run(['sed', '-i', 's/\r//g', fn])
# Move files to match SAMD51 structure.
if chip == 'samd21':
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/include'), downloaded_chip_dir)
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/gcc/gcc'), os.path.join(downloaded_chip_dir, 'gcc'))
shutil.move(os.path.join(downloaded_chip_dir, 'samd21a/gcc/system_samd21.c'), os.path.join(downloaded_chip_dir, 'gcc'))
print("Updating",chip,"from",downloaded_chip_dir)
subprocess.run(['rsync', '-r', '--delete', downloaded_chip_dir + '/', chip], check=True)
| [
"[email protected]"
] | |
eb41704997bf530667ea55f44b8a1c784eaeb6b5 | 871454bb6203d26f93c144cb8604e5c6276ab94b | /auth/demo/serializers.py | eae85b34749d46389279c7317dc7fb45f67302a9 | [] | no_license | netology-code/DJ_code | 1b13a51b7f85c1f35dcfb2b2d010fe2ded7eb297 | a9b657aa6e2d9b3a9f4f4c4c120bb30e8802c042 | refs/heads/master | 2023-06-20T11:35:44.791530 | 2021-07-25T14:41:21 | 2021-07-25T14:41:21 | 388,563,577 | 3 | 44 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | from rest_framework import serializers
from demo.models import Adv
class AdvSerializer(serializers.ModelSerializer):
class Meta:
model = Adv
fields = ['id', 'user', 'text', 'created_at', 'open']
read_only_fields = ['user',] | [
"[email protected]"
] | |
56f1c883e8021be17c0479ddf05c910dcebf5f12 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/compute/virtual_machine.py | dbc0cf23804156f5474f9678ad015327f98ecc81 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62,321 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['VirtualMachineArgs', 'VirtualMachine']
@pulumi.input_type
class VirtualMachineArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
additional_capabilities: Optional[pulumi.Input['AdditionalCapabilitiesArgs']] = None,
availability_set: Optional[pulumi.Input['SubResourceArgs']] = None,
billing_profile: Optional[pulumi.Input['BillingProfileArgs']] = None,
diagnostics_profile: Optional[pulumi.Input['DiagnosticsProfileArgs']] = None,
eviction_policy: Optional[pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']]] = None,
extended_location: Optional[pulumi.Input['ExtendedLocationArgs']] = None,
extensions_time_budget: Optional[pulumi.Input[str]] = None,
hardware_profile: Optional[pulumi.Input['HardwareProfileArgs']] = None,
host: Optional[pulumi.Input['SubResourceArgs']] = None,
host_group: Optional[pulumi.Input['SubResourceArgs']] = None,
identity: Optional[pulumi.Input['VirtualMachineIdentityArgs']] = None,
license_type: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
network_profile: Optional[pulumi.Input['NetworkProfileArgs']] = None,
os_profile: Optional[pulumi.Input['OSProfileArgs']] = None,
plan: Optional[pulumi.Input['PlanArgs']] = None,
platform_fault_domain: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']]] = None,
proximity_placement_group: Optional[pulumi.Input['SubResourceArgs']] = None,
scheduled_events_profile: Optional[pulumi.Input['ScheduledEventsProfileArgs']] = None,
security_profile: Optional[pulumi.Input['SecurityProfileArgs']] = None,
storage_profile: Optional[pulumi.Input['StorageProfileArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
virtual_machine_scale_set: Optional[pulumi.Input['SubResourceArgs']] = None,
vm_name: Optional[pulumi.Input[str]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a VirtualMachine resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input['AdditionalCapabilitiesArgs'] additional_capabilities: Specifies additional capabilities enabled or disabled on the virtual machine.
:param pulumi.Input['SubResourceArgs'] availability_set: Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Availability sets overview](https://docs.microsoft.com/azure/virtual-machines/availability-set-overview). <br><br> For more information on Azure planned maintenance, see [Maintenance and updates for Virtual Machines in Azure](https://docs.microsoft.com/azure/virtual-machines/maintenance-and-updates) <br><br> Currently, a VM can only be added to availability set at creation time. The availability set to which the VM is being added should be under the same resource group as the availability set resource. An existing VM cannot be added to an availability set. <br><br>This property cannot exist along with a non-null properties.virtualMachineScaleSet reference.
:param pulumi.Input['BillingProfileArgs'] billing_profile: Specifies the billing related details of a Azure Spot virtual machine. <br><br>Minimum api-version: 2019-03-01.
:param pulumi.Input['DiagnosticsProfileArgs'] diagnostics_profile: Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
:param pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']] eviction_policy: Specifies the eviction policy for the Azure Spot virtual machine and Azure Spot scale set. <br><br>For Azure Spot virtual machines, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2019-03-01. <br><br>For Azure Spot scale sets, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2017-10-30-preview.
:param pulumi.Input['ExtendedLocationArgs'] extended_location: The extended location of the Virtual Machine.
:param pulumi.Input[str] extensions_time_budget: Specifies the time alloted for all extensions to start. The time duration should be between 15 minutes and 120 minutes (inclusive) and should be specified in ISO 8601 format. The default value is 90 minutes (PT1H30M). <br><br> Minimum api-version: 2020-06-01
:param pulumi.Input['HardwareProfileArgs'] hardware_profile: Specifies the hardware settings for the virtual machine.
:param pulumi.Input['SubResourceArgs'] host: Specifies information about the dedicated host that the virtual machine resides in. <br><br>Minimum api-version: 2018-10-01.
:param pulumi.Input['SubResourceArgs'] host_group: Specifies information about the dedicated host group that the virtual machine resides in. <br><br>Minimum api-version: 2020-06-01. <br><br>NOTE: User cannot specify both host and hostGroup properties.
:param pulumi.Input['VirtualMachineIdentityArgs'] identity: The identity of the virtual machine, if configured.
:param pulumi.Input[str] license_type: Specifies that the image or disk that is being used was licensed on-premises. <br><br> Possible values for Windows Server operating system are: <br><br> Windows_Client <br><br> Windows_Server <br><br> Possible values for Linux Server operating system are: <br><br> RHEL_BYOS (for RHEL) <br><br> SLES_BYOS (for SUSE) <br><br> For more information, see [Azure Hybrid Use Benefit for Windows Server](https://docs.microsoft.com/azure/virtual-machines/windows/hybrid-use-benefit-licensing) <br><br> [Azure Hybrid Use Benefit for Linux Server](https://docs.microsoft.com/azure/virtual-machines/linux/azure-hybrid-benefit-linux) <br><br> Minimum api-version: 2015-06-15
:param pulumi.Input[str] location: Resource location
:param pulumi.Input['NetworkProfileArgs'] network_profile: Specifies the network interfaces of the virtual machine.
:param pulumi.Input['OSProfileArgs'] os_profile: Specifies the operating system settings used while creating the virtual machine. Some of the settings cannot be changed once VM is provisioned.
:param pulumi.Input['PlanArgs'] plan: Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
:param pulumi.Input[int] platform_fault_domain: Specifies the scale set logical fault domain into which the Virtual Machine will be created. By default, the Virtual Machine will by automatically assigned to a fault domain that best maintains balance across available fault domains.<br><li>This is applicable only if the 'virtualMachineScaleSet' property of this Virtual Machine is set.<li>The Virtual Machine Scale Set that is referenced, must have 'platformFaultDomainCount' > 1.<li>This property cannot be updated once the Virtual Machine is created.<li>Fault domain assignment can be viewed in the Virtual Machine Instance View.<br><br>Minimum api‐version: 2020‐12‐01
:param pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']] priority: Specifies the priority for the virtual machine. <br><br>Minimum api-version: 2019-03-01
:param pulumi.Input['SubResourceArgs'] proximity_placement_group: Specifies information about the proximity placement group that the virtual machine should be assigned to. <br><br>Minimum api-version: 2018-04-01.
:param pulumi.Input['ScheduledEventsProfileArgs'] scheduled_events_profile: Specifies Scheduled Event related configurations.
:param pulumi.Input['SecurityProfileArgs'] security_profile: Specifies the Security related profile settings for the virtual machine.
:param pulumi.Input['StorageProfileArgs'] storage_profile: Specifies the storage settings for the virtual machine disks.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
:param pulumi.Input[str] user_data: UserData for the VM, which must be base-64 encoded. Customer should not pass any secrets in here. <br><br>Minimum api-version: 2021-03-01
:param pulumi.Input['SubResourceArgs'] virtual_machine_scale_set: Specifies information about the virtual machine scale set that the virtual machine should be assigned to. Virtual machines specified in the same virtual machine scale set are allocated to different nodes to maximize availability. Currently, a VM can only be added to virtual machine scale set at creation time. An existing VM cannot be added to a virtual machine scale set. <br><br>This property cannot exist along with a non-null properties.availabilitySet reference. <br><br>Minimum api‐version: 2019‐03‐01
:param pulumi.Input[str] vm_name: The name of the virtual machine.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: The virtual machine zones.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if additional_capabilities is not None:
pulumi.set(__self__, "additional_capabilities", additional_capabilities)
if availability_set is not None:
pulumi.set(__self__, "availability_set", availability_set)
if billing_profile is not None:
pulumi.set(__self__, "billing_profile", billing_profile)
if diagnostics_profile is not None:
pulumi.set(__self__, "diagnostics_profile", diagnostics_profile)
if eviction_policy is not None:
pulumi.set(__self__, "eviction_policy", eviction_policy)
if extended_location is not None:
pulumi.set(__self__, "extended_location", extended_location)
if extensions_time_budget is not None:
pulumi.set(__self__, "extensions_time_budget", extensions_time_budget)
if hardware_profile is not None:
pulumi.set(__self__, "hardware_profile", hardware_profile)
if host is not None:
pulumi.set(__self__, "host", host)
if host_group is not None:
pulumi.set(__self__, "host_group", host_group)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if license_type is not None:
pulumi.set(__self__, "license_type", license_type)
if location is not None:
pulumi.set(__self__, "location", location)
if network_profile is not None:
pulumi.set(__self__, "network_profile", network_profile)
if os_profile is not None:
pulumi.set(__self__, "os_profile", os_profile)
if plan is not None:
pulumi.set(__self__, "plan", plan)
if platform_fault_domain is not None:
pulumi.set(__self__, "platform_fault_domain", platform_fault_domain)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if proximity_placement_group is not None:
pulumi.set(__self__, "proximity_placement_group", proximity_placement_group)
if scheduled_events_profile is not None:
pulumi.set(__self__, "scheduled_events_profile", scheduled_events_profile)
if security_profile is not None:
pulumi.set(__self__, "security_profile", security_profile)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
if virtual_machine_scale_set is not None:
pulumi.set(__self__, "virtual_machine_scale_set", virtual_machine_scale_set)
if vm_name is not None:
pulumi.set(__self__, "vm_name", vm_name)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="additionalCapabilities")
def additional_capabilities(self) -> Optional[pulumi.Input['AdditionalCapabilitiesArgs']]:
"""
Specifies additional capabilities enabled or disabled on the virtual machine.
"""
return pulumi.get(self, "additional_capabilities")
@additional_capabilities.setter
def additional_capabilities(self, value: Optional[pulumi.Input['AdditionalCapabilitiesArgs']]):
pulumi.set(self, "additional_capabilities", value)
@property
@pulumi.getter(name="availabilitySet")
def availability_set(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Availability sets overview](https://docs.microsoft.com/azure/virtual-machines/availability-set-overview). <br><br> For more information on Azure planned maintenance, see [Maintenance and updates for Virtual Machines in Azure](https://docs.microsoft.com/azure/virtual-machines/maintenance-and-updates) <br><br> Currently, a VM can only be added to availability set at creation time. The availability set to which the VM is being added should be under the same resource group as the availability set resource. An existing VM cannot be added to an availability set. <br><br>This property cannot exist along with a non-null properties.virtualMachineScaleSet reference.
"""
return pulumi.get(self, "availability_set")
@availability_set.setter
def availability_set(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "availability_set", value)
@property
@pulumi.getter(name="billingProfile")
def billing_profile(self) -> Optional[pulumi.Input['BillingProfileArgs']]:
"""
Specifies the billing related details of a Azure Spot virtual machine. <br><br>Minimum api-version: 2019-03-01.
"""
return pulumi.get(self, "billing_profile")
@billing_profile.setter
def billing_profile(self, value: Optional[pulumi.Input['BillingProfileArgs']]):
pulumi.set(self, "billing_profile", value)
@property
@pulumi.getter(name="diagnosticsProfile")
def diagnostics_profile(self) -> Optional[pulumi.Input['DiagnosticsProfileArgs']]:
"""
Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
"""
return pulumi.get(self, "diagnostics_profile")
@diagnostics_profile.setter
def diagnostics_profile(self, value: Optional[pulumi.Input['DiagnosticsProfileArgs']]):
pulumi.set(self, "diagnostics_profile", value)
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> Optional[pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']]]:
"""
Specifies the eviction policy for the Azure Spot virtual machine and Azure Spot scale set. <br><br>For Azure Spot virtual machines, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2019-03-01. <br><br>For Azure Spot scale sets, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2017-10-30-preview.
"""
return pulumi.get(self, "eviction_policy")
@eviction_policy.setter
def eviction_policy(self, value: Optional[pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']]]):
pulumi.set(self, "eviction_policy", value)
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> Optional[pulumi.Input['ExtendedLocationArgs']]:
"""
The extended location of the Virtual Machine.
"""
return pulumi.get(self, "extended_location")
@extended_location.setter
def extended_location(self, value: Optional[pulumi.Input['ExtendedLocationArgs']]):
pulumi.set(self, "extended_location", value)
@property
@pulumi.getter(name="extensionsTimeBudget")
def extensions_time_budget(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the time alloted for all extensions to start. The time duration should be between 15 minutes and 120 minutes (inclusive) and should be specified in ISO 8601 format. The default value is 90 minutes (PT1H30M). <br><br> Minimum api-version: 2020-06-01
"""
return pulumi.get(self, "extensions_time_budget")
@extensions_time_budget.setter
def extensions_time_budget(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "extensions_time_budget", value)
@property
@pulumi.getter(name="hardwareProfile")
def hardware_profile(self) -> Optional[pulumi.Input['HardwareProfileArgs']]:
"""
Specifies the hardware settings for the virtual machine.
"""
return pulumi.get(self, "hardware_profile")
@hardware_profile.setter
def hardware_profile(self, value: Optional[pulumi.Input['HardwareProfileArgs']]):
pulumi.set(self, "hardware_profile", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the dedicated host that the virtual machine resides in. <br><br>Minimum api-version: 2018-10-01.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "host", value)
@property
@pulumi.getter(name="hostGroup")
def host_group(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the dedicated host group that the virtual machine resides in. <br><br>Minimum api-version: 2020-06-01. <br><br>NOTE: User cannot specify both host and hostGroup properties.
"""
return pulumi.get(self, "host_group")
@host_group.setter
def host_group(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "host_group", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['VirtualMachineIdentityArgs']]:
"""
The identity of the virtual machine, if configured.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['VirtualMachineIdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies that the image or disk that is being used was licensed on-premises. <br><br> Possible values for Windows Server operating system are: <br><br> Windows_Client <br><br> Windows_Server <br><br> Possible values for Linux Server operating system are: <br><br> RHEL_BYOS (for RHEL) <br><br> SLES_BYOS (for SUSE) <br><br> For more information, see [Azure Hybrid Use Benefit for Windows Server](https://docs.microsoft.com/azure/virtual-machines/windows/hybrid-use-benefit-licensing) <br><br> [Azure Hybrid Use Benefit for Linux Server](https://docs.microsoft.com/azure/virtual-machines/linux/azure-hybrid-benefit-linux) <br><br> Minimum api-version: 2015-06-15
"""
return pulumi.get(self, "license_type")
@license_type.setter
def license_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "license_type", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="networkProfile")
def network_profile(self) -> Optional[pulumi.Input['NetworkProfileArgs']]:
"""
Specifies the network interfaces of the virtual machine.
"""
return pulumi.get(self, "network_profile")
@network_profile.setter
def network_profile(self, value: Optional[pulumi.Input['NetworkProfileArgs']]):
pulumi.set(self, "network_profile", value)
@property
@pulumi.getter(name="osProfile")
def os_profile(self) -> Optional[pulumi.Input['OSProfileArgs']]:
"""
Specifies the operating system settings used while creating the virtual machine. Some of the settings cannot be changed once VM is provisioned.
"""
return pulumi.get(self, "os_profile")
@os_profile.setter
def os_profile(self, value: Optional[pulumi.Input['OSProfileArgs']]):
pulumi.set(self, "os_profile", value)
@property
@pulumi.getter
def plan(self) -> Optional[pulumi.Input['PlanArgs']]:
"""
Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
"""
return pulumi.get(self, "plan")
@plan.setter
def plan(self, value: Optional[pulumi.Input['PlanArgs']]):
pulumi.set(self, "plan", value)
@property
@pulumi.getter(name="platformFaultDomain")
def platform_fault_domain(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the scale set logical fault domain into which the Virtual Machine will be created. By default, the Virtual Machine will by automatically assigned to a fault domain that best maintains balance across available fault domains.<br><li>This is applicable only if the 'virtualMachineScaleSet' property of this Virtual Machine is set.<li>The Virtual Machine Scale Set that is referenced, must have 'platformFaultDomainCount' > 1.<li>This property cannot be updated once the Virtual Machine is created.<li>Fault domain assignment can be viewed in the Virtual Machine Instance View.<br><br>Minimum api‐version: 2020‐12‐01
"""
return pulumi.get(self, "platform_fault_domain")
@platform_fault_domain.setter
def platform_fault_domain(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "platform_fault_domain", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']]]:
"""
Specifies the priority for the virtual machine. <br><br>Minimum api-version: 2019-03-01
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="proximityPlacementGroup")
def proximity_placement_group(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the proximity placement group that the virtual machine should be assigned to. <br><br>Minimum api-version: 2018-04-01.
"""
return pulumi.get(self, "proximity_placement_group")
@proximity_placement_group.setter
def proximity_placement_group(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "proximity_placement_group", value)
@property
@pulumi.getter(name="scheduledEventsProfile")
def scheduled_events_profile(self) -> Optional[pulumi.Input['ScheduledEventsProfileArgs']]:
"""
Specifies Scheduled Event related configurations.
"""
return pulumi.get(self, "scheduled_events_profile")
@scheduled_events_profile.setter
def scheduled_events_profile(self, value: Optional[pulumi.Input['ScheduledEventsProfileArgs']]):
pulumi.set(self, "scheduled_events_profile", value)
@property
@pulumi.getter(name="securityProfile")
def security_profile(self) -> Optional[pulumi.Input['SecurityProfileArgs']]:
"""
Specifies the Security related profile settings for the virtual machine.
"""
return pulumi.get(self, "security_profile")
@security_profile.setter
def security_profile(self, value: Optional[pulumi.Input['SecurityProfileArgs']]):
pulumi.set(self, "security_profile", value)
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional[pulumi.Input['StorageProfileArgs']]:
"""
Specifies the storage settings for the virtual machine disks.
"""
return pulumi.get(self, "storage_profile")
@storage_profile.setter
def storage_profile(self, value: Optional[pulumi.Input['StorageProfileArgs']]):
pulumi.set(self, "storage_profile", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
UserData for the VM, which must be base-64 encoded. Customer should not pass any secrets in here. <br><br>Minimum api-version: 2021-03-01
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@property
@pulumi.getter(name="virtualMachineScaleSet")
def virtual_machine_scale_set(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Specifies information about the virtual machine scale set that the virtual machine should be assigned to. Virtual machines specified in the same virtual machine scale set are allocated to different nodes to maximize availability. Currently, a VM can only be added to virtual machine scale set at creation time. An existing VM cannot be added to a virtual machine scale set. <br><br>This property cannot exist along with a non-null properties.availabilitySet reference. <br><br>Minimum api‐version: 2019‐03‐01
"""
return pulumi.get(self, "virtual_machine_scale_set")
@virtual_machine_scale_set.setter
def virtual_machine_scale_set(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "virtual_machine_scale_set", value)
@property
@pulumi.getter(name="vmName")
def vm_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the virtual machine.
"""
return pulumi.get(self, "vm_name")
@vm_name.setter
def vm_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_name", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The virtual machine zones.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "zones", value)
class VirtualMachine(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_capabilities: Optional[pulumi.Input[pulumi.InputType['AdditionalCapabilitiesArgs']]] = None,
availability_set: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
billing_profile: Optional[pulumi.Input[pulumi.InputType['BillingProfileArgs']]] = None,
diagnostics_profile: Optional[pulumi.Input[pulumi.InputType['DiagnosticsProfileArgs']]] = None,
eviction_policy: Optional[pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']]] = None,
extended_location: Optional[pulumi.Input[pulumi.InputType['ExtendedLocationArgs']]] = None,
extensions_time_budget: Optional[pulumi.Input[str]] = None,
hardware_profile: Optional[pulumi.Input[pulumi.InputType['HardwareProfileArgs']]] = None,
host: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
host_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['VirtualMachineIdentityArgs']]] = None,
license_type: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
network_profile: Optional[pulumi.Input[pulumi.InputType['NetworkProfileArgs']]] = None,
os_profile: Optional[pulumi.Input[pulumi.InputType['OSProfileArgs']]] = None,
plan: Optional[pulumi.Input[pulumi.InputType['PlanArgs']]] = None,
platform_fault_domain: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']]] = None,
proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scheduled_events_profile: Optional[pulumi.Input[pulumi.InputType['ScheduledEventsProfileArgs']]] = None,
security_profile: Optional[pulumi.Input[pulumi.InputType['SecurityProfileArgs']]] = None,
storage_profile: Optional[pulumi.Input[pulumi.InputType['StorageProfileArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
virtual_machine_scale_set: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
vm_name: Optional[pulumi.Input[str]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Describes a Virtual Machine.
API Version: 2021-03-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['AdditionalCapabilitiesArgs']] additional_capabilities: Specifies additional capabilities enabled or disabled on the virtual machine.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] availability_set: Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Availability sets overview](https://docs.microsoft.com/azure/virtual-machines/availability-set-overview). <br><br> For more information on Azure planned maintenance, see [Maintenance and updates for Virtual Machines in Azure](https://docs.microsoft.com/azure/virtual-machines/maintenance-and-updates) <br><br> Currently, a VM can only be added to availability set at creation time. The availability set to which the VM is being added should be under the same resource group as the availability set resource. An existing VM cannot be added to an availability set. <br><br>This property cannot exist along with a non-null properties.virtualMachineScaleSet reference.
:param pulumi.Input[pulumi.InputType['BillingProfileArgs']] billing_profile: Specifies the billing related details of a Azure Spot virtual machine. <br><br>Minimum api-version: 2019-03-01.
:param pulumi.Input[pulumi.InputType['DiagnosticsProfileArgs']] diagnostics_profile: Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
:param pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']] eviction_policy: Specifies the eviction policy for the Azure Spot virtual machine and Azure Spot scale set. <br><br>For Azure Spot virtual machines, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2019-03-01. <br><br>For Azure Spot scale sets, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2017-10-30-preview.
:param pulumi.Input[pulumi.InputType['ExtendedLocationArgs']] extended_location: The extended location of the Virtual Machine.
:param pulumi.Input[str] extensions_time_budget: Specifies the time alloted for all extensions to start. The time duration should be between 15 minutes and 120 minutes (inclusive) and should be specified in ISO 8601 format. The default value is 90 minutes (PT1H30M). <br><br> Minimum api-version: 2020-06-01
:param pulumi.Input[pulumi.InputType['HardwareProfileArgs']] hardware_profile: Specifies the hardware settings for the virtual machine.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] host: Specifies information about the dedicated host that the virtual machine resides in. <br><br>Minimum api-version: 2018-10-01.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] host_group: Specifies information about the dedicated host group that the virtual machine resides in. <br><br>Minimum api-version: 2020-06-01. <br><br>NOTE: User cannot specify both host and hostGroup properties.
:param pulumi.Input[pulumi.InputType['VirtualMachineIdentityArgs']] identity: The identity of the virtual machine, if configured.
:param pulumi.Input[str] license_type: Specifies that the image or disk that is being used was licensed on-premises. <br><br> Possible values for Windows Server operating system are: <br><br> Windows_Client <br><br> Windows_Server <br><br> Possible values for Linux Server operating system are: <br><br> RHEL_BYOS (for RHEL) <br><br> SLES_BYOS (for SUSE) <br><br> For more information, see [Azure Hybrid Use Benefit for Windows Server](https://docs.microsoft.com/azure/virtual-machines/windows/hybrid-use-benefit-licensing) <br><br> [Azure Hybrid Use Benefit for Linux Server](https://docs.microsoft.com/azure/virtual-machines/linux/azure-hybrid-benefit-linux) <br><br> Minimum api-version: 2015-06-15
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[pulumi.InputType['NetworkProfileArgs']] network_profile: Specifies the network interfaces of the virtual machine.
:param pulumi.Input[pulumi.InputType['OSProfileArgs']] os_profile: Specifies the operating system settings used while creating the virtual machine. Some of the settings cannot be changed once VM is provisioned.
:param pulumi.Input[pulumi.InputType['PlanArgs']] plan: Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
:param pulumi.Input[int] platform_fault_domain: Specifies the scale set logical fault domain into which the Virtual Machine will be created. By default, the Virtual Machine will by automatically assigned to a fault domain that best maintains balance across available fault domains.<br><li>This is applicable only if the 'virtualMachineScaleSet' property of this Virtual Machine is set.<li>The Virtual Machine Scale Set that is referenced, must have 'platformFaultDomainCount' > 1.<li>This property cannot be updated once the Virtual Machine is created.<li>Fault domain assignment can be viewed in the Virtual Machine Instance View.<br><br>Minimum api‐version: 2020‐12‐01
:param pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']] priority: Specifies the priority for the virtual machine. <br><br>Minimum api-version: 2019-03-01
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] proximity_placement_group: Specifies information about the proximity placement group that the virtual machine should be assigned to. <br><br>Minimum api-version: 2018-04-01.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[pulumi.InputType['ScheduledEventsProfileArgs']] scheduled_events_profile: Specifies Scheduled Event related configurations.
:param pulumi.Input[pulumi.InputType['SecurityProfileArgs']] security_profile: Specifies the Security related profile settings for the virtual machine.
:param pulumi.Input[pulumi.InputType['StorageProfileArgs']] storage_profile: Specifies the storage settings for the virtual machine disks.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
:param pulumi.Input[str] user_data: UserData for the VM, which must be base-64 encoded. Customer should not pass any secrets in here. <br><br>Minimum api-version: 2021-03-01
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] virtual_machine_scale_set: Specifies information about the virtual machine scale set that the virtual machine should be assigned to. Virtual machines specified in the same virtual machine scale set are allocated to different nodes to maximize availability. Currently, a VM can only be added to virtual machine scale set at creation time. An existing VM cannot be added to a virtual machine scale set. <br><br>This property cannot exist along with a non-null properties.availabilitySet reference. <br><br>Minimum api‐version: 2019‐03‐01
:param pulumi.Input[str] vm_name: The name of the virtual machine.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: The virtual machine zones.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VirtualMachineArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Describes a Virtual Machine.
API Version: 2021-03-01.
:param str resource_name: The name of the resource.
:param VirtualMachineArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VirtualMachineArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_capabilities: Optional[pulumi.Input[pulumi.InputType['AdditionalCapabilitiesArgs']]] = None,
availability_set: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
billing_profile: Optional[pulumi.Input[pulumi.InputType['BillingProfileArgs']]] = None,
diagnostics_profile: Optional[pulumi.Input[pulumi.InputType['DiagnosticsProfileArgs']]] = None,
eviction_policy: Optional[pulumi.Input[Union[str, 'VirtualMachineEvictionPolicyTypes']]] = None,
extended_location: Optional[pulumi.Input[pulumi.InputType['ExtendedLocationArgs']]] = None,
extensions_time_budget: Optional[pulumi.Input[str]] = None,
hardware_profile: Optional[pulumi.Input[pulumi.InputType['HardwareProfileArgs']]] = None,
host: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
host_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['VirtualMachineIdentityArgs']]] = None,
license_type: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
network_profile: Optional[pulumi.Input[pulumi.InputType['NetworkProfileArgs']]] = None,
os_profile: Optional[pulumi.Input[pulumi.InputType['OSProfileArgs']]] = None,
plan: Optional[pulumi.Input[pulumi.InputType['PlanArgs']]] = None,
platform_fault_domain: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[Union[str, 'VirtualMachinePriorityTypes']]] = None,
proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scheduled_events_profile: Optional[pulumi.Input[pulumi.InputType['ScheduledEventsProfileArgs']]] = None,
security_profile: Optional[pulumi.Input[pulumi.InputType['SecurityProfileArgs']]] = None,
storage_profile: Optional[pulumi.Input[pulumi.InputType['StorageProfileArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
virtual_machine_scale_set: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
vm_name: Optional[pulumi.Input[str]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VirtualMachineArgs.__new__(VirtualMachineArgs)
__props__.__dict__["additional_capabilities"] = additional_capabilities
__props__.__dict__["availability_set"] = availability_set
__props__.__dict__["billing_profile"] = billing_profile
__props__.__dict__["diagnostics_profile"] = diagnostics_profile
__props__.__dict__["eviction_policy"] = eviction_policy
__props__.__dict__["extended_location"] = extended_location
__props__.__dict__["extensions_time_budget"] = extensions_time_budget
__props__.__dict__["hardware_profile"] = hardware_profile
__props__.__dict__["host"] = host
__props__.__dict__["host_group"] = host_group
__props__.__dict__["identity"] = identity
__props__.__dict__["license_type"] = license_type
__props__.__dict__["location"] = location
__props__.__dict__["network_profile"] = network_profile
__props__.__dict__["os_profile"] = os_profile
__props__.__dict__["plan"] = plan
__props__.__dict__["platform_fault_domain"] = platform_fault_domain
__props__.__dict__["priority"] = priority
__props__.__dict__["proximity_placement_group"] = proximity_placement_group
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["scheduled_events_profile"] = scheduled_events_profile
__props__.__dict__["security_profile"] = security_profile
__props__.__dict__["storage_profile"] = storage_profile
__props__.__dict__["tags"] = tags
__props__.__dict__["user_data"] = user_data
__props__.__dict__["virtual_machine_scale_set"] = virtual_machine_scale_set
__props__.__dict__["vm_name"] = vm_name
__props__.__dict__["zones"] = zones
__props__.__dict__["instance_view"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resources"] = None
__props__.__dict__["type"] = None
__props__.__dict__["vm_id"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:compute/v20150615:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20160330:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20160430preview:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20170330:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20171201:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20180401:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20180601:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20181001:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20190301:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20190701:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20191201:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20200601:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20201201:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20210301:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20210401:VirtualMachine"), pulumi.Alias(type_="azure-native:compute/v20210701:VirtualMachine")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualMachine, __self__).__init__(
'azure-native:compute:VirtualMachine',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualMachine':
"""
Get an existing VirtualMachine resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = VirtualMachineArgs.__new__(VirtualMachineArgs)
__props__.__dict__["additional_capabilities"] = None
__props__.__dict__["availability_set"] = None
__props__.__dict__["billing_profile"] = None
__props__.__dict__["diagnostics_profile"] = None
__props__.__dict__["eviction_policy"] = None
__props__.__dict__["extended_location"] = None
__props__.__dict__["extensions_time_budget"] = None
__props__.__dict__["hardware_profile"] = None
__props__.__dict__["host"] = None
__props__.__dict__["host_group"] = None
__props__.__dict__["identity"] = None
__props__.__dict__["instance_view"] = None
__props__.__dict__["license_type"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["network_profile"] = None
__props__.__dict__["os_profile"] = None
__props__.__dict__["plan"] = None
__props__.__dict__["platform_fault_domain"] = None
__props__.__dict__["priority"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["proximity_placement_group"] = None
__props__.__dict__["resources"] = None
__props__.__dict__["scheduled_events_profile"] = None
__props__.__dict__["security_profile"] = None
__props__.__dict__["storage_profile"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["user_data"] = None
__props__.__dict__["virtual_machine_scale_set"] = None
__props__.__dict__["vm_id"] = None
__props__.__dict__["zones"] = None
return VirtualMachine(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalCapabilities")
def additional_capabilities(self) -> pulumi.Output[Optional['outputs.AdditionalCapabilitiesResponse']]:
"""
Specifies additional capabilities enabled or disabled on the virtual machine.
"""
return pulumi.get(self, "additional_capabilities")
@property
@pulumi.getter(name="availabilitySet")
def availability_set(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Availability sets overview](https://docs.microsoft.com/azure/virtual-machines/availability-set-overview). <br><br> For more information on Azure planned maintenance, see [Maintenance and updates for Virtual Machines in Azure](https://docs.microsoft.com/azure/virtual-machines/maintenance-and-updates) <br><br> Currently, a VM can only be added to availability set at creation time. The availability set to which the VM is being added should be under the same resource group as the availability set resource. An existing VM cannot be added to an availability set. <br><br>This property cannot exist along with a non-null properties.virtualMachineScaleSet reference.
"""
return pulumi.get(self, "availability_set")
@property
@pulumi.getter(name="billingProfile")
def billing_profile(self) -> pulumi.Output[Optional['outputs.BillingProfileResponse']]:
"""
Specifies the billing related details of a Azure Spot virtual machine. <br><br>Minimum api-version: 2019-03-01.
"""
return pulumi.get(self, "billing_profile")
@property
@pulumi.getter(name="diagnosticsProfile")
def diagnostics_profile(self) -> pulumi.Output[Optional['outputs.DiagnosticsProfileResponse']]:
"""
Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
"""
return pulumi.get(self, "diagnostics_profile")
@property
@pulumi.getter(name="evictionPolicy")
def eviction_policy(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the eviction policy for the Azure Spot virtual machine and Azure Spot scale set. <br><br>For Azure Spot virtual machines, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2019-03-01. <br><br>For Azure Spot scale sets, both 'Deallocate' and 'Delete' are supported and the minimum api-version is 2017-10-30-preview.
"""
return pulumi.get(self, "eviction_policy")
@property
@pulumi.getter(name="extendedLocation")
def extended_location(self) -> pulumi.Output[Optional['outputs.ExtendedLocationResponse']]:
"""
The extended location of the Virtual Machine.
"""
return pulumi.get(self, "extended_location")
@property
@pulumi.getter(name="extensionsTimeBudget")
def extensions_time_budget(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the time alloted for all extensions to start. The time duration should be between 15 minutes and 120 minutes (inclusive) and should be specified in ISO 8601 format. The default value is 90 minutes (PT1H30M). <br><br> Minimum api-version: 2020-06-01
"""
return pulumi.get(self, "extensions_time_budget")
@property
@pulumi.getter(name="hardwareProfile")
def hardware_profile(self) -> pulumi.Output[Optional['outputs.HardwareProfileResponse']]:
"""
Specifies the hardware settings for the virtual machine.
"""
return pulumi.get(self, "hardware_profile")
@property
@pulumi.getter
def host(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the dedicated host that the virtual machine resides in. <br><br>Minimum api-version: 2018-10-01.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="hostGroup")
def host_group(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the dedicated host group that the virtual machine resides in. <br><br>Minimum api-version: 2020-06-01. <br><br>NOTE: User cannot specify both host and hostGroup properties.
"""
return pulumi.get(self, "host_group")
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.VirtualMachineIdentityResponse']]:
"""
The identity of the virtual machine, if configured.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter(name="instanceView")
def instance_view(self) -> pulumi.Output['outputs.VirtualMachineInstanceViewResponse']:
"""
The virtual machine instance view.
"""
return pulumi.get(self, "instance_view")
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> pulumi.Output[Optional[str]]:
"""
Specifies that the image or disk that is being used was licensed on-premises. <br><br> Possible values for Windows Server operating system are: <br><br> Windows_Client <br><br> Windows_Server <br><br> Possible values for Linux Server operating system are: <br><br> RHEL_BYOS (for RHEL) <br><br> SLES_BYOS (for SUSE) <br><br> For more information, see [Azure Hybrid Use Benefit for Windows Server](https://docs.microsoft.com/azure/virtual-machines/windows/hybrid-use-benefit-licensing) <br><br> [Azure Hybrid Use Benefit for Linux Server](https://docs.microsoft.com/azure/virtual-machines/linux/azure-hybrid-benefit-linux) <br><br> Minimum api-version: 2015-06-15
"""
return pulumi.get(self, "license_type")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkProfile")
def network_profile(self) -> pulumi.Output[Optional['outputs.NetworkProfileResponse']]:
"""
Specifies the network interfaces of the virtual machine.
"""
return pulumi.get(self, "network_profile")
@property
@pulumi.getter(name="osProfile")
def os_profile(self) -> pulumi.Output[Optional['outputs.OSProfileResponse']]:
"""
Specifies the operating system settings used while creating the virtual machine. Some of the settings cannot be changed once VM is provisioned.
"""
return pulumi.get(self, "os_profile")
@property
@pulumi.getter
def plan(self) -> pulumi.Output[Optional['outputs.PlanResponse']]:
"""
Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
"""
return pulumi.get(self, "plan")
@property
@pulumi.getter(name="platformFaultDomain")
def platform_fault_domain(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the scale set logical fault domain into which the Virtual Machine will be created. By default, the Virtual Machine will by automatically assigned to a fault domain that best maintains balance across available fault domains.<br><li>This is applicable only if the 'virtualMachineScaleSet' property of this Virtual Machine is set.<li>The Virtual Machine Scale Set that is referenced, must have 'platformFaultDomainCount' > 1.<li>This property cannot be updated once the Virtual Machine is created.<li>Fault domain assignment can be viewed in the Virtual Machine Instance View.<br><br>Minimum api‐version: 2020‐12‐01
"""
return pulumi.get(self, "platform_fault_domain")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the priority for the virtual machine. <br><br>Minimum api-version: 2019-03-01
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="proximityPlacementGroup")
def proximity_placement_group(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the proximity placement group that the virtual machine should be assigned to. <br><br>Minimum api-version: 2018-04-01.
"""
return pulumi.get(self, "proximity_placement_group")
@property
@pulumi.getter
def resources(self) -> pulumi.Output[Sequence['outputs.VirtualMachineExtensionResponse']]:
"""
The virtual machine child extension resources.
"""
return pulumi.get(self, "resources")
@property
@pulumi.getter(name="scheduledEventsProfile")
def scheduled_events_profile(self) -> pulumi.Output[Optional['outputs.ScheduledEventsProfileResponse']]:
"""
Specifies Scheduled Event related configurations.
"""
return pulumi.get(self, "scheduled_events_profile")
@property
@pulumi.getter(name="securityProfile")
def security_profile(self) -> pulumi.Output[Optional['outputs.SecurityProfileResponse']]:
"""
Specifies the Security related profile settings for the virtual machine.
"""
return pulumi.get(self, "security_profile")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> pulumi.Output[Optional['outputs.StorageProfileResponse']]:
"""
Specifies the storage settings for the virtual machine disks.
"""
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userData")
def user_data(self) -> pulumi.Output[Optional[str]]:
"""
UserData for the VM, which must be base-64 encoded. Customer should not pass any secrets in here. <br><br>Minimum api-version: 2021-03-01
"""
return pulumi.get(self, "user_data")
@property
@pulumi.getter(name="virtualMachineScaleSet")
def virtual_machine_scale_set(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
Specifies information about the virtual machine scale set that the virtual machine should be assigned to. Virtual machines specified in the same virtual machine scale set are allocated to different nodes to maximize availability. Currently, a VM can only be added to virtual machine scale set at creation time. An existing VM cannot be added to a virtual machine scale set. <br><br>This property cannot exist along with a non-null properties.availabilitySet reference. <br><br>Minimum api‐version: 2019‐03‐01
"""
return pulumi.get(self, "virtual_machine_scale_set")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> pulumi.Output[str]:
"""
Specifies the VM unique ID which is a 128-bits identifier that is encoded and stored in all Azure IaaS VMs SMBIOS and can be read using platform BIOS commands.
"""
return pulumi.get(self, "vm_id")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The virtual machine zones.
"""
return pulumi.get(self, "zones")
| [
"[email protected]"
] | |
2b21d1be3b1a32f23cc147e1cc53f5d752653ca1 | 7d096568677660790479d87c22b47aae838ef96b | /stubs-legacy/System/Text/__init__.py | 740ee3c117f9b4b8520efecb912e7b6b83d5b0bd | [
"MIT"
] | permissive | NISystemsEngineering/rfmx-pythonnet | 30adbdd5660b0d755957f35b68a4c2f60065800c | cd4f90a88a37ed043df880972cb55dfe18883bb7 | refs/heads/master | 2023-02-04T00:39:41.107043 | 2023-02-01T21:58:50 | 2023-02-01T21:58:50 | 191,603,578 | 7 | 5 | MIT | 2023-02-01T21:58:52 | 2019-06-12T16:02:32 | Python | UTF-8 | Python | false | false | 139,587 | py | # encoding: utf-8
# module System.Text calls itself Text
# from mscorlib,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089,System,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089
# by generator 1.145
""" NamespaceTracker represent a CLS namespace. """
# no imports
# no functions
# classes
class Encoding(object,ICloneable):
""" Represents a character encoding. """
def Clone(self):
"""
Clone(self: Encoding) -> object
When overridden in a derived class,creates a shallow copy of the current System.Text.Encoding
object.
Returns: A copy of the current System.Text.Encoding object.
"""
pass
@staticmethod
def Convert(srcEncoding,dstEncoding,bytes,index=None,count=None):
"""
Convert(srcEncoding: Encoding,dstEncoding: Encoding,bytes: Array[Byte],index: int,count: int) -> Array[Byte]
Converts a range of bytes in a byte array from one encoding to another.
srcEncoding: The encoding of the source array,bytes.
dstEncoding: The encoding of the output array.
bytes: The array of bytes to convert.
index: The index of the first element of bytes to convert.
count: The number of bytes to convert.
Returns: An array of type System.Byte containing the result of converting a range of bytes in bytes from
srcEncoding to dstEncoding.
Convert(srcEncoding: Encoding,dstEncoding: Encoding,bytes: Array[Byte]) -> Array[Byte]
Converts an entire byte array from one encoding to another.
srcEncoding: The encoding format of bytes.
dstEncoding: The target encoding format.
bytes: The bytes to convert.
Returns: An array of type System.Byte containing the results of converting bytes from srcEncoding to
dstEncoding.
"""
pass
def Equals(self,value):
"""
Equals(self: Encoding,value: object) -> bool
Determines whether the specified System.Object is equal to the current instance.
value: The System.Object to compare with the current instance.
Returns: true if value is an instance of System.Text.Encoding and is equal to the current instance;
otherwise,false.
"""
pass
def GetByteCount(self,*__args):
"""
GetByteCount(self: Encoding,chars: Array[Char],index: int,count: int) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding a set of
characters from the specified character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: Encoding,chars: Char*,count: int) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding a set of
characters starting at the specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: Encoding,chars: Array[Char]) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding all the
characters in the specified character array.
chars: The character array containing the characters to encode.
Returns: The number of bytes produced by encoding all the characters in the specified character array.
GetByteCount(self: Encoding,s: str) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding the
characters in the specified string.
s: The string containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: Encoding,s: str) -> Array[Byte]
When overridden in a derived class,encodes all the characters in the specified string into a
sequence of bytes.
s: The string containing the characters to encode.
Returns: A byte array containing the results of encoding the specified set of characters.
GetBytes(self: Encoding,s: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
When overridden in a derived class,encodes a set of characters from the specified string into
the specified byte array.
s: The string containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: Encoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
When overridden in a derived class,encodes a set of characters starting at the specified
character pointer into a sequence of bytes that are stored starting at the specified byte
pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by the bytes parameter.
GetBytes(self: Encoding,chars: Array[Char]) -> Array[Byte]
When overridden in a derived class,encodes all the characters in the specified character array
into a sequence of bytes.
chars: The character array containing the characters to encode.
Returns: A byte array containing the results of encoding the specified set of characters.
GetBytes(self: Encoding,chars: Array[Char],index: int,count: int) -> Array[Byte]
When overridden in a derived class,encodes a set of characters from the specified character
array into a sequence of bytes.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: A byte array containing the results of encoding the specified set of characters.
GetBytes(self: Encoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
When overridden in a derived class,encodes a set of characters from the specified character
array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: Encoding,bytes: Byte*,count: int) -> int
When overridden in a derived class,calculates the number of characters produced by decoding a
sequence of bytes starting at the specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: Encoding,bytes: Array[Byte],index: int,count: int) -> int
When overridden in a derived class,calculates the number of characters produced by decoding a
sequence of bytes from the specified byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: Encoding,bytes: Array[Byte]) -> int
When overridden in a derived class,calculates the number of characters produced by decoding all
the bytes in the specified byte array.
bytes: The byte array containing the sequence of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: Encoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
When overridden in a derived class,decodes a sequence of bytes from the specified byte array
into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
GetChars(self: Encoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
When overridden in a derived class,decodes a sequence of bytes starting at the specified byte
pointer into a set of characters that are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by the chars parameter.
GetChars(self: Encoding,bytes: Array[Byte]) -> Array[Char]
When overridden in a derived class,decodes all the bytes in the specified byte array into a set
of characters.
bytes: The byte array containing the sequence of bytes to decode.
Returns: A character array containing the results of decoding the specified sequence of bytes.
GetChars(self: Encoding,bytes: Array[Byte],index: int,count: int) -> Array[Char]
When overridden in a derived class,decodes a sequence of bytes from the specified byte array
into a set of characters.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A character array containing the results of decoding the specified sequence of bytes.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: Encoding) -> Decoder
When overridden in a derived class,obtains a decoder that converts an encoded sequence of bytes
into a sequence of characters.
Returns: A System.Text.Decoder that converts an encoded sequence of bytes into a sequence of characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: Encoding) -> Encoder
When overridden in a derived class,obtains an encoder that converts a sequence of Unicode
characters into an encoded sequence of bytes.
Returns: A System.Text.Encoder that converts a sequence of Unicode characters into an encoded sequence of
bytes.
"""
pass
@staticmethod
def GetEncoding(*__args):
"""
GetEncoding(name: str) -> Encoding
Returns the encoding associated with the specified code page name.
name: The code page name of the preferred encoding. Any value returned by the
System.Text.Encoding.WebName property is valid. Possible values are listed in the Name column of
the table that appears in the System.Text.Encoding class topic.
Returns: The encoding associated with the specified code page.
GetEncoding(name: str,encoderFallback: EncoderFallback,decoderFallback: DecoderFallback) -> Encoding
Returns the encoding associated with the specified code page name. Parameters specify an error
handler for characters that cannot be encoded and byte sequences that cannot be decoded.
name: The code page name of the preferred encoding. Any value returned by the
System.Text.Encoding.WebName property is valid. Possible values are listed in the Name column of
the table that appears in the System.Text.Encoding class topic.
encoderFallback: An object that provides an error-handling procedure when a character cannot be encoded with the
current encoding.
decoderFallback: An object that provides an error-handling procedure when a byte sequence cannot be decoded with
the current encoding.
Returns: The encoding that is associated with the specified code page.
GetEncoding(codepage: int) -> Encoding
Returns the encoding associated with the specified code page identifier.
codepage: The code page identifier of the preferred encoding. Possible values are listed in the Code Page
column of the table that appears in the System.Text.Encoding class topic.-or- 0 (zero),to use
the default encoding.
Returns: The encoding that is associated with the specified code page.
GetEncoding(codepage: int,encoderFallback: EncoderFallback,decoderFallback: DecoderFallback) -> Encoding
Returns the encoding associated with the specified code page identifier. Parameters specify an
error handler for characters that cannot be encoded and byte sequences that cannot be decoded.
codepage: The code page identifier of the preferred encoding. Possible values are listed in the Code Page
column of the table that appears in the System.Text.Encoding class topic.-or- 0 (zero),to use
the default encoding.
encoderFallback: An object that provides an error-handling procedure when a character cannot be encoded with the
current encoding.
decoderFallback: An object that provides an error-handling procedure when a byte sequence cannot be decoded with
the current encoding.
Returns: The encoding that is associated with the specified code page.
"""
pass
@staticmethod
def GetEncodings():
"""
GetEncodings() -> Array[EncodingInfo]
Returns an array that contains all encodings.
Returns: An array that contains all encodings.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Encoding) -> int
Returns the hash code for the current instance.
Returns: The hash code for the current instance.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: Encoding,charCount: int) -> int
When overridden in a derived class,calculates the maximum number of bytes produced by encoding
the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: Encoding,byteCount: int) -> int
When overridden in a derived class,calculates the maximum number of characters produced by
decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetPreamble(self):
"""
GetPreamble(self: Encoding) -> Array[Byte]
When overridden in a derived class,returns a sequence of bytes that specifies the encoding used.
Returns: A byte array containing a sequence of bytes that specifies the encoding used.-or- A byte array
of length zero,if a preamble is not required.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: Encoding,bytes: Array[Byte],index: int,count: int) -> str
When overridden in a derived class,decodes a sequence of bytes from the specified byte array
into a string.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
GetString(self: Encoding,bytes: Array[Byte]) -> str
When overridden in a derived class,decodes all the bytes in the specified byte array into a
string.
bytes: The byte array containing the sequence of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
GetString(self: Encoding,bytes: Byte*,byteCount: int) -> str
"""
pass
def IsAlwaysNormalized(self,form=None):
"""
IsAlwaysNormalized(self: Encoding,form: NormalizationForm) -> bool
When overridden in a derived class,gets a value indicating whether the current encoding is
always normalized,using the specified normalization form.
form: One of the System.Text.NormalizationForm values.
Returns: true if the current System.Text.Encoding object is always normalized using the specified
System.Text.NormalizationForm value; otherwise,false. The default is false.
IsAlwaysNormalized(self: Encoding) -> bool
Gets a value indicating whether the current encoding is always normalized,using the default
normalization form.
Returns: true if the current System.Text.Encoding is always normalized; otherwise,false. The default is
false.
"""
pass
@staticmethod
def RegisterProvider(provider):
""" RegisterProvider(provider: EncodingProvider) """
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,*args): #cannot find CLR constructor
"""
__new__(cls: type)
__new__(cls: type,codePage: int)
__new__(cls: type,codePage: int,encoderFallback: EncoderFallback,decoderFallback: DecoderFallback)
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
BodyName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a name for the current encoding that can be used with mail agent body tags.
Get: BodyName(self: Encoding) -> str
"""
CodePage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the code page identifier of the current System.Text.Encoding.
Get: CodePage(self: Encoding) -> int
"""
DecoderFallback=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Text.DecoderFallback object for the current System.Text.Encoding object.
Get: DecoderFallback(self: Encoding) -> DecoderFallback
Set: DecoderFallback(self: Encoding)=value
"""
EncoderFallback=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Text.EncoderFallback object for the current System.Text.Encoding object.
Get: EncoderFallback(self: Encoding) -> EncoderFallback
Set: EncoderFallback(self: Encoding)=value
"""
EncodingName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the human-readable description of the current encoding.
Get: EncodingName(self: Encoding) -> str
"""
HeaderName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a name for the current encoding that can be used with mail agent header tags.
Get: HeaderName(self: Encoding) -> str
"""
IsBrowserDisplay=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding can be used by browser clients for displaying content.
Get: IsBrowserDisplay(self: Encoding) -> bool
"""
IsBrowserSave=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding can be used by browser clients for saving content.
Get: IsBrowserSave(self: Encoding) -> bool
"""
IsMailNewsDisplay=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding can be used by mail and news clients for displaying content.
Get: IsMailNewsDisplay(self: Encoding) -> bool
"""
IsMailNewsSave=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding can be used by mail and news clients for saving content.
Get: IsMailNewsSave(self: Encoding) -> bool
"""
IsReadOnly=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding is read-only.
Get: IsReadOnly(self: Encoding) -> bool
"""
IsSingleByte=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets a value indicating whether the current encoding uses single-byte code points.
Get: IsSingleByte(self: Encoding) -> bool
"""
WebName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the name registered with the Internet Assigned Numbers Authority (IANA) for the current encoding.
Get: WebName(self: Encoding) -> str
"""
WindowsCodePage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the Windows operating system code page that most closely corresponds to the current encoding.
Get: WindowsCodePage(self: Encoding) -> int
"""
ASCII=None
BigEndianUnicode=None
Default=None
Unicode=None
UTF32=None
UTF7=None
UTF8=None
class ASCIIEncoding(Encoding,ICloneable):
"""
Represents an ASCII character encoding of Unicode characters.
ASCIIEncoding()
"""
def GetByteCount(self,chars,*__args):
"""
GetByteCount(self: ASCIIEncoding,chars: Char*,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters starting at the
specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: ASCIIEncoding,chars: str) -> int
Calculates the number of bytes produced by encoding the characters in the specified
System.String.
chars: The System.String containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: ASCIIEncoding,chars: Array[Char],index: int,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters from the specified
character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: ASCIIEncoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
Encodes a set of characters starting at the specified character pointer into a sequence of bytes
that are stored starting at the specified byte pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by bytes.
GetBytes(self: ASCIIEncoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified character array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: ASCIIEncoding,chars: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified System.String into the specified byte array.
chars: The System.String containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: ASCIIEncoding,bytes: Byte*,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes starting at the
specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: ASCIIEncoding,bytes: Array[Byte],index: int,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes from the specified
byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: ASCIIEncoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
Decodes a sequence of bytes starting at the specified byte pointer into a set of characters that
are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by chars.
GetChars(self: ASCIIEncoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
Decodes a sequence of bytes from the specified byte array into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: ASCIIEncoding) -> Decoder
Obtains a decoder that converts an ASCII encoded sequence of bytes into a sequence of Unicode
characters.
Returns: A System.Text.Decoder that converts an ASCII encoded sequence of bytes into a sequence of
Unicode characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: ASCIIEncoding) -> Encoder
Obtains an encoder that converts a sequence of Unicode characters into an ASCII encoded sequence
of bytes.
Returns: An System.Text.Encoder that converts a sequence of Unicode characters into an ASCII encoded
sequence of bytes.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: ASCIIEncoding,charCount: int) -> int
Calculates the maximum number of bytes produced by encoding the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: ASCIIEncoding,byteCount: int) -> int
Calculates the maximum number of characters produced by decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: ASCIIEncoding,bytes: Array[Byte],byteIndex: int,byteCount: int) -> str
Decodes a range of bytes from a byte array into a string.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self,*args):
pass
IsSingleByte=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the current encoding uses single-byte code points.
Get: IsSingleByte(self: ASCIIEncoding) -> bool
"""
class Decoder(object):
""" Converts a sequence of encoded bytes into a set of characters. """
def Convert(self,bytes,*__args):
"""
Convert(self: Decoder,bytes: Byte*,byteCount: int,chars: Char*,charCount: int,flush: bool) -> (int,int,bool)
Converts a buffer of encoded bytes to UTF-16 encoded characters and stores the result in another
buffer.
bytes: The address of a buffer that contains the byte sequences to convert.
byteCount: The number of bytes in bytes to convert.
chars: The address of a buffer to store the converted characters.
charCount: The maximum number of characters in chars to use in the conversion.
flush: true to indicate no further data is to be converted; otherwise,false.
Convert(self: Decoder,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int,charCount: int,flush: bool) -> (int,int,bool)
Converts an array of encoded bytes to UTF-16 encoded characters and stores the result in a byte
array.
bytes: A byte array to convert.
byteIndex: The first element of bytes to convert.
byteCount: The number of elements of bytes to convert.
chars: An array to store the converted characters.
charIndex: The first element of chars in which data is stored.
charCount: The maximum number of elements of chars to use in the conversion.
flush: true to indicate that no further data is to be converted; otherwise,false.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: Decoder,bytes: Byte*,count: int,flush: bool) -> int
When overridden in a derived class,calculates the number of characters produced by decoding a
sequence of bytes starting at the specified byte pointer. A parameter indicates whether to clear
the internal state of the decoder after the calculation.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
flush: true to simulate clearing the internal state of the encoder after the calculation; otherwise,
false.
Returns: The number of characters produced by decoding the specified sequence of bytes and any bytes in
the internal buffer.
GetCharCount(self: Decoder,bytes: Array[Byte],index: int,count: int,flush: bool) -> int
When overridden in a derived class,calculates the number of characters produced by decoding a
sequence of bytes from the specified byte array. A parameter indicates whether to clear the
internal state of the decoder after the calculation.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
flush: true to simulate clearing the internal state of the encoder after the calculation; otherwise,
false.
Returns: The number of characters produced by decoding the specified sequence of bytes and any bytes in
the internal buffer.
GetCharCount(self: Decoder,bytes: Array[Byte],index: int,count: int) -> int
When overridden in a derived class,calculates the number of characters produced by decoding a
sequence of bytes from the specified byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes and any bytes in
the internal buffer.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: Decoder,bytes: Byte*,byteCount: int,chars: Char*,charCount: int,flush: bool) -> int
When overridden in a derived class,decodes a sequence of bytes starting at the specified byte
pointer and any bytes in the internal buffer into a set of characters that are stored starting
at the specified character pointer. A parameter indicates whether to clear the internal state of
the decoder after the conversion.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
flush: true to clear the internal state of the decoder after the conversion; otherwise,false.
Returns: The actual number of characters written at the location indicated by the chars parameter.
GetChars(self: Decoder,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int,flush: bool) -> int
When overridden in a derived class,decodes a sequence of bytes from the specified byte array
and any bytes in the internal buffer into the specified character array. A parameter indicates
whether to clear the internal state of the decoder after the conversion.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
flush: true to clear the internal state of the decoder after the conversion; otherwise,false.
Returns: The actual number of characters written into the chars parameter.
GetChars(self: Decoder,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
When overridden in a derived class,decodes a sequence of bytes from the specified byte array
and any bytes in the internal buffer into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def Reset(self):
"""
Reset(self: Decoder)
When overridden in a derived class,sets the decoder back to its initial state.
"""
pass
Fallback=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Text.DecoderFallback object for the current System.Text.Decoder object.
Get: Fallback(self: Decoder) -> DecoderFallback
Set: Fallback(self: Decoder)=value
"""
FallbackBuffer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Text.DecoderFallbackBuffer object associated with the current System.Text.Decoder object.
Get: FallbackBuffer(self: Decoder) -> DecoderFallbackBuffer
"""
class DecoderFallback(object):
""" Provides a failure-handling mechanism,called a fallback,for an encoded input byte sequence that cannot be converted to an output character. """
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: DecoderFallback) -> DecoderFallbackBuffer
When overridden in a derived class,initializes a new instance of the
System.Text.DecoderFallbackBuffer class.
Returns: An object that provides a fallback buffer for a decoder.
"""
pass
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the maximum number of characters the current System.Text.DecoderFallback object can return.
Get: MaxCharCount(self: DecoderFallback) -> int
"""
ExceptionFallback=None
ReplacementFallback=None
class DecoderExceptionFallback(DecoderFallback):
"""
Throws System.Text.DecoderFallbackException if an encoded input byte sequence cannot be converted to a decoded output character. This class cannot be inherited.
DecoderExceptionFallback()
"""
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: DecoderExceptionFallback) -> DecoderFallbackBuffer
Initializes a new instance of the System.Text.DecoderExceptionFallback class.
Returns: A System.Text.DecoderFallbackBuffer object.
"""
pass
def Equals(self,value):
"""
Equals(self: DecoderExceptionFallback,value: object) -> bool
Indicates whether the current System.Text.DecoderExceptionFallback object and a specified object
are equal.
value: An object that derives from the System.Text.DecoderExceptionFallback class.
Returns: true if value is not null and is a System.Text.DecoderExceptionFallback object; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DecoderExceptionFallback) -> int
Retrieves the hash code for this instance.
Returns: The return value is always the same arbitrary value,and has no special significance.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __ne__(self,*args):
pass
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the maximum number of characters this instance can return.
Get: MaxCharCount(self: DecoderExceptionFallback) -> int
"""
class DecoderFallbackBuffer(object):
""" Provides a buffer that allows a fallback handler to return an alternate string to a decoder when it cannot decode an input byte sequence. """
def Fallback(self,bytesUnknown,index):
"""
Fallback(self: DecoderFallbackBuffer,bytesUnknown: Array[Byte],index: int) -> bool
When overridden in a derived class,prepares the fallback buffer to handle the specified input
byte sequence.
bytesUnknown: An input array of bytes.
index: The index position of a byte in bytesUnknown.
Returns: true if the fallback buffer can process bytesUnknown; false if the fallback buffer ignores
bytesUnknown.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: DecoderFallbackBuffer) -> Char
When overridden in a derived class,retrieves the next character in the fallback buffer.
Returns: The next character in the fallback buffer.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: DecoderFallbackBuffer) -> bool
When overridden in a derived class,causes the next call to the
System.Text.DecoderFallbackBuffer.GetNextChar method to access the data buffer character
position that is prior to the current character position.
Returns: true if the System.Text.DecoderFallbackBuffer.MovePrevious operation was successful; otherwise,
false.
"""
pass
def Reset(self):
"""
Reset(self: DecoderFallbackBuffer)
Initializes all data and state information pertaining to this fallback buffer.
"""
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the number of characters in the current System.Text.DecoderFallbackBuffer object that remain to be processed.
Get: Remaining(self: DecoderFallbackBuffer) -> int
"""
class DecoderExceptionFallbackBuffer(DecoderFallbackBuffer):
"""
Throws System.Text.DecoderFallbackException when an encoded input byte sequence cannot be converted to a decoded output character. This class cannot be inherited.
DecoderExceptionFallbackBuffer()
"""
def Fallback(self,bytesUnknown,index):
"""
Fallback(self: DecoderExceptionFallbackBuffer,bytesUnknown: Array[Byte],index: int) -> bool
Throws System.Text.DecoderFallbackException when the input byte sequence cannot be decoded. The
nominal return value is not used.
bytesUnknown: An input array of bytes.
index: The index position of a byte in the input.
Returns: None. No value is returned because the
System.Text.DecoderExceptionFallbackBuffer.Fallback(System.Byte[],System.Int32) method always
throws an exception. The nominal return value is true. A return value is defined,although it is
unchanging,because this method implements an abstract method.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: DecoderExceptionFallbackBuffer) -> Char
Retrieves the next character in the exception data buffer.
Returns: The return value is always the Unicode character NULL (U+0000). A return value is defined,
although it is unchanging,because this method implements an abstract method.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: DecoderExceptionFallbackBuffer) -> bool
Causes the next call to System.Text.DecoderExceptionFallbackBuffer.GetNextChar to access the
exception data buffer character position that is prior to the current position.
Returns: The return value is always false. A return value is defined,although it is unchanging,because
this method implements an abstract method.
"""
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the current System.Text.DecoderExceptionFallbackBuffer object that remain to be processed.
Get: Remaining(self: DecoderExceptionFallbackBuffer) -> int
"""
class DecoderFallbackException(ArgumentException,ISerializable,_Exception):
"""
The exception that is thrown when a decoder fallback operation fails. This class cannot be inherited.
DecoderFallbackException()
DecoderFallbackException(message: str)
DecoderFallbackException(message: str,innerException: Exception)
DecoderFallbackException(message: str,bytesUnknown: Array[Byte],index: int)
"""
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,*__args):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,innerException: Exception)
__new__(cls: type,message: str,bytesUnknown: Array[Byte],index: int)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
BytesUnknown=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the input byte sequence that caused the exception.
Get: BytesUnknown(self: DecoderFallbackException) -> Array[Byte]
"""
Index=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the index position in the input byte sequence of the byte that caused the exception.
Get: Index(self: DecoderFallbackException) -> int
"""
class DecoderReplacementFallback(DecoderFallback):
"""
Provides a failure-handling mechanism,called a fallback,for an encoded input byte sequence that cannot be converted to an output character. The fallback emits a user-specified replacement string instead of a decoded input byte sequence. This class cannot be inherited.
DecoderReplacementFallback()
DecoderReplacementFallback(replacement: str)
"""
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: DecoderReplacementFallback) -> DecoderFallbackBuffer
Creates a System.Text.DecoderFallbackBuffer object that is initialized with the replacement
string of this System.Text.DecoderReplacementFallback object.
Returns: A System.Text.DecoderFallbackBuffer object that specifies a string to use instead of the
original decoding operation input.
"""
pass
def Equals(self,value):
"""
Equals(self: DecoderReplacementFallback,value: object) -> bool
Indicates whether the value of a specified object is equal to the
System.Text.DecoderReplacementFallback object.
value: A System.Text.DecoderReplacementFallback object.
Returns: true if value is a System.Text.DecoderReplacementFallback object having a
System.Text.DecoderReplacementFallback.DefaultString property that is equal to the
System.Text.DecoderReplacementFallback.DefaultString property of the current
System.Text.DecoderReplacementFallback object; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DecoderReplacementFallback) -> int
Retrieves the hash code for the value of the System.Text.DecoderReplacementFallback object.
Returns: The hash code of the value of the object.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
@staticmethod
def __new__(self,replacement=None):
"""
__new__(cls: type)
__new__(cls: type,replacement: str)
"""
pass
def __ne__(self,*args):
pass
DefaultString=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the replacement string that is the value of the System.Text.DecoderReplacementFallback object.
Get: DefaultString(self: DecoderReplacementFallback) -> str
"""
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the replacement string for the System.Text.DecoderReplacementFallback object.
Get: MaxCharCount(self: DecoderReplacementFallback) -> int
"""
class DecoderReplacementFallbackBuffer(DecoderFallbackBuffer):
"""
Represents a substitute output string that is emitted when the original input byte sequence cannot be decoded. This class cannot be inherited.
DecoderReplacementFallbackBuffer(fallback: DecoderReplacementFallback)
"""
def Fallback(self,bytesUnknown,index):
"""
Fallback(self: DecoderReplacementFallbackBuffer,bytesUnknown: Array[Byte],index: int) -> bool
Prepares the replacement fallback buffer to use the current replacement string.
bytesUnknown: An input byte sequence. This parameter is ignored unless an exception is thrown.
index: The index position of the byte in bytesUnknown. This parameter is ignored in this operation.
Returns: true if the replacement string is not empty; false if the replacement string is empty.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: DecoderReplacementFallbackBuffer) -> Char
Retrieves the next character in the replacement fallback buffer.
Returns: The next character in the replacement fallback buffer.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: DecoderReplacementFallbackBuffer) -> bool
Causes the next call to System.Text.DecoderReplacementFallbackBuffer.GetNextChar to access the
character position in the replacement fallback buffer prior to the current character position.
Returns: true if the System.Text.DecoderReplacementFallbackBuffer.MovePrevious operation was successful;
otherwise,false.
"""
pass
def Reset(self):
"""
Reset(self: DecoderReplacementFallbackBuffer)
Initializes all internal state information and data in the
System.Text.DecoderReplacementFallbackBuffer object.
"""
pass
@staticmethod
def __new__(self,fallback):
""" __new__(cls: type,fallback: DecoderReplacementFallback) """
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the replacement fallback buffer that remain to be processed.
Get: Remaining(self: DecoderReplacementFallbackBuffer) -> int
"""
class Encoder(object):
""" Converts a set of characters into a sequence of bytes. """
def Convert(self,chars,*__args):
"""
Convert(self: Encoder,chars: Char*,charCount: int,bytes: Byte*,byteCount: int,flush: bool) -> (int,int,bool)
Converts a buffer of Unicode characters to an encoded byte sequence and stores the result in
another buffer.
chars: The address of a string of UTF-16 encoded characters to convert.
charCount: The number of characters in chars to convert.
bytes: The address of a buffer to store the converted bytes.
byteCount: The maximum number of bytes in bytes to use in the conversion.
flush: true to indicate no further data is to be converted; otherwise,false.
Convert(self: Encoder,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int,byteCount: int,flush: bool) -> (int,int,bool)
Converts an array of Unicode characters to an encoded byte sequence and stores the result in an
array of bytes.
chars: An array of characters to convert.
charIndex: The first element of chars to convert.
charCount: The number of elements of chars to convert.
bytes: An array where the converted bytes are stored.
byteIndex: The first element of bytes in which data is stored.
byteCount: The maximum number of elements of bytes to use in the conversion.
flush: true to indicate no further data is to be converted; otherwise,false.
"""
pass
def GetByteCount(self,chars,*__args):
"""
GetByteCount(self: Encoder,chars: Char*,count: int,flush: bool) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding a set of
characters starting at the specified character pointer. A parameter indicates whether to clear
the internal state of the encoder after the calculation.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
flush: true to simulate clearing the internal state of the encoder after the calculation; otherwise,
false.
Returns: The number of bytes produced by encoding the specified characters and any characters in the
internal buffer.
GetByteCount(self: Encoder,chars: Array[Char],index: int,count: int,flush: bool) -> int
When overridden in a derived class,calculates the number of bytes produced by encoding a set of
characters from the specified character array. A parameter indicates whether to clear the
internal state of the encoder after the calculation.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
flush: true to simulate clearing the internal state of the encoder after the calculation; otherwise,
false.
Returns: The number of bytes produced by encoding the specified characters and any characters in the
internal buffer.
"""
pass
def GetBytes(self,chars,*__args):
"""
GetBytes(self: Encoder,chars: Char*,charCount: int,bytes: Byte*,byteCount: int,flush: bool) -> int
When overridden in a derived class,encodes a set of characters starting at the specified
character pointer and any characters in the internal buffer into a sequence of bytes that are
stored starting at the specified byte pointer. A parameter indicates whether to clear the
internal state of the encoder after the conversion.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
flush: true to clear the internal state of the encoder after the conversion; otherwise,false.
Returns: The actual number of bytes written at the location indicated by the bytes parameter.
GetBytes(self: Encoder,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int,flush: bool) -> int
When overridden in a derived class,encodes a set of characters from the specified character
array and any characters in the internal buffer into the specified byte array. A parameter
indicates whether to clear the internal state of the encoder after the conversion.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
flush: true to clear the internal state of the encoder after the conversion; otherwise,false.
Returns: The actual number of bytes written into bytes.
"""
pass
def Reset(self):
"""
Reset(self: Encoder)
When overridden in a derived class,sets the encoder back to its initial state.
"""
pass
Fallback=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Text.EncoderFallback object for the current System.Text.Encoder object.
Get: Fallback(self: Encoder) -> EncoderFallback
Set: Fallback(self: Encoder)=value
"""
FallbackBuffer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Text.EncoderFallbackBuffer object associated with the current System.Text.Encoder object.
Get: FallbackBuffer(self: Encoder) -> EncoderFallbackBuffer
"""
class EncoderFallback(object):
""" Provides a failure-handling mechanism,called a fallback,for an input character that cannot be converted to an encoded output byte sequence. """
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: EncoderFallback) -> EncoderFallbackBuffer
When overridden in a derived class,initializes a new instance of the
System.Text.EncoderFallbackBuffer class.
Returns: An object that provides a fallback buffer for an encoder.
"""
pass
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the maximum number of characters the current System.Text.EncoderFallback object can return.
Get: MaxCharCount(self: EncoderFallback) -> int
"""
ExceptionFallback=None
ReplacementFallback=None
class EncoderExceptionFallback(EncoderFallback):
"""
Throws a System.Text.EncoderFallbackException if an input character cannot be converted to an encoded output byte sequence. This class cannot be inherited.
EncoderExceptionFallback()
"""
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: EncoderExceptionFallback) -> EncoderFallbackBuffer
Initializes a new instance of the System.Text.EncoderExceptionFallback class.
Returns: A System.Text.EncoderFallbackBuffer object.
"""
pass
def Equals(self,value):
"""
Equals(self: EncoderExceptionFallback,value: object) -> bool
Indicates whether the current System.Text.EncoderExceptionFallback object and a specified object
are equal.
value: An object that derives from the System.Text.EncoderExceptionFallback class.
Returns: true if value is not null (Nothing in Visual Basic .NET) and is a
System.Text.EncoderExceptionFallback object; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: EncoderExceptionFallback) -> int
Retrieves the hash code for this instance.
Returns: The return value is always the same arbitrary value,and has no special significance.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __ne__(self,*args):
pass
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the maximum number of characters this instance can return.
Get: MaxCharCount(self: EncoderExceptionFallback) -> int
"""
class EncoderFallbackBuffer(object):
""" Provides a buffer that allows a fallback handler to return an alternate string to an encoder when it cannot encode an input character. """
def Fallback(self,*__args):
"""
Fallback(self: EncoderFallbackBuffer,charUnknownHigh: Char,charUnknownLow: Char,index: int) -> bool
When overridden in a derived class,prepares the fallback buffer to handle the specified
surrogate pair.
charUnknownHigh: The high surrogate of the input pair.
charUnknownLow: The low surrogate of the input pair.
index: The index position of the surrogate pair in the input buffer.
Returns: true if the fallback buffer can process charUnknownHigh and charUnknownLow; false if the
fallback buffer ignores the surrogate pair.
Fallback(self: EncoderFallbackBuffer,charUnknown: Char,index: int) -> bool
When overridden in a derived class,prepares the fallback buffer to handle the specified input
character.
charUnknown: An input character.
index: The index position of the character in the input buffer.
Returns: true if the fallback buffer can process charUnknown; false if the fallback buffer ignores
charUnknown.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: EncoderFallbackBuffer) -> Char
When overridden in a derived class,retrieves the next character in the fallback buffer.
Returns: The next character in the fallback buffer.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: EncoderFallbackBuffer) -> bool
When overridden in a derived class,causes the next call to the
System.Text.EncoderFallbackBuffer.GetNextChar method to access the data buffer character
position that is prior to the current character position.
Returns: true if the System.Text.EncoderFallbackBuffer.MovePrevious operation was successful; otherwise,
false.
"""
pass
def Reset(self):
"""
Reset(self: EncoderFallbackBuffer)
Initializes all data and state information pertaining to this fallback buffer.
"""
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""When overridden in a derived class,gets the number of characters in the current System.Text.EncoderFallbackBuffer object that remain to be processed.
Get: Remaining(self: EncoderFallbackBuffer) -> int
"""
class EncoderExceptionFallbackBuffer(EncoderFallbackBuffer):
"""
Throws System.Text.EncoderFallbackException when an input character cannot be converted to an encoded output byte sequence. This class cannot be inherited.
EncoderExceptionFallbackBuffer()
"""
def Fallback(self,*__args):
"""
Fallback(self: EncoderExceptionFallbackBuffer,charUnknownHigh: Char,charUnknownLow: Char,index: int) -> bool
Throws an exception because the input character cannot be encoded. Parameters specify the value
and index position of the surrogate pair in the input,and the nominal return value is not used.
charUnknownHigh: The high surrogate of the input pair.
charUnknownLow: The low surrogate of the input pair.
index: The index position of the surrogate pair in the input buffer.
Returns: None. No value is returned because the
System.Text.EncoderExceptionFallbackBuffer.Fallback(System.Char,System.Char,System.Int32) method
always throws an exception.
Fallback(self: EncoderExceptionFallbackBuffer,charUnknown: Char,index: int) -> bool
Throws an exception because the input character cannot be encoded. Parameters specify the value
and index position of the character that cannot be converted.
charUnknown: An input character.
index: The index position of the character in the input buffer.
Returns: None. No value is returned because the
System.Text.EncoderExceptionFallbackBuffer.Fallback(System.Char,System.Int32) method always
throws an exception.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: EncoderExceptionFallbackBuffer) -> Char
Retrieves the next character in the exception fallback buffer.
Returns: The return value is always the Unicode character,NULL (U+0000). A return value is defined,
although it is unchanging,because this method implements an abstract method.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: EncoderExceptionFallbackBuffer) -> bool
Causes the next call to the System.Text.EncoderExceptionFallbackBuffer.GetNextChar method to
access the exception data buffer character position that is prior to the current position.
Returns: The return value is always false.A return value is defined,although it is unchanging,because
this method implements an abstract method.
"""
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the current System.Text.EncoderExceptionFallbackBuffer object that remain to be processed.
Get: Remaining(self: EncoderExceptionFallbackBuffer) -> int
"""
class EncoderFallbackException(ArgumentException,ISerializable,_Exception):
"""
The exception that is thrown when an encoder fallback operation fails. This class cannot be inherited.
EncoderFallbackException()
EncoderFallbackException(message: str)
EncoderFallbackException(message: str,innerException: Exception)
"""
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def IsUnknownSurrogate(self):
"""
IsUnknownSurrogate(self: EncoderFallbackException) -> bool
Indicates whether the input that caused the exception is a surrogate pair.
Returns: true if the input was a surrogate pair; otherwise,false.
"""
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,innerException=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,innerException: Exception)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
CharUnknown=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the input character that caused the exception.
Get: CharUnknown(self: EncoderFallbackException) -> Char
"""
CharUnknownHigh=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the high component character of the surrogate pair that caused the exception.
Get: CharUnknownHigh(self: EncoderFallbackException) -> Char
"""
CharUnknownLow=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the low component character of the surrogate pair that caused the exception.
Get: CharUnknownLow(self: EncoderFallbackException) -> Char
"""
Index=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the index position in the input buffer of the character that caused the exception.
Get: Index(self: EncoderFallbackException) -> int
"""
class EncoderReplacementFallback(EncoderFallback):
"""
Provides a failure handling mechanism,called a fallback,for an input character that cannot be converted to an output byte sequence. The fallback uses a user-specified replacement string instead of the original input character. This class cannot be inherited.
EncoderReplacementFallback()
EncoderReplacementFallback(replacement: str)
"""
def CreateFallbackBuffer(self):
"""
CreateFallbackBuffer(self: EncoderReplacementFallback) -> EncoderFallbackBuffer
Creates a System.Text.EncoderFallbackBuffer object that is initialized with the replacement
string of this System.Text.EncoderReplacementFallback object.
Returns: A System.Text.EncoderFallbackBuffer object equal to this System.Text.EncoderReplacementFallback
object.
"""
pass
def Equals(self,value):
"""
Equals(self: EncoderReplacementFallback,value: object) -> bool
Indicates whether the value of a specified object is equal to the
System.Text.EncoderReplacementFallback object.
value: A System.Text.EncoderReplacementFallback object.
Returns: true if the value parameter specifies an System.Text.EncoderReplacementFallback object and the
replacement string of that object is equal to the replacement string of this
System.Text.EncoderReplacementFallback object; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: EncoderReplacementFallback) -> int
Retrieves the hash code for the value of the System.Text.EncoderReplacementFallback object.
Returns: The hash code of the value of the object.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
@staticmethod
def __new__(self,replacement=None):
"""
__new__(cls: type)
__new__(cls: type,replacement: str)
"""
pass
def __ne__(self,*args):
pass
DefaultString=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the replacement string that is the value of the System.Text.EncoderReplacementFallback object.
Get: DefaultString(self: EncoderReplacementFallback) -> str
"""
MaxCharCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the replacement string for the System.Text.EncoderReplacementFallback object.
Get: MaxCharCount(self: EncoderReplacementFallback) -> int
"""
class EncoderReplacementFallbackBuffer(EncoderFallbackBuffer):
"""
Represents a substitute input string that is used when the original input character cannot be encoded. This class cannot be inherited.
EncoderReplacementFallbackBuffer(fallback: EncoderReplacementFallback)
"""
def Fallback(self,*__args):
"""
Fallback(self: EncoderReplacementFallbackBuffer,charUnknownHigh: Char,charUnknownLow: Char,index: int) -> bool
Indicates whether a replacement string can be used when an input surrogate pair cannot be
encoded,or whether the surrogate pair can be ignored. Parameters specify the surrogate pair and
the index position of the pair in the input.
charUnknownHigh: The high surrogate of the input pair.
charUnknownLow: The low surrogate of the input pair.
index: The index position of the surrogate pair in the input buffer.
Returns: true if the replacement string is not empty; false if the replacement string is empty.
Fallback(self: EncoderReplacementFallbackBuffer,charUnknown: Char,index: int) -> bool
Prepares the replacement fallback buffer to use the current replacement string.
charUnknown: An input character. This parameter is ignored in this operation unless an exception is thrown.
index: The index position of the character in the input buffer. This parameter is ignored in this
operation.
Returns: true if the replacement string is not empty; false if the replacement string is empty.
"""
pass
def GetNextChar(self):
"""
GetNextChar(self: EncoderReplacementFallbackBuffer) -> Char
Retrieves the next character in the replacement fallback buffer.
Returns: The next Unicode character in the replacement fallback buffer that the application can encode.
"""
pass
def MovePrevious(self):
"""
MovePrevious(self: EncoderReplacementFallbackBuffer) -> bool
Causes the next call to the System.Text.EncoderReplacementFallbackBuffer.GetNextChar method to
access the character position in the replacement fallback buffer prior to the current character
position.
Returns: true if the System.Text.EncoderReplacementFallbackBuffer.MovePrevious operation was successful;
otherwise,false.
"""
pass
def Reset(self):
"""
Reset(self: EncoderReplacementFallbackBuffer)
Initializes all internal state information and data in this instance of
System.Text.EncoderReplacementFallbackBuffer.
"""
pass
@staticmethod
def __new__(self,fallback):
""" __new__(cls: type,fallback: EncoderReplacementFallback) """
pass
Remaining=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of characters in the replacement fallback buffer that remain to be processed.
Get: Remaining(self: EncoderReplacementFallbackBuffer) -> int
"""
class EncodingInfo(object):
""" Provides basic information about an encoding. """
def Equals(self,value):
"""
Equals(self: EncodingInfo,value: object) -> bool
Gets a value indicating whether the specified object is equal to the current
System.Text.EncodingInfo object.
value: An object to compare to the current System.Text.EncodingInfo object.
Returns: true if value is a System.Text.EncodingInfo object and is equal to the current
System.Text.EncodingInfo object; otherwise,false.
"""
pass
def GetEncoding(self):
"""
GetEncoding(self: EncodingInfo) -> Encoding
Returns a System.Text.Encoding object that corresponds to the current System.Text.EncodingInfo
object.
Returns: A System.Text.Encoding object that corresponds to the current System.Text.EncodingInfo object.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: EncodingInfo) -> int
Returns the hash code for the current System.Text.EncodingInfo object.
Returns: A 32-bit signed integer hash code.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __ne__(self,*args):
pass
CodePage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the code page identifier of the encoding.
Get: CodePage(self: EncodingInfo) -> int
"""
DisplayName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the human-readable description of the encoding.
Get: DisplayName(self: EncodingInfo) -> str
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the name registered with the Internet Assigned Numbers Authority (IANA) for the encoding.
Get: Name(self: EncodingInfo) -> str
"""
class EncodingProvider(object):
""" EncodingProvider() """
def GetEncoding(self,*__args):
"""
GetEncoding(self: EncodingProvider,name: str,encoderFallback: EncoderFallback,decoderFallback: DecoderFallback) -> Encoding
GetEncoding(self: EncodingProvider,codepage: int,encoderFallback: EncoderFallback,decoderFallback: DecoderFallback) -> Encoding
GetEncoding(self: EncodingProvider,name: str) -> Encoding
GetEncoding(self: EncodingProvider,codepage: int) -> Encoding
"""
pass
class NormalizationForm(Enum,IComparable,IFormattable,IConvertible):
"""
Defines the type of normalization to perform.
enum NormalizationForm,values: FormC (1),FormD (2),FormKC (5),FormKD (6)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
FormC=None
FormD=None
FormKC=None
FormKD=None
value__=None
class StringBuilder(object,ISerializable):
"""
Represents a mutable string of characters. This class cannot be inherited.
StringBuilder()
StringBuilder(capacity: int)
StringBuilder(value: str)
StringBuilder(value: str,capacity: int)
StringBuilder(value: str,startIndex: int,length: int,capacity: int)
StringBuilder(capacity: int,maxCapacity: int)
"""
def Append(self,value,*__args):
"""
Append(self: StringBuilder,value: Decimal) -> StringBuilder
Appends the string representation of a specified decimal number to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: UInt16) -> StringBuilder
Appends the string representation of a specified 16-bit unsigned integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: float) -> StringBuilder
Appends the string representation of a specified double-precision floating-point number to this
instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Int64) -> StringBuilder
Appends the string representation of a specified 64-bit signed integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Single) -> StringBuilder
Appends the string representation of a specified single-precision floating-point number to this
instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Array[Char]) -> StringBuilder
Appends the string representation of the Unicode characters in a specified array to this
instance.
value: The array of characters to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Char*,valueCount: int) -> StringBuilder
Append(self: StringBuilder,value: object) -> StringBuilder
Appends the string representation of a specified object to this instance.
value: The object to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: UInt32) -> StringBuilder
Appends the string representation of a specified 32-bit unsigned integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: UInt64) -> StringBuilder
Appends the string representation of a specified 64-bit unsigned integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: str,startIndex: int,count: int) -> StringBuilder
Appends a copy of a specified substring to this instance.
value: The string that contains the substring to append.
startIndex: The starting position of the substring within value.
count: The number of characters in value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: bool) -> StringBuilder
Appends the string representation of a specified Boolean value to this instance.
value: The Boolean value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: str) -> StringBuilder
Appends a copy of the specified string to this instance.
value: The string to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Char,repeatCount: int) -> StringBuilder
Appends a specified number of copies of the string representation of a Unicode character to this
instance.
value: The character to append.
repeatCount: The number of times to append value.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Array[Char],startIndex: int,charCount: int) -> StringBuilder
Appends the string representation of a specified subarray of Unicode characters to this instance.
value: A character array.
startIndex: The starting position in value.
charCount: The number of characters to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Int16) -> StringBuilder
Appends the string representation of a specified 16-bit signed integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: int) -> StringBuilder
Appends the string representation of a specified 32-bit signed integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Char) -> StringBuilder
Appends the string representation of a specified Unicode character to this instance.
value: The Unicode character to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: SByte) -> StringBuilder
Appends the string representation of a specified 8-bit signed integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
Append(self: StringBuilder,value: Byte) -> StringBuilder
Appends the string representation of a specified 8-bit unsigned integer to this instance.
value: The value to append.
Returns: A reference to this instance after the append operation has completed.
"""
pass
def AppendFormat(self,*__args):
"""
AppendFormat(self: StringBuilder,provider: IFormatProvider,format: str,arg0: object,arg1: object) -> StringBuilder
AppendFormat(self: StringBuilder,provider: IFormatProvider,format: str,arg0: object) -> StringBuilder
AppendFormat(self: StringBuilder,provider: IFormatProvider,format: str,*args: Array[object]) -> StringBuilder
Appends the string returned by processing a composite format string,which contains zero or more
format items,to this instance. Each format item is replaced by the string representation of a
corresponding argument in a parameter array using a specified format provider.
provider: An object that supplies culture-specific formatting information.
format: A composite format string (see Remarks).
args: An array of objects to format.
Returns: A reference to this instance after the append operation has completed. After the append
operation,this instance contains any data that existed before the operation,suffixed by a copy
of format where any format specification is replaced by the string representation of the
corresponding object argument.
AppendFormat(self: StringBuilder,provider: IFormatProvider,format: str,arg0: object,arg1: object,arg2: object) -> StringBuilder
AppendFormat(self: StringBuilder,format: str,arg0: object,arg1: object) -> StringBuilder
Appends the string returned by processing a composite format string,which contains zero or more
format items,to this instance. Each format item is replaced by the string representation of
either of two arguments.
format: A composite format string (see Remarks).
arg0: The first object to format.
arg1: The second object to format.
Returns: A reference to this instance with format appended. Each format item in format is replaced by the
string representation of the corresponding object argument.
AppendFormat(self: StringBuilder,format: str,arg0: object) -> StringBuilder
Appends the string returned by processing a composite format string,which contains zero or more
format items,to this instance. Each format item is replaced by the string representation of a
single argument.
format: A composite format string (see Remarks).
arg0: An object to format.
Returns: A reference to this instance with format appended. Each format item in format is replaced by the
string representation of arg0.
AppendFormat(self: StringBuilder,format: str,*args: Array[object]) -> StringBuilder
Appends the string returned by processing a composite format string,which contains zero or more
format items,to this instance. Each format item is replaced by the string representation of a
corresponding argument in a parameter array.
format: A composite format string (see Remarks).
args: An array of objects to format.
Returns: A reference to this instance with format appended. Each format item in format is replaced by the
string representation of the corresponding object argument.
AppendFormat(self: StringBuilder,format: str,arg0: object,arg1: object,arg2: object) -> StringBuilder
Appends the string returned by processing a composite format string,which contains zero or more
format items,to this instance. Each format item is replaced by the string representation of
either of three arguments.
format: A composite format string (see Remarks).
arg0: The first object to format.
arg1: The second object to format.
arg2: The third object to format.
Returns: A reference to this instance with format appended. Each format item in format is replaced by the
string representation of the corresponding object argument.
"""
pass
def AppendLine(self,value=None):
"""
AppendLine(self: StringBuilder,value: str) -> StringBuilder
Appends a copy of the specified string followed by the default line terminator to the end of the
current System.Text.StringBuilder object.
value: The string to append.
Returns: A reference to this instance after the append operation has completed.
AppendLine(self: StringBuilder) -> StringBuilder
Appends the default line terminator to the end of the current System.Text.StringBuilder object.
Returns: A reference to this instance after the append operation has completed.
"""
pass
def Clear(self):
"""
Clear(self: StringBuilder) -> StringBuilder
Removes all characters from the current System.Text.StringBuilder instance.
Returns: An object whose System.Text.StringBuilder.Length is 0 (zero).
"""
pass
def CopyTo(self,sourceIndex,destination,destinationIndex,count):
"""
CopyTo(self: StringBuilder,sourceIndex: int,destination: Array[Char],destinationIndex: int,count: int)
Copies the characters from a specified segment of this instance to a specified segment of a
destination System.Char array.
sourceIndex: The starting position in this instance where characters will be copied from. The index is
zero-based.
destination: The array where characters will be copied.
destinationIndex: The starting position in destination where characters will be copied. The index is zero-based.
count: The number of characters to be copied.
"""
pass
def EnsureCapacity(self,capacity):
"""
EnsureCapacity(self: StringBuilder,capacity: int) -> int
Ensures that the capacity of this instance of System.Text.StringBuilder is at least the
specified value.
capacity: The minimum capacity to ensure.
Returns: The new capacity of this instance.
"""
pass
def Equals(self,*__args):
"""
Equals(self: StringBuilder,sb: StringBuilder) -> bool
Returns a value indicating whether this instance is equal to a specified object.
sb: An object to compare with this instance,or null.
Returns: true if this instance and sb have equal string,System.Text.StringBuilder.Capacity,and
System.Text.StringBuilder.MaxCapacity values; otherwise,false.
"""
pass
def Insert(self,index,value,*__args):
"""
Insert(self: StringBuilder,index: int,value: Single) -> StringBuilder
Inserts the string representation of a single-precision floating point number into this instance
at the specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: float) -> StringBuilder
Inserts the string representation of a double-precision floating-point number into this instance
at the specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: int) -> StringBuilder
Inserts the string representation of a specified 32-bit signed integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Int64) -> StringBuilder
Inserts the string representation of a 64-bit signed integer into this instance at the specified
character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Decimal) -> StringBuilder
Inserts the string representation of a decimal number into this instance at the specified
character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: UInt64) -> StringBuilder
Inserts the string representation of a 64-bit unsigned integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: object) -> StringBuilder
Inserts the string representation of an object into this instance at the specified character
position.
index: The position in this instance where insertion begins.
value: The object to insert,or null.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: UInt16) -> StringBuilder
Inserts the string representation of a 16-bit unsigned integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: UInt32) -> StringBuilder
Inserts the string representation of a 32-bit unsigned integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: bool) -> StringBuilder
Inserts the string representation of a Boolean value into this instance at the specified
character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: SByte) -> StringBuilder
Inserts the string representation of a specified 8-bit signed integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: str,count: int) -> StringBuilder
Inserts one or more copies of a specified string into this instance at the specified character
position.
index: The position in this instance where insertion begins.
value: The string to insert.
count: The number of times to insert value.
Returns: A reference to this instance after insertion has completed.
Insert(self: StringBuilder,index: int,value: str) -> StringBuilder
Inserts a string into this instance at the specified character position.
index: The position in this instance where insertion begins.
value: The string to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Byte) -> StringBuilder
Inserts the string representation of a specified 8-bit unsigned integer into this instance at
the specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Array[Char]) -> StringBuilder
Inserts the string representation of a specified array of Unicode characters into this instance
at the specified character position.
index: The position in this instance where insertion begins.
value: The character array to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Array[Char],startIndex: int,charCount: int) -> StringBuilder
Inserts the string representation of a specified subarray of Unicode characters into this
instance at the specified character position.
index: The position in this instance where insertion begins.
value: A character array.
startIndex: The starting index within value.
charCount: The number of characters to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Int16) -> StringBuilder
Inserts the string representation of a specified 16-bit signed integer into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
Insert(self: StringBuilder,index: int,value: Char) -> StringBuilder
Inserts the string representation of a specified Unicode character into this instance at the
specified character position.
index: The position in this instance where insertion begins.
value: The value to insert.
Returns: A reference to this instance after the insert operation has completed.
"""
pass
def Remove(self,startIndex,length):
"""
Remove(self: StringBuilder,startIndex: int,length: int) -> StringBuilder
Removes the specified range of characters from this instance.
startIndex: The zero-based position in this instance where removal begins.
length: The number of characters to remove.
Returns: A reference to this instance after the excise operation has completed.
"""
pass
def Replace(self,*__args):
"""
Replace(self: StringBuilder,oldChar: Char,newChar: Char) -> StringBuilder
Replaces all occurrences of a specified character in this instance with another specified
character.
oldChar: The character to replace.
newChar: The character that replaces oldChar.
Returns: A reference to this instance with oldChar replaced by newChar.
Replace(self: StringBuilder,oldChar: Char,newChar: Char,startIndex: int,count: int) -> StringBuilder
Replaces,within a substring of this instance,all occurrences of a specified character with
another specified character.
oldChar: The character to replace.
newChar: The character that replaces oldChar.
startIndex: The position in this instance where the substring begins.
count: The length of the substring.
Returns: A reference to this instance with oldChar replaced by newChar in the range from startIndex to
startIndex + count -1.
Replace(self: StringBuilder,oldValue: str,newValue: str) -> StringBuilder
Replaces all occurrences of a specified string in this instance with another specified string.
oldValue: The string to replace.
newValue: The string that replaces oldValue,or null.
Returns: A reference to this instance with all instances of oldValue replaced by newValue.
Replace(self: StringBuilder,oldValue: str,newValue: str,startIndex: int,count: int) -> StringBuilder
Replaces,within a substring of this instance,all occurrences of a specified string with
another specified string.
oldValue: The string to replace.
newValue: The string that replaces oldValue,or null.
startIndex: The position in this instance where the substring begins.
count: The length of the substring.
Returns: A reference to this instance with all instances of oldValue replaced by newValue in the range
from startIndex to startIndex + count - 1.
"""
pass
def ToString(self,startIndex=None,length=None):
"""
ToString(self: StringBuilder,startIndex: int,length: int) -> str
Converts the value of a substring of this instance to a System.String.
startIndex: The starting position of the substring in this instance.
length: The length of the substring.
Returns: A string whose value is the same as the specified substring of this instance.
ToString(self: StringBuilder) -> str
Converts the value of this instance to a System.String.
Returns: A string whose value is the same as this instance.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,capacity: int)
__new__(cls: type,value: str)
__new__(cls: type,value: str,capacity: int)
__new__(cls: type,value: str,startIndex: int,length: int,capacity: int)
__new__(cls: type,capacity: int,maxCapacity: int)
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
def __str__(self,*args):
pass
Capacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the maximum number of characters that can be contained in the memory allocated by the current instance.
Get: Capacity(self: StringBuilder) -> int
Set: Capacity(self: StringBuilder)=value
"""
Length=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the length of the current System.Text.StringBuilder object.
Get: Length(self: StringBuilder) -> int
Set: Length(self: StringBuilder)=value
"""
MaxCapacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the maximum capacity of this instance.
Get: MaxCapacity(self: StringBuilder) -> int
"""
class UnicodeEncoding(Encoding,ICloneable):
"""
Represents a UTF-16 encoding of Unicode characters.
UnicodeEncoding(bigEndian: bool,byteOrderMark: bool,throwOnInvalidBytes: bool)
UnicodeEncoding()
UnicodeEncoding(bigEndian: bool,byteOrderMark: bool)
"""
def Equals(self,value):
"""
Equals(self: UnicodeEncoding,value: object) -> bool
Determines whether the specified System.Object is equal to the current
System.Text.UnicodeEncoding object.
value: The System.Object to compare with the current object.
Returns: true if value is an instance of System.Text.UnicodeEncoding and is equal to the current object;
otherwise,false.
"""
pass
def GetByteCount(self,*__args):
"""
GetByteCount(self: UnicodeEncoding,chars: Char*,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters starting at the
specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UnicodeEncoding,s: str) -> int
Calculates the number of bytes produced by encoding the characters in the specified
System.String.
s: The System.String containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UnicodeEncoding,chars: Array[Char],index: int,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters from the specified
character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: UnicodeEncoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
Encodes a set of characters starting at the specified character pointer into a sequence of bytes
that are stored starting at the specified byte pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by the bytes parameter.
GetBytes(self: UnicodeEncoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified character array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: UnicodeEncoding,s: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified System.String into the specified byte array.
s: The System.String containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: UnicodeEncoding,bytes: Byte*,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes starting at the
specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: UnicodeEncoding,bytes: Array[Byte],index: int,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes from the specified
byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: UnicodeEncoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
Decodes a sequence of bytes starting at the specified byte pointer into a set of characters that
are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by the chars parameter.
GetChars(self: UnicodeEncoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
Decodes a sequence of bytes from the specified byte array into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: UnicodeEncoding) -> Decoder
Obtains a decoder that converts a UTF-16 encoded sequence of bytes into a sequence of Unicode
characters.
Returns: A System.Text.Decoder that converts a UTF-16 encoded sequence of bytes into a sequence of
Unicode characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: UnicodeEncoding) -> Encoder
Obtains an encoder that converts a sequence of Unicode characters into a UTF-16 encoded sequence
of bytes.
Returns: A System.Text.Encoder object that converts a sequence of Unicode characters into a UTF-16
encoded sequence of bytes.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: UnicodeEncoding) -> int
Returns the hash code for the current instance.
Returns: The hash code for the current System.Text.UnicodeEncoding object.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: UnicodeEncoding,charCount: int) -> int
Calculates the maximum number of bytes produced by encoding the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: UnicodeEncoding,byteCount: int) -> int
Calculates the maximum number of characters produced by decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetPreamble(self):
"""
GetPreamble(self: UnicodeEncoding) -> Array[Byte]
Returns a Unicode byte order mark encoded in UTF-16 format,if the constructor for this instance
requests a byte order mark.
Returns: A byte array containing the Unicode byte order mark,if the constructor for this instance
requests a byte order mark. Otherwise,this method returns a byte array of length zero.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: UnicodeEncoding,bytes: Array[Byte],index: int,count: int) -> str
Decodes a range of bytes from a byte array into a string.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A System.String object containing the results of decoding the specified sequence of bytes.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,bigEndian=None,byteOrderMark=None,throwOnInvalidBytes=None):
"""
__new__(cls: type)
__new__(cls: type,bigEndian: bool,byteOrderMark: bool)
__new__(cls: type,bigEndian: bool,byteOrderMark: bool,throwOnInvalidBytes: bool)
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
CharSize=2
class UTF32Encoding(Encoding,ICloneable):
"""
Represents a UTF-32 encoding of Unicode characters.
UTF32Encoding()
UTF32Encoding(bigEndian: bool,byteOrderMark: bool)
UTF32Encoding(bigEndian: bool,byteOrderMark: bool,throwOnInvalidCharacters: bool)
"""
def Equals(self,value):
"""
Equals(self: UTF32Encoding,value: object) -> bool
Determines whether the specified System.Object is equal to the current System.Text.UTF32Encoding
object.
value: The System.Object to compare with the current object.
Returns: true if value is an instance of System.Text.UTF32Encoding and is equal to the current object;
otherwise,false.
"""
pass
def GetByteCount(self,*__args):
"""
GetByteCount(self: UTF32Encoding,chars: Char*,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters starting at the
specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF32Encoding,s: str) -> int
Calculates the number of bytes produced by encoding the characters in the specified
System.String.
s: The System.String containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF32Encoding,chars: Array[Char],index: int,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters from the specified
character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: UTF32Encoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
Encodes a set of characters starting at the specified character pointer into a sequence of bytes
that are stored starting at the specified byte pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by the bytes parameter.
GetBytes(self: UTF32Encoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified character array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: UTF32Encoding,s: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified System.String into the specified byte array.
s: The System.String containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: UTF32Encoding,bytes: Byte*,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes starting at the
specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: UTF32Encoding,bytes: Array[Byte],index: int,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes from the specified
byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: UTF32Encoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
Decodes a sequence of bytes starting at the specified byte pointer into a set of characters that
are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by chars.
GetChars(self: UTF32Encoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
Decodes a sequence of bytes from the specified byte array into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: UTF32Encoding) -> Decoder
Obtains a decoder that converts a UTF-32 encoded sequence of bytes into a sequence of Unicode
characters.
Returns: A System.Text.Decoder that converts a UTF-32 encoded sequence of bytes into a sequence of
Unicode characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: UTF32Encoding) -> Encoder
Obtains an encoder that converts a sequence of Unicode characters into a UTF-32 encoded sequence
of bytes.
Returns: A System.Text.Encoder that converts a sequence of Unicode characters into a UTF-32 encoded
sequence of bytes.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: UTF32Encoding) -> int
Returns the hash code for the current instance.
Returns: The hash code for the current System.Text.UTF32Encoding object.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: UTF32Encoding,charCount: int) -> int
Calculates the maximum number of bytes produced by encoding the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: UTF32Encoding,byteCount: int) -> int
Calculates the maximum number of characters produced by decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetPreamble(self):
"""
GetPreamble(self: UTF32Encoding) -> Array[Byte]
Returns a Unicode byte order mark encoded in UTF-32 format,if the constructor for this instance
requests a byte order mark.
Returns: A byte array containing the Unicode byte order mark,if the constructor for this instance
requests a byte order mark. Otherwise,this method returns a byte array of length zero.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: UTF32Encoding,bytes: Array[Byte],index: int,count: int) -> str
Decodes a range of bytes from a byte array into a string.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,bigEndian=None,byteOrderMark=None,throwOnInvalidCharacters=None):
"""
__new__(cls: type)
__new__(cls: type,bigEndian: bool,byteOrderMark: bool)
__new__(cls: type,bigEndian: bool,byteOrderMark: bool,throwOnInvalidCharacters: bool)
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
class UTF7Encoding(Encoding,ICloneable):
"""
Represents a UTF-7 encoding of Unicode characters.
UTF7Encoding()
UTF7Encoding(allowOptionals: bool)
"""
def Equals(self,value):
"""
Equals(self: UTF7Encoding,value: object) -> bool
Gets a value indicating whether the specified object is equal to the current
System.Text.UTF7Encoding object.
value: An object to compare to the current System.Text.UTF7Encoding object.
Returns: true if value is a System.Text.UTF7Encoding object and is equal to the current
System.Text.UTF7Encoding object; otherwise,false.
"""
pass
def GetByteCount(self,*__args):
"""
GetByteCount(self: UTF7Encoding,chars: Char*,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters starting at the
specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF7Encoding,s: str) -> int
Calculates the number of bytes produced by encoding the characters in the specified
System.String object.
s: The System.String object containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF7Encoding,chars: Array[Char],index: int,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters from the specified
character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: UTF7Encoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
Encodes a set of characters starting at the specified character pointer into a sequence of bytes
that are stored starting at the specified byte pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by bytes.
GetBytes(self: UTF7Encoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified character array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: UTF7Encoding,s: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified System.String into the specified byte array.
s: The System.String containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: UTF7Encoding,bytes: Byte*,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes starting at the
specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: UTF7Encoding,bytes: Array[Byte],index: int,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes from the specified
byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: UTF7Encoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
Decodes a sequence of bytes starting at the specified byte pointer into a set of characters that
are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by chars.
GetChars(self: UTF7Encoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
Decodes a sequence of bytes from the specified byte array into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: UTF7Encoding) -> Decoder
Obtains a decoder that converts a UTF-7 encoded sequence of bytes into a sequence of Unicode
characters.
Returns: A System.Text.Decoder that converts a UTF-7 encoded sequence of bytes into a sequence of Unicode
characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: UTF7Encoding) -> Encoder
Obtains an encoder that converts a sequence of Unicode characters into a UTF-7 encoded sequence
of bytes.
Returns: A System.Text.Encoder that converts a sequence of Unicode characters into a UTF-7 encoded
sequence of bytes.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: UTF7Encoding) -> int
Returns the hash code for the current System.Text.UTF7Encoding object.
Returns: A 32-bit signed integer hash code.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: UTF7Encoding,charCount: int) -> int
Calculates the maximum number of bytes produced by encoding the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: UTF7Encoding,byteCount: int) -> int
Calculates the maximum number of characters produced by decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: UTF7Encoding,bytes: Array[Byte],index: int,count: int) -> str
Decodes a range of bytes from a byte array into a string.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,allowOptionals=None):
"""
__new__(cls: type)
__new__(cls: type,allowOptionals: bool)
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
class UTF8Encoding(Encoding,ICloneable):
"""
Represents a UTF-8 encoding of Unicode characters.
UTF8Encoding()
UTF8Encoding(encoderShouldEmitUTF8Identifier: bool)
UTF8Encoding(encoderShouldEmitUTF8Identifier: bool,throwOnInvalidBytes: bool)
"""
def Equals(self,value):
"""
Equals(self: UTF8Encoding,value: object) -> bool
Determines whether the specified System.Object is equal to the current System.Text.UTF8Encoding
object.
value: The System.Object to compare with the current instance.
Returns: true if value is an instance of System.Text.UTF8Encoding and is equal to the current object;
otherwise,false.
"""
pass
def GetByteCount(self,chars,*__args):
"""
GetByteCount(self: UTF8Encoding,chars: Char*,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters starting at the
specified character pointer.
chars: A pointer to the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF8Encoding,chars: str) -> int
Calculates the number of bytes produced by encoding the characters in the specified
System.String.
chars: The System.String containing the set of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
GetByteCount(self: UTF8Encoding,chars: Array[Char],index: int,count: int) -> int
Calculates the number of bytes produced by encoding a set of characters from the specified
character array.
chars: The character array containing the set of characters to encode.
index: The index of the first character to encode.
count: The number of characters to encode.
Returns: The number of bytes produced by encoding the specified characters.
"""
pass
def GetBytes(self,*__args):
"""
GetBytes(self: UTF8Encoding,chars: Char*,charCount: int,bytes: Byte*,byteCount: int) -> int
Encodes a set of characters starting at the specified character pointer into a sequence of bytes
that are stored starting at the specified byte pointer.
chars: A pointer to the first character to encode.
charCount: The number of characters to encode.
bytes: A pointer to the location at which to start writing the resulting sequence of bytes.
byteCount: The maximum number of bytes to write.
Returns: The actual number of bytes written at the location indicated by bytes.
GetBytes(self: UTF8Encoding,chars: Array[Char],charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified character array into the specified byte array.
chars: The character array containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
GetBytes(self: UTF8Encoding,s: str,charIndex: int,charCount: int,bytes: Array[Byte],byteIndex: int) -> int
Encodes a set of characters from the specified System.String into the specified byte array.
s: The System.String containing the set of characters to encode.
charIndex: The index of the first character to encode.
charCount: The number of characters to encode.
bytes: The byte array to contain the resulting sequence of bytes.
byteIndex: The index at which to start writing the resulting sequence of bytes.
Returns: The actual number of bytes written into bytes.
"""
pass
def GetCharCount(self,bytes,*__args):
"""
GetCharCount(self: UTF8Encoding,bytes: Byte*,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes starting at the
specified byte pointer.
bytes: A pointer to the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
GetCharCount(self: UTF8Encoding,bytes: Array[Byte],index: int,count: int) -> int
Calculates the number of characters produced by decoding a sequence of bytes from the specified
byte array.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: The number of characters produced by decoding the specified sequence of bytes.
"""
pass
def GetChars(self,bytes,*__args):
"""
GetChars(self: UTF8Encoding,bytes: Byte*,byteCount: int,chars: Char*,charCount: int) -> int
Decodes a sequence of bytes starting at the specified byte pointer into a set of characters that
are stored starting at the specified character pointer.
bytes: A pointer to the first byte to decode.
byteCount: The number of bytes to decode.
chars: A pointer to the location at which to start writing the resulting set of characters.
charCount: The maximum number of characters to write.
Returns: The actual number of characters written at the location indicated by chars.
GetChars(self: UTF8Encoding,bytes: Array[Byte],byteIndex: int,byteCount: int,chars: Array[Char],charIndex: int) -> int
Decodes a sequence of bytes from the specified byte array into the specified character array.
bytes: The byte array containing the sequence of bytes to decode.
byteIndex: The index of the first byte to decode.
byteCount: The number of bytes to decode.
chars: The character array to contain the resulting set of characters.
charIndex: The index at which to start writing the resulting set of characters.
Returns: The actual number of characters written into chars.
"""
pass
def GetDecoder(self):
"""
GetDecoder(self: UTF8Encoding) -> Decoder
Obtains a decoder that converts a UTF-8 encoded sequence of bytes into a sequence of Unicode
characters.
Returns: A System.Text.Decoder that converts a UTF-8 encoded sequence of bytes into a sequence of Unicode
characters.
"""
pass
def GetEncoder(self):
"""
GetEncoder(self: UTF8Encoding) -> Encoder
Obtains an encoder that converts a sequence of Unicode characters into a UTF-8 encoded sequence
of bytes.
Returns: A System.Text.Encoder that converts a sequence of Unicode characters into a UTF-8 encoded
sequence of bytes.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: UTF8Encoding) -> int
Returns the hash code for the current instance.
Returns: The hash code for the current instance.
"""
pass
def GetMaxByteCount(self,charCount):
"""
GetMaxByteCount(self: UTF8Encoding,charCount: int) -> int
Calculates the maximum number of bytes produced by encoding the specified number of characters.
charCount: The number of characters to encode.
Returns: The maximum number of bytes produced by encoding the specified number of characters.
"""
pass
def GetMaxCharCount(self,byteCount):
"""
GetMaxCharCount(self: UTF8Encoding,byteCount: int) -> int
Calculates the maximum number of characters produced by decoding the specified number of bytes.
byteCount: The number of bytes to decode.
Returns: The maximum number of characters produced by decoding the specified number of bytes.
"""
pass
def GetPreamble(self):
"""
GetPreamble(self: UTF8Encoding) -> Array[Byte]
Returns a Unicode byte order mark encoded in UTF-8 format,if the constructor for this instance
requests a byte order mark.
Returns: A byte array containing the Unicode byte order mark,if the constructor for this instance
requests a byte order mark. Otherwise,this method returns a byte array of length zero.
"""
pass
def GetString(self,bytes,*__args):
"""
GetString(self: UTF8Encoding,bytes: Array[Byte],index: int,count: int) -> str
Decodes a range of bytes from a byte array into a string.
bytes: The byte array containing the sequence of bytes to decode.
index: The index of the first byte to decode.
count: The number of bytes to decode.
Returns: A System.String containing the results of decoding the specified sequence of bytes.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,encoderShouldEmitUTF8Identifier=None,throwOnInvalidBytes=None):
"""
__new__(cls: type)
__new__(cls: type,encoderShouldEmitUTF8Identifier: bool)
__new__(cls: type,encoderShouldEmitUTF8Identifier: bool,throwOnInvalidBytes: bool)
"""
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
# variables with complex values
| [
"[email protected]"
] | |
e98cfae6341abe6f37c3f1a0a2427707d096ce33 | fb54704d4a6f9475f42b85d8c470e3425b37dcae | /medium/ex723.py | 72d622c13c82343615c20230acd3754aceafcd33 | [] | no_license | ziyuan-shen/leetcode_algorithm_python_solution | b2784071a94b04e687fd536b57e8d5a9ec1a4c05 | 920b65db80031fad45d495431eda8d3fb4ef06e5 | refs/heads/master | 2021-06-27T05:19:47.774044 | 2021-02-04T09:47:30 | 2021-02-04T09:47:30 | 210,991,299 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,893 | py | class Solution:
def crush(self, board):
for r in range(self.nrow):
p1 = 0
p2 = 1
while p2 < self.ncol - 1:
val = abs(board[r][p1])
while p2 < self.ncol and abs(board[r][p2]) == val:
p2 += 1
if p2 - p1 > 2:
for c in range(p1, p2):
board[r][c] = -val
p1 = p2
p2 += 1
for c in range(self.ncol):
p1 = 0
p2 = 1
while p2 < self.nrow - 1:
val = abs(board[p1][c])
while p2 < self.nrow and abs(board[p2][c]) == val:
p2 += 1
if p2 - p1 > 2:
for r in range(p1, p2):
board[r][c] = -val
p1 = p2
p2 += 1
change_flag = False
for r in range(self.nrow):
for c in range(self.ncol):
if board[r][c] < 0:
change_flag = True
board[r][c] = 0
return change_flag
def gravity(self, board):
for c in range(self.ncol):
p1 = self.nrow - 1
p2 = p1 - 1
while p2 >= 0:
if board[p1][c] != 0:
p1 -= 1
p2 -= 1
else:
while p2 >= 0 and board[p2][c] == 0:
p2 -= 1
if p2 >= 0:
board[p1][c] = board[p2][c]
board[p2][c] = 0
p1 -= 1
p2 = p1 - 1
def candyCrush(self, board: List[List[int]]) -> List[List[int]]:
self.nrow = len(board)
self.ncol = len(board[0])
while self.crush(board):
self.gravity(board)
return board
| [
"[email protected]"
] | |
a0f7d764e965e2a2a6b2307b6cffc09c9e55114e | b167407960a3b69b16752590def1a62b297a4b0c | /tools/project-creator/Python2.6.6/Lib/test/test_timeout.py | 48fdc0108adcd87a0723272de8ee9ff835c08394 | [
"MIT"
] | permissive | xcode1986/nineck.ca | 543d1be2066e88a7db3745b483f61daedf5f378a | 637dfec24407d220bb745beacebea4a375bfd78f | refs/heads/master | 2020-04-15T14:48:08.551821 | 2019-01-15T07:36:06 | 2019-01-15T07:36:06 | 164,768,581 | 1 | 1 | MIT | 2019-01-15T08:30:27 | 2019-01-09T02:09:21 | C++ | UTF-8 | Python | false | false | 7,061 | py | """Unit tests for socket timeout feature."""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
import socket
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.localhost = '127.0.0.1'
def tearDown(self):
self.sock.close()
def testConnectTimeout(self):
# Choose a private address that is unlikely to exist to prevent
# failures due to the connect succeeding before the timeout.
# Use a dotted IP address to avoid including the DNS lookup time
# with the connect time. This avoids failing the assertion that
# the timeout occurred fast enough.
addr = ('10.0.0.0', 12345)
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.connect, addr)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvTimeout(self):
# Test recv() timeout
_timeout = 0.02
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testAcceptTimeout(self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
self.sock.listen(5)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvfromTimeout(self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testSend(self):
# Test send() timeout
# couldn't figure out how to test it
pass
def testSendto(self):
# Test sendto() timeout
# couldn't figure out how to test it
pass
def testSendall(self):
# Test sendall() timeout
# couldn't figure out how to test it
pass
def test_main():
test_support.requires('network')
test_support.run_unittest(CreationTestCase, TimeoutTestCase)
if __name__ == "__main__":
test_main()
| [
"[email protected]"
] | |
ea2aa046e4ada2f0e7f4f8eabf7b353f0795682a | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_3/mtledn001/question1.py | d2992358e9f1fd6e7a51fafadabd786f4caafacd | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | # Draw a rectangle
h = eval(input('Enter the height of the rectangle:\n'))
w= eval(input('Enter the width of the rectangle:\n'))
for i in range(1,h+1):
print("*"*w)
| [
"[email protected]"
] | |
a8f1dd9db7c0ce138615ac6844de319aaa5c692f | dec29f40788478f73798f23b79ca892b3121150a | /apps/product/migrations/0006_product_featured.py | 239bba971c32d72c05c0f20fc7113bd3b3a59639 | [] | no_license | RonaldTheodoro/django-ecommerce | 2c661e6f3ae0154ecb7a8e25183875da8c27d14f | 9097049107e5a7ab52474aa89fe40f02689fb24a | refs/heads/master | 2021-05-06T02:08:51.166682 | 2017-12-17T00:32:03 | 2017-12-17T00:32:03 | 114,499,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-30 11:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0005_auto_20171119_1838'),
]
operations = [
migrations.AddField(
model_name='product',
name='featured',
field=models.BooleanField(default=True),
),
]
| [
"[email protected]"
] | |
52a14bcf8d67657749c1e9d1a0a544f03ad604c3 | 6644e20a38f22b2b0842981b9b4855fb08bb0113 | /exer706.py | d303559ba7fbd7a9ddd388a86fe41bc3f199b33d | [
"MIT"
] | permissive | profnssorg/valmorMantelli1 | 506acb35243aeea24701d70e369280f064a3024e | a9a42218b54da208e834f333aa5242d14639b28b | refs/heads/master | 2020-04-11T13:55:03.354177 | 2019-01-18T20:28:33 | 2019-01-18T20:28:33 | 161,834,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | ###Titulo: Leitor de string
###Função: Este programa lê três strings e substitui na primeira os caracteres da segunda e terceira
###Autor: Valmor Mantelli Jr.
###Data: 08/01/2019
###Versão: 0.0.13
# Declaração de variáveis
f = ""
s = ""
t = ""
x = ""
p = ""
y = 0
#Entrada de dados
f = input("Digite a primeira sequência de letras: ")
s = input("Digite a segunda sequência de letras: ")
t = input("Digit a terceira sequencia de letras: ")
#Processamento dos dados e saída
if len (s) == len(t): # Verifica se o tamanho da segunda e terceira strings são iguais
x = ""
for p in f:
y =s.find(p)
if y != -1:
x += t[y]
else:
x += p
if x == "":
print("Não sobraram caracteres para serem retirados.")
else:
print("Os caracteres %s foram trocados por %s em %s gerando uma nova sequência %s." % (s, t, p, x))
else:
print ("A segunda e terceira sequência de caracteres precisam ter o mesmo tamanho")
| [
"[email protected]"
] | |
e88c2e150d4d4d9f1fee48c6f67a8495f4ed9ccc | d2cb388a385598489e36817afb15502596780dc2 | /hedgehog/test_bayes_net.py | 4ac37b9ca124ff2baad63ef64502765f00f3f245 | [
"MIT"
] | permissive | dormanh/hedgehog | 2ab5bd0fe58e8e8d4fb6410946a532c4a5d09f7c | 6ebb6fad04dcae810e16768cfc90e6a12bbc5cd4 | refs/heads/master | 2023-03-13T05:45:42.648064 | 2021-02-28T13:49:20 | 2021-02-28T13:49:20 | 343,113,432 | 0 | 0 | MIT | 2021-02-28T13:34:20 | 2021-02-28T13:34:19 | null | UTF-8 | Python | false | false | 3,524 | py | import copy
import importlib
import inspect
import math
import random
import numpy as np
import pandas as pd
import pytest
import hedgehog as hh
def check_partial_fit(bn):
"""Checks that partial_fit produces the same result as fit."""
bn_partial = copy.deepcopy(bn)
# Fit the parameters of the first BN in one go
samples = bn.sample(500)
bn.fit(samples)
# Fit the parameters of the second BN incrementally
bn_partial.P = {}
for chunk in np.array_split(samples, 5):
bn_partial.partial_fit(chunk)
# Check that the obtained parameters are identical
for node in bn.P:
pd.testing.assert_series_equal(bn.P[node], bn_partial.P[node])
def check_sample_many(bn):
for n in (2, 3, 100):
sample = bn.sample(n)
assert len(sample) == n
assert sorted(sample.columns) == sorted(bn.nodes)
def check_sample_one(bn):
sample = bn.sample()
assert isinstance(sample, dict)
assert sorted(sample.keys()) == sorted(bn.nodes)
def check_full_joint_dist(bn):
fjd = bn.full_joint_dist()
assert math.isclose(fjd.sum(), 1)
assert sorted(fjd.index.names) == sorted(bn.nodes)
def check_Ps(bn):
for child, parents in bn.parents.items():
P = bn.P[child]
assert P.index.names[-1] == child
assert P.index.names[:-1] == parents
assert P.groupby(parents).sum().eq(1).all()
for orphan in set(bn.nodes) - set(bn.parents):
P = bn.P[orphan]
assert P.index.name == orphan
assert P.sum() == 1
def check_query(bn):
"""Checks that the query function works for every algorithm."""
fjd = bn.full_joint_dist()
event = dict(zip(fjd.index.names, fjd.index[0]))
query = random.choice(list(event))
del event[query]
for algorithm in ('exact', 'gibbs', 'likelihood', 'rejection'):
bn.query(query, event=event, algorithm=algorithm)
def naive():
bn = hh.BayesNet('A', 'B', 'C')
bn.P['A'] = pd.Series({True: .1, False: .9})
bn.P['B'] = pd.Series({True: .3, False: .7})
bn.P['C'] = pd.Series({True: .5, False: .5})
bn.prepare()
return bn
@pytest.mark.parametrize('bn, check', [
pytest.param(
example(),
check,
id=f"{example.__name__}:{check.__name__}"
)
for example in (
*dict(inspect.getmembers(
importlib.import_module('hedgehog.examples'),
inspect.isfunction)
).values(),
naive
)
for check in (
check_partial_fit,
check_sample_many,
check_sample_one,
check_full_joint_dist,
check_Ps,
check_query
)
])
def test(bn, check):
check(bn)
def test_indep_vars():
"""This doctest checks that querying with independent variables works as expected.
>>> bn = hh.BayesNet()
>>> bn.P['A'] = pd.Series({1: .2, 2: .3, 3: .5})
>>> bn.P['B'] = pd.Series({1: .4, 2: .2, 3: .4})
>>> bn.prepare()
>>> bn.full_joint_dist()
A B
1 1 0.08
2 0.04
3 0.08
2 1 0.12
2 0.06
3 0.12
3 1 0.20
2 0.10
3 0.20
Name: P(A, B), dtype: float64
>>> bn.query('A', event={'B': 1})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
>>> bn.query('A', event={'B': 2})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
>>> bn.query('A', event={'B': 3})
A
1 0.2
2 0.3
3 0.5
Name: P(A), dtype: float64
"""
| [
"[email protected]"
] | |
065879523e20eb090c83b185d34a52dad01e6602 | c71799bd4e86b8354588e395396e559df867843b | /utility/utility_env.py | 07867072760d3dfc04b871d842687612217b90a6 | [] | no_license | jacob-heglund/capture-the-flag | ab564c6f271dd2bb2bc07dd4660eb0798f5480f4 | 99423e6606fa4f32358df91f83d69b39c68c6593 | refs/heads/master | 2020-09-06T20:03:17.997672 | 2019-11-08T19:37:31 | 2019-11-08T19:37:31 | 220,535,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,781 | py | import numpy as np
import gym_cap.envs.const as CONST
UNKNOWN = CONST.UNKNOWN # -1
TEAM1_BG = CONST.TEAM1_BACKGROUND # 0
TEAM2_BG = CONST.TEAM2_BACKGROUND # 1
TEAM1_AG = CONST.TEAM1_UGV # 2
TEAM1_UAV= CONST.TEAM1_UAV # 3
TEAM2_AG = CONST.TEAM2_UGV # 4
TEAM2_UAV= CONST.TEAM2_UAV # 5
TEAM1_FL = CONST.TEAM1_FLAG # 6
TEAM2_FL = CONST.TEAM2_FLAG # 7
OBSTACLE = CONST.OBSTACLE # 8
DEAD = CONST.DEAD # 9
SELECTED = CONST.SELECTED # 10
COMPLETED= CONST.COMPLETED # 11
def one_hot_encoder(state, agents, vision_radius=9, reverse=False):
"""Encoding pipeline for CtF state to one-hot representation
6-channel one-hot representation of state.
State is not binary: team2 is represented with -1.
Channels are not symmetrical.
:param state: CtF state in raw format
:param agents: Agent list of CtF environment
:param vision_radius: Size of the vision range (default=9)
:param reverse:Reverse the color. Used for red-perspective (default=False)
:return oh_state: One-hot encoded state
"""
vision_lx = 2*vision_radius+1
vision_ly = 2*vision_radius+1
oh_state = np.zeros((len(agents),vision_lx,vision_ly,6))
# team 1 : (1), team 2 : (-1), map elements: (0)
map_channel = {UNKNOWN:0, DEAD:0,
TEAM1_BG:1, TEAM2_BG:1,
TEAM1_AG:2, TEAM2_AG:2,
TEAM1_UAV:3, TEAM2_UAV:3,
TEAM1_FL:4, TEAM2_FL:4,
OBSTACLE:5}
if not reverse:
map_color = {UNKNOWN:1, DEAD:0,
TEAM1_BG:0, TEAM2_BG:1,
TEAM1_AG:1, TEAM2_AG:-1,
TEAM1_UAV:1, TEAM2_UAV:-1,
TEAM1_FL:1, TEAM2_FL:-1,
OBSTACLE:1}
else: # reverse color
map_color = {UNKNOWN:1, DEAD:0,
TEAM1_BG:1, TEAM2_BG:0,
TEAM1_AG:-1, TEAM2_AG:1,
TEAM1_UAV:-1, TEAM2_UAV:1,
TEAM1_FL:-1, TEAM2_FL:1,
OBSTACLE:1}
# Expand the observation with wall to avoid dealing with the boundary
sx, sy = state.shape
_state = np.full((sx+2*vision_radius, sy+2*vision_radius),OBSTACLE)
_state[vision_radius:vision_radius+sx, vision_radius:vision_radius+sy] = state
state = _state
for idx,agent in enumerate(agents):
# Initialize Variables
x, y = agent.get_loc()
x += vision_radius
y += vision_radius
vision = state[x-vision_radius:x+vision_radius+1,y-vision_radius:y+vision_radius+1] # extract view
# FULL MATRIX OPERATION
for channel, val in map_color.items():
if val == 1:
oh_state[idx,:,:,map_channel[channel]] += (vision == channel).astype(np.int32)
elif val == -1:
oh_state[idx,:,:,map_channel[channel]] -= (vision == channel).astype(np.int32)
return oh_state
def one_hot_encoder_v2(state, agents, vision_radius=9, reverse=False):
""" Encoding pipeline for CtF state to one-hot representation
11-channel one-hot representation of state.
State is binary.
Some optimization is included.
:param state: CtF state in raw format
:param agents: Agent list of CtF environment
:param vision_radius: Size of the vision range (default=9)`
:param reverse:Reverse the color. Used for red-perspective (default=False)
:return oh_state: One-hot encoded state
"""
num_channel = 11
num_agents = len(agents)
vision_lx = 2*vision_radius+1
vision_ly = 2*vision_radius+1
# Map channel for each elements
if not reverse:
order = [UNKNOWN, OBSTACLE, TEAM1_BG, TEAM2_BG, TEAM1_AG, TEAM2_AG,
TEAM1_UAV, TEAM2_UAV, TEAM1_FL, TEAM2_FL, DEAD]
else:
order = [UNKNOWN, OBSTACLE, TEAM2_BG, TEAM1_BG, TEAM2_AG, TEAM1_AG,
TEAM2_UAV, TEAM1_UAV, TEAM2_FL, TEAM1_FL, DEAD]
map_channel = dict(zip(order, range(num_channel)))
# Padding Boundary
#state = np.pad(state, ((vision_radius,vision_radius),(vision_radius,vision_radius)), 'constant', constant_values=OBSTACLE)
sx, sy = state.shape
_state = np.full((sx+2*vision_radius, sy+2*vision_radius),OBSTACLE)
_state[vision_radius:vision_radius+sx, vision_radius:vision_radius+sy] = state
state = _state
each_agent = []
for idx, agent in enumerate(agents):
# Initialize Variables
x, y = agent.get_loc()
x += vision_radius
y += vision_radius
vision = state[x-vision_radius:x+vision_radius+1,y-vision_radius:y+vision_radius+1] # extract view
# operation
each_channel = []
for element, channel in map_channel.items():
each_channel.append(vision==element)
each_agent.append(np.stack(each_channel, axis=-1))
oh_state = np.stack(each_agent, axis=0)
return oh_state
# Debug
def debug():
"""debug
Include testing code for above methods and classes.
The execution will start witn __main__, and call this method.
"""
import gym
import time
env = gym.make("cap-v0")
s = env.reset(map_size=20)
print('start running')
stime = time.time()
for _ in range(3000):
s = env.reset(map_size=20)
one_hot_encoder(s, env.get_team_blue)
print(f'Finish testing for one-hot-encoder: {time.time()-stime} sec')
s = env.reset(map_size=20)
print('start running v2')
stime = time.time()
for _ in range(3000):
s = env.reset(map_size=20)
one_hot_encoder_v2(s, env.get_team_blue)
print(f'Finish testing for one-hot-encoder: {time.time()-stime} sec')
if __name__ == '__main__':
debug()
| [
"[email protected]"
] | |
6a61a4502a0334d46da5be716ffb38dbcc31975f | 583fdb9f37dea28ada24e335f1e44ba6cf587770 | /860 柠檬水找零.py | 109ef98e4711690d86ba8fbc1ce323e5179e55a1 | [] | no_license | Ford-z/LeetCode | 8c4c30eeaa3d8f02b24c8d0058c60f09c3a6debe | 88eeca3780b4dc77efce4f14d317ed1c872cf650 | refs/heads/master | 2021-11-21T00:51:05.314084 | 2021-09-16T15:45:18 | 2021-09-16T15:45:18 | 194,425,542 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,596 | py | #在柠檬水摊上,每一杯柠檬水的售价为 5 美元。
#顾客排队购买你的产品,(按账单 bills 支付的顺序)一次购买一杯。
#每位顾客只买一杯柠檬水,然后向你付 5 美元、10 美元或 20 美元。你必须给每个顾客正确找零,也就是说净交易是每位顾客向你支付 5 美元。
#注意,一开始你手头没有任何零钱。
#如果你能给每位顾客正确找零,返回 true ,否则返回 false 。
#来源:力扣(LeetCode)
#链接:https://leetcode-cn.com/problems/lemonade-change
#著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
class Solution:
def lemonadeChange(self, bills: List[int]) -> bool:
cash=[]
flag=True
for i in range(len(bills)):
if(bills[i]==5):
cash.append(5)
if(bills[i]==10):
if(cash.count(5)>=1):
cash.remove(5)
cash.append(10)
else:
flag=False
break
if(bills[i]==20):
if(cash.count(10)>=1 and cash.count(5)>=1):
cash.remove(10)
cash.remove(5)
cash.append(20)
elif(cash.count(5)>=3):
cash.remove(5)
cash.remove(5)
cash.remove(5)
cash.append(20)
else:
flag=False
break
return flag
| [
"[email protected]"
] | |
1c6da6f5d8052821f26f42065a6f5aaba410456d | dec494542217437afa7f38e8703328b25b183cb8 | /39.py | c80519dded9c7ba9b7dd4286c678e9a42e4168c1 | [] | no_license | Transi-ent/LeetcodeSolver | ee44c9c4d5bce9f11c079c5b27b4b967790cb5cd | a89e19753666657a6f15c1be589b9b2dbd4b6c84 | refs/heads/master | 2021-04-16T20:37:34.710175 | 2021-02-24T03:51:43 | 2021-02-24T03:51:43 | 249,383,432 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,483 | py | class Solution:
"""
回溯法。枝叶粗大,
"""
def combinationSum(self, candidates: list, target: int) -> list:
s = set()
res = self.findCombnt(candidates, target, 0, [],[],s)
print(res)
return res
def findCombnt(self, nums: list, target: int, index: int,
tmplist: list, res: list, s: set) ->list:
n = len(nums)
if index>=n:
return res
for i in range(index, n):
copyOftmp = tmplist.copy()
copyOfs = s.copy()
sumVal = sum(copyOftmp)
if sumVal+nums[i]==target:
tmp = copyOftmp+[nums[i]]
tmp.sort()
ss = ''.join([str(i) for i in tmp])
if not ss in s:
res.append(copyOftmp+[nums[i]])
s.add(ss)
elif sumVal+nums[i]>target:
self.findCombnt(nums, target, index+1, copyOftmp, res, s)
else:
self.findCombnt(nums, target, index, copyOftmp+[nums[i]], res, s)
return res
class Solution2:
"""
回溯法。改进版
"""
def combinationSum(self, candidates: list, target: int) -> list:
res = []
candidates.sort()
n = len(candidates)
def traceback(index: int, tmpsum: int, tmp: list):
if tmpsum>target or index>=n:
return
if tmpsum==target:
res.append(tmp)
for i in range(index, n):
if tmpsum+candidates[i]>target:
break # 因为已经排好序了,后面的元素只会越来越大
# 回溯法的本质即为在一个循环中递归调用自身
traceback(i, tmpsum+candidates[i], tmp+[candidates[i]])
traceback(0, 0, [])
print(res)
return res
class Solution3:
"""
递归。
"""
def combinationSum(self, candidates: list, target: int) -> list:
res = []
candidates.sort()
n = len(candidates)
def dfs(index: int, tmpsum: int, tmp: list):
if tmpsum>target or index>=n:
return
if tmpsum==target:
res.append(tmp)
return
dfs(index, tmpsum+candidates[index], tmp+[candidates[index]])
dfs(index+1, tmpsum, tmp)
dfs(0, 0, [])
print(res)
return res
Solution3().combinationSum([2,3,6,7], 7)
| [
"[email protected]"
] | |
3a6b487f57c857ad7468ca3079d6f2dcc05ee7d9 | dd31ec8f3f979b0339cf686ce9094def03ef003a | /almoxarifado/admin.py | 439d557537488c4abfc4866aa38d2ec5b78db9fc | [
"MIT"
] | permissive | rvmoura96/projeto-almoxarifado | 872bb945b4057bdbf108776e2101e9966a23f4de | 4ca5e5d00f449a940f7c601479bb3fe14c54f012 | refs/heads/master | 2022-11-11T07:45:33.475443 | 2017-11-21T21:13:19 | 2017-11-21T21:13:19 | 106,044,249 | 1 | 1 | MIT | 2022-10-26T05:02:32 | 2017-10-06T19:48:08 | Python | UTF-8 | Python | false | false | 322 | py | from django.contrib import admin
from .models import Equipamento, Item, Tipo, TipoItens, Modelo, Fabricante
# Register your models here.
admin.site.register(Tipo)
admin.site.register(TipoItens)
admin.site.register(Modelo)
admin.site.register(Fabricante)
admin.site.register(Equipamento)
admin.site.register(Item) | [
"[email protected]"
] | |
ff9ef6987c579208033a4e26c738b75647883f52 | bb81b9c70f204e636560dc969b68c1654e24cb77 | /tests/i2b2modeltests/metadatatests/test_modifier_dimension.py | 8b6124f32a4a8cc9d09f52c4faf5c4d17e173218 | [
"Apache-2.0"
] | permissive | BD2KOnFHIR/i2b2model | 8f526225498ccfadddeb180e76d644f1098a4041 | 566be446f4b8691f8d82d5d04b7635248aba6041 | refs/heads/master | 2021-04-27T20:52:59.357953 | 2020-04-14T17:01:02 | 2020-04-14T17:01:02 | 122,386,895 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,433 | py | import unittest
from collections import OrderedDict
from datetime import datetime
from dynprops import as_dict, clear
from i2b2model.shared.i2b2core import I2B2Core
from i2b2model.testingutils.base_test_case import BaseTestCase
class ModifierDimensionTestCase(BaseTestCase):
def setUp(self):
clear(I2B2Core)
def tearDown(self):
clear(I2B2Core)
def test_basics(self):
from i2b2model.metadata.i2b2modifierdimension import ModifierDimension
I2B2Core.download_date = datetime(2017, 5, 25)
I2B2Core.sourcesystem_cd = "MOD_TEST"
I2B2Core.import_date = datetime(2017, 5, 25)
md = ModifierDimension('MODTEST', 'baboon', 'Wild baboons', ['Earth', 'Africa', 'Zimbabwai'])
self.assertAlmostNow(md.update_date)
I2B2Core.update_date = datetime(2001, 12, 1)
expected = OrderedDict([
('modifier_path', '\\Earth\\Africa\\Zimbabwai\\baboon\\'),
('modifier_cd', 'MODTEST:baboon'),
('name_char', 'MODTEST Wild baboons'),
('modifier_blob', ''),
('update_date', datetime(2001, 12, 1, 0, 0)),
('download_date', datetime(2017, 5, 25, 0, 0)),
('import_date', datetime(2017, 5, 25, 0, 0)),
('sourcesystem_cd', 'MOD_TEST'),
('upload_id', None)])
self.assertEqual(expected, as_dict(md))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
dbc9ed063bb15f5e00d2bafe49c707761c1595fe | e58e59ed72562454750c87556bf44d7a4bae5065 | /pymontecarlo_casino2/expander.py | da2650a680f18028b0b14a523f6e6077d26b8430 | [
"Apache-2.0"
] | permissive | tomyanmmx/pymontecarlo-casino2 | 587c67bc32217a5eb23583f58a6a88511f3c69e2 | d1c7eb761ccbde745fe858caf9490e87b13347d5 | refs/heads/master | 2021-05-07T22:59:52.396739 | 2017-09-10T14:22:59 | 2017-09-10T14:22:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | """"""
# Standard library modules.
# Third party modules.
# Local modules.
from pymontecarlo.options.program.expander import Expander, expand_to_single
# Globals and constants variables.
class Casino2Expander(Expander):
def expand_analyses(self, analyses):
return expand_to_single(analyses)
| [
"[email protected]"
] | |
22da037eb35ae20878e8e83e233f08cc4cad5413 | 51a37b7108f2f69a1377d98f714711af3c32d0df | /src/leetcode/P430.py | 2e1a4f1b681e32e310db002cc62a8f5a002e90be | [] | no_license | stupidchen/leetcode | 1dd2683ba4b1c0382e9263547d6c623e4979a806 | 72d172ea25777980a49439042dbc39448fcad73d | refs/heads/master | 2022-03-14T21:15:47.263954 | 2022-02-27T15:33:15 | 2022-02-27T15:33:15 | 55,680,865 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 729 | py | """
# Definition for a Node.
class Node:
def __init__(self, val, prev, next, child):
self.val = val
self.prev = prev
self.next = next
self.child = child
"""
def tail(node):
if node is None:
return None
t = node
while t.next is not None:
t = t.next
return t
class Solution:
def flatten(self, head):
if head is None:
return None
c = self.flatten(head.child)
n = self.flatten(head.next)
if c is not None:
head.child = None
head.next = c
c.prev = head
t = tail(c)
t.next = n
if n is not None:
n.prev = t
return head
| [
"[email protected]"
] | |
a7e12fe087d3ae0a715b5abdef52d114f3c2ff4f | 6452098273ff0555d3edf349ed800958cf89b7d8 | /Posts/posts/migrations/0003_auto_20190626_1200.py | 6fb96575c2c8114c2c9aa2295368ad35fb88895a | [] | no_license | karthik018/DjangoPractice | 8a530ec90d6efd7cc9c7122743bc647d6274b736 | 041e1144e8b6153f5f8c0d5a367472f1e68a9dc6 | refs/heads/master | 2020-06-11T08:07:59.201541 | 2019-06-27T08:54:42 | 2019-06-27T08:54:42 | 193,900,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | # Generated by Django 2.2.2 on 2019-06-26 12:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0002_auto_20190626_1134'),
]
operations = [
migrations.AlterField(
model_name='comments',
name='commented_on_id',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Comments'),
),
]
| [
"[email protected]"
] | |
db4718b5c11abc46048e756487f6714698f41805 | 20674c17d815214bf66b75be686bb8a45c0f5914 | /version1/415_Add_Strings.py | abfc1fed5d4ddd31c6b7d8e9156cb801b0847556 | [] | no_license | moontree/leetcode | e7b670969fe20785b15aae82996875fd66de1b08 | f2bf9b13508cd01c8f383789569e55a438f77202 | refs/heads/master | 2021-05-20T20:36:45.615420 | 2020-04-02T09:15:26 | 2020-04-02T09:15:26 | 252,408,563 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | """
Given two non-negative integers num1 and num2 represented as string, return the sum of num1 and num2.
Note:
The length of both num1 and num2 is < 5100.
Both num1 and num2 contains only digits 0-9.
Both num1 and num2 does not contain any leading zero.
You must not use any built-in BigInteger library or convert the inputs to integer directly.
"""
def add_strings(num1, num2):
"""
:type num1: str
:type num2: str
:rtype: str
"""
b1, b2 = [int(s) for s in num1][::-1], [int(s) for s in num2][::-1]
c = 0
m, n = len(b1), len(b2)
i = 0
res = []
while i < m and i < n:
val = b1[i] + b2[i] + c
c = val / 10
res.append(val % 10)
i += 1
while i < m:
val = b1[i] + c
c = val / 10
res.append(val % 10)
i += 1
while i < n:
val = b2[i] + c
c = val / 10
res.append(val % 10)
i += 1
if c > 0:
res.append(c)
print res
return "".join([str(v) for v in res[::-1]])
print add_strings("99991", "9")
| [
"[email protected]"
] | |
b5e0e9c943a15be5ba40d7ff9c3c755ac2c3a131 | 7748d76ac2557477733c245189a5510e793c965a | /5 - Django/Amadon/apps/amadon/views.py | 9c08620dd6db4778c4a77f3047aef83137c8ccee | [] | no_license | ectom/Coding-Dojo-Python-Stack | 9798fd0c452389d485d343659eed7132c61b9055 | 07d2d48e966f4210627a1a11d561f1d28e8a9982 | refs/heads/master | 2021-06-27T22:59:40.794253 | 2017-09-15T09:54:40 | 2017-09-15T09:54:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | from django.shortcuts import render, HttpResponse, redirect
def index(request):
try:
request.session['data']
request.session['total_price'] = 0
request.session['total_quantity'] = 0
except:
request.session['data'] = []
return render(request, 'amadon/index.html')
def process(request):
items = {
'001': 19.99,
'002': 29.99,
'003': 4.99,
'004': 49.99
}
data = {
'quantity': int(request.POST['quantity'])
}
number = request.POST["product"]
print items[number]
request.session['data'].append(data)
for item in request.session['data']:
request.session['price'] = items[number]*item['quantity']
request.session['total_price'] += (items[number]*item['quantity'])
request.session['total_quantity'] += item['quantity']
return redirect('/checkout')
def checkout(request):
return render(request, 'amadon/checkout.html')
def clear(request):
request.session.clear()
return redirect('/')
| [
"[email protected]"
] | |
7934d26cf2260f368ab8d2c022c0b804c116afe7 | acb8acdc1233c248694edbae0092ddfcd46baf52 | /tests/test_dev.py | d45529c4fe3e59423f4ec7d7a35cb4b7573d5d29 | [
"MIT"
] | permissive | tomhamiltonstubber/harrier | c1173dde726b412223a3d867a4184de54060ff79 | f581aa5478cfecb6e9c58c7544584636f868a0c2 | refs/heads/master | 2020-03-21T18:29:25.966509 | 2019-02-19T17:10:56 | 2019-02-19T17:10:56 | 138,894,743 | 0 | 0 | null | 2018-06-27T14:42:50 | 2018-06-27T14:42:50 | null | UTF-8 | Python | false | false | 11,661 | py | import asyncio
from pathlib import Path
from pytest_toolbox import gettree, mktree
from watchgod import Change
import harrier.dev
from harrier.config import Config
from harrier.dev import HarrierWatcher
from harrier.main import dev
class MockServer:
def __init__(self, *args, **kwargs):
pass
async def start(self):
pass
async def shutdown(self):
pass
def test_dev_simple(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
yield {(Change.modified, str(tmpdir.join('pages/foobar.md')))}
yield {(Change.modified, str(tmpdir.join('pages/features/whatever.md')))}
yield {(Change.modified, str(tmpdir.join('harrier.yml')))}
yield {(Change.added, str(tmpdir.join('theme/sass/main.scss')))}
tmpdir.join('harrier.yml').write('foo: 2')
yield {(Change.modified, str(tmpdir.join('harrier.yml')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.md': '# hello\n {{ config.foo }}',
'features/whatever.md': '## Foo',
},
'harrier.yml': 'foo: 1'
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 25698)
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': '<h1 id="1-hello">hello</h1>\n\n<p>2</p>\n',
},
'features': {
'whatever': {
'index.html': '<h2 id="2-foo">Foo</h2>\n',
},
},
}
def test_dev_delete(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
yield {(Change.deleted, str(tmpdir.join('pages/features/whatever.md')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.md': 'hello',
'features/whatever.md': 'Foo',
},
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
assert dev(str(tmpdir), 8000) == 0
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': '<p>hello</p>\n',
},
'features': {
'whatever': {},
},
}
def test_extensions_error(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
tmpdir.join('extensions.py').write('print(xxx)')
yield {(Change.modified, str(tmpdir.join('extensions.py')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.md': '**hello**',
},
'theme/templates/main.jinja': 'main:\n {{ content }}',
'harrier.yml': 'default_template: main.jinja',
'extensions.py': 'x = 1'
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
assert dev(str(tmpdir), 8000) == 1
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': 'main:\n <p><strong>hello</strong></p>\n',
},
}
def test_mock_executor(tmpdir, mocker):
foobar_path = str(tmpdir.join('pages/foobar.md'))
async def awatch_alt(*args, **kwargs):
yield {(Change.modified, str(tmpdir.join('harrier.yml')))}
yield {(Change.modified, foobar_path)}
yield {(Change.modified, str(tmpdir.join('theme/assets/main.png')))}
yield {(Change.modified, str(tmpdir.join('theme/sass/main.scss')))}
yield {(Change.modified, str(tmpdir.join('theme/templates/main.jinja')))}
yield {(Change.modified, str(tmpdir.join('extensions.py')))}
yield {(Change.modified, str(tmpdir.join('data/foobar.yml')))}
mktree(tmpdir, {
'pages/foobar.md': '# hello',
'theme/templates/main.jinja': 'main:\n {{ content }}',
'harrier.yml': 'foo: bar',
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
loop = asyncio.new_event_loop()
f = asyncio.Future(loop=loop)
f.set_result(None)
mock_run_in_executor = mocker.patch.object(loop, 'run_in_executor', return_value=f)
asyncio.set_event_loop(loop)
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
assert gettree(tmpdir.join('dist')) == {}
assert [c[0][2].dict(exclude={'config_path'}) for c in mock_run_in_executor.call_args_list] == [
{
'pages': '__FB__',
'assets': False,
'sass': False,
'templates': False,
'data': False,
'extensions': False,
'update_config': False,
},
{
'pages': set(),
'assets': False,
'sass': False,
'templates': False,
'data': False,
'extensions': False,
'update_config': True,
},
{
'pages': {(Change.modified, Path(foobar_path))},
'assets': False,
'sass': False,
'templates': False,
'data': False,
'extensions': False,
'update_config': False,
},
{
'pages': set(),
'assets': True,
'sass': False,
'templates': False,
'data': False,
'extensions': False,
'update_config': False,
},
{
'pages': set(),
'assets': False,
'sass': True,
'templates': False,
'data': False,
'extensions': False,
'update_config': False,
},
{
'pages': set(),
'assets': False,
'sass': False,
'templates': True,
'data': False,
'extensions': False,
'update_config': False,
},
{
'pages': set(),
'assets': False,
'sass': False,
'templates': False,
'data': False,
'extensions': True,
'update_config': False,
},
{
'pages': set(),
'assets': False,
'sass': False,
'templates': False,
'data': True,
'extensions': False,
'update_config': False,
},
]
def test_webpack_terminate(tmpdir, mocker, caplog):
async def awatch_alt(*args, **kwargs):
yield {(Change.modified, str(tmpdir.join('harrier.yml')))}
mktree(tmpdir, {
'pages/foobar.md': '# hello',
'theme/templates/main.jinja': 'main:\n {{ content }}',
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
f = asyncio.Future()
mock_webpack = mocker.MagicMock()
mock_webpack.returncode = None
f.set_result(mock_webpack)
mocker.patch('harrier.dev.start_webpack_watch', return_value=f)
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
assert tmpdir.join('dist').check()
assert mock_webpack.send_signal.call_count == 1
assert 'webpack existed badly' not in caplog.text
mock_webpack.returncode = 0
dev(str(tmpdir), 8000)
assert mock_webpack.send_signal.call_count == 1
assert 'webpack existed badly' not in caplog.text
mock_webpack.returncode = 1
dev(str(tmpdir), 8000)
assert mock_webpack.send_signal.call_count == 1
assert 'webpack existed badly' in caplog.text
class Entry:
def __init__(self, path):
self.path = str(path)
self.name = self.path.rsplit('/', 1)[1]
def test_harrier_watcher(tmpdir):
mktree(tmpdir, {
'pages/foobar.md': '# hello',
'theme/templates/main.jinja': 'main:\n {{ content }}',
})
harrier.dev.CONFIG = Config(source_dir=tmpdir)
watcher = HarrierWatcher(Path(tmpdir))
assert not watcher.should_watch_dir(Entry(tmpdir.join('foobar')))
assert not watcher.should_watch_dir(Entry(tmpdir.join('__pycache__')))
assert watcher.should_watch_dir(Entry(tmpdir.join('pages')))
assert watcher.should_watch_dir(Entry(tmpdir.join('pages/whatever')))
harrier.dev.CONFIG = None
def test_dev_extensions(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
yield {(Change.modified, str(tmpdir.join('pages/foobar.html')))}
yield {(Change.modified, str(tmpdir.join('pages/foobar.html')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.html': 'before',
},
'call': '0',
'extensions.py': """
from pathlib import Path
from harrier.extensions import modify, template
p = Path(__file__).parent / 'call'
@modify.som
def change_pages(site):
v = int(p.read_text())
v += 1
p.write_text(str(v))
site['pages']['/foobar.html']['content'] = str(v)
return site
"""
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': '3\n',
},
}
def test_dev_delete_image(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
yield {(Change.deleted, str(tmpdir.join('pages/other/whatever.png')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.html': 'hello',
'other/whatever.png': '*',
},
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': 'hello\n',
},
'other': {},
}
def test_dev_data(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
tmpdir.join('data/foobar.yml').write('a: 2')
yield {(Change.modified, str(tmpdir.join('data/foobar.yml')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.html': '{{ data.foobar.a }}',
},
'data/foobar.yml': 'a: 1'
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': '2\n',
},
}
def test_ignored_directory(tmpdir, mocker, loop):
async def awatch_alt(*args, **kwargs):
yield {(Change.modified, str(tmpdir.join('pages/ignored.html')))}
asyncio.set_event_loop(loop)
mktree(tmpdir, {
'pages': {
'foobar.html': '1',
'ignored.html': '2'
},
'harrier.yaml': (
'ignore:\n'
'- /ignored.html'
)
})
mocker.patch('harrier.dev.awatch', side_effect=awatch_alt)
mocker.patch('harrier.dev.Server', return_value=MockServer())
assert not tmpdir.join('dist').check()
dev(str(tmpdir), 8000)
# debug(gettree(tmpdir.join('dist')))
assert gettree(tmpdir.join('dist')) == {
'foobar': {
'index.html': '1\n',
},
}
| [
"[email protected]"
] | |
5e4a349d5502380c981d7f17a2242bfa4f4745a3 | b7ad21dc8a2bf3459f8f65c7c2df944f168b9086 | /regression_tests/bugs/78484.py | fcfaf180b6266f5d2fdf43038e46c5cdbba6e362 | [] | no_license | italiangrid/WMS-Test-Suite | ee99651cdacbd18ec202ba3d62d6c1aeb02ab405 | 0c72c8a868c671691eae55800e906d55d9b5de0d | refs/heads/master | 2016-09-03T06:28:05.953113 | 2013-11-25T13:29:30 | 2013-11-25T13:29:30 | 3,335,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,897 | py | #
# Bug: 78484
# Title: [ YAIM_WMS ] Multiple parameter configuration added in condor_config.local
# Link: https://savannah.cern.ch/bugs/?78484
#
#
import logging
from libutils.Exceptions import *
def run(utils):
bug='78484'
logging.info("Start regression test for bug %s"%(bug))
logging.warning("To verify this bug you need access to WMS. You have set WMS_USERNAME and WMS_PASSOWRD attributes at configuration file")
ssh=utils.open_ssh(utils.get_WMS(),utils.get_Username(),utils.get_Password())
logging.info("Get the list of GRID_MONITOR* variables")
short_hostname=utils.execute_remote_cmd(ssh,"/bin/hostname -s")[:-1]
result=utils.execute_remote_cmd(ssh,"cat /opt/condor-c/local.%s/condor_config.local"%(short_hostname))
result=result.split("\n")
grid_monitor=[]
for line in result:
if line.find("GRID_MONITOR")!=-1:
grid_monitor.append(line)
logging.info("Run yaim (site-info.def should be presented in /opt/glite/yaim/examples/siteinfo directory)")
utils.execute_remote_cmd(ssh,"/opt/glite/yaim/bin/yaim -c -s /opt/glite/yaim/examples/siteinfo/site-info.def -n WMS")
logging.info("Get the list of GRID_MONITOR* variables after yaim")
result=utils.execute_remote_cmd(ssh,"cat /opt/condor-c/local.%s/condor_config.local"%(short_hostname))
result=result.split("\n")
grid_monitor_after=[]
for line in result:
if line.find("GRID_MONITOR")!=-1:
grid_monitor_after.append(line)
z=set(grid_monitor)^set(grid_monitor_after)
if len(z) >0 :
ssh.close()
logging.error("Error!!!. After yaim found these new entries: %s"%(z))
raise GeneralError("Check GRID_MONITOR* variables","After yaim found these new entries: %s"%(z))
logging.info("Test OK")
ssh.close()
logging.info("End of regression test for bug %s"%(bug))
| [
"[email protected]"
] | |
9ba4a6fbe3c9770a2d39f5f306cacdcfab42f357 | 65cf5e4c6d016a2dd7ddde139d3e65a94fbed9df | /dashboard/config.py | 8916d314290706a3487747ff48734f46805a359b | [
"MIT"
] | permissive | rodriguesfas/ipm | afd188884556cb2f32a685abd64ec3d570af7f6c | 596382123c3d45bde6410672692d1f59ec2d734a | refs/heads/master | 2022-10-04T17:28:14.468166 | 2020-06-03T06:09:56 | 2020-06-03T06:09:56 | 193,582,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Enable Flask's debugging features. Should be False in production
DEBUG = True | [
"[email protected]"
] | |
253068a67dcd85f578e3f01e7400efd2cdf1f67e | f2149869f6b2d12ce0fe68cecaf3b7c63dc6d3d8 | /src/paramCube.py | b41e2345df0684e08dc9287533832617e552a305 | [] | no_license | mclaughlin6464/HODEmulator | 35946bb55ead0427970dece32110a5932018d561 | 97a86c6c74cc49689478dde3155ab7a4c89f7c91 | refs/heads/master | 2020-04-05T14:09:05.870165 | 2016-08-31T00:56:44 | 2016-08-31T00:56:44 | 55,720,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,599 | py | # This module is similar to testSeveralSteps, but with an increase in scale.
#@Author Sean McLaughlin
import numpy as np
from time import time
from os import path
from itertools import izip
import argparse
from myCats import cat_dict
from allCorrFunc import loadHaloAndModel, popAndCorr, RBINS, MIN_PTCL
from doBatchCalls import BOUNDS #i Need them in both places but it's smarter to ahve ti there.
# TODO not hardcoding some of these? Depends on my use i guess.
# Will have to see how i end up using this.
SIMNAME = 'chinchilla' # hardcode for noew
REDSHIFT = 0.5#0.0
#N_PTCL = 200
RBIN_CENTERS = (RBINS[1:] + RBINS[:-1]) / 2
def paramCube(outputdir, fixed_params={}, n_per_dim=4, id_no=None):
if type(n_per_dim) is int:
n_per_dim = {key: n_per_dim for key in BOUNDS.iterkeys()}
assert type(n_per_dim) is dict
values = {}
for param in BOUNDS.iterkeys():
if param in fixed_params:
n_per_dim[param] = 1
values[param] = np.array([fixed_params[param]])
else: # param in varied_params
values[param] = np.linspace(BOUNDS[param][0], BOUNDS[param][1], num=n_per_dim[param])
n_total = np.prod(n_per_dim.values())
if n_total == 1: #only one, we can skip all this stuff.
calc_galaxy_autocorr(SIMNAME, 1 / (1 + REDSHIFT),
path.join(outputdir,'Emulator_lhc_'+ '%03d'%id_no if id_no is not None else 'Emulator'),
params=fixed_params, do_jackknife=True, Lbox=400, npart=2048)
return
points = [{} for i in xrange(n_total)]
fixed_base = '_'.join('%s%.2f' % (key, val) for key, val in fixed_params.iteritems()) + '_'
outbase = [fixed_base for i in xrange(n_total)]
n_segment = n_total # not necessary, but notaionally clearer
for param in sorted(BOUNDS.iterkeys()): # sorted to make deterministic, though it may already be.
n_segment /= n_per_dim[param]
for i, p in enumerate(points):
idx = (i / n_segment) % n_per_dim[param]
p[param] = values[param][idx]
outbase[i] += str(idx) # now each outbase has a unique combination of indexes
# now each dictionary in values carries a unique combination of parameters for the emulator
# if memory was an issue one could just run the model at each step instead of generating them all.
# i don't think 1000 dictionaries is the worst of my memory issues.
# now, send each fo these to my code.
for p, out in izip(points, outbase):
#check if the file already exists
if path.isfile(path.join(outputdir,out+'_corr_%.3f.npy'%(1/(1+REDSHIFT)) )):
continue #file already exists!
print 'Skipping %s'%out
calc_galaxy_autocorr(SIMNAME, 1 / (1 + REDSHIFT), path.join(outputdir, out),
params=p,do_jackknife=True, Lbox=400, npart=2048)
# mostly copied from allCorrFunc. I don't wanan break backwards compatibaility yet
# but I need to make some changes here.
def calc_galaxy_autocorr(simname, scale_factor, outbase, params={},do_jackknife=True, **kwargs):
'Calculate the cross correlation for a single catalog at a single scale factor'
print h.heap()
print '--'*25
t0 = time()
cat = cat_dict[simname](**kwargs)
print str(cat)
halocat, model = loadHaloAndModel(cat, 'redMagic', scale_factor)
if do_jackknife:
data, cov = popAndCorr(halocat, model, cat, params,do_jackknife, MIN_PTCL, RBINS)
else:
data = popAndCorr(halocat, model, cat, params,do_jackknife, MIN_PTCL, RBINS)
header_start = ['Cosmology: %s'%simname, 'Params for HOD:' ]
header_start.extend('%s:%.3f'%(key,val) for key, val in params.iteritems())
header = '\n'.join(header_start)
np.savetxt(outbase + '_corr_%.3f.npy' % (scale_factor), data,
header = header)
if do_jackknife:
np.savetxt(outbase + '_cov_%.3f.npy' % (scale_factor), cov,
header = header)
print '\nTotal Time: %.3f\n' % (time() - t0)
def testCube(outputdir, fixed_params={}, n_per_dim=4):
'''Create fake data of the same structure as paramCube for testing. '''
if type(n_per_dim) is int:
n_per_dim = {key: n_per_dim for key in BOUNDS.iterkeys()}
assert type(n_per_dim) is dict
values = {}
for param in BOUNDS.iterkeys():
if param in fixed_params:
n_per_dim[param] = 1
values[param] = np.array([fixed_params[param]])
else: # param in varied_params
values[param] = np.linspace(BOUNDS[param][0], BOUNDS[param][1], num=n_per_dim[param])
n_total = np.prod(n_per_dim.values())
points = [{} for i in xrange(n_total)]
fixed_base = '_'.join('%s%.2f' % (key, val) for key, val in fixed_params.iteritems()) + '_'
outbase = [fixed_base for i in xrange(n_total)]
n_segment = n_total # not necessary, but notaionally clearer
for param in sorted(BOUNDS.iterkeys()): # sorted to make deterministic, though it may already be.
n_segment /= n_per_dim[param]
for i, p in enumerate(points):
idx = (i / n_segment) % n_per_dim[param]
p[param] = values[param][idx]
outbase[i] += str(idx) # now each outbase has a unique combination of indexes
# now each dictionary in values carries a unique combination of parameters for the emulator
# if memory was an issue one could just run the model at each step instead of generating them all.
# i don't think 1000 dictionaries is the worst of my memory issues.
# now, send each fo these to my code.
simname = SIMNAME
scale_factor = 1/(1+REDSHIFT)
for p, out in izip(points, outbase):
ob = path.join(outputdir, out)
#I could maybe do something more interesting than rands.
data = np.stack( [(RBINS[1:] + RBINS[:-1]) / 2, np.random.rand(len(RBINS)-1)] )
#cov = np.random.rand((len(RBINS), len(RBINS))) #could just do an eye matrix too.
cov = np.eye((len(RBINS)-1))*np.random.rand()
header_start = ['Cosmology: %s'%simname, 'Params for HOD:' ]
header_start.extend('%s:%.3f'%(key,val) for key, val in p.iteritems())
header = '\n'.join(header_start)
np.savetxt(ob + '_corr_test_%.3f.npy' % (scale_factor), data,
header = header)
np.savetxt(ob + '_cov_test_%.3f.npy' % (scale_factor), cov,
header = header)
if __name__ == '__main__':
desc = 'Run my correlation function calculator over a large range of parameters'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('outputdir', type=str,
help='The directory to store the outputs of the calculations.')
parser.add_argument('--test', action='store_true', help='Create fake data with a similar structure for testing.')
parser.add_argument('--id',type=int,default=None, help='The job id for this call.')
for param in BOUNDS.iterkeys():
parser.add_argument(''.join(['--', param])) #no help scripts #YOLO
args = vars(parser.parse_args())
test = args['test']
del args['test']
outputdir = args['outputdir']
del args['outputdir']
id_no = args['id']
del args['id']
for key in args.keys():
if args[key] is not None:
args[key] = float(args[key])
else:
del args[key]
#pretty smart if i say so myself
#leave default nperdim for now..
print args
if not test:
paramCube(outputdir, fixed_params=args, id_no=id_no)
else:
testCube(outputdir, fixed_params=args)
| [
"[email protected]"
] | |
565266b855d7039287e4042a28519c553237365d | 0c5ab5c69ed77ed9a0613e4cefd3fc1b418ceef6 | /FinalProject/WordSeg/SylMap.py | 060d497164e78364b3b7d61ee8d8eeef063be86f | [] | no_license | Neil-Do/HUS-Python | 00831eb1121690f76752867ccfdcb8bd98973af6 | 630849717bb27b99a9ddfb33ae479485ebb032d4 | refs/heads/master | 2020-09-13T10:54:50.523791 | 2020-01-17T06:53:43 | 2020-01-17T06:53:43 | 222,749,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | class SylMap():
def __init__(self):
sylMapDataFile = open("../data/VNsyl.txt", 'r')
size_ = sylMapDataFile.readline()
self.syl_ = set(syllabel.strip() for syllabel in sylMapDataFile)
sylMapDataFile.close()
def isVNESE(self, token):
return token in self.syl_
# da test
| [
"[email protected]"
] | |
01c70cd54c7ba983dca24fdedac9503fe11c80ca | cc26a1bbae6af3dec61fd27e44484e01da21d36e | /Scientific Expedition/Sum by Type/mission.py | 7577f5f610a89a6c3cd896fe72c83b758d036bf5 | [] | no_license | ofisser86/py-check-io | 6bacef0783987e49f3bf28b9bea74e59e4ebb184 | 70469deea240f03199072f2dd28d6819815a2624 | refs/heads/master | 2023-02-02T09:32:53.934629 | 2020-12-16T13:44:51 | 2020-12-16T13:44:51 | 309,277,316 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | from typing import Tuple
def sum_by_types(items: list) -> Tuple[str, int]:
# your code here
return "".join([i for i in items if type(i) is str]), sum([i for i in items if type(i) is int])
if __name__ == '__main__':
print("Example:")
print(sum_by_types([]))
# These "asserts" are used for self-checking and not for an auto-testing
assert sum_by_types([]) == ('', 0)
assert sum_by_types([1, 2, 3]) == ('', 6)
assert sum_by_types(['1', 2, 3]) == ('1', 5)
assert sum_by_types(['1', '2', 3]) == ('12', 3)
assert sum_by_types(['1', '2', '3']) == ('123', 0)
assert sum_by_types(['size', 12, 'in', 45, 0]) == ('sizein', 57)
print("Coding complete? Click 'Check' to earn cool rewards!")
| [
"[email protected]"
] | |
196450c05cebf762582c46904661852744c4960a | d200a54adcec3a254a909b9689f925c1614f6fb1 | /backend/core/urls.py | 6a071e60621875e2de757a1a6ad7a79bb286993f | [] | no_license | shusaku-ishikawa/binance | 1bbe7f4aaf32c0ade4f67da7a4c1972f414bfa19 | 60bad0848fa4f4666e2476117a79ee8452326ed1 | refs/heads/master | 2022-01-27T01:35:24.038917 | 2019-11-30T12:42:36 | 2019-11-30T12:42:36 | 204,909,653 | 0 | 1 | null | 2022-01-15T05:20:54 | 2019-08-28T10:50:13 | JavaScript | UTF-8 | Python | false | false | 137 | py | from django.contrib import admin
from django.urls import path
app_name = 'core'
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
5accaf4c31a7171c179ad37e1672b2d39ccb1637 | 10874b503afbe8a244e7ece543455e300b3b10d9 | /models/dalle_small/DALLE-pytorch/dalle_pytorch/attention.py | 6a1180cf11ae178ca8ec13bf72c1d13615dd416c | [
"MIT"
] | permissive | j-min/DallEval | 6f67c2d2c2edb412864118fc7e777c7ddfc0c9bb | e4148183ce2b1a592f193cb4c70c839606a7048d | refs/heads/main | 2023-05-22T13:36:37.828483 | 2022-11-15T00:59:07 | 2022-11-15T00:59:07 | 454,800,820 | 113 | 7 | MIT | 2022-12-21T01:27:58 | 2022-02-02T14:12:12 | Jupyter Notebook | UTF-8 | Python | false | false | 12,834 | py | from inspect import isfunction
from math import ceil
import torch
from torch import nn, einsum
import torch.nn.functional as F
from einops import rearrange, repeat
# helpers
def exists(val):
return val is not None
def uniq(arr):
return{el: True for el in arr}.keys()
def default(val, d):
if exists(val):
return val
return d() if isfunction(d) else d
def max_neg_value(t):
return -torch.finfo(t.dtype).max
def stable_softmax(t, dim = -1, alpha = 32 ** 2):
t = t / alpha
t = t - torch.amax(t, dim = dim, keepdim = True)
return (t * alpha).softmax(dim = dim)
# classes
class Attention(nn.Module):
def __init__(self, dim, seq_len, causal = True, heads = 8, dim_head = 64, dropout = 0., stable = False):
super().__init__()
inner_dim = dim_head * heads
self.heads = heads
self.seq_len = seq_len
self.scale = dim_head ** -0.5
self.stable = stable
self.causal = causal
self.to_qkv = nn.Linear(dim, inner_dim * 3, bias = False)
self.to_out = nn.Sequential(
nn.Linear(inner_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x, mask = None):
b, n, _, h, device = *x.shape, self.heads, x.device
softmax = torch.softmax if not self.stable else stable_softmax
qkv = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = h), qkv)
q = q * self.scale
dots = torch.einsum('b h i d, b h j d -> b h i j', q, k)
mask_value = max_neg_value(dots)
if exists(mask):
mask = rearrange(mask, 'b j -> b () () j')
dots.masked_fill_(~mask, mask_value)
del mask
if self.causal:
i, j = dots.shape[-2:]
mask = torch.ones(i, j, device = device).triu_(j - i + 1).bool()
dots.masked_fill_(mask, mask_value)
attn = softmax(dots, dim=-1)
out = torch.einsum('b h i j, b h j d -> b h i d', attn, v)
out = rearrange(out, 'b h n d -> b n (h d)')
out = self.to_out(out)
return out
# sparse attention with convolutional pattern, as mentioned in the blog post. customizable kernel size and dilation
class SparseConvCausalAttention(nn.Module):
def __init__(self, dim, seq_len, image_size = 32, kernel_size = 5, dilation = 1, heads = 8, dim_head = 64, dropout = 0., stable = False, **kwargs):
super().__init__()
assert kernel_size % 2 == 1, 'kernel size must be odd'
inner_dim = dim_head * heads
self.seq_len = seq_len
self.heads = heads
self.scale = dim_head ** -0.5
self.image_size = image_size
self.kernel_size = kernel_size
self.dilation = dilation
self.stable = stable
self.to_qkv = nn.Linear(dim, inner_dim * 3, bias = False)
self.to_out = nn.Sequential(
nn.Linear(inner_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x, mask = None):
b, n, _, h, img_size, kernel_size, dilation, seq_len, device = *x.shape, self.heads, self.image_size, self.kernel_size, self.dilation, self.seq_len, x.device
softmax = torch.softmax if not self.stable else stable_softmax
img_seq_len = img_size ** 2
text_len = seq_len + 1 - img_seq_len
# padding
padding = seq_len - n + 1
mask = default(mask, lambda: torch.ones(b, text_len, device = device).bool())
x = F.pad(x, (0, 0, 0, padding), value = 0)
mask = mask[:, :text_len]
# derive query / keys / values
qkv = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h = h), qkv)
q *= self.scale
((q_text, q_img), (k_text, k_img), (v_text, v_img)) = map(lambda t: (t[:, :-img_seq_len], t[:, -img_seq_len:]), (q, k, v))
# text attention
dots_text = einsum('b i d, b j d -> b i j', q_text, k_text)
mask_value = max_neg_value(dots_text)
i, j = dots_text.shape[-2:]
text_causal_mask = torch.ones(i, j, device = device).triu_(j - i + 1).bool()
dots_text.masked_fill_(text_causal_mask, mask_value)
attn_text = softmax(dots_text, dim = -1)
out_text = einsum('b i j, b j d -> b i d', attn_text, v_text)
# image attention
effective_kernel_size = (kernel_size - 1) * dilation + 1
padding = effective_kernel_size // 2
k_img, v_img = map(lambda t: rearrange(t, 'b (h w) c -> b c h w', h = img_size), (k_img, v_img))
k_img, v_img = map(lambda t: F.unfold(t, kernel_size, padding = padding, dilation = dilation), (k_img, v_img))
k_img, v_img = map(lambda t: rearrange(t, 'b (d j) i -> b i j d', j = kernel_size ** 2), (k_img, v_img))
# let image attend to all of text
dots_image = einsum('b i d, b i j d -> b i j', q_img, k_img)
dots_image_to_text = einsum('b i d, b j d -> b i j', q_img, k_text)
# calculate causal attention for local convolution
i, j = dots_image.shape[-2:]
img_seq = torch.arange(img_seq_len, device = device)
k_img_indices = rearrange(img_seq.float(), '(h w) -> () () h w', h = img_size)
k_img_indices = F.pad(k_img_indices, (padding,) * 4, value = img_seq_len) # padding set to be max, so it is never attended to
k_img_indices = F.unfold(k_img_indices, kernel_size, dilation = dilation)
k_img_indices = rearrange(k_img_indices, 'b j i -> b i j')
# mask image attention
q_img_indices = rearrange(img_seq, 'i -> () i ()')
causal_mask = q_img_indices < k_img_indices
# concat text mask with image causal mask
causal_mask = repeat(causal_mask, '() i j -> b i j', b = b * h)
mask = repeat(mask, 'b j -> (b h) i j', i = i, h = h)
mask = torch.cat((~mask, causal_mask), dim = -1)
# image can attend to all of text
dots = torch.cat((dots_image_to_text, dots_image), dim = -1)
dots.masked_fill_(mask, mask_value)
attn = softmax(dots, dim = -1)
# aggregate
attn_image_to_text, attn_image = attn[..., :text_len], attn[..., text_len:]
out_image_to_image = einsum('b i j, b i j d -> b i d', attn_image, v_img)
out_image_to_text = einsum('b i j, b j d -> b i d', attn_image_to_text, v_text)
out_image = out_image_to_image + out_image_to_text
# combine attended values for both text and image
out = torch.cat((out_text, out_image), dim = 1)
out = rearrange(out, '(b h) n d -> b n (h d)', h = h)
out = self.to_out(out)
return out[:, :n]
# sparse axial causal attention
class SparseAxialCausalAttention(nn.Module):
def __init__(self, dim, seq_len, image_size = 32, axis = 0, heads = 8, dim_head = 64, dropout = 0., stable = False, **kwargs):
super().__init__()
assert axis in {0, 1}, 'axis must be either 0 (along height) or 1 (along width)'
self.axis = axis
inner_dim = dim_head * heads
self.seq_len = seq_len
self.heads = heads
self.scale = dim_head ** -0.5
self.image_size = image_size
self.stable = stable
self.to_qkv = nn.Linear(dim, inner_dim * 3, bias = False)
self.to_out = nn.Sequential(
nn.Linear(inner_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x, mask = None):
b, n, _, h, img_size, axis, seq_len, device = *x.shape, self.heads, self.image_size, self.axis, self.seq_len, x.device
softmax = torch.softmax if not self.stable else stable_softmax
img_seq_len = img_size ** 2
text_len = seq_len + 1 - img_seq_len
# padding
padding = seq_len - n + 1
mask = default(mask, lambda: torch.ones(b, text_len, device = device).bool())
x = F.pad(x, (0, 0, 0, padding), value = 0)
mask = mask[:, :text_len]
# derive queries / keys / values
qkv = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h = h), qkv)
q *= self.scale
((q_text, q_img), (k_text, k_img), (v_text, v_img)) = map(lambda t: (t[:, :-img_seq_len], t[:, -img_seq_len:]), (q, k, v))
# text attention
dots_text = einsum('b i d, b j d -> b i j', q_text, k_text)
mask_value = max_neg_value(dots_text)
i, j = dots_text.shape[-2:]
text_causal_mask = torch.ones(i, j, device = device).triu_(j - i + 1).bool()
dots_text.masked_fill_(text_causal_mask, mask_value)
attn_text = softmax(dots_text, dim = -1)
out_text = einsum('b i j, b j d -> b i d', attn_text, v_text)
# image attention
split_axis_einops = 'b (h w) c -> b h w c' if axis == 0 else 'b (h w) c -> b w h c'
merge_axis_einops = 'b x n d -> b (x n) d' if axis == 0 else 'b x n d -> b (n x) d'
# split out axis
q_img, k_img, v_img = map(lambda t: rearrange(t, split_axis_einops, h = img_size), (q_img, k_img, v_img))
# similarity
dots_image_to_image = einsum('b x i d, b x j d -> b x i j', q_img, k_img)
dots_image_to_text = einsum('b x i d, b j d -> b x i j', q_img, k_text)
dots = torch.cat((dots_image_to_text, dots_image_to_image), dim = -1)
# mask so image has full attention to text, but causal along axis
bh, x, i, j = dots.shape
causal_mask = torch.ones(i, img_size, device = device).triu_(img_size - i + 1).bool()
causal_mask = repeat(causal_mask, 'i j -> b x i j', b = bh, x = x)
mask = repeat(mask, 'b j -> (b h) x i j', h = h, x = x, i = i)
mask = torch.cat((~mask, causal_mask), dim = -1)
dots.masked_fill_(mask, mask_value)
# attention.
attn = softmax(dots, dim = -1)
# aggregate
attn_image_to_text, attn_image_to_image = attn[..., :text_len], attn[..., text_len:]
out_image_to_image = einsum('b x i j, b x j d -> b x i d', attn_image_to_image, v_img)
out_image_to_text = einsum('b x i j, b j d -> b x i d', attn_image_to_text, v_text)
out_image = out_image_to_image + out_image_to_text
# merge back axis
out_image = rearrange(out_image, merge_axis_einops, x = img_size)
# combine attended values for both text and image
out = torch.cat((out_text, out_image), dim = 1)
out = rearrange(out, '(b h) n d -> b n (h d)', h = h)
out = self.to_out(out)
return out[:, :n]
# microsoft sparse attention CUDA kernel
class SparseAttention(Attention):
def __init__(
self,
*args,
block_size = 16,
text_seq_len = 256,
num_random_blocks = None,
**kwargs
):
super().__init__(*args, **kwargs)
from deepspeed.ops.sparse_attention import SparseSelfAttention, VariableSparsityConfig
self.block_size = block_size
num_random_blocks = default(num_random_blocks, self.seq_len // block_size // 4)
global_block_indices = list(range(ceil(text_seq_len / block_size)))
self.attn_fn = SparseSelfAttention(
sparsity_config = VariableSparsityConfig(
num_heads = self.heads,
block = self.block_size,
num_random_blocks = num_random_blocks,
global_block_indices = global_block_indices,
attention = 'unidirectional' if self.causal else 'bidirectional'
),
max_seq_length = self.seq_len,
attn_mask_mode = 'add'
)
def forward(self, x, mask = None):
b, n, _, h, device = *x.shape, self.heads, x.device
remainder = n % self.block_size
mask = default(mask, lambda: torch.ones(b, n, device = device).bool())
if remainder > 0:
padding = self.block_size - remainder
x = F.pad(x, (0, 0, 0, padding), value = 0)
mask = F.pad(mask, (0, padding), value = False)
qkv = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = h), qkv)
key_pad_mask = None
if exists(mask):
key_pad_mask = ~mask
attn_mask = None
if self.causal:
i, j = q.shape[-2], k.shape[-2]
mask = torch.ones(i, j, device = device).triu_(j - i + 1).bool()
attn_mask = torch.zeros(i, j, device = device).to(q)
mask_value = max_neg_value(q) / 2
attn_mask.masked_fill_(mask, mask_value)
out = self.attn_fn(q, k, v, attn_mask = attn_mask, key_padding_mask = key_pad_mask)
out = rearrange(out, 'b h n d -> b n (h d)')
out = self.to_out(out)
return out[:, :n]
| [
"[email protected]"
] | |
2685be9bebb71c12c8a3a6a0c4af0206ea9d012b | ff7ed554b920bf1871e0e8991e4aa1fb5084f152 | /src/scoring/scorer.py | db30a09dca5aec826b5b5fc8c0f9583ce8c5213c | [
"BSD-3-Clause"
] | permissive | uw-bionlp/ards | 68bfec52c4b2c2a4b4878acf089709cb1ddebddb | e9fc27f7034cc6b54f0ccdba4a58377948cf0258 | refs/heads/main | 2023-04-04T19:48:41.375668 | 2021-04-15T01:05:25 | 2021-04-15T01:05:25 | 358,041,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,695 | py | import logging
import os
from collections import OrderedDict, Counter
import pandas as pd
class Scorer(object):
def __init__(self):
pass
def compare(self, T, P):
return None
def combine(self, dfs):
return None
def fit(self, T, P, params=None, path=None):
'''
Score predictions
Parameters
----------
T = truth
P = predictions
'''
# Check sentence count
len_check(T, P)
# Get counts
dfs = self.compare(T, P)
if not isinstance(dfs, dict):
dfs = OrderedDict([('default', dfs)])
for k, df in dfs.items():
logging.info('\n\n{}\n{}'.format(k, df))
# Include each parameter in data frame
if params is not None:
dfs = {k:add_params_to_df(df, params) for k, df in dfs.items()}
if path is not None:
for k, df in dfs.items():
if len(dfs) == 1:
f = os.path.join(path, f"scores.csv")
else:
f = os.path.join(path, f"scores_{k}.csv")
df.to_csv(f)
return dfs
def combine_cv(self, dfs, path=None):
dfs = self.combine(dfs)
if path is not None:
for k, df in dfs.items():
if len(dfs) == 1:
f = os.path.join(path, f"scores.csv")
else:
f = os.path.join(path, f"scores_{k}.csv")
df.to_csv(f)
return dfs
def len_check(x, y):
assert len(x) == len(y), "length mismatch: {} vs {}".format(len(x), len(y))
def add_params_to_df(df, params):
# Loop on Level 1
for p1, v1 in params.items():
# Level 1 as dictionary
if isinstance(v1, dict):
# Loop on level 2
for p2, v2 in v1.items():
# Level 2 as dictionary
if isinstance(v2, dict):
# Loop on level 3
for p3, v3 in v2.items():
# Level 3 is dictionary
if isinstance(v3, dict):
df[str((p1, p2, p3))] = str(v3)
# Level 3 is not dict, list, or array
elif not isinstance(v3, (list, np.ndarray)):
df[str((p1, p2, p3))] = v3
# Level 2 is not dict, list, or array
elif not isinstance(v2, (list, np.ndarray)):
df[str((p1, p2))] = v2
# Level 1 is not dict, list, or array
elif not isinstance(v1, (list, np.ndarray)):
df[p1] = v1
return df
| [
"[email protected]"
] | |
cb3db768dc1343418cd45430123426146f56856c | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2016_09_01/operations/_deployment_operations_operations.py | a410f6846b1f4e88ddb19e928e3938e34985065a | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 9,224 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DeploymentOperationsOperations(object):
"""DeploymentOperationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.resources.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
deployment_name, # type: str
operation_id, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DeploymentOperation"
"""Gets a deployments operation.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param operation_id: The ID of the operation to get.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeploymentOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}'} # type: ignore
def list(
self,
resource_group_name, # type: str
deployment_name, # type: str
top=None, # type: Optional[int]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DeploymentOperationsListResult"]
"""Gets all deployments operations for a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment with the operation to get.
:type deployment_name: str
:param top: The number of results to return.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperationsListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2016_09_01.models.DeploymentOperationsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperationsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DeploymentOperationsListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations'} # type: ignore
| [
"[email protected]"
] | |
07650116b243fdcdc1d07d9421308b6cee3c708e | c4af67db4c523d20f2d55aef90ba77db1fb53c38 | /PlonePAS/pas.py | 64c88fe9034587bc6f6eee27b0ac545a42c18253 | [] | no_license | dtgit/dtedu | e59b16612d7d9ea064026bf80a44657082ef45a3 | d787885fe7ed0de6f9e40e9b05d852a0e9d60677 | refs/heads/master | 2020-04-06T05:22:50.025074 | 2009-04-08T20:13:20 | 2009-04-08T20:13:20 | 171,351 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,706 | py | ##############################################################################
#
# PlonePAS - Adapt PluggableAuthService for use in Plone
# Copyright (C) 2005 Enfold Systems, Kapil Thangavelu, et al
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
pas alterations and monkies
"""
from sets import Set
from Products.CMFCore.utils import getToolByName
from AccessControl import Unauthorized, getSecurityManager
from AccessControl.Permissions import manage_users as ManageUsers
from AccessControl.Permissions import manage_properties, change_permissions
from Products.PluggableAuthService.PluggableAuthService import \
PluggableAuthService, _SWALLOWABLE_PLUGIN_EXCEPTIONS
from Products.PluggableAuthService.PluggableAuthService import security
from Products.PluggableAuthService.interfaces.plugins import IRoleAssignerPlugin
from Products.PluggableAuthService.interfaces.plugins import IUserEnumerationPlugin
from Products.PluggableAuthService.interfaces.plugins import IGroupEnumerationPlugin
from Products.PlonePAS.interfaces.plugins import IUserManagement, ILocalRolesPlugin
from Products.PlonePAS.interfaces.group import IGroupIntrospection
from Products.PlonePAS.interfaces.plugins import IUserIntrospection
from AccessControl.requestmethod import postonly
# Register the PAS acl_users as a utility
from Products.CMFCore.utils import registerToolInterface
from Products.PluggableAuthService.interfaces.authservice import IPluggableAuthService
registerToolInterface('acl_users', IPluggableAuthService)
#################################
# pas folder monkies - standard zope user folder api
_old_doAddUser = PluggableAuthService._doAddUser
def _doAddUser(self, login, password, roles, domains, groups=None, **kw ):
"""Masking of PAS._doAddUser to add groups param."""
retval = _old_doAddUser(self, login, password, roles, domains)
if groups is not None:
self.userSetGroups(login, groups)
return retval
PluggableAuthService._doAddUser = _doAddUser
def _doDelUsers(self, names, REQUEST=None):
"""
Delete users given by a list of user ids.
Has no return value, like the original.
"""
for name in names:
self._doDelUser(name)
PluggableAuthService._doDelUsers = _doDelUsers
def _doDelUser(self, id):
"""
Given a user id, hand off to a deleter plugin if available.
"""
plugins = self._getOb('plugins')
userdeleters = plugins.listPlugins(IUserManagement)
if not userdeleters:
raise NotImplementedError("There is no plugin that can "
" delete users.")
for userdeleter_id, userdeleter in userdeleters:
try:
userdeleter.doDeleteUser(id)
except _SWALLOWABLE_PLUGIN_EXCEPTIONS:
pass
PluggableAuthService._doDelUser = _doDelUser
security.declareProtected(ManageUsers, 'userFolderDelUsers')
PluggableAuthService.userFolderDelUsers = postonly(PluggableAuthService._doDelUsers)
def _doChangeUser(self, principal_id, password, roles, domains=(), groups=None,
REQUEST=None, **kw):
"""
Given a principal id, change its password, roles, domains, iff
respective plugins for such exist.
XXX domains are currently ignored.
"""
# Might be called with 'None' as password from the Plone UI, in
# prefs_users_overview when resetPassword is not set.
if password is not None:
self.userSetPassword(principal_id, password)
plugins = self._getOb('plugins')
rmanagers = plugins.listPlugins(IRoleAssignerPlugin)
if not (rmanagers):
raise NotImplementedError("There is no plugin that can modify roles")
for rid, rmanager in rmanagers:
rmanager.assignRolesToPrincipal(roles, principal_id)
if groups is not None:
self.userSetGroups(principal_id, groups)
return True
PluggableAuthService._doChangeUser = _doChangeUser
security.declareProtected(ManageUsers, 'userFolderEditUser')
PluggableAuthService.userFolderEditUser = postonly(PluggableAuthService._doChangeUser)
# ttw alias
# XXX need to security restrict these methods, no base class sec decl
#PluggableAuthService.userFolderAddUser__roles__ = ()
def userFolderAddUser(self, login, password, roles, domains, groups=None, REQUEST=None, **kw ):
self._doAddUser(login, password, roles, domains, **kw)
if groups is not None:
self.userSetGroups(login, groups)
PluggableAuthService.userFolderAddUser = postonly(userFolderAddUser)
def _doAddGroup(self, id, roles, groups=None, **kw):
gtool = getToolByName(self, 'portal_groups')
return gtool.addGroup(id, roles, groups, **kw)
PluggableAuthService._doAddGroup = _doAddGroup
# for prefs_group_manage compatibility. really should be using tool.
def _doDelGroups(self, names, REQUEST=None):
gtool = getToolByName(self, 'portal_groups')
for group_id in names:
gtool.removeGroup(group_id)
PluggableAuthService._doDelGroups = _doDelGroups
security.declareProtected(ManageUsers, 'userFolderDelGroups')
PluggableAuthService.userFolderDelGroups = postonly(PluggableAuthService._doDelGroups)
def _doChangeGroup(self, principal_id, roles, groups=None, REQUEST=None, **kw):
"""
Given a group's id, change its roles, domains, iff respective
plugins for such exist.
XXX domains are currently ignored.
See also _doChangeUser
"""
gtool = getToolByName(self, 'portal_groups')
gtool.editGroup(principal_id, roles, groups, **kw)
return True
PluggableAuthService._doChangeGroup = _doChangeGroup
def _updateGroup(self, principal_id, roles=None, groups=None, **kw):
"""
Given a group's id, change its roles, groups, iff respective
plugins for such exist.
XXX domains are currently ignored.
This is not an alias to _doChangeGroup because its params are different (slightly).
"""
return self._doChangeGroup(principal_id, roles, groups, **kw)
PluggableAuthService._updateGroup = _updateGroup
security.declareProtected(ManageUsers, 'userFolderEditGroup')
PluggableAuthService.userFolderEditGroup = postonly(PluggableAuthService._doChangeGroup)
security.declareProtected(ManageUsers, 'getGroups')
def getGroups(self):
gtool = getToolByName(self, 'portal_groups')
return gtool.listGroups()
PluggableAuthService.getGroups = getGroups
security.declareProtected(ManageUsers, 'getGroupNames')
def getGroupNames(self):
gtool = getToolByName(self, 'portal_groups')
return gtool.getGroupIds()
PluggableAuthService.getGroupNames = getGroupNames
security.declareProtected(ManageUsers, 'getGroupIds')
def getGroupIds(self):
gtool = getToolByName(self, 'portal_groups')
return gtool.getGroupIds()
PluggableAuthService.getGroupIds = getGroupIds
security.declareProtected(ManageUsers, 'getGroup')
def getGroup(self, group_id):
"""Like getGroupById in groups tool, but doesn't wrap.
"""
group = None
introspectors = self.plugins.listPlugins(IGroupIntrospection)
if not introspectors:
raise ValueError, 'No plugins allow for group management'
for iid, introspector in introspectors:
group = introspector.getGroupById(group_id)
if group is not None:
break
return group
PluggableAuthService.getGroup = getGroup
security.declareProtected(ManageUsers, 'getGroupByName')
def getGroupByName(self, name, default = None):
ret = self.getGroup(name)
if ret is None:
return default
return ret
PluggableAuthService.getGroupByName = getGroupByName
security.declareProtected(ManageUsers, 'getGroupById')
def getGroupById(self, id, default = None):
gtool = getToolByName(self, "portal_groups")
ret = gtool.getGroupById(id)
if ret is None:
return default
else:
return ret
PluggableAuthService.getGroupById = getGroupById
security.declarePublic("getLocalRolesForDisplay")
def getLocalRolesForDisplay(self, object):
"""This is used for plone's local roles display
This method returns a tuple (massagedUsername, roles, userType,
actualUserName). This method is protected by the 'access content
information' permission. We may change that if it's too
permissive...
A GRUF method originally.
"""
# Perform security check on destination object
if not getSecurityManager().checkPermission(manage_properties, object):
raise Unauthorized(name = "getLocalRolesForDisplay")
return self._getLocalRolesForDisplay(object)
PluggableAuthService.getLocalRolesForDisplay = getLocalRolesForDisplay
def _getLocalRolesForDisplay(self, object):
result = []
# we don't have a PAS-side way to get this
local_roles = object.get_local_roles()
for one_user in local_roles:
username = userid = one_user[0]
roles = one_user[1]
userType = 'user'
if self.getGroup(userid):
userType = 'group'
else:
user = self.getUserById(userid) or self.getUser(username)
if user:
username = user.getUserName()
userid = user.getId()
result.append((username, roles, userType, userid))
return tuple(result)
PluggableAuthService._getLocalRolesForDisplay = _getLocalRolesForDisplay
def getUsers(self):
"""
Return a list of all users from plugins that implement the user
introspection interface.
Could potentially be very long.
"""
# We should have a method that's cheap about returning number of users.
retval = []
plugins = self._getOb('plugins')
try:
introspectors = self.plugins.listPlugins(IUserIntrospection)
except KeyError:
return retval
for iid, introspector in introspectors:
retval += introspector.getUsers()
return retval
PluggableAuthService.getUsers = getUsers
PluggableAuthService.getPureUsers = getUsers # this'll make listMembers work
def canListAllUsers(self):
plugins = self._getOb('plugins')
# Do we have multiple user plugins?
if len(plugins.listPlugins(IUserEnumerationPlugin)) != len(plugins.listPlugins(IUserIntrospection)):
return False
# Does our single user enumerator support the needed API?
#for method in [#'countAllUsers',
# 'getUsers',
# 'getUserNames']:
# if not hasattr(pas, method):
# return False
return True
PluggableAuthService.canListAllUsers = canListAllUsers
def canListAllGroups(self):
plugins = self._getOb('plugins')
# Do we have multiple user plugins?
if len(plugins.listPlugins(IGroupEnumerationPlugin)) != len(plugins.listPlugins(IGroupIntrospection)):
return False
return True
PluggableAuthService.canListAllGroups = canListAllGroups
def userSetPassword(self, userid, password):
"""Emulate GRUF 3 call for password set, for use with PwRT."""
# used by _doChangeUser
plugins = self._getOb('plugins')
managers = plugins.listPlugins(IUserManagement)
if not (managers):
raise NotImplementedError("There is no plugin that can modify users")
modified = False
for mid, manager in managers:
try:
manager.doChangeUser(userid, password)
except RuntimeError:
# XXX: why silent ignore this Error?
pass
else:
modified = True
if not modified:
raise RuntimeError ("No user management plugins were able "
"to successfully modify the user")
PluggableAuthService.userSetPassword = userSetPassword
def credentialsChanged(self, user, name, new_password):
"""Notifies the authentication mechanism that this user has changed
passwords. This can be used to update the authentication cookie.
Note that this call should *not* cause any change at all to user
databases.
For use by CMFCore.MembershipTool.credentialsChanged
"""
request = self.REQUEST
response = request.RESPONSE
login = name
self.updateCredentials(request, response, login, new_password)
PluggableAuthService.credentialsChanged = credentialsChanged
# for ZopeVersionControl, we need to check 'plugins' for more than
# existence, since it replaces objects (like 'plugins') with SimpleItems
# and calls _delOb, which tries to use special methods of 'plugins'
from OFS.Folder import Folder
def _delOb( self, id ):
#
# Override ObjectManager's version to clean up any plugin
# registrations for the deleted object
#
# XXX imo this is a evil one
#
plugins = self._getOb( 'plugins', None )
if getattr(plugins, 'removePluginById', None) is not None:
plugins.removePluginById( id )
Folder._delOb( self, id )
PluggableAuthService._delOb = _delOb
def addRole( self, role ):
plugins = self._getOb('plugins')
roles = plugins.listPlugins(IRoleAssignerPlugin)
for plugin_id, plugin in roles:
try:
plugin.addRole( role )
return
except _SWALLOWABLE_PLUGIN_EXCEPTIONS:
pass
PluggableAuthService.addRole = addRole
def getAllLocalRoles( self, context ):
# Perform security check on destination object
if not getSecurityManager().checkPermission(change_permissions, context):
raise Unauthorized(name = "getAllLocalRoles")
return self._getAllLocalRoles(context)
PluggableAuthService.getAllLocalRoles = getAllLocalRoles
def _getAllLocalRoles(self, context):
plugins = self._getOb('plugins')
lrmanagers = plugins.listPlugins(ILocalRolesPlugin)
roles={}
for lrid, lrmanager in lrmanagers:
newroles=lrmanager.getAllLocalRolesInContext(context)
for k,v in newroles.items():
if k not in roles:
roles[k]=Set()
roles[k].update(v)
return roles
PluggableAuthService._getAllLocalRoles = _getAllLocalRoles
from Products.PluggableAuthService.plugins.ZODBUserManager import ZODBUserManager
def noKeywordEnumerateusers(self, id=None, login=None, exact_match=False,
sort_by=None, max_results=None, **kw):
if kw:
return ()
return self._oldEnumerateUsers(id, login, exact_match, sort_by,
max_results, **kw)
ZODBUserManager._oldEnumerateUsers = ZODBUserManager.enumerateUsers
ZODBUserManager.enumerateUsers = noKeywordEnumerateusers
| [
"[email protected]"
] | |
a48fc7b47b8c888ac0173f2c85b6eee2778b957f | eac7f7b96ebce1351dc6b50e45f1fcfa0f930dbb | /python/graphscope/interactive/query.py | b848c3c4251f27b449d1ffbb815dab5ff5b304e6 | [
"Apache-2.0"
] | permissive | Jancd/GraphScope | 82bc43e02717fc3df1811ccfb73f476649b709fa | e162f11886dc49d68038836b665aa5381cea8d24 | refs/heads/main | 2023-01-31T18:40:06.995923 | 2020-12-15T01:24:12 | 2020-12-15T01:24:12 | 321,543,831 | 1 | 0 | Apache-2.0 | 2020-12-15T03:39:00 | 2020-12-15T03:38:59 | null | UTF-8 | Python | false | false | 5,129 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import logging
import random
from concurrent.futures import ThreadPoolExecutor
from gremlin_python.driver.client import Client
from graphscope.framework.loader import Loader
logger = logging.getLogger("graphscope")
class InteractiveQuery(object):
"""`InteractiveQuery` class, is a simple wrapper around
`Gremlin-Python <https://pypi.org/project/gremlinpython/>`_,
which implements Gremlin within the Python language.
It also can expose gremlin endpoint which can be used by any other standard gremlin console.
It also has a method called `subgraph` which can extract some fragments
from origin graph, produce a new, smaller but concise graph stored in vineyard,
which lifetime is independent from the origin graph.
"""
def __init__(self, graphscope_session, object_id, front_ip, front_port):
self._graphscope_session = graphscope_session
self._object_id = object_id
self._graph_url = "ws://%s:%d/gremlin" % (front_ip, front_port)
self._client = Client(self._graph_url, "g")
self._closed = False
@property
def object_id(self):
"""Get the vineyard object id of graph.
Returns:
str: object id
"""
return self._object_id
@property
def graph_url(self):
"""The gremlin graph url can be used with any standard gremlin console, e.g., thinkerpop."""
return self._graph_url
def closed(self):
"""Return if the current instance is closed."""
return self._closed
def subgraph(self, gremlin_script):
"""Create a subgraph, which input is the result of the execution of `gremlin_script`.
Any gremlin script that will output a set of edges can be used to contruct a subgraph.
Args:
gremlin_script (str): gremlin script to be executed
Raises:
RuntimeError: If the interactive instance is closed.
Returns:
:class:`Graph`: constructed subgraph. which is also stored in vineyard.
"""
if self.closed():
raise RuntimeError("Interactive query is closed.")
now_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
random_num = random.randint(0, 10000000)
graph_name = "%s_%s" % (str(now_time), str(random_num))
# create graph handle by name
self._client.submit(
"g.createGraph('%s').with('graphType', 'vineyard')" % graph_name
).all().result()
# start a thread to launch the graph
def load_subgraph(name):
import vineyard
host, port = self._graphscope_session.engine_config[
"vineyard_rpc_endpoint"
].split(":")
client = vineyard.connect(host, int(port))
# get vertex/edge stream id
vstream = client.get_name("__%s_vertex_stream" % name, True)
estream = client.get_name("__%s_edge_stream" % name, True)
# invoke load_from
g = self._graphscope_session.load_from(
edges=[Loader(estream)],
vertices=[Loader(vstream)],
generate_eid=False,
)
client.put_name(vineyard.ObjectID(g.vineyard_id), graph_name)
logger.info("subgraph has been loaded")
return g
pool = ThreadPoolExecutor()
subgraph_task = pool.submit(load_subgraph, (graph_name,))
# add subgraph vertices and edges
subgraph_script = "%s.subgraph('%s').outputVineyard('%s')" % (
gremlin_script,
graph_name,
graph_name,
)
self._client.submit(subgraph_script).all().result()
return subgraph_task.result()
def execute(self, query):
"""Execute gremlin querying scripts.
Behind the scene, it uses `gremlinpython` to send the query.
Args:
query (str): Scripts that written in gremlin quering language.
Raises:
RuntimeError: If the interactive script is closed
Returns:
execution results
"""
if self.closed():
raise RuntimeError("Interactive query is closed.")
return self._client.submit(query)
def close(self):
"""Close interactive instance and release resources"""
if not self.closed():
self._closed = True
self._graphscope_session.close_interactive_instance(self)
| [
"[email protected]"
] | |
ed4062b3b9dba0e71e56a70a329d543dd325663d | 4f0ceccea62d3c909af88a7e3f5e1d063ed94b2e | /1368_Minimum Cost to Make at Least One Valid Path in a Grid.py | 8406191983ad2ec32fef45f5613be51ea9b98894 | [] | no_license | 0as1s/leetcode | d19c2ec4f96666a03227871b9b21b26adcd6b3b4 | c82d375f8d9d4feeaba243eb5c990c1ba3ec73d2 | refs/heads/master | 2021-05-09T21:30:35.378394 | 2021-02-01T15:37:37 | 2021-02-01T15:37:37 | 118,729,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,661 | py | def minCost(self, grid: List[List[int]]) -> int:
if not grid or not grid[0]:
return True
m, n = len(grid), len(grid[0])
directions = [[0,0],[0,1],[0,-1],[1,0],[-1,0]]
q = deque([(0,0,0)])
visit = set()
visit.add((0,0))
while q:
cx, cy, dis = q.popleft()
if cx == m-1 and cy == n-1:
return dis
visit.add((cx, cy))
for i in range(1,5):
nx = cx + directions[i][0]
ny = cy + directions[i][1]
cost = 1 if grid[cx][cy] != i else 0
if 0 <= nx < m and 0 <= ny < n and (nx, ny) not in visit:
if cost == 1:
q.append((nx, ny, dis+1))
else:
q.appendleft((nx, ny, dis))
return -1
# class Solution(object):
# def minCost(self, grid):
# """
# :type grid: List[List[int]]
# :rtype: int
# """
# dd = {
# 1: (0, 1),
# 2: (0, -1),
# 3: (1, 0),
# 4: (-1, 0)
# }
# current = set([(0, 0)])
# cur = (0, 0)
# h = len(grid)
# l = len(grid[0])
# count = 0
# used = set()
# while True:
# x, y = cur
# if x == h-1 and y == l-1:
# return count
# dx, dy = dd[grid[x][y]]
# nx, ny = x + dx, y + dy
# if (nx, ny) not in used and 0 <= nx < h and 0 <= ny < l:
# if nx == h-1 and ny == l-1:
# return count
# current.add((nx, ny))
# used.add((nx, ny))
# cur = (nx, ny)
# else:
# break
# while True:
# count += 1
# new_current = set()
# for cur in current:
# x, y = cur
# for dx, dy in dd.values():
# nx, ny = x + dx, y + dy
# if (nx, ny) not in used and 0 <= nx < h and 0 <= ny < l:
# if nx == h-1 and ny == l-1:
# return count
# used.add((nx, ny))
# new_current.add((nx, ny))
# ddx, ddy = dd[grid[nx][ny]]
# nnx, nny = nx + ddx, ny + ddy
# while (nnx, nny) not in used and 0 <= nnx < h and 0 <= nny < l:
# if nnx == h-1 and nny == l-1:
# return count
# #used.add((nnx, nny))
# new_current.add((nnx, nny))
# ddx, ddy = dd[grid[nnx][nny]]
# nnx, nny = nx + ddx, ny + ddy
# current = new_current | [
"[email protected]"
] | |
82613acce0ae9f7f5f98b7e788444a5660de8c5c | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/goatLatin_20200803094226.py | 95977408a574cf0cbbe7838b30f3c6531d9dfa24 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | def goalLatin(S):
vowels = {'a','e','i','o','u'}
S = S.split(" ")
count = 1
for i in range(len(S)):
begin = 'a'
newWord = ""
if S[i][0] in vowels:
begin = begin * count
newWord += S[i] + "ma" + begin
else:
begin = begin * count
newWord = S[i]
| [
"[email protected]"
] | |
1ce1c226b036e66a4128c738fb794ba8a6a298eb | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/UnitedVoucherDigest.py | 215bbce60af562cae3f1e47b250b8e73ca0210bc | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 5,157 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class UnitedVoucherDigest(object):
def __init__(self):
self._budget_close = None
self._ceiling_amount = None
self._discount_type = None
self._from_amount = None
self._prize_id = None
self._reduction_ratio = None
self._show_order = None
self._to_amount = None
self._voucher_biz_code = None
@property
def budget_close(self):
return self._budget_close
@budget_close.setter
def budget_close(self, value):
self._budget_close = value
@property
def ceiling_amount(self):
return self._ceiling_amount
@ceiling_amount.setter
def ceiling_amount(self, value):
self._ceiling_amount = value
@property
def discount_type(self):
return self._discount_type
@discount_type.setter
def discount_type(self, value):
self._discount_type = value
@property
def from_amount(self):
return self._from_amount
@from_amount.setter
def from_amount(self, value):
self._from_amount = value
@property
def prize_id(self):
return self._prize_id
@prize_id.setter
def prize_id(self, value):
self._prize_id = value
@property
def reduction_ratio(self):
return self._reduction_ratio
@reduction_ratio.setter
def reduction_ratio(self, value):
self._reduction_ratio = value
@property
def show_order(self):
return self._show_order
@show_order.setter
def show_order(self, value):
self._show_order = value
@property
def to_amount(self):
return self._to_amount
@to_amount.setter
def to_amount(self, value):
self._to_amount = value
@property
def voucher_biz_code(self):
return self._voucher_biz_code
@voucher_biz_code.setter
def voucher_biz_code(self, value):
self._voucher_biz_code = value
def to_alipay_dict(self):
params = dict()
if self.budget_close:
if hasattr(self.budget_close, 'to_alipay_dict'):
params['budget_close'] = self.budget_close.to_alipay_dict()
else:
params['budget_close'] = self.budget_close
if self.ceiling_amount:
if hasattr(self.ceiling_amount, 'to_alipay_dict'):
params['ceiling_amount'] = self.ceiling_amount.to_alipay_dict()
else:
params['ceiling_amount'] = self.ceiling_amount
if self.discount_type:
if hasattr(self.discount_type, 'to_alipay_dict'):
params['discount_type'] = self.discount_type.to_alipay_dict()
else:
params['discount_type'] = self.discount_type
if self.from_amount:
if hasattr(self.from_amount, 'to_alipay_dict'):
params['from_amount'] = self.from_amount.to_alipay_dict()
else:
params['from_amount'] = self.from_amount
if self.prize_id:
if hasattr(self.prize_id, 'to_alipay_dict'):
params['prize_id'] = self.prize_id.to_alipay_dict()
else:
params['prize_id'] = self.prize_id
if self.reduction_ratio:
if hasattr(self.reduction_ratio, 'to_alipay_dict'):
params['reduction_ratio'] = self.reduction_ratio.to_alipay_dict()
else:
params['reduction_ratio'] = self.reduction_ratio
if self.show_order:
if hasattr(self.show_order, 'to_alipay_dict'):
params['show_order'] = self.show_order.to_alipay_dict()
else:
params['show_order'] = self.show_order
if self.to_amount:
if hasattr(self.to_amount, 'to_alipay_dict'):
params['to_amount'] = self.to_amount.to_alipay_dict()
else:
params['to_amount'] = self.to_amount
if self.voucher_biz_code:
if hasattr(self.voucher_biz_code, 'to_alipay_dict'):
params['voucher_biz_code'] = self.voucher_biz_code.to_alipay_dict()
else:
params['voucher_biz_code'] = self.voucher_biz_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = UnitedVoucherDigest()
if 'budget_close' in d:
o.budget_close = d['budget_close']
if 'ceiling_amount' in d:
o.ceiling_amount = d['ceiling_amount']
if 'discount_type' in d:
o.discount_type = d['discount_type']
if 'from_amount' in d:
o.from_amount = d['from_amount']
if 'prize_id' in d:
o.prize_id = d['prize_id']
if 'reduction_ratio' in d:
o.reduction_ratio = d['reduction_ratio']
if 'show_order' in d:
o.show_order = d['show_order']
if 'to_amount' in d:
o.to_amount = d['to_amount']
if 'voucher_biz_code' in d:
o.voucher_biz_code = d['voucher_biz_code']
return o
| [
"[email protected]"
] | |
c8be4275c567f1b45a9c30be24e70d5c9f86cb5f | bc441bb06b8948288f110af63feda4e798f30225 | /terraform_sdk/model/inspection/metric_group_pb2.pyi | 744283cf0fc142859439410d535c139cf855a348 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,951 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from terraform_sdk.model.inspection.dim_pb2 import (
InspectionDim as terraform_sdk___model___inspection___dim_pb2___InspectionDim,
)
from terraform_sdk.model.inspection.val_pb2 import (
InspectionVal as terraform_sdk___model___inspection___val_pb2___InspectionVal,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class InspectionMetricGroup(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: typing___Text
name = ... # type: typing___Text
category = ... # type: typing___Text
memo = ... # type: typing___Text
@property
def dims(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[terraform_sdk___model___inspection___dim_pb2___InspectionDim]: ...
@property
def vals(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[terraform_sdk___model___inspection___val_pb2___InspectionVal]: ...
def __init__(self,
*,
id : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
category : typing___Optional[typing___Text] = None,
dims : typing___Optional[typing___Iterable[terraform_sdk___model___inspection___dim_pb2___InspectionDim]] = None,
vals : typing___Optional[typing___Iterable[terraform_sdk___model___inspection___val_pb2___InspectionVal]] = None,
memo : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> InspectionMetricGroup: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> InspectionMetricGroup: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"category",b"category",u"dims",b"dims",u"id",b"id",u"memo",b"memo",u"name",b"name",u"vals",b"vals"]) -> None: ...
| [
"[email protected]"
] | |
811d4b3ee04b7828cbc1c09b26fa4f53da40ae09 | 2b28814e50b036a17afb26cd56accdacb6f38854 | /src/api/migrations/0001_initial.py | c91b14a01d4a3367c1fbda7671980d2c60cc7a76 | [
"BSD-2-Clause"
] | permissive | Financial-Times/bullet-train-api | c6660965ca5e8f956b7666bde35b5f64bf18f773 | a54e0df1c85ff353c1b5c3056ea3e3e4d1fc7006 | refs/heads/master | 2023-08-03T16:31:06.503907 | 2021-02-10T09:29:35 | 2021-02-10T09:29:35 | 170,123,292 | 1 | 1 | BSD-3-Clause | 2019-10-01T16:26:51 | 2019-02-11T12:14:35 | Python | UTF-8 | Python | false | false | 4,536 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-04-20 12:59
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Environment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('api_key', models.UUIDField(default=uuid.uuid4)),
],
),
migrations.CreateModel(
name='Feature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
],
),
migrations.CreateModel(
name='FeatureState',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField()),
('environment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='featurestates', to='api.Environment')),
('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='featurestates', to='api.Feature')),
],
),
migrations.CreateModel(
name='FFAdminUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Identity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identities', to='api.Environment')),
],
options={
'verbose_name_plural': 'Identities',
},
),
migrations.CreateModel(
name='IdentityFeature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField()),
('feature', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identityfeatures', to='api.Feature')),
('identity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identityfeatures', to='api.Identity')),
],
),
migrations.CreateModel(
name='Organisation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=2000)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name=b'DateCreated')),
('organisation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='projects', to='api.Organisation')),
],
),
migrations.AddField(
model_name='feature',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='features', to='api.Project'),
),
migrations.AddField(
model_name='environment',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='environments', to='api.Project'),
),
]
| [
"[email protected]"
] | |
60ea8faede03779356d2fbe48b62b1b293086d76 | 1c0175a77bb5edb1a9d2f9ea795cf15b42e5c5c2 | /hackbright.py | 93d596be219ac559ef29f834e8d4ef8136720d13 | [] | no_license | jengrace/project-tracker-flask2 | 89fb53a03c0e07a4c25c3ff45be424c03b7e78d2 | 49125d272537d2dbb8259e2b9b94f8816a3a7d26 | refs/heads/master | 2021-04-30T07:00:24.470001 | 2017-01-25T21:03:10 | 2017-01-25T21:03:10 | 79,973,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,368 | py | """Hackbright Project Tracker.
A front-end for a database that allows users to work with students, class
projects, and the grades students receive in class projects.
"""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def connect_to_db(app):
"""Connect the database to our Flask app."""
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///hackbright'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.app = app
db.init_app(app)
def get_student_by_github(github):
"""Given a github account name, print information about the
matching student."""
QUERY = """
SELECT first_name, last_name, github
FROM Students
WHERE github = :github
"""
db_cursor = db.session.execute(QUERY, {'github': github})
row = db_cursor.fetchone()
print "Student: %s %s\nGithub account: %s" % (row[0], row[1],
row[2])
return row
def make_new_student(first_name, last_name, github):
"""Add a new student and print confirmation.
Given a first name, last name, and GitHub account, add student to the
database and print a confirmation message.
"""
QUERY = """
INSERT INTO Students
VALUES (:first_name, :last_name, :github)
"""
db.session.execute(QUERY, {'first_name': first_name,
'last_name': last_name,
'github': github})
db.session.commit()
print "Successfully added student: %s %s" % (first_name, last_name)
def get_project_by_title(title):
"""Given a project title, print information about the project."""
QUERY = """
SELECT title, description, max_grade
FROM Projects
WHERE title = :title
"""
db_cursor = db.session.execute(QUERY, {'title': title})
row = db_cursor.fetchone()
print "Title: %s\nDescription: %s\nMax Grade: %d" % (row[0], row[1],
row[2])
return row
def get_grade_by_github_title(github, title):
"""Print grade student received for a project."""
QUERY = """
SELECT grade
FROM Grades
WHERE student_github = :github
AND project_title = :title
"""
db_cursor = db.session.execute(QUERY, {'github': github, 'title': title})
row = db_cursor.fetchone()
print "Student %s in project %s received grade of %s" % (
github, title, row[0])
return row
def assign_grade(github, title, grade):
"""Assign a student a grade on an assignment and print a confirmation."""
QUERY = """INSERT INTO Grades (student_github, project_title, grade)
VALUES (:github, :title, :grade)"""
db_cursor = db.session.execute(QUERY, {'github': github, 'title': title, 'grade': grade})
db.session.commit()
print "Successfully assigned grade of %s for %s in %s" % (
grade, github, title)
def get_grades_by_github(github):
"""Get a list of all grades for a student by their github username"""
QUERY = """
SELECT project_title, grade
FROM Grades
WHERE student_github = :github
"""
db_cursor = db.session.execute(QUERY, {'github': github})
rows = db_cursor.fetchall()
for row in rows:
print "Student %s received grade of %s for project %s" % (
github, row[1], row[0])
print '**********************rows: ', rows
return rows
def get_grades_by_title(title):
"""Get a list of all student grades for a project by its title"""
QUERY = """
SELECT student_github, grade
FROM Grades
WHERE project_title = :title
"""
db_cursor = db.session.execute(QUERY, {'title': title})
rows = db_cursor.fetchall()
for row in rows:
print "Student %s received grade of %s for project %s" % (
row[0], row[1], title)
return rows
def handle_input():
"""Main loop.
Repeatedly prompt for commands, performing them, until 'quit' is received as a
command."""
command = None
while command != "quit":
input_string = raw_input("HBA Database> ")
tokens = input_string.split()
command = tokens[0]
args = tokens[1:]
if command == "student":
github = args[0]
get_student_by_github(github)
elif command == "new_student":
first_name, last_name, github = args # unpack!
make_new_student(first_name, last_name, github)
elif command == "project":
title = args[0]
get_project_by_title(title)
elif command == "grade":
github, title = args
get_grade_by_github_title(github, title)
elif command == "assign_grade":
github, title, grade = args
assign_grade(github, title, grade)
elif command == "student_grades":
github = args[0]
get_grades_by_github(github)
elif command == "project_grades":
title = args[0]
get_grades_by_title(title)
if __name__ == "__main__":
app = Flask(__name__)
connect_to_db(app)
handle_input()
# To be tidy, we'll close our database connection -- though, since this
# is where our program ends, we'd quit anyway.
db.session.close()
| [
"[email protected]"
] | |
b96c46eaaad9c98e4ed8795b32c1d40b44d14a6a | 200239bfd98b1bdf4bf19f9aa96007aaada3887d | /ax/core/trial.py | d8e8a130ff8fabcf8927e6711e2e44c19364891e | [
"MIT"
] | permissive | cristicmf/Ax | 18cad5a5ea944f03a4e1ae771036dc8e59369996 | c940dd0ad3a7d01eec7d68f0e51de8b019a19615 | refs/heads/master | 2023-04-29T18:30:19.444123 | 2021-05-13T15:56:03 | 2021-05-13T15:57:02 | 367,475,333 | 0 | 0 | MIT | 2021-05-14T21:00:00 | 2021-05-14T20:43:22 | null | UTF-8 | Python | false | false | 8,740 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations
from typing import TYPE_CHECKING, Dict, List, Optional
from ax.core.arm import Arm
from ax.core.base_trial import BaseTrial, immutable_once_run
from ax.core.generator_run import GeneratorRun, GeneratorRunType
from ax.core.types import TCandidateMetadata
from ax.utils.common.docutils import copy_doc
from ax.utils.common.typeutils import not_none
if TYPE_CHECKING:
# import as module to make sphinx-autodoc-typehints happy
from ax import core # noqa F401 # pragma: no cover
class Trial(BaseTrial):
"""Trial that only has one attached arm and no arm weights.
Args:
experiment: Experiment, to which this trial is attached.
generator_run: GeneratorRun, associated with this trial.
Trial has only one generator run (of just one arm)
attached to it. This can also be set later through `add_arm`
or `add_generator_run`, but a trial's associated genetor run is
immutable once set.
trial_type: Type of this trial, if used in MultiTypeExperiment.
ttl_seconds: If specified, trials will be considered failed after
this many seconds since the time the trial was ran, unless the
trial is completed before then. Meant to be used to detect
'dead' trials, for which the evaluation process might have
crashed etc., and which should be considered failed after
their 'time to live' has passed.
index: If specified, the trial's index will be set accordingly.
This should generally not be specified, as in the index will be
automatically determined based on the number of existing trials.
This is only used for the purpose of loading from storage.
"""
def __init__(
self,
experiment: core.experiment.Experiment,
generator_run: Optional[GeneratorRun] = None,
trial_type: Optional[str] = None,
ttl_seconds: Optional[int] = None,
index: Optional[int] = None,
) -> None:
super().__init__(
experiment=experiment,
trial_type=trial_type,
ttl_seconds=ttl_seconds,
index=index,
)
self._generator_run = None
if generator_run is not None:
self.add_generator_run(generator_run=generator_run)
@property
def generator_run(self) -> Optional[GeneratorRun]:
"""Generator run attached to this trial."""
return self._generator_run
# pyre-ignore[6]: T77111662.
@copy_doc(BaseTrial.generator_runs)
@property
def generator_runs(self) -> List[GeneratorRun]:
gr = self._generator_run
return [gr] if gr is not None else []
@property
def arm(self) -> Optional[Arm]:
"""The arm associated with this batch."""
if self.generator_run is None:
return None
generator_run = not_none(self.generator_run)
if len(generator_run.arms) == 0:
return None
elif len(generator_run.arms) > 1:
raise ValueError( # pragma: no cover
"Generator run associated with this trial included multiple "
"arms, but trial expects only one."
)
return generator_run.arms[0]
@immutable_once_run
def add_arm(self, arm: Arm) -> Trial:
"""Add arm to the trial.
Returns:
The trial instance.
"""
return self.add_generator_run(
generator_run=GeneratorRun(arms=[arm], type=GeneratorRunType.MANUAL.name)
)
@immutable_once_run
def add_generator_run(
self, generator_run: GeneratorRun, multiplier: float = 1.0
) -> Trial:
"""Add a generator run to the trial.
Note: since trial includes only one arm, this will raise a ValueError if
the generator run includes multiple arms.
Returns:
The trial instance.
"""
if len(generator_run.arms) > 1:
raise ValueError(
"Trial includes only one arm, but this generator run "
"included multiple."
)
self.experiment.search_space.check_types(
generator_run.arms[0].parameters, raise_error=True
)
self._check_existing_and_name_arm(generator_run.arms[0])
self._generator_run = generator_run
generator_run.index = 0
self._set_generation_step_index(
generation_step_index=generator_run._generation_step_index
)
return self
@property
def arms(self) -> List[Arm]:
"""All arms attached to this trial.
Returns:
arms: list of a single arm
attached to this trial if there is one, else None.
"""
# pyre-fixme[7]: Expected `List[Arm]` but got `Union[List[Optional[Arm]],
# List[_T]]`.
return [self.arm] if self.arm is not None else []
@property
def arms_by_name(self) -> Dict[str, Arm]:
"""Dictionary of all arms attached to this trial with their names
as keys.
Returns:
arms: dictionary of a single
arm name to arm if one is attached to this trial,
else None.
"""
# pyre-fixme[16]: `Optional` has no attribute `name`.
return {self.arm.name: self.arm} if self.arm is not None else {}
@property
def abandoned_arms(self) -> List[Arm]:
"""Abandoned arms attached to this trial."""
return (
[not_none(self.arm)]
if self.generator_run is not None
and self.arm is not None
and self.is_abandoned
else []
)
@property
def objective_mean(self) -> float:
"""Objective mean for the arm attached to this trial, retrieved from the
latest data available for the objective for the trial.
Note: the retrieved objective is the experiment-level objective at the
time of the call to `objective_mean`, which is not necessarily the
objective that was set at the time the trial was created or ran.
"""
# For SimpleExperiment, fetch_data just executes eval_trial.
df = self.fetch_data().df
if df.empty:
raise ValueError(f"No data was retrieved for trial {self.index}.")
opt_config = self.experiment.optimization_config
if opt_config is None:
raise ValueError( # pragma: no cover
"Experiment optimization config (and thus the objective) is not set."
)
return self.get_metric_mean(metric_name=opt_config.objective.metric.name)
def get_metric_mean(self, metric_name: str) -> float:
"""Metric mean for the arm attached to this trial, retrieved from the
latest data available for the metric for the trial.
"""
# For SimpleExperiment, fetch_data just executes eval_trial.
df = self.fetch_data().df
try:
return df.loc[df["metric_name"] == metric_name].iloc[0]["mean"]
except IndexError: # pragma: no cover
raise ValueError(f"Metric {metric_name} not yet in data for trial.")
def __repr__(self) -> str:
return (
"Trial("
f"experiment_name='{self._experiment._name}', "
f"index={self._index}, "
f"status={self._status}, "
f"arm={self.arm})"
)
def _get_candidate_metadata_from_all_generator_runs(
self,
) -> Dict[str, TCandidateMetadata]:
"""Retrieves candidate metadata from the generator run on this
batch trial in the form of { arm name -> candidate metadata} mapping.
"""
gr = self.generator_run
if gr is None or gr.candidate_metadata_by_arm_signature is None:
return {}
cand_metadata = not_none(gr.candidate_metadata_by_arm_signature)
return {a.name: cand_metadata.get(a.signature) for a in gr.arms}
def _get_candidate_metadata(self, arm_name: str) -> TCandidateMetadata:
"""Retrieves candidate metadata for a specific arm."""
gr = self.generator_run
if gr is None or gr.arms[0].name != arm_name:
raise ValueError(
f"Arm by name {arm_name} is not part of trial #{self.index}."
)
if gr.candidate_metadata_by_arm_signature is None:
return None
arm = gr.arms[0]
return not_none(gr.candidate_metadata_by_arm_signature).get(arm.signature)
| [
"[email protected]"
] | |
c9923efa24c881279a13f57397a90a6c1b858295 | a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea | /scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py | d4fe0ea78d24abc8a7965ec863bd16283ce84c71 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ishiis/airflow | 4305794e36b611d01f49e3f2401be3dc49782670 | 292440d54f4db84aaf0c5a98cf5fcf34303f2fa8 | refs/heads/master | 2022-07-30T00:51:28.806940 | 2022-07-14T12:07:11 | 2022-07-14T12:07:11 | 209,801,072 | 1 | 0 | Apache-2.0 | 2019-09-20T13:47:26 | 2019-09-20T13:47:26 | null | UTF-8 | Python | false | false | 4,803 | py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
import sys
from pathlib import Path
from typing import List
from rich.console import Console
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
"This file is intended to be executed as an executable program. You cannot use it as a module."
f"To run this script, run the ./{__file__} command [FILE] ..."
)
console = Console(color_system="standard", width=200)
errors: List[str] = []
SKIP_COMP_CHECK = "# ignore airflow compat check"
TRY_NUM_MATCHER = re.compile(r".*context.*\[[\"']try_number[\"']].*")
GET_MANDATORY_MATCHER = re.compile(r".*conf\.get_mandatory_value")
GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)")
HOOK_PARAMS_MATCHER = re.compile(r".*get_hook\(hook_params")
def _check_file(_file: Path):
lines = _file.read_text().splitlines()
for index, line in enumerate(lines):
if SKIP_COMP_CHECK in line:
continue
if "XCom.get_value(" in line:
if "if ti_key is not None:" not in lines[index - 1]:
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3.0 only):[/]\n\n"
f"{lines[index-1]}\n{lines[index]}\n\n"
"[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n"
"if ti_key is not None:\n"
" value = XCom.get_value(...., ti_key=ti_key)\n\n"
"See: https://airflow.apache.org/docs/apache-airflow-providers/"
"howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n"
)
if "ti.map_index" in line:
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use map_index field in providers "
"as it is only available in Airflow 2.3+[/]"
)
if TRY_NUM_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not expect try_number field for context in providers "
"as it is only available in Airflow 2.3+[/]"
)
if GET_MANDATORY_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use conf.get_mandatory_value in providers "
"as it is only available in Airflow 2.3+[/]"
)
if HOOK_PARAMS_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.3+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use 'hook_params' in get_hook as it has been added in providers "
"as it is not available in Airflow 2.3+. Use get_hook() instead.[/]"
)
if GET_AIRFLOW_APP_MATCHER.match(line):
errors.append(
f"[red]In {_file}:{index} there is a forbidden construct "
"(Airflow 2.4+ only):[/]\n\n"
f"{lines[index]}\n\n"
"[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers "
"as it is not available in Airflow 2.4+. Use current_app instead.[/]"
)
if __name__ == '__main__':
for file in sys.argv[1:]:
_check_file(Path(file))
if errors:
console.print("[red]Found Airflow 2.2 compatibility problems in providers:[/]\n")
for error in errors:
console.print(f"{error}")
sys.exit(1)
| [
"[email protected]"
] | |
28643ece960fbdb5ec5bf2ab0ecc38b9f974345c | b8fd7e01a7069a0666eb2fe21991753fd5ff7860 | /Python Language/Stack/3 . top.py | 89b10fca7acd6e331210a9db5f36ac8f9bed3454 | [] | no_license | Jafoor/Leet-Code-Solved-Problems | 0b6be0f3c82b1bc13c0c484782db65601cefa7b8 | 935e5679e04bf6f9c9d8a0bdf8b204923a2bc7a5 | refs/heads/master | 2023-07-02T13:38:59.690783 | 2021-07-19T16:20:48 | 2021-07-19T16:20:48 | 256,105,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | class Stack:
#declearing stack
def __init__(self):
self.stack = []
#pushing values
def push(self, data):
self.stack.append(data)
#pop element from Stack
def pop(self):
if len(self.stack) <= 0:
return ("No elemennt to pop")
else:
return self.stack.pop()
def top(self):
if len(self.stack) <= 0:
return ("No elements in the stack")
else:
return self.stack[-1]
S = Stack()
S.push("Mon")
S.push("sun")
print(S.top())
print(S.pop())
print(S.top())
| [
"[email protected]"
] | |
e5e2f907488bbd1e533f57e90d04fbf9ca1a94e9 | 33ce95a46bad431fb9acde07f10f472c43533824 | /functions_advanced_exercise/keyword_argument_length.py | 8521f0728b213aa0197f75f270a97e0e6e220239 | [] | no_license | ivan-yosifov88/python_advanced | 91dead1a44771a46e85cecdfc6b02e11c0cb4d91 | 21830aabc87eb28eb32bf3c070bf202b4740f628 | refs/heads/main | 2023-06-29T21:31:30.285019 | 2021-06-23T20:31:36 | 2021-06-23T20:31:36 | 342,571,734 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | def kwargs_length(**kwargs):
return len(kwargs)
dictionary = {'name': 'Peter', 'age': 25, 'baba': 'Peter'}
print(kwargs_length(**dictionary))
| [
"ivan.yosifov88gmail.com"
] | ivan.yosifov88gmail.com |
e902eec2088a110c55e9d2063a8d882bcd34f49d | 0c021e881a2d5507f2dd0600a7bc36fa2b8de112 | /turtlecircle.py | 31d3b56a4add57cb27afc03411115accd4f46ccf | [] | no_license | Techsrijan/girlspolytechnic | 016299f28d808f7628693dec555642463862a537 | 0eb136632dd37c1e65ac290509ed166144e8d2a0 | refs/heads/main | 2023-02-03T20:40:30.474711 | 2020-12-16T10:22:59 | 2020-12-16T10:22:59 | 307,243,788 | 0 | 6 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | from turtle import *
import time
t=Turtle()
s=Screen()
s.title("Circle Example")
s.setup(800,600)
'''t.circle(50) # anticlockwise
t.circle((-100))
t.up()
t.forward(200)
t.down()
t.pencolor("red")
t.circle(300)
'''
t.circle(50)
t.up()
t.goto(-100,-100)
t.down()
t.circle(50)
t.undo()
t.reset()
time.sleep(2)
t.write("This is my turtle tutorial",font=("Comic Sans Ms",25,"bold"))
done() | [
"[email protected]"
] | |
1e75f91cfeb5d9ec5f124aa4adcddf060fb9624d | 2329ba07662331526c508e16512eb8c194c2d0c8 | /src/measurement/area/imperial.py | 6b26952f153010fe609458008baeca02b9c31ad8 | [] | no_license | patricknevindwyer/Converto | 9265e2c6a7af88f6d2a767baed78ce8e95b83215 | 8b9a874a63a8fefde3149d8f6c28c338cc20f09f | refs/heads/master | 2020-05-18T19:58:49.335100 | 2010-12-03T13:40:40 | 2010-12-03T13:40:40 | 1,091,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | '''
Created on Dec 3, 2010
@author: patricknevindwyer
Imperial measurements of Area.
'''
from measurement.bases import Area
square_feet = Area(
toBaseUnit = 0.09290304,
fromBaseUnit = 10.7639104,
suffixes = ('square foot', 'square feet')
) | [
"[email protected]"
] | |
879cdf55ca3f1f61547f825cbd91ff885eb084e1 | 2734b77a68f6d7e22e8b823418ad1c59fe1a34af | /opengever/dossier/content_deleter.py | a165f1f965b4eda76e04acabca72a444e73e2129 | [] | no_license | 4teamwork/opengever.core | 5963660f5f131bc12fd0a5898f1d7c8f24a5e2b1 | a01bec6c00d203c21a1b0449f8d489d0033c02b7 | refs/heads/master | 2023-08-30T23:11:27.914905 | 2023-08-25T14:27:15 | 2023-08-25T14:27:15 | 9,788,097 | 19 | 8 | null | 2023-09-14T13:28:56 | 2013-05-01T08:28:16 | Python | UTF-8 | Python | false | false | 707 | py | from opengever.base.browser.folder_buttons_availability import FolderButtonsAvailabilityView
from opengever.base.content_deleter import BaseContentDeleter
from opengever.dossier.behaviors.dossier import IDossierMarker
from zExceptions import Forbidden
from zope.component import adapter
@adapter(IDossierMarker)
class DossierDeleter(BaseContentDeleter):
permission = 'opengever.dossier: Delete dossier'
def verify_may_delete(self, **kwargs):
super(DossierDeleter, self).verify_may_delete()
if not self.context.objectCount() == 0:
raise Forbidden()
if FolderButtonsAvailabilityView(self.context, None)._has_linked_workspaces():
raise Forbidden()
| [
"[email protected]"
] | |
a01f9443c22e4e22c3db6a462ca1c40ad91e0bd4 | d85fbcf9199a46d7ce43537a6b333e381fd1b868 | /argo/workflows/dsl/__about__.py | eb032ad16c682ac31874759e105c26fd5d1f1c5c | [
"Python-2.0",
"Apache-2.0"
] | permissive | binarycrayon/argo-python-dsl | 78af89ab6a4e522a0bd9b1a28124ce687d544cef | 0eb6fcf1c0901c842dd280d8c052981b5b3378ce | refs/heads/master | 2021-02-13T15:35:52.177954 | 2020-03-08T07:41:46 | 2020-03-08T07:41:46 | 244,710,162 | 0 | 0 | Apache-2.0 | 2020-03-03T18:22:47 | 2020-03-03T18:22:47 | null | UTF-8 | Python | false | false | 500 | py | """About this package."""
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "argo-workflows-dsl"
__summary__ = "DSL for Argo Workflows"
__uri__ = "https://github.com/CermakM/argo-python-dsl"
__version__ = "0.1.0-rc"
__author__ = "Marek Cermak"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2019 {0}".format(__author__)
| [
"[email protected]"
] | |
a13ba3403a954a805e8bc586f2179587ed19b562 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02843/s696796090.py | 05e70395147aa3317b5c278c07796ed013434575 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 348 | py | X = int(input())
price = [100,101,102,103,104,105]
dp = [[0]*100001 for _ in range(6)]
dp[0][0] = 1
for i in range(100,100001):
dp[0][i] = dp[0][i-100]
for i in range(1,6):
for j in range(price[i]):
dp[i][j] = dp[i-1][j]
for j in range(price[i],100001):
dp[i][j] = max(dp[i-1][j],dp[i][j-price[i]])
print(dp[-1][X])
| [
"[email protected]"
] | |
b7c2ba1ef562df11e19607089e595bbc069c386f | 19f05c91b991f38eca19275bfcb8a2a27000bb45 | /makahiki/apps/components/activities/migrations/0009_populate_slug_data.py | 049b4d883d7525c8fda6839f54a537b4397486ae | [] | no_license | keokilee/makahiki | 9c40576c73fef2bf11dc22194dbabf98f5e67e64 | 783db33ed0b38fb4dccc371c426265f7028a2d13 | refs/heads/master | 2020-04-30T03:44:52.309826 | 2012-03-17T01:36:19 | 2012-03-17T01:36:19 | 543,870 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 13,754 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.template.defaultfilters import slugify
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for base in orm.ActivityBase.objects.all():
base.slug = slugify(base.name)
base.save()
def backwards(self, orm):
"Write your backwards methods here."
for base in orm.ActivityBase.objects.all():
base.slug = ""
base.save()
models = {
'activities.activity': {
'Meta': {'object_name': 'Activity', '_ormbases': ['activities.ActivityBase']},
'activitybase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['activities.ActivityBase']", 'unique': 'True', 'primary_key': 'True'}),
'confirm_prompt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'confirm_type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'}),
'duration': ('django.db.models.fields.IntegerField', [], {}),
'event_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'event_location': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'expire_date': ('django.db.models.fields.DateField', [], {}),
'point_range_end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'point_range_start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'point_value': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date(2011, 6, 9)'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'through': "orm['activities.ActivityMember']", 'symmetrical': 'False'})
},
'activities.activitybase': {
'Meta': {'object_name': 'ActivityBase'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Category']", 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'depends_on': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'depends_on_text': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'energy_related': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1000'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'})
},
'activities.activitymember': {
'Meta': {'object_name': 'ActivityMember', '_ormbases': ['activities.CommonActivityUser']},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Activity']"}),
'admin_comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'commonactivityuser_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['activities.CommonActivityUser']", 'unique': 'True', 'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'blank': 'True'}),
'points_awarded': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.TextPromptQuestion']", 'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'user_comment': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'activities.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'activities.commitment': {
'Meta': {'object_name': 'Commitment', '_ormbases': ['activities.ActivityBase']},
'activitybase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['activities.ActivityBase']", 'unique': 'True', 'primary_key': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'point_value': ('django.db.models.fields.IntegerField', [], {}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'through': "orm['activities.CommitmentMember']", 'symmetrical': 'False'})
},
'activities.commitmentmember': {
'Meta': {'object_name': 'CommitmentMember'},
'award_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'commitment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Commitment']"}),
'completion_date': ('django.db.models.fields.DateField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'activities.commonactivityuser': {
'Meta': {'object_name': 'CommonActivityUser'},
'approval_status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '20'}),
'award_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'})
},
'activities.confirmationcode': {
'Meta': {'object_name': 'ConfirmationCode'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Activity']"}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'})
},
'activities.questionchoice': {
'Meta': {'object_name': 'QuestionChoice'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Activity']"}),
'choice': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.TextPromptQuestion']"})
},
'activities.textpromptquestion': {
'Meta': {'object_name': 'TextPromptQuestion'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activities.Activity']"}),
'answer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.TextField', [], {})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'floors.dorm': {
'Meta': {'object_name': 'Dorm'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '20', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'floors.floor': {
'Meta': {'object_name': 'Floor'},
'dorm': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['floors.Dorm']"}),
'floor_identifier': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '10', 'db_index': 'True'})
},
'makahiki_base.like': {
'Meta': {'object_name': 'Like'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'floor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['floors.Floor']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['activities']
| [
"[email protected]"
] | |
b0ae8f4bfb739eb24ac9fc0d06617715c46a65d9 | e84020108a7037d8d4867d95fada1b72cbcbcd25 | /src/chattisgarh/fto/processFTOStatus.py | d660f4fc16006a208a39e4abc60bff1a449ef8b8 | [] | no_license | rajesh241/libtech | 8384316051a2e8c2d4a925cd43216b855b82e4d9 | 0105e717357a3626106028adae9bf162a7f93fbf | refs/heads/master | 2022-12-10T03:09:00.048841 | 2020-06-14T09:39:04 | 2020-06-14T09:39:04 | 24,629,538 | 1 | 1 | null | 2022-12-08T02:26:11 | 2014-09-30T07:57:45 | Python | UTF-8 | Python | false | false | 3,182 | py | import csv
from bs4 import BeautifulSoup
import requests
import MySQLdb
import time
import re
import os
import sys
fileDir=os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, fileDir+'/../../includes/')
from settings import dbhost,dbuser,dbpasswd,sid,token
from globalSettings import datadir
#Error File Defination
errorfile = open('/tmp/processFTO.log', 'a')
#File Path where all the Downloaded FTOs would be placed
districtName="SURGUJA"
ftofilepath=datadir+districtName+"/"
#Connect to MySQL Database
db = MySQLdb.connect(host=dbhost, user=dbuser, passwd=dbpasswd, db="surguja",charset='utf8')
cur=db.cursor()
db.autocommit(True)
#Query to set up Database to read Hindi Characters
query="SET NAMES utf8"
cur.execute(query)
#Query to get the FTO
query=" select f.id,f.ftoNo,b.name,f.finyear,f.blockCode from ftoDetails f,blocks b where b.blockCode=f.blockCode and b.isActive=1 and f.finyear='16' and f.isProcessed=1 and f.isStatusDownloaded=1 and f.isStatusProcessed=0 and f.incorrectPOFile!=1 "
#query=" select f.id,f.ftoNo,b.name,f.finyear,f.blockCode from ftoDetails f,blocks b where b.blockCode=f.blockCode and b.isActive=1 and f.finyear='16' and f.isStatusDownloaded=1 and f.isStatusProcessed=0 and ftoNo='CH3305003_081015FTO_142597'"
cur.execute(query)
if cur.rowcount:
results = cur.fetchall()
for row in results:
ftoid=str(row[0])
ftoNo=row[1]
blockName=row[2]
finyear=row[3]
blockCode=row[4]
print str(ftoid)+" "+finyear+" "+ftoNo+" "+blockName
if finyear=='16':
fullfinyear='2015-2016'
elif finyear=='15':
fullfinyear='2014-2015'
else:
fullfinyear='2013-2014'
ftofilename=ftofilepath+blockName+"/FTO/"+fullfinyear+"/"+ftoNo+"_status.html"
print ftofilename
if (os.path.isfile(ftofilename)):
ftohtml=open(ftofilename,'r').read()
if "The file name does not appear to be correct" in ftohtml:
print "This does not seem like a postoffice PO"
errorflag=1
else:
htmlsoup=BeautifulSoup(ftohtml)
try:
table=htmlsoup.find('table',id="ctl00_ContentPlaceHolder1_Table1")
rows = table.findAll('tr')
errorflag=0
except:
errorflag=1
print "errorflag is "+str(errorflag)
if errorflag==0:
for tr in rows:
cols = tr.findAll('td')
tdtext=''
eventDate= cols[0].text
if eventDate != 'Date Time':
print eventDate
event = cols[1].text
office= cols[2].text
fileid=cols[3].text
print eventDate+event+office+fileid
eventDateFormat='%d %M %Y %H:%i:%s'
query="insert into ftoStatus (ftoNo,blockCode,finyear,eventDate,event,office,fileid) values ('%s','%s','%s',STR_TO_DATE('%s','%s'),'%s','%s','%s');" % (ftoNo,blockCode,finyear,eventDate,eventDateFormat,event,office,fileid)
print query
cur.execute(query)
query="update ftoDetails set isStatusProcessed=1 where id=%s" %(ftoid)
cur.execute(query)
else:
query="update ftoDetails set incorrectPOFile=1 where id=%s" %(ftoid)
cur.execute(query)
| [
"[email protected]"
] | |
2ff21a4006685c10124c9e5389c38c0bf6df8a09 | e315a4bb8847dd9a0ba6c4e1f4ebc21d8bbbfb4c | /classifier.py | d6aeaf40fb118dbed807891db2946d96afdcbcdf | [] | no_license | marco-willi/camtrap_classifier | 464f1ad60eaf275fa4dbe077396639a7192d9ab6 | 10363c18382b86560bfc6c202183ecfd77d4e079 | refs/heads/master | 2021-05-08T06:52:23.074452 | 2017-12-01T07:13:04 | 2017-12-01T07:13:04 | 106,642,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,695 | py | """
Class to provide a Classifier for applying a model on images
- takes a folder/folders with images, a model, and model configs as input
"""
import os
from keras.models import load_model
from keras.preprocessing.image import ImageDataGenerator
from collections import OrderedDict
import numpy as np
import pandas as pd
import json
class CamTrapClassifier(object):
""" Classifier completely independent of the specific
project infrastructure - to be used for researches who want to apply
one of the models on their images
Parameters
------------
path_to_model:
- path to model file
- string
model_cfg_json:
- path to json with model config
- Json-string with keys: "class_mapper" & "pre_processing"
- Optional if keras_datagen provided
keras_datagen:
- DataGenerator which will be fit on data using a batch
of 2'000 randomly selected images, will override
parameters in model_cfg_json
- Object of keras.preprocessing.image.ImageDataGenerator
- Optional if model_cfg_json provided
class_list:
- list of classes in order of model output layer
- list
- Optional, only needs to be specified if not in model_cfg_json
refit_on_data:
- Whether to re-fit the DataGenerator on a batch of
randomly selected images of the provided data
- Boolean
- Default: False (recommended)
"""
def __init__(self,
path_to_model=None,
model_cfg_json=None,
keras_datagen=None,
class_list=None,
refit_on_data=False):
self.path_to_model = path_to_model
self.keras_datagen = keras_datagen
self.class_list = class_list
self.model_cfg_json = model_cfg_json
self.refit_on_data = refit_on_data
self.model = None
self.preds = None
self.pre_processing = None
self.color_mode = "rgb"
# Checks
if path_to_model is None:
raise IOError("Path to model has to be specified")
if model_cfg_json is None:
if keras_datagen is None:
raise IOError("Specify keras ImageDataGenerator or model cfg")
if class_list is None:
raise IOError("Specify class list to map predictions\
to classes")
if model_cfg_json is not None:
if (keras_datagen is not None) or (class_list is not None):
raise IOError("Specify only one of model_cfg_json or\
(keras_datagen and class_list)\
class_list should be in model cfg json")
if not os.path.isfile(self.path_to_model):
raise FileNotFoundError("Model File %s not found" %
self.path_to_model)
# Load model from disk
print("Loading model from disk: %s" % self.path_to_model)
self.model = load_model(self.path_to_model)
# handle color mode
if self.model.input_shape[3] == 1:
self.color_mode = 'grayscale'
else:
self.color_mode = 'rgb'
# Load cfg from json file
if model_cfg_json is not None:
cfg_file = open(model_cfg_json, 'r')
model_cfg = json.load(cfg_file)
# check model_cfg
assert 'class_mapper' in model_cfg.keys(),\
"class_mapper not found in model_cfg_json,\
following keys found %s" % model_cfg.keys()
assert 'pre_processing' in model_cfg.keys(),\
"pre_processing not found in model_cfg_json,\
following keys found %s" % model_cfg.keys()
# extract class mapping and order
class_list = list()
for i in range(0, len(model_cfg['class_mapper'].keys())):
class_list.append(model_cfg['class_mapper'][str(i)])
self.class_list = class_list
# add pre_processing
self.pre_processing = model_cfg['pre_processing']
def predict_path(self, path, output_path,
output_file_name='predictions.csv',
batch_size=256):
""" Predict class for images
Parameters
------------
path:
- path to directory that contains 1:N sub-directories
with images
- string
output_path:
- path to directory to which prediction csv will be written
- string
output_file_name:
- file name of the output csv written to output_path
- string
batch_size:
- number of images to process in one batch, if too large it
might not fit into memory
- integer
"""
# check input
if any([x is None for x in [path, output_path]]):
raise IOError("Path and output_path have to be specified")
# check output_path
output_path = os.path.join(output_path, "")
# check batch_size
assert type(batch_size) == int,\
"batch_size has to be an integer, is %s" % type(batch_size)
# fit data generator on input data
if self.pre_processing is None:
print("Initializing generator")
generator = self.keras_datagen.flow_from_directory(
path,
target_size=self.model.input_shape[1:3],
color_mode=self.color_mode,
batch_size=batch_size,
class_mode='sparse',
seed=123,
shuffle=False)
# check number of files found
assert generator.n > 0,\
"No images found in sub-directories of %s" % path
# Fit data generator if required
if any([self.keras_datagen.featurewise_std_normalization,
self.keras_datagen.samplewise_std_normalization,
self.keras_datagen.zca_whitening]):
self._refit_datagen(path, self.keras_datagen)
# use pre-defined pre_processing options and add to generator
else:
print("Initializing generator")
gen = ImageDataGenerator(rescale=1./255)
if self.refit_on_data:
self._refit_datagen(path, gen)
else:
# set pre-processing attributes
for k, v in self.pre_processing.items():
if type(v) is list:
v = np.array(v)
setattr(gen, k, v)
generator = gen.flow_from_directory(
path,
target_size=self.model.input_shape[1:3],
color_mode=self.color_mode,
batch_size=batch_size,
class_mode='sparse',
seed=123,
shuffle=False)
# check number of files found
assert generator.n > 0,\
"No images found in sub-directories of %s" % path
# predict whole set
print("Starting to predict images in path")
# calculate number of iterations required to process whole dataset
steps_remainder = generator.n % batch_size
if steps_remainder > 0:
extra_step = 1
else:
extra_step = 0
preds = self.model.predict_generator(
generator,
steps=(generator.n // batch_size) + extra_step,
workers=1,
use_multiprocessing=False,
verbose=1)
print("Finished predicting %s of %s images" %
(preds.shape[0], generator.n))
# check size and log critical
if preds.shape[0] != generator.n:
print("Number of Preds %s don't match" +
"number of images %s" % (preds.shape[0], generator.n))
# save predictions
self.preds = preds
# Create a data frame with all predictions
print("Creating Result DF")
res = self._create_result_df(generator.filenames,
generator.directory)
# write DF to disk
res.to_csv(output_path + output_file_name, index=False)
def _create_result_df(self, filenames,
image_directory=""):
""" Create Data Frame with Predictions """
# get max predictions & class ids
id_max = np.argmax(self.preds, axis=1)
max_pred = np.amax(self.preds, axis=1)
# map class names and indices
n_classes = len(self.class_list)
# create result data frame via dictionary
res = OrderedDict()
# loop over all files / predictions
for i in range(0, len(filenames)):
fname = filenames[i].split(os.path.sep)[1]
class_dir = filenames[i].split(os.path.sep)[0]
p = max_pred[i]
y_pred = self.class_list[id_max[i]]
# store predictions for all classes
p_all = self.preds[i, :]
preds_all = {self.class_list[j]: p_all[j] for j in
range(0, n_classes)}
if image_directory == '':
image_path = ''
else:
image_path = image_directory + class_dir +\
os.path.sep + fname
res[i] = OrderedDict([('file_name', fname),
('predicted_class', y_pred),
('predicted_probability', p),
('predictions_all', preds_all),
('image_path', image_path)])
res_df = pd.DataFrame.from_dict(res, orient="index")
return res_df
def _refit_datagen(self, path, datagen):
""" Fit Datagenerator on Raw Images """
print("Fitting data generator")
# create a generator to randomly select images to calculate
# image statistics for data pre-processing
datagen_raw = ImageDataGenerator(rescale=1./255)
raw_generator = datagen_raw.flow_from_directory(
path,
target_size=self.model.input_shape[1:3],
color_mode=self.color_mode,
batch_size=2000,
class_mode='sparse',
seed=123,
shuffle=True)
# fit the generator with a batch of sampled data
X_raw, Y_raw = raw_generator.next()
datagen.fit(X_raw)
| [
"[email protected]"
] | |
04231d91e5d33e957c15c28892b7716bb60a70c1 | 895dfeb5c6af5e8c66772491d164e9d5b57d4302 | /Courses/Full_Stack_Foundations/restaurant_raw/webserver.py | 7fa49de6ca38602236c51866bf2598078cc5b8a0 | [] | no_license | JPGITHUB1519/FSND-Udacity | 753747aac6b727ac7a0e98d95059127bf8a3690d | a7acb4df6ff3b2216da5749e7087b0e254ed80cd | refs/heads/master | 2020-12-30T12:11:21.051853 | 2017-10-05T02:30:22 | 2017-10-05T02:30:22 | 91,410,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,540 | py | import os
import cgi
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from jinja2 import Environment, FileSystemLoader, select_autoescape
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
### jinja config
config = {
"directory": os.path.dirname(os.path.abspath(__file__)),
"templates_dir": '/templates'
}
directory = os.path.dirname(os.path.abspath(__file__))
env = Environment(
loader = FileSystemLoader(config["directory"] + config['templates_dir']),
autoescape=select_autoescape('html', 'xml')
)
### database config
engine = create_engine('sqlite:///restaurantmenu.db')
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bing = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
class webserverHandler(BaseHTTPRequestHandler):
def render_str(self, template, **params):
template = env.get_template(template)
return template.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def write(self, element):
self.wfile.write(element)
def send_headers(self, request_type="get"):
if request_type == "get":
self.send_response(200)
if request_type == "post":
self.send_response(301)
self.send_header('Content-Type', 'text/html')
self.end_headers()
def do_GET(self):
try:
if self.path.endswith('/test'):
self.get_sent_headers()
self.render("test.html", name="jean")
return
if self.path.endswith('/restaurants'):
self.send_headers()
restaurants = session.query(Restaurant).all()
self.render('restaurants.html', restaurants=restaurants)
return
if self.path.endswith('/restaurants/create'):
self.send_headers()
self.render("restaurants_create.html")
return
if self.path.endswith('/restaurants/edit/*'):
self.send_headers()
self.write("hey")
print self.path
# restaurant = session.query(Restaurant).filter_by(id=id)
# self.render("restaurants_edit.html")
except IOError:
self.send_error(404, "File not found %s" % self.path)
def do_POST(self):
try:
if self.path.endswith("/restaurants/store"):
self.send_headers("post")
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
# collect field from the form
fields = cgi.parse_multipart(self.rfile, pdict)
name = fields.get('name')
restaurant = Restaurant(name=name[0])
session.add(restaurant)
session.commit()
# redirect
self.write("Restaurante Creado %s" % restaurant.name)
if self.path.endswith('/restaurants/update'):
pass
except IOError:
self.send_error(404, "File not found %s" % self.path)
def main():
try:
port = 8080
server = HTTPServer(('', port), webserverHandler)
print "Web Server Running on port %s" % port
server.serve_forever()
except KeyboardInterrupt:
print "^C entered, stopping web server..."
server.socket.close()
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
1aa3752c58335a1b8cb63f5ca192c48180bc3889 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4062/codes/1845_2220.py | 3d32d34040dc707ac31d11f0735accdb07b48017 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | from numpy import *
m= array(eval(input("Pagamentos: ")))
l = shape(m)[0]
c = shape(m)[1]
for i in range(l):
print(max(m[i, :])) | [
"[email protected]"
] | |
21a347c147a0776a230dc184af319b1643c6a013 | 4b758ca583d2a58d4d711381405e024109a0f08f | /dali/test/python/test_operator_random_resized_crop.py | 483d481dbf53a927fd48e58f3eff3b31bc9c649c | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | ConnectionMaster/DALI | 76ff07b2fa3f62490b059088c88ade7570130ff4 | 6b90519d2c209d705e8912a5f00b71a018aeaa52 | refs/heads/master | 2023-04-14T13:04:57.520421 | 2021-01-22T16:34:31 | 2021-01-22T16:34:31 | 187,683,855 | 1 | 1 | Apache-2.0 | 2023-04-03T23:45:28 | 2019-05-20T17:18:56 | C++ | UTF-8 | Python | false | false | 6,577 | py | import nvidia.dali as dali
import nvidia.dali.fn as fn
import numpy as np
import test_utils
def close(a, b):
return abs(a - b) < 1e-5 or abs(a - b) < abs(a) + abs(b) * 1e-6
def analyze_frame(image, channel_dim):
def pixel(x, y):
return image[:, y, x] if channel_dim == 0 else image[y, x, :]
x0, y0, f0 = pixel(0, 0)
x1, y1, f1 = pixel(-1, 0)
x2, y2, f2 = pixel(0, -1)
x3, y3, f3 = pixel(-1, -1)
assert close(x0, x2), "x0 = {} != x2 = {}".format(x0, x2)
assert close(x1, x3), "x1 = {} != x3 = {}".format(x1, x3)
assert close(y0, y1), "y0 = {} != y1 = {}".format(y0, y1)
assert close(y2, y3), "y2 = {} != y3 = {}".format(y2, y3)
assert close(f0, f1) and close(f0, f2) and close(f0, f3)
return x0, y0, x3, y3, int(np.round(f0))
def check_frame(image, frame_index, total_frames, channel_dim, roi, w, h, aspect_ratio_range, area_range, value_range):
x0, y0, x1, y1, f = analyze_frame(image, channel_dim)
assert f == frame_index * value_range // total_frames
out_h, out_w = image.shape[:2] if channel_dim != 0 else image.shape[1:3]
xeps = np.ceil(2 + 2 * w / out_w)
yeps = np.ceil(2 + 2 * h / out_h)
if frame_index == 0:
roi_w_max = min((x1 - x0) * w / value_range + xeps, w)
roi_w_min = max((x1 - x0) * w / value_range - xeps, 1)
roi_h_max = min((y1 - y0) * h / value_range + yeps, h)
roi_h_min = max((y1 - y0) * h / value_range - xeps, 1)
ratio_min = roi_w_min / roi_h_max
ratio_max = roi_w_max / roi_h_min
area_min = roi_w_min * roi_h_min / (w * h)
area_max = roi_w_max * roi_h_max / (w * h)
assert ratio_max >= aspect_ratio_range[0] and ratio_min <= aspect_ratio_range[1], \
"aspect ratio estimated at {}..{} outside valiid range [{} .. {}]".format(
ratio_min, ratio_min, *aspect_ratio_range)
assert area_max >= area_range[0] and area_min <= area_range[1], \
"area estimated at {}..{} outside valiid range [{} .. {}]".format(
area_min, area_max, *area_range)
return x0, y0, x1, y1
else:
assert (x0, y0, x1, y1) == roi
return roi
def check_seq(seq, channel_dim, w, h, aspect_ratio_range, area_range, value_range):
frame_dim = 1 if channel_dim == 0 else 0
frame_channel_dim = -1 if channel_dim == -1 else 0
roi = None
total_frames = seq.shape[frame_dim]
for f in range(total_frames):
frame = seq[:,f] if frame_dim == 1 else seq[f]
roi = check_frame(frame, f, total_frames, frame_channel_dim, roi, w, h, aspect_ratio_range, area_range, value_range)
def check_output(output, channel_dim, input_shape, aspect_ratio_range, area_range, value_range):
if len(input_shape) == 3:
h, w = input_shape[1:3] if channel_dim == 0 else input_shape[0:2]
check_frame(output, 0, 1, channel_dim, None, w, h, aspect_ratio_range, area_range, value_range)
else:
hidx = 1 if channel_dim == -1 else 2
h, w = input_shape[hidx:hidx+2]
check_seq(output, channel_dim, w, h, aspect_ratio_range, area_range, value_range)
def type_range(type):
if np.issubdtype(type, np.integer):
return np.iinfo(type).max
else:
return 100000
def generate_data(frames, width, height, channel_dim, type):
value_range = type_range(type)
no_frames = (frames is None)
if no_frames:
frames = 1
x = (np.arange(0, width) * value_range // width).astype(type)[np.newaxis,np.newaxis,:]
y = (np.arange(0, height) * value_range // height).astype(type)[np.newaxis,:,np.newaxis]
f = (np.arange(0, frames) * value_range // frames).astype(type)[:,np.newaxis,np.newaxis]
x = np.broadcast_to(x, (frames, height, width))
y = np.broadcast_to(y, (frames, height, width))
f = np.broadcast_to(f, (frames, height, width))
seq = np.stack([x, y, f], axis=channel_dim)
if no_frames:
seq = seq[:, 0] if channel_dim == 0 else seq[0]
return seq
def generator(batch_size, max_frames, channel_dim, type):
type = test_utils.dali_type_to_np(type)
assert max_frames is not None or channel_dim != 1
def generate():
batch = []
for _ in range(batch_size):
frames = None if max_frames is None else np.random.randint(1, max_frames+1)
sz = np.random.randint(100, 2000 / (max_frames or 1))
w, h = np.random.randint(sz, 2*sz, [2])
batch.append(generate_data(frames, w, h, channel_dim, type))
return batch
return generate
def _test_rrc(device, max_frames, layout, aspect_ratio_range, area_range, output_size, input_type, output_type):
batch_size = 4
pipe = dali.pipeline.Pipeline(batch_size, 4, 0)
channel_dim = layout.find('C')
value_range = type_range(test_utils.dali_type_to_np(input_type))
if channel_dim == len(layout)-1:
channel_dim = -1
input = fn.external_source(source=generator(batch_size, max_frames, channel_dim, input_type), layout=layout)
shape = fn.shapes(input)
if device == "gpu":
input = input.gpu()
out = fn.random_resized_crop(input, random_aspect_ratio=aspect_ratio_range, random_area=area_range,
size=output_size, interp_type=dali.types.INTERP_LINEAR, seed=12321, dtype=output_type)
pipe.set_outputs(out, shape)
pipe.build()
for iter in range(3):
outputs, input_shapes = pipe.run()
if device == "gpu":
outputs = outputs.as_cpu()
assert outputs.layout() == layout
for i in range(batch_size):
out = outputs.at(i)
input_shape = input_shapes.at(i).tolist()
check_output(out, channel_dim, input_shape, aspect_ratio_range, area_range, value_range)
def test_random_resized_crop():
np.random.seed(12345)
types = [dali.types.UINT8, dali.types.INT16, dali.types.FLOAT]
for device in ["cpu", "gpu"]:
for max_frames in [None, 1, 8]:
for layout in ["FHWC", "FCHW", "CFHW"] if max_frames is not None else ["HWC", "CHW"]:
for aspect, area in [
((0.5, 2), (0.1, 0.8)),
((1, 2), (0.4, 1.0)),
((0.5, 1), (0.1, 0.5))]:
for size in [(100,100), (640,480)]:
input_type = types[np.random.randint(0, len(types))]
output_type = dali.types.FLOAT if np.random.randint(0, 2) else None
yield _test_rrc, device, max_frames, layout, aspect, area, size, input_type, output_type
| [
"[email protected]"
] | |
2e71d123768a4baaceb7cf78751643f3bc645473 | eb8855df9efcaafe7be3c33ac6a9eba09190a802 | /article/migrations/0002_articlepost_total_views.py | 1afe206df23165b96a38b7402b009dcd70fbdaca | [] | no_license | demo112/Bloging_new | 4441b32cde519b8aa6e5fd04a30af09a2e2cbe46 | ab8fbf0e33dd91413a707491bfce3e46d4d021c9 | refs/heads/master | 2020-04-30T00:52:41.741959 | 2019-03-28T00:08:28 | 2019-03-28T00:08:28 | 176,513,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | # Generated by Django 2.1.7 on 2019-03-22 19:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='articlepost',
name='total_views',
field=models.PositiveIntegerField(default=0),
),
]
| [
"[email protected]"
] | |
f359d2d972956ca7e1c4ea94324273fa95da5ccf | 6bcffc29218f2a5459cac1f20f4eada0430abea4 | /pacioli/database/sql_views.py | 4458f0f649d4847ab93f46470e6cda941c927f7e | [
"BSD-2-Clause"
] | permissive | mantach86/pacioli | 8091c053440384e2bf8f4241d50da181b7dbc031 | 4a66ed80b1e3408e19d81e0ce7cfa46e477f489e | refs/heads/master | 2020-07-24T18:41:35.146089 | 2017-04-21T15:15:46 | 2017-04-21T15:15:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,441 | py | from pacioli.models import db
def create_all():
create_ofx_views()
create_trial_balances_trigger_function()
create_amazon_views()
create_bookkeeping_views()
create_mappings_views()
def create_trial_balances_trigger_function():
db.engine.execute("""
CREATE OR REPLACE FUNCTION
bookkeeping.update_trial_balance(
_subaccount VARCHAR,
period_interval_name VARCHAR,
period_name VARCHAR
) RETURNS VOID AS $$
DECLARE
existing_debit_record RECORD;
existing_credit_record RECORD;
debit_balance_amount NUMERIC;
debit_changes_amount NUMERIC;
credit_balance_amount NUMERIC;
credit_changes_amount NUMERIC;
BEGIN
SELECT coalesce(sum(functional_amount), 0) INTO debit_balance_amount
FROM bookkeeping.journal_entries
WHERE debit_subaccount = _subaccount
AND to_char(timestamp, period_interval_name) <= period_name;
SELECT coalesce(sum(functional_amount), 0) INTO debit_changes_amount
FROM bookkeeping.journal_entries
WHERE debit_subaccount = _subaccount
AND to_char(timestamp, period_interval_name) = period_name;
SELECT * INTO existing_debit_record
FROM bookkeeping.trial_balances
WHERE bookkeeping.trial_balances.subaccount = _subaccount
AND bookkeeping.trial_balances.period = period_name
AND bookkeeping.trial_balances.period_interval = period_interval_name;
IF existing_debit_record IS NULL THEN
INSERT INTO bookkeeping.trial_balances
(subaccount, debit_balance, credit_balance,
net_balance, debit_changes, credit_changes,
net_changes, period, period_interval)
VALUES (_subaccount, debit_balance_amount, 0,
debit_balance_amount, debit_changes_amount, 0,
debit_changes_amount, period_name, period_interval_name);
ELSE
UPDATE bookkeeping.trial_balances
SET debit_balance = debit_balance_amount,
net_balance = debit_balance_amount - existing_debit_record.credit_balance,
debit_changes = debit_changes_amount,
net_changes = debit_changes_amount - existing_debit_record.credit_changes
WHERE id = existing_debit_record.id;
END IF;
SELECT coalesce(sum(functional_amount), 0) INTO credit_balance_amount
FROM bookkeeping.journal_entries
WHERE credit_subaccount = _subaccount
AND to_char(timestamp, period_interval_name) <= period_name;
SELECT coalesce(sum(functional_amount), 0) INTO credit_changes_amount
FROM bookkeeping.journal_entries
WHERE credit_subaccount = _subaccount
AND to_char(timestamp, period_interval_name) = period_name;
SELECT * INTO existing_credit_record
FROM bookkeeping.trial_balances
WHERE subaccount = _subaccount
AND period = period_name
AND period_interval = period_interval_name;
IF existing_credit_record IS NULL THEN
INSERT INTO bookkeeping.trial_balances
(subaccount, debit_balance, credit_balance,
net_balance, debit_changes, credit_changes,
net_changes, period, period_interval)
VALUES (_subaccount, 0, credit_balance_amount,
-credit_balance_amount, 0, credit_changes_amount,
-credit_changes_amount, period_name, period_interval_name);
ELSE
UPDATE bookkeeping.trial_balances
SET credit_balance = credit_balance_amount,
net_balance = existing_credit_record.debit_balance - credit_balance_amount,
credit_changes = credit_changes_amount,
net_changes = existing_credit_record.debit_changes - credit_changes_amount
WHERE id = existing_credit_record.id;
END IF;
RETURN;
END;
$$
SECURITY DEFINER
LANGUAGE plpgsql;
""")
db.engine.execute("""
CREATE OR REPLACE FUNCTION bookkeeping.subaccount_insert_triggered()
RETURNS trigger AS $$
DECLARE
period_intervals VARCHAR[5] := '{"YYYY", "YYYY-Q", "YYYY-MM",
"YYYY-WW", "YYYY-MM-DD"}';
period_interval_name VARCHAR;
period_name RECORD;
BEGIN
<<period_interval_loop>>
FOREACH period_interval_name IN ARRAY period_intervals
LOOP
<<periods_loop>>
FOR period_name in SELECT DISTINCT
to_char(bookkeeping.journal_entries.timestamp,
period_interval_name) AS p
FROM bookkeeping.journal_entries
WHERE bookkeeping.journal_entries.timestamp >= new.timestamp LOOP
PERFORM bookkeeping.update_trial_balance(
new.debit_subaccount,
period_interval_name,
period_name.p);
PERFORM bookkeeping.update_trial_balance(
new.credit_subaccount,
period_interval_name,
period_name.p);
END LOOP periods_loop;
END LOOP period_interval_loop;
RETURN new;
END;
$$
SECURITY DEFINER
LANGUAGE plpgsql;
""")
db.engine.execute("""
DROP TRIGGER IF EXISTS subaccount_insert_trigger
ON bookkeeping.journal_entries;
CREATE TRIGGER subaccount_insert_trigger
AFTER INSERT OR UPDATE
ON bookkeeping.journal_entries
FOR EACH ROW
EXECUTE PROCEDURE bookkeeping.subaccount_insert_triggered();
""")
def create_ofx_views():
db.engine.execute("""
CREATE OR REPLACE VIEW ofx.transactions
AS SELECT
concat(ofx.stmttrn.fitid, ofx.stmttrn.acctfrom_id) AS id,
ofx.stmttrn.dtposted AS date,
ofx.stmttrn.trnamt AS amount,
concat(ofx.stmttrn.name, ofx.stmttrn.memo) AS description,
ofx.stmttrn.trntype AS type,
ofx.acctfrom.name AS account,
ofx.stmttrn.acctfrom_id AS account_id,
bookkeeping.journal_entries.id AS journal_entry_id,
bookkeeping.journal_entries.debit_subaccount AS debit_subaccount,
bookkeeping.journal_entries.credit_subaccount AS credit_subaccount
FROM ofx.stmttrn
LEFT OUTER JOIN bookkeeping.journal_entries
ON bookkeeping.journal_entries.transaction_id
= concat(ofx.stmttrn.fitid, ofx.stmttrn.acctfrom_id)
AND bookkeeping.journal_entries.transaction_source = 'ofx'
JOIN ofx.acctfrom
ON ofx.acctfrom.id = ofx.stmttrn.acctfrom_id
ORDER BY ofx.stmttrn.dtposted DESC;
""")
db.engine.execute("""
CREATE OR REPLACE VIEW ofx.investment_transactions AS SELECT
ofx.invtran.*,
ofx.acctfrom.name AS account_name,
CASE ofx.invtran.subclass
WHEN 'buymf' THEN buymf_secinfo.secname
WHEN 'sellmf' THEN sellmf_secinfo.secname
WHEN 'reinvest' THEN reinvest_secinfo.secname
END AS secname,
CASE ofx.invtran.subclass
WHEN 'buymf' THEN buymf_secinfo.ticker
WHEN 'sellmf' THEN sellmf_secinfo.ticker
WHEN 'reinvest' THEN reinvest_secinfo.ticker
END AS ticker,
CASE ofx.invtran.subclass
WHEN 'buymf' THEN buymf.units
WHEN 'sellmf' THEN sellmf.units
WHEN 'reinvest' THEN reinvest.units
END AS units,
CASE ofx.invtran.subclass
WHEN 'buymf' THEN buymf.unitprice
WHEN 'sellmf' THEN sellmf.unitprice
WHEN 'reinvest' THEN reinvest.unitprice
END AS unitprice,
CASE ofx.invtran.subclass
WHEN 'buymf' THEN buymf.total*-1
WHEN 'sellmf' THEN sellmf.total*-1
WHEN 'reinvest' THEN reinvest.total*-1
END AS total
FROM ofx.invtran
LEFT OUTER JOIN ofx.buymf ON ofx.buymf.id = ofx.invtran.id
and ofx.invtran.subclass = 'buymf'
LEFT OUTER JOIN ofx.sellmf ON ofx.sellmf.id = ofx.invtran.id
and ofx.invtran.subclass = 'sellmf'
LEFT OUTER JOIN ofx.reinvest ON ofx.reinvest.id = ofx.invtran.id
and ofx.invtran.subclass = 'reinvest'
LEFT OUTER JOIN ofx.secinfo buymf_secinfo
ON buymf_secinfo.id = ofx.buymf.secinfo_id
LEFT OUTER JOIN ofx.secinfo sellmf_secinfo
ON sellmf_secinfo.id = ofx.sellmf.secinfo_id
LEFT OUTER JOIN ofx.secinfo reinvest_secinfo
ON reinvest_secinfo.id = ofx.reinvest.secinfo_id
JOIN ofx.acctfrom ON acctfrom.id = ofx.invtran.acctfrom_id
ORDER BY ofx.invtran.dttrade DESC;
""")
db.engine.execute("""
CREATE OR REPLACE VIEW ofx.cost_bases AS SELECT
investment_transactions.secname,
sum(investment_transactions.units) AS total_units,
sum(investment_transactions.total) AS cost_basis,
q1.ticker,
q1.adjusted_close AS "close",
q1.adjusted_close
* sum(investment_transactions.units) AS market_value,
(q1.adjusted_close
* sum(investment_transactions.units)
- sum(investment_transactions.total)) AS pnl,
(q1.adjusted_close
* sum(investment_transactions.units)
- sum(investment_transactions.total))
/ sum(investment_transactions.total) AS pnl_percent,
q2.date AS price_date
FROM ofx.investment_transactions
JOIN (SELECT ticker, max(date) AS date
FROM investments.security_prices
GROUP BY ticker) AS q2
ON q2.ticker = investment_transactions.ticker
JOIN investments.security_prices q1
ON q1.ticker = ofx.investment_transactions.ticker
AND q2.date = q1.date
GROUP BY investment_transactions.secname,
q1.ticker,
q2.date,
q1.adjusted_close
ORDER BY sum(investment_transactions.total);
""")
def create_amazon_views():
db.engine.execute("""
CREATE OR REPLACE VIEW amazon.amazon_transactions
AS SELECT
amazon.items.*,
bookkeeping.journal_entries.id AS journal_entry_id
FROM amazon.items
LEFT OUTER JOIN bookkeeping.journal_entries
ON cast(amazon.items.id AS CHARACTER VARYING) = bookkeeping.journal_entries.transaction_id
AND bookkeeping.journal_entries.transaction_source = 'amazon'
ORDER BY amazon.items.shipment_date DESC;
""")
def create_bookkeeping_views():
db.engine.execute("""
CREATE OR REPLACE VIEW bookkeeping.detailed_journal_entries
AS SELECT
bookkeeping.journal_entries.id AS id,
bookkeeping.journal_entries.transaction_source AS transaction_source,
bookkeeping.journal_entries.transaction_id AS transaction_id,
bookkeeping.journal_entries."timestamp" AS "timestamp",
bookkeeping.journal_entries.debit_subaccount as debit_subaccount,
bookkeeping.journal_entries.credit_subaccount as credit_subaccount,
bookkeeping.journal_entries.functional_amount as functional_amount,
CASE bookkeeping.journal_entries.transaction_source
WHEN 'ofx' THEN concat(ofx.stmttrn.name, ofx.stmttrn.memo)
WHEN 'amazon' THEN amazon.items.title
END AS description
FROM bookkeeping.journal_entries
LEFT OUTER JOIN ofx.stmttrn
ON concat(ofx.stmttrn.fitid, ofx.stmttrn.acctfrom_id)
= bookkeeping.journal_entries.transaction_id
AND bookkeeping.journal_entries.transaction_source = 'ofx'
LEFT OUTER JOIN amazon.items
ON cast(amazon.items.id AS CHARACTER VARYING)
= bookkeeping.journal_entries.transaction_id
AND bookkeeping.journal_entries.transaction_source = 'amazon'
LEFT OUTER JOIN ofx.acctfrom
ON ofx.acctfrom.id = ofx.stmttrn.acctfrom_id
ORDER BY bookkeeping.journal_entries."timestamp" DESC;
""")
def create_mappings_views():
db.engine.execute("""
CREATE OR REPLACE VIEW admin.mapping_overlaps
AS SELECT DISTINCT
concat(ofx.stmttrn.name, ofx.stmttrn.memo) AS description,
mappings_table_1.id AS mapping_id_1,
mappings_table_1.keyword AS mapping_keyword_1,
mappings_table_2.id AS mapping_id_2,
mappings_table_2.keyword AS mapping_keyword_2,
mappings_table_2.source AS source
FROM ofx.stmttrn
JOIN admin.mappings mappings_table_1
ON lower(concat(ofx.stmttrn.name, ofx.stmttrn.memo))
LIKE '%%' || array_to_string(regexp_split_to_array(
lower(mappings_table_1.keyword), E'\\\s+'
), '%%') || '%%'
AND mappings_table_1.source = 'ofx'
JOIN admin.mappings mappings_table_2
ON lower(concat(ofx.stmttrn.name, ofx.stmttrn.memo))
LIKE '%%' || array_to_string(regexp_split_to_array(
lower(mappings_table_2.keyword), E'\\\s+'
), '%%') || '%%'
AND mappings_table_1.keyword != mappings_table_2.keyword
AND mappings_table_2.source = 'ofx'
ORDER BY description;
""")
# Todo: have view updates propagate to underlying physical tables
# try:
# db.engine.execute('DROP RULE admin.ofx_mapping_overlaps_keyword_1_update_rule')
# except ProgrammingError:
# pass
# db.engine.execute("""
# CREATE RULE ofx_mapping_overlaps_keyword_1_update_rule AS
# ON UPDATE TO admin.ofx_mapping_overlaps WHERE NEW.mapping_keyword_1 <> OLD.mapping_keyword_1
# DO INSTEAD UPDATE admin.mappings SET keyword = NEW.mapping_keyword_1 WHERE id = NEW.mapping_id_1;
# """)
#
# try:
# db.engine.execute('DROP RULE admin.ofx_mapping_overlaps_keyword_2_update_rule')
# except ProgrammingError:
# pass
# db.engine.execute("""
# CREATE RULE ofx_mapping_overlaps_keyword_2_update_rule AS
# ON UPDATE TO admin.ofx_mapping_overlaps WHERE NEW.mapping_keyword_2 <> OLD.mapping_keyword_2
# DO INSTEAD UPDATE admin.mappings SET keyword = NEW.mapping_keyword_2 WHERE id = NEW.mapping_id_2;
# """)
| [
"[email protected]"
] | |
44c5dbbc3246735c0cdeeccde1162967ba8c658e | f768ddba19c7bc7664ae581daeef5fe2f650a539 | /post/post.py | ddb481a99e2e6e443b0f580bbc720d0f68fac4c2 | [] | no_license | douwings/pythonWork | 36ac2ba32a721ed6d9af62a9168eee12a61891af | 70f84eb179e12cc36b521fdb9a2573a14a300d23 | refs/heads/master | 2022-12-23T18:41:50.560391 | 2020-09-28T06:20:36 | 2020-09-28T06:20:36 | 273,136,705 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,047 | py | # -*- coding=utf-8 -*-
import urllib.request
import urllib.parse
import ssl
import json
import time
def postsendsms():
ssl._create_default_https_context = ssl._create_unverified_context
sendsmspost_url = 'http://172.16.2.111:3000/v1/login_by_wx'
t = time.time()
sendsmsheaders = {
# 'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
sendsmsform_data = {
'userwxcode': '043Ow9h228nyXU0IPFi22wbMg22Ow9h2',
'encryptedData' : '76uQW81eWiIo8JCsRTUWf4QuR3PkgctOfF3rvAcTx2jXHRfauzia0XggmMANUYK0WylpOp6Ms3YXkOTqNWvQJ4ualQkpZiowJzrL71p1AXCCdpGlDo7YiB9Qt0K5In/h0mme0Jzw99LCyPTac2dwepNle0LRe1HCNmKlYyyyF5kpgB/ZD8BSBSGw8VFpufXWagZb+iQ88T25iPnPrTMhpJRDun7F/JROcNERmjCHlSzYADYMxaOqaj17tk1mKRe5lt2tphxARR++ZfWaTqqSESJn/ywYZr+3XATsZZ/Ve7rgieoQshb6drQTEhCUDoUB2bz3XVHNsm6X8wiv7rCBBe4Gm6MEguKCJxeXcS41kYaKS+caZarcLJ3JD5sstrQ6zx6pKaGhIja5AvBWWyBtyMgn3tvXeh28Glt5qrOK2GuvWK5BoKC71tQSi6iz8OQhCS3P3hdGfNf/tSH6Eb3krzZOaV19jQGAgVI+wNpVvDY=',
'iv' : 'kDJKkS9g6mFXputhbNouMw==',
}
# 构建post请求
request = urllib.request.Request(url=sendsmspost_url, headers=sendsmsheaders)
form_data = urllib.parse.urlencode(sendsmsform_data).encode()
# 发送post请求
response = urllib.request.urlopen(request, data=form_data)
dict_json = json.loads(response.read().decode())
print(dict_json)
print("####################")
return dict_json
def testtoken(data) :
print(data['errcode'])
ssl._create_default_https_context = ssl._create_unverified_context
sendsmspost_url = 'http://172.16.2.44:3000/v1/sforeignlogin'
t = time.time()
sendsmsheaders = {
# 'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
authData = {
'openid' : data['openid'],
'access_token' : data['access_token'],
}
sendsmsform_data = {
'platform': 'weixin',
'authData' : json.dumps(authData),
}
# 构建post请求
request = urllib.request.Request(url=sendsmspost_url, headers=sendsmsheaders)
form_data = urllib.parse.urlencode(sendsmsform_data).encode()
# 发送post请求
response = urllib.request.urlopen(request, data=form_data)
dict_json = json.loads(response.read().decode())
print(dict_json)
print("####################")
return dict_json
def get_wx_userinfo():
url = 'http://172.16.2.44:3000/v1/app/get_wx_userinfo?'
data = {
'code' : '0231512s1uGMOk0kWY0s1bDV1s11512o',
}
query_string = urllib.parse.urlencode(data)
url += query_string
headers = {
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
}
# get
requset = urllib.request.Request(url = url , headers=headers)
response = urllib.request.urlopen(requset)
print("####################")
print(response.read().decode())
if __name__ == '__main__':
# get_wx_userinfo()
# testtoken(postsendsms())
postsendsms()
# data = postsendsms()
# testtoken(data)
# testtoken(data)
# testtoken(data)
# 23_BgsyJbTUfekU4pynn0EbQpC1T0jCOiwUEtSJhF6FghVFFLcgXyXflYQx5wAv1-qqBfGcYLZE6KfBUk3oPt9ELrDkA6lB_tgiVJG6hRI7V0_20jaPvkHbL_eG9PgPIKbADAZSU
# 23_BgsyJbTUfekU4pynn0EbQpC1T0jCOiwUEtSJhF6FghVFFLcgXyXflYQx5wAv1-qqBfGcYLZE6KfBUk3oPt9ELrDkA6lB_tgiVJG6hRI7V0_20jaPvkHbL_eG9PgPIKbADAZSU
# 23_bej0j1-r1VKCVYNB7ZDi05kRhrN1J3jXlVXjrsmClv7s_azAgShoybtG99VoCh3791R4Yp3i4d7o2FcmmQ5kLuLehdfSfKPMnKw79x3f0ke_-XE7hkG5irujGjEoKI9ZNeGhaOaadKDNeqhVGRBdAFAJUE
# 23_bej0j1-r1VKCVYNB7ZDi05kRhrN1J3jXlVXjrsmClv7s_azAgShoybtG99VoCh3791R4Yp3i4d7o2FcmmQ5kLuLehdfSfKPMnKw79x3f0ke_-XE7hkG5irujGjEoKI9ZNeGhaOaadKDNeqhVGRBdAFAJUE
# { params: { userwxcode: '011oLiaZ1Vnt5U0aF98Z17ZDaZ1oLiah' } }
# simpleForeignLogin | [
"[email protected]"
] | |
736aecf523f7137ffba1df7104167bee096698f0 | 035ec6f79bb70374a54d6b491b34114fcc9f0e24 | /wtDigiTwin/fast/fastlinfiles.py | 0b796235f81f0435db9e1ae181eee4b64a327b49 | [
"MIT"
] | permissive | deyh2020/wtDigiTwin | b8b8d8f0a0ca73a6e7dddba0e3b6457a7551a9da | 2c1e965ab5fdca10e67b0db9ef87837f5abebc02 | refs/heads/master | 2023-07-14T07:23:28.628625 | 2021-07-26T15:12:22 | 2021-07-26T15:12:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,700 | py | import numpy as np
import pickle
import glob
import os
import weio
class FASTPeriodicOP(object):
""" Class for a set of *.lin files, all assumed to be for the same periodic operating point"""
def __init__(self,prefix,nLin=None):
if nLin is None:
linfiles= glob.glob(prefix + '*.*.lin')
self.nLinTimes = len(linfiles)
else:
self.nLinTimes = nLin
print(prefix, self.nLinTimes)
self.prefix = prefix
self.Data = []
self.vAzim = []
self.vWS = []
self.vPitch = []
self.vRotSpeed = []
self.vBu = []
for i in np.arange(self.nLinTimes):
linfilename= prefix+'.'+str(i+1)+'.lin'
print(linfilename)
if not os.path.exists(linfilename):
print('Linearization file missing: ',linfilename)
linfile=weio.read(linfilename)
df=linfile.toDataFrame()
self.Data.append(linfile)
#self.A=lin['A']
#B=linfile['B']
#u=linfile['u']
#self.C=lin['C']
#self.D=lin['D']
try:
self.vWS.append(df['u']['WS_[m/s]'][0])
except:
print('Wind speed not found in input, assuming 0m/s')
self.vWS.append(0)
self.vRotSpeed.append(linfile.RotSpeed)
self.vAzim.append(linfile.Azimuth)
self.vPitch.append(df['u']['B1pitch_[rad]'][0]*180/np.pi)
self.WS = np.mean(self.vWS)
self.Pitch = np.mean(self.vPitch)
self.RotSpeed = np.mean(self.vRotSpeed)
self.x = df['x']
self.y = df['y']
self.u = df['u']
try:
self.EDdescr = linfile['EDDOF']
except:
print('EDDOF not available. A special version of OpenFAST is required.')
class FASTLin(object):
""" Class for linearization data for different operating points (typically Campbell) """
def __init__(self,folder='./', prefix='',nLin=None):
fstfiles= glob.glob(folder + prefix + '*.*.lin')
Sim_Prefix=np.unique(['.'.join(f.split('.')[:-2]) for f in fstfiles])
nSim = len(Sim_Prefix)
# --- Read period operating points
print('Reading linearizations for {} operating points'.format(nSim))
self.OP_Data=[FASTPeriodicOP(pref,nLin=nLin) for pref in Sim_Prefix]
# --- Sort by wind speed
Isort = np.argsort(self.WS)
self.OP_Data = [self.OP_Data[i] for i in Isort]
if self.MaxNLinTimes>1:
IBad = [i for i in np.arange(nSim) if self.nLinTimes[i]<self.MaxNLinTimes and self.OP_Data[i].WS>0]
if len(IBad)>0:
print('>>> The following simulations have insufficient number of data points:')
for i in IBad:
print(self.OP_Data[i].prefix, self.OP_Data[i].nLinTimes)
self.OP_Data = [self.OP_Data[i] for i in np.arange(nSim) if i not in IBad]
@property
def WS(self):
return np.array([sim.WS for sim in self.OP_Data])
@property
def nLinTimes(self):
return np.array([sim.nLinTimes for sim in self.OP_Data])
@property
def MaxNLinTimes(self):
return np.max(self.nLinTimes)
@property
def nOP(self):
return len(self.OP_Data)
@property
def xdescr(self):
return self.OP_Data[0].x.columns.values
@property
def ydescr(self):
return self.OP_Data[0].y.columns.values
@property
def EDdescr(self):
return self.OP_Data[0].EDdescr
@property
def udescr(self):
return self.OP_Data[0].u.columns.values
@property
def xop_mean(self):
return np.mean(np.abs(np.array([op.x.values for op in self.OP_Data])),axis=0)
@property
def uop_mean(self):
return np.mean(np.abs(np.array([op.u.values for op in self.OP_Data])),axis=0)
@property
def uop_mean(self):
return np.mean(np.abs(np.array([op.u.values for op in self.OP_Data])),axis=0)
@property
def yop_mean(self):
return np.mean(np.abs(np.array([op.y.values for op in self.OP_Data])),axis=0)
def stats(self,matName,WS=None):
if WS is None:
WS = self.WS
nOP=self.nOP
else:
nOP=len(WS)
print('Returning stats for WS:',WS)
M_mean=[]
shape= self.OP_Data[0].Data[0][matName].shape
M_all = np.zeros( (nOP, self.MaxNLinTimes, shape[0],shape[1]))
M_mean_perWS= np.zeros( (nOP, shape[0],shape[1]))
M_std_perWS = np.zeros( (nOP, shape[0],shape[1]))
# loop on operating points (e.g. WS)
ii=0
for iop, op in enumerate(self.OP_Data):
if op.WS in WS:
# Loop on linearization times (e.g. Azimuth)
for iTimes in np.arange(self.MaxNLinTimes):
if op.nLinTimes==1:
M_all[ii,iTimes,:,:]=op.Data[0][matName]
else:
M_all[ii,iTimes,:,:]=op.Data[iTimes][matName]
M_mean_perWS[ii,:,:] = np.mean(M_all[ii,:,:,:],axis=0)
M_std_perWS [ii,:,:] = np.std(M_all[ii,:,:,:],axis=0)
ii+=1
M_mean = np.mean( M_mean_perWS, axis=0 )
M_stdWS = np.std ( M_mean_perWS, axis=0 ) # How much elements vary with wind speed
M_stdAzim = np.mean( M_std_perWS , axis=0) # How much elements vary due to azimuth
return M_mean, M_mean_perWS, M_stdAzim, M_stdWS, M_all
def save(self,filename):
with open(filename,'wb') as f:
pickle.dump(self,f)
# def full_linear_model
| [
"[email protected]"
] | |
b6076b5e720e510264d359984df649983226d155 | bac5ecb5eef06dfe76b9b7bff80faee7485c67dd | /.history/django_vuejs_tutorial/django_vuejs/dataiku/admin_20200829045700.py | 95c85e8343b24f314b19ffb3176a56a4567e185b | [] | no_license | MChrys/dataiku | fb1e48401d544cbcc5a80a0a27668dc9d2d196e5 | 6091b24f565224260a89246e29c0a1cbb72f58ed | refs/heads/master | 2022-12-16T11:06:13.896643 | 2020-09-12T19:03:51 | 2020-09-12T19:03:51 | 293,287,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | from django.contrib import admin
# Register your models here.
from .models import QCM, Answer, Task, Dataiku_account, Operation, Session, Question, Run, Posibility
admin.site.register(Task)
admin.site.register(Dataiku_account)
admin.site.register(Operation)
admin.site.register(Session)
admin.site.register(Question)
admin.site.register(Run)
admin.site.register(Posibility)
admin.site.register(QCM)
#admin.site.register(Answer)
| [
"[email protected]"
] | |
7b7c17e361f20416c6b1ac8296418c9a8ec75d00 | cba8f623e613cfb0cdba73fb373bec68f7bbfdcb | /ABC085D.py | 9802bdf40462578e6247674b1b8a473147a6a75d | [] | no_license | bonoron/Atcoder | 7d0af77a12b40ce2bdebf5ab5a76462629a03ea5 | e8c0d1ed7d113a0ea23e30d20e8d9993ba1430fa | refs/heads/master | 2022-12-24T20:00:32.702741 | 2020-09-24T07:03:48 | 2020-09-24T07:03:48 | 271,685,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | n,h=map(int,input().split())
A=list(tuple(map(int,input().split())) for i in range(n))
A,B=zip(*A)
a=max(A)
count=0
for i in sorted(B)[::-1]:
if h<=0:break
elif i>=a:
count +=1
h -=i
if h>0:
if h%a==0:count +=h//a
else:count +=h//a+1
print(count) | [
"[email protected]"
] | |
76078cb90de2d17011da5b083945e6a4c92670c6 | a0b27e1a3a17ce9ec21bea460ba75b9d72564e33 | /ublog/routes.py | 3975864270044f0b4cf884b72d3cc09e93b704b2 | [] | no_license | florije1988/ublog | f662bc0dadf0fb244af054db2ca1a491b0454827 | a94fd3a52a9898b8e89020a7b8754e52ea00d664 | refs/heads/master | 2020-12-24T20:25:02.679572 | 2016-05-09T09:39:50 | 2016-05-09T09:39:50 | 58,360,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,758 | py | # -*- coding: utf-8 -*-
__author__ = 'florije'
from ublog import app
from flask import render_template, request, flash, session, url_for, redirect
from forms import ContactForm, SignupForm, SigninForm
from flask_mail import Message, Mail
from models import db, User
mail = Mail()
@app.route('/')
def home():
return render_template('home.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/contact', methods=['GET', 'POST'])
def contact():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
flash('All fields are required.')
return render_template('contact.html', form=form)
else:
msg = Message(form.subject.data, sender='[email protected]', recipients=['[email protected]'])
msg.body = """
From: %s <%s>
%s
""" % (form.name.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('contact.html', success=True)
elif request.method == 'GET':
return render_template('contact.html', form=form)
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if not form.validate():
return render_template('signup.html', form=form)
else:
newuser = User(form.firstname.data, form.lastname.data, form.email.data, form.password.data)
db.session.add(newuser)
db.session.commit()
session['email'] = newuser.email
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signup.html', form=form)
@app.route('/profile')
def profile():
if 'email' not in session:
return redirect(url_for('signin'))
user = User.query.filter_by(email=session['email']).first()
if user is None:
return redirect(url_for('signin'))
else:
return render_template('profile.html')
@app.route('/signin', methods=['GET', 'POST'])
def signin():
form = SigninForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if not form.validate():
return render_template('signin.html', form=form)
else:
session['email'] = form.email.data
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signin.html', form=form)
@app.route('/signout')
def signout():
if 'email' not in session:
return redirect(url_for('signin'))
session.pop('email', None)
return redirect(url_for('home'))
| [
"[email protected]"
] | |
d2d869abcf663c008f157d716357b792152c0431 | 3a891a79be468621aae43defd9a5516f9763f36e | /desktop/core/ext-py/tablib-0.10.0/tablib/packages/openpyxl3/shared/date_time.py | 8a58cd098dcb8ffa0bdc3773c76a3d882c73d5c1 | [
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-Advertising-Acknowledgement",
"MIT"
] | permissive | oyorooms/hue | b53eb87f805063a90f957fd2e1733f21406269aa | 4082346ef8d5e6a8365b05752be41186840dc868 | refs/heads/master | 2020-04-15T20:31:56.931218 | 2019-01-09T19:02:21 | 2019-01-09T19:05:36 | 164,998,117 | 4 | 2 | Apache-2.0 | 2019-01-10T05:47:36 | 2019-01-10T05:47:36 | null | UTF-8 | Python | false | false | 5,931 | py | # file openpyxl/shared/date_time.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Manage Excel date weirdness."""
# Python stdlib imports
from math import floor
import calendar
import datetime
import time
import re
# constants
W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
RE_W3CDTF = '(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.(\d{2}))?Z'
EPOCH = datetime.datetime.utcfromtimestamp(0)
def datetime_to_W3CDTF(dt):
"""Convert from a datetime to a timestamp string."""
return datetime.datetime.strftime(dt, W3CDTF_FORMAT)
def W3CDTF_to_datetime(formatted_string):
"""Convert from a timestamp string to a datetime object."""
match = re.match(RE_W3CDTF,formatted_string)
digits = list(map(int, match.groups()[:6]))
return datetime.datetime(*digits)
class SharedDate(object):
"""Date formatting utilities for Excel with shared state.
Excel has a two primary date tracking schemes:
Windows - Day 1 == 1900-01-01
Mac - Day 1 == 1904-01-01
SharedDate stores which system we are using and converts dates between
Python and Excel accordingly.
"""
CALENDAR_WINDOWS_1900 = 1900
CALENDAR_MAC_1904 = 1904
datetime_object_type = 'DateTime'
def __init__(self):
self.excel_base_date = self.CALENDAR_WINDOWS_1900
def datetime_to_julian(self, date):
"""Convert from python datetime to excel julian date representation."""
if isinstance(date, datetime.datetime):
return self.to_julian(date.year, date.month, date.day, \
hours=date.hour, minutes=date.minute, seconds=date.second)
elif isinstance(date, datetime.date):
return self.to_julian(date.year, date.month, date.day)
def to_julian(self, year, month, day, hours=0, minutes=0, seconds=0):
"""Convert from Python date to Excel JD."""
# explicitly disallow bad years
# Excel 2000 treats JD=0 as 1/0/1900 (buggy, disallow)
# Excel 2000 treats JD=2958466 as a bad date (Y10K bug!)
if year < 1900 or year > 10000:
msg = 'Year not supported by Excel: %s' % year
raise ValueError(msg)
if self.excel_base_date == self.CALENDAR_WINDOWS_1900:
# Fudge factor for the erroneous fact that the year 1900 is
# treated as a Leap Year in MS Excel. This affects every date
# following 28th February 1900
if year == 1900 and month <= 2:
excel_1900_leap_year = False
else:
excel_1900_leap_year = True
excel_base_date = 2415020
else:
raise NotImplementedError('Mac dates are not yet supported.')
#excel_base_date = 2416481
#excel_1900_leap_year = False
# Julian base date adjustment
if month > 2:
month = month - 3
else:
month = month + 9
year -= 1
# Calculate the Julian Date, then subtract the Excel base date
# JD 2415020 = 31 - Dec - 1899 -> Excel Date of 0
century, decade = int(str(year)[:2]), int(str(year)[2:])
excel_date = floor(146097 * century / 4) + \
floor((1461 * decade) / 4) + floor((153 * month + 2) / 5) + \
day + 1721119 - excel_base_date
if excel_1900_leap_year:
excel_date += 1
# check to ensure that we exclude 2/29/1900 as a possible value
if self.excel_base_date == self.CALENDAR_WINDOWS_1900 \
and excel_date == 60:
msg = 'Error: Excel believes 1900 was a leap year'
raise ValueError(msg)
excel_time = ((hours * 3600) + (minutes * 60) + seconds) / 86400
return excel_date + excel_time
def from_julian(self, value=0):
"""Convert from the Excel JD back to a date"""
if self.excel_base_date == self.CALENDAR_WINDOWS_1900:
excel_base_date = 25569
if value < 60:
excel_base_date -= 1
elif value == 60:
msg = 'Error: Excel believes 1900 was a leap year'
raise ValueError(msg)
else:
raise NotImplementedError('Mac dates are not yet supported.')
#excel_base_date = 24107
if value >= 1:
utc_days = value - excel_base_date
return EPOCH + datetime.timedelta(days=utc_days)
elif value >= 0:
hours = floor(value * 24)
mins = floor(value * 24 * 60) - floor(hours * 60)
secs = floor(value * 24 * 60 * 60) - floor(hours * 60 * 60) - \
floor(mins * 60)
return datetime.time(int(hours), int(mins), int(secs))
else:
msg = 'Negative dates (%s) are not supported' % value
raise ValueError(msg)
| [
"[email protected]"
] | |
526d6589db2b272ca5cfba55a28b5a4fb0d708f3 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_noisy429.py | 001320894098c4a3b7a6c5fabdd40cc8775b5332 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,395 | py | # qubit number=3
# total number=77
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[1]) # number=70
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=33
prog.y(input_qubit[2]) # number=56
prog.cz(input_qubit[2],input_qubit[1]) # number=34
prog.h(input_qubit[1]) # number=35
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_noisy429.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
91a0878a1d095da6cac81f085ca9349ddc24f247 | 203fd0a2a91ee063736d11b069e11026c4184ec5 | /tools/test_files/all_17_or_less/cm152a_212.py | 8f2b7e168fbe23ea1f74ee042adbb48bfd761ae9 | [
"Apache-2.0"
] | permissive | Astlaan/OpenQL | 19b35200775367e6f11d1756e9495c1883a1c531 | 1b60d5784d439e6c19e662b570114aa1b3045d38 | refs/heads/develop | 2020-09-01T02:56:37.906564 | 2019-11-15T15:43:55 | 2019-11-15T15:43:55 | 181,012,110 | 0 | 0 | NOASSERTION | 2019-04-12T13:18:51 | 2019-04-12T13:18:51 | null | UTF-8 | Python | false | false | 27,978 | py | from openql import openql as ql
import os
import argparse
def circuit(config_file, new_scheduler='yes', scheduler='ASAP', uniform_sched= 'no', sched_commute = 'yes', mapper='base', moves='no', maptiebreak='random', initial_placement='no', output_dir_name='test_output', optimize='no', measurement=True, log_level='LOG_WARNING'):
curdir = os.path.dirname(__file__)
output_dir = os.path.join(curdir, output_dir_name)
ql.set_option('output_dir', output_dir)
ql.set_option('optimize', optimize)
ql.set_option('scheduler', scheduler)
ql.set_option('scheduler_uniform', uniform_sched)
ql.set_option('mapper', mapper)
ql.set_option('initialplace', initial_placement)
ql.set_option('log_level', log_level)
ql.set_option('scheduler_post179', new_scheduler)
ql.set_option('scheduler_commute', sched_commute)
ql.set_option('mapusemoves', moves)
ql.set_option('maptiebreak', maptiebreak)
config_fn = os.path.join(curdir, config_file)
# platform = ql.Platform('platform_none', config_fn)
platform = ql.Platform('starmon', config_fn)
sweep_points = [1,2]
num_circuits = 1
num_qubits = 12
p = ql.Program('cm152a_212', platform, num_qubits)
p.set_sweep_points(sweep_points)
k = ql.Kernel('cm152a_212', platform, num_qubits)
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,4])
k.gate('cnot',[10,3])
k.gate('cnot',[4,10])
k.gate('tdag',[3])
k.gate('cnot',[4,3])
k.gate('tdag',[4])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[4,10])
k.gate('cnot',[3,4])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,4])
k.gate('cnot',[10,3])
k.gate('cnot',[4,10])
k.gate('tdag',[3])
k.gate('cnot',[4,3])
k.gate('tdag',[4])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[4,10])
k.gate('cnot',[3,4])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,8])
k.gate('cnot',[10,3])
k.gate('cnot',[8,10])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[8,10])
k.gate('cnot',[3,8])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,8])
k.gate('cnot',[10,3])
k.gate('cnot',[8,10])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[8,10])
k.gate('cnot',[3,8])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('h',[11])
k.gate('x',[2])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,6])
k.gate('cnot',[10,3])
k.gate('cnot',[6,10])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[6,10])
k.gate('cnot',[3,6])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,6])
k.gate('cnot',[10,3])
k.gate('cnot',[6,10])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[6,10])
k.gate('cnot',[3,6])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('x',[2])
k.gate('x',[3])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,5])
k.gate('cnot',[10,3])
k.gate('cnot',[5,10])
k.gate('tdag',[3])
k.gate('cnot',[5,3])
k.gate('tdag',[5])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[5,10])
k.gate('cnot',[3,5])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,5])
k.gate('cnot',[10,3])
k.gate('cnot',[5,10])
k.gate('tdag',[3])
k.gate('cnot',[5,3])
k.gate('tdag',[5])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[5,10])
k.gate('cnot',[3,5])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[9])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,9])
k.gate('cnot',[10,3])
k.gate('cnot',[9,10])
k.gate('tdag',[3])
k.gate('cnot',[9,3])
k.gate('tdag',[9])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[9,10])
k.gate('cnot',[3,9])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[9])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,9])
k.gate('cnot',[10,3])
k.gate('cnot',[9,10])
k.gate('tdag',[3])
k.gate('cnot',[9,3])
k.gate('tdag',[9])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[9,10])
k.gate('cnot',[3,9])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('tdag',[3])
k.gate('cnot',[9,3])
k.gate('tdag',[9])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,2])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[2,0])
k.gate('cnot',[11,2])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('tdag',[3])
k.gate('cnot',[9,3])
k.gate('tdag',[9])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('h',[11])
k.gate('x',[2])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[7])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,7])
k.gate('cnot',[10,3])
k.gate('cnot',[7,10])
k.gate('tdag',[3])
k.gate('cnot',[7,3])
k.gate('tdag',[7])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[7,10])
k.gate('cnot',[3,7])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[7])
k.gate('t',[3])
k.gate('t',[10])
k.gate('cnot',[3,7])
k.gate('cnot',[10,3])
k.gate('cnot',[7,10])
k.gate('tdag',[3])
k.gate('cnot',[7,3])
k.gate('tdag',[7])
k.gate('tdag',[3])
k.gate('t',[10])
k.gate('cnot',[10,3])
k.gate('cnot',[7,10])
k.gate('cnot',[3,7])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('x',[1])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('t',[0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('tdag',[10])
k.gate('cnot',[1,10])
k.gate('tdag',[1])
k.gate('tdag',[10])
k.gate('t',[0])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('h',[0])
k.gate('h',[10])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,2])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('cnot',[9,2])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('tdag',[3])
k.gate('cnot',[11,3])
k.gate('tdag',[11])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,2])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('cnot',[9,2])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('t',[0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('tdag',[10])
k.gate('cnot',[1,10])
k.gate('tdag',[1])
k.gate('tdag',[10])
k.gate('t',[0])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('h',[0])
k.gate('h',[10])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,2])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('cnot',[9,2])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('tdag',[3])
k.gate('cnot',[11,3])
k.gate('tdag',[11])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,2])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[2,10])
k.gate('cnot',[9,2])
k.gate('h',[10])
if measurement:
for q in range(num_qubits):
k.gate('measure', [q])
p.add_kernel(k)
p.compile()
ql.set_option('mapper', 'no')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OpenQL compilation of a Quantum Algorithm')
parser.add_argument('config_file', help='Path to the OpenQL configuration file to compile this algorithm')
parser.add_argument('--new_scheduler', nargs='?', default='yes', help='Scheduler defined by Hans')
parser.add_argument('--scheduler', nargs='?', default='ASAP', help='Scheduler specification (ASAP (default), ALAP, ...)')
parser.add_argument('--uniform_sched', nargs='?', default='no', help='Uniform shceduler actication (yes or no)')
parser.add_argument('--sched_commute', nargs='?', default='yes', help='Permits two-qubit gates to be commutable')
parser.add_argument('--mapper', nargs='?', default='base', help='Mapper specification (base, minextend, minextendrc)')
parser.add_argument('--moves', nargs='?', default='no', help='Let the use of moves')
parser.add_argument('--maptiebreak', nargs='?', default='random', help='')
parser.add_argument('--initial_placement', nargs='?', default='no', help='Initial placement specification (yes or no)')
parser.add_argument('--out_dir', nargs='?', default='test_output', help='Folder name to store the compilation')
parser.add_argument('--measurement', nargs='?', default=True, help='Add measurement to all the qubits in the end of the algorithm')
args = parser.parse_args()
try:
circuit(args.config_file, args.new_scheduler, args.scheduler, args.uniform_sched, args.sched_commute, args.mapper, args.moves, args.maptiebreak, args.initial_placement, args.out_dir)
except TypeError:
print('\nCompiled, but some gate is not defined in the configuration file. \nThe gate will be invoked like it is.')
raise | [
"[email protected]"
] | |
f5d5c94e264e9b416f9e321c6f92c2a627772d05 | 9b265894d94a46f91ca20aab4fb5ece5d635dd16 | /LPAv/Trabalho 9 - Python/questao1.py | 5fe445972f602478bf68c2a4d9cdbae5b266bf31 | [] | no_license | JosephLevinthal/LPAv | fad4499ec294b3bbec82f73c9989fcf602f65d79 | 73ecf1cb39b550b13be558b80cbd97701ea34fa0 | refs/heads/master | 2021-09-16T11:08:07.828030 | 2018-06-20T02:56:42 | 2018-06-20T02:56:42 | 130,502,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | import re
stringDeEntrada = input()
while (stringDeEntrada != "####"):
if (re.match("^([0-9]{7,7}-[0-9])$", stringDeEntrada)):
print("SIM")
else:
print("NAO")
stringDeEntrada = input()
| [
"[email protected]"
] | |
faa7bd06c204447f3804c32070026adf782a82f3 | 45243d7b7412814b8cb43fefdf088099e0c93419 | /src/user/migrations/0007_user_uid.py | 090e72b9c2445055597e04d4b1cb18f3a2b3feab | [] | no_license | AlexanderNevarko/drec_stud_site | e08e621c54bdd145709913d4a69c8f089475fcad | 4e176a77bf5b4a20e57e1379581713a6fa6d2d21 | refs/heads/master | 2020-09-21T07:55:46.820999 | 2019-01-14T20:22:50 | 2019-01-14T20:22:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-04 12:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0006_auto_20170809_1308'),
]
operations = [
migrations.AddField(
model_name='user',
name='uid',
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='UID карты'),
),
]
| [
"[email protected]"
] | |
54e37c4b81fec916c8fc8cd7108b3e190dcd4ace | f00699824a8c5def54421ee3cf836ec2cd15d957 | /3/django_1703_day3/app01/urls.py | e7561509abebc6e85ef5e6b1b8c19ae0201bd4be | [] | no_license | ZhiqiWu/student_manage | 9171d78c32d6900b08de9034b9a2f50c9e24d0b8 | da12ebaf4e9d6357cd9f832144ed756a55510433 | refs/heads/master | 2023-03-29T04:56:09.486061 | 2020-01-24T08:11:50 | 2020-01-24T08:11:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | from django.conf.urls import include, url
import views
urlpatterns = [
url(r'^$', views.index),
url(r'^zhale/$', views.zhale),
url(r'^tpl/$', views.tpl),
]
| [
"[email protected]"
] | |
2366786f7a3b2ce29c4d743b0ba81791a2013a84 | 3a570384a3fa9c4c7979d33b182556e1c637e9eb | /anwmisc/anw-pyui/Packages/pyui/renderers/unseen.py | 1caadb76f8f683700fdcc1917514579e56fa470d | [] | no_license | colshag/ANW | 56a028af5042db92b5ead641dc542fcb4533344e | 46948d8d18a0639185dd4ffcffde126914991553 | refs/heads/master | 2020-03-27T00:22:49.409109 | 2018-10-27T06:37:04 | 2018-10-27T06:37:04 | 145,618,125 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,479 | py | import PyUnseen
from msgs import *
import pyui.locals
from pyui.renderer3d import Renderer3DBase
from pyui.desktop import getDesktop
messageMap = \
[
WM_KEYDOWN,
WM_KEYUP,
WM_CHAR,
WM_MOUSEMOVE,
WM_LBUTTONDOWN,
WM_LBUTTONUP,
WM_LBUTTONDBLCLK,
WM_RBUTTONDOWN,
WM_RBUTTONUP,
WM_RBUTTONDBLCLK,
WM_MBUTTONDOWN,
WM_MBUTTONUP,
WM_MBUTTONDBLCLK,
WM_MOUSEWHEEL,
WM_CLOSE,
]
mouseMsgs = \
[
WM_MOUSEMOVE,
WM_LBUTTONDOWN,
WM_LBUTTONUP,
WM_LBUTTONDBLCLK,
WM_RBUTTONDOWN,
WM_RBUTTONUP,
WM_RBUTTONDBLCLK,
WM_MBUTTONDOWN,
WM_MBUTTONUP,
WM_MBUTTONDBLCLK,
WM_MOUSEWHEEL,
]
VK_LBUTTON = 0x01
VK_RBUTTON = 0x02
VK_CANCEL = 0x03
VK_MBUTTON = 0x04
VK_BACK = 0x08
VK_TAB = 0x09
VK_CLEAR = 0x0C
VK_RETURN = 0x0D
VK_SHIFT = 0x10
VK_CONTROL = 0x11
VK_MENU = 0x12
VK_PAUSE = 0x13
VK_CAPITAL = 0x14
VK_ESCAPE = 0x1B
VK_SPACE = 0x20
VK_PRIOR = 0x21
VK_NEXT = 0x22
VK_END = 0x23
VK_HOME = 0x24
VK_LEFT = 0x25
VK_UP = 0x26
VK_RIGHT = 0x27
VK_DOWN = 0x28
VK_SELECT = 0x29
VK_PRINT = 0x2A
VK_EXECUTE = 0x2B
VK_SNAPSHOT = 0x2C
VK_INSERT = 0x2D
VK_DELETE = 0x2E
VK_HELP = 0x2F
#/* VK_0 thru VK_9 are the same as ASCII '0' thru '9' (0x30 - 0x39) */
#/* VK_A thru VK_Z are the same as ASCII 'A' thru 'Z' (0x41 - 0x5A) */
VK_LWIN = 0x5B
VK_RWIN = 0x5C
VK_APPS = 0x5D
VK_NUMPAD0 = 0x60
VK_NUMPAD1 = 0x61
VK_NUMPAD2 = 0x62
VK_NUMPAD3 = 0x63
VK_NUMPAD4 = 0x64
VK_NUMPAD5 = 0x65
VK_NUMPAD6 = 0x66
VK_NUMPAD7 = 0x67
VK_NUMPAD8 = 0x68
VK_NUMPAD9 = 0x69
VK_MULTIPLY = 0x6A
VK_ADD = 0x6B
VK_SEPARATOR = 0x6C
VK_SUBTRACT = 0x6D
VK_DECIMAL = 0x6E
VK_DIVIDE = 0x6F
VK_F1 = 0x70
VK_F2 = 0x71
VK_F3 = 0x72
VK_F4 = 0x73
VK_F5 = 0x74
VK_F6 = 0x75
VK_F7 = 0x76
VK_F8 = 0x77
VK_F9 = 0x78
VK_F10 = 0x79
VK_F11 = 0x7A
VK_F12 = 0x7B
VK_F13 = 0x7C
VK_F14 = 0x7D
VK_F15 = 0x7E
VK_F16 = 0x7F
VK_F17 = 0x80
VK_F18 = 0x81
VK_F19 = 0x82
VK_F20 = 0x83
VK_F21 = 0x84
VK_F22 = 0x85
VK_F23 = 0x86
VK_F24 = 0x87
VK_NUMLOCK = 0x90
VK_SCROLL = 0x91
VK_LSHIFT = 0xA0
VK_RSHIFT = 0xA1
VK_LCONTROL = 0xA2
VK_RCONTROL = 0xA3
VK_LMENU = 0xA4
VK_RMENU = 0xA5
DEBUG_KEY = VK_F1
keydown = {}
keystate = [0] * 0x100
debugEnabled = 1
def gotEvent(event, wParam, lParam):
global keydown, keystate, debugEnabled
if event in mouseMsgs:
x = lParam & 0xffff
y = lParam >> 16
#print "Mouse Event: %d (%d,%d)" % (event, x, y)
mods = pyui.locals.MOD_NONE
if event in [WM_LBUTTONDOWN, WM_LBUTTONUP, WM_RBUTTONDOWN, WM_RBUTTONUP, WM_MOUSEMOVE]:
if keystate[VK_SHIFT]:
mods |= pyui.locals.MOD_SHIFT
if keystate[VK_CONTROL]:
mods |= pyui.locals.MOD_CONTROL
if keystate[VK_MENU]:
mods |= pyui.locals.MOD_ALT
if getDesktop():
getDesktop().postUserEvent(event, x, y, wParam, mods)
return
# mods for key events
if event in [WM_CHAR, WM_KEYDOWN, WM_KEYUP]:
mods = pyui.locals.MOD_NONE
if keystate[VK_SHIFT]:
mods |= pyui.locals.MOD_SHIFT
if keystate[VK_CONTROL]:
mods |= pyui.locals.MOD_CONTROL
if keystate[VK_MENU]:
mods |= pyui.locals.MOD_ALT
# This is the handler for character keys.
if event == WM_CHAR:
getDesktop().postUserEvent(pyui.locals.CHAR, 0, 0, chr(wParam), mods)
return
if event == WM_KEYDOWN:
if debugEnabled and (DEBUG_KEY == wParam):
PyUnseen.debug(0)
return
global keydown, keystate
keystate[wParam] += 1
getDesktop().postUserEvent(pyui.locals.KEYDOWN, 0, 0, wParam, mods)
return
if event == WM_KEYUP:
global keydown, keystate
keystate[wParam] = 0
getDesktop().postUserEvent(pyui.locals.KEYUP, 0, 0, wParam, mods)
return
# special event handlers
if event == WM_CLOSE:
getDesktop().postUserEvent(pyui.locals.QUIT, 0, 0, 0)
return
class Unseen(Renderer3DBase):
"""Direct3D Renderer using PyUnseen engine.
"""
name = "PyUnseen"
def __init__(self, w, h, fullscreen, title="Unseen"):
Renderer3DBase.__init__(self, w, h, fullscreen)
PyUnseen.initialize(w, h, gotEvent, messageMap, title )
self.font1 = PyUnseen.createFont( "Arial", 9, 0 )
self.fixedFont = PyUnseen.createFont( "Courier", 7, 0 )
self.populateConstants()
# store the actual height and width surface created (might be less than requested)
#(getDesktop().width, getDesktop().height) = PyUnseen.getDesktopSize()
(w, pyui.locals.TEXT_HEIGHT) = self.getTextSize(" ")
self.images = {}
self.cache = {} # tracks all objects by Handle. useful for debugging
def draw(self, windows):
"""run the python widgets drawing code. This calls describeWindow on any windows
that have changed. The actual drawing is done within PyUnseen.render.
"""
for w in windows:
w.drawWindow(self)
PyUnseen.render()
PyUnseen.messagepump()
self.mustFill = 0
self.dirtyRects = []
def populateConstants(self):
"""Populate pyui.constants with the values from msgs.py which are win32 message types.
"""
pyui.locals.LMOUSEBUTTONDOWN = WM_LBUTTONDOWN
pyui.locals.RMOUSEBUTTONDOWN = WM_RBUTTONDOWN
pyui.locals.MMOUSEBUTTONDOWN = WM_MBUTTONDOWN
pyui.locals.LMOUSEBUTTONUP = WM_LBUTTONUP
pyui.locals.RMOUSEBUTTONUP = WM_RBUTTONUP
pyui.locals.MMOUSEBUTTONUP = WM_MBUTTONUP
pyui.locals.MOUSEMOVE = WM_MOUSEMOVE
pyui.locals.MOUSEWHEEL = WM_MOUSEWHEEL
pyui.locals.QUIT = WM_CLOSE
pyui.locals.LMOUSEDBLCLICK = WM_LBUTTONDBLCLK
pyui.locals.RMOUSEDBLCLICK = WM_RBUTTONDBLCLK
pyui.locals.MMOUSEDBLCLICK = WM_MBUTTONDBLCLK
global keydown
pyui.locals.K_BACKSPACE = VK_BACK
pyui.locals.K_TAB = VK_TAB
pyui.locals.K_RETURN = VK_RETURN
pyui.locals.K_SHIFT = VK_SHIFT
pyui.locals.K_CONTROL = VK_CONTROL
pyui.locals.K_ALT = VK_MENU
pyui.locals.K_ESCAPE = VK_ESCAPE
pyui.locals.K_SPACE = VK_SPACE
pyui.locals.K_PAGEUP = VK_PRIOR
pyui.locals.K_PAGEDOWN = VK_NEXT
pyui.locals.K_END = VK_END
pyui.locals.K_HOME = VK_HOME
pyui.locals.K_LEFT = VK_LEFT
pyui.locals.K_RIGHT = VK_RIGHT
pyui.locals.K_UP = VK_UP
pyui.locals.K_DOWN = VK_DOWN
pyui.locals.K_INSERT = VK_INSERT
pyui.locals.K_DELETE = VK_DELETE
pyui.locals.K_F1 = VK_F1
pyui.locals.K_F2 = VK_F2
pyui.locals.K_F3 = VK_F3
pyui.locals.K_F4 = VK_F4
pyui.locals.K_F5 = VK_F5
pyui.locals.K_F6 = VK_F6
pyui.locals.K_F7 = VK_F7
pyui.locals.K_F8 = VK_F8
pyui.locals.K_F9 = VK_F9
pyui.locals.K_F10 = VK_F10
pyui.locals.K_F11 = VK_F11
pyui.locals.K_F12 = VK_F12
###############################################################################
### PyUnseen interface functions
###############################################################################
def setWindowTitle(self, title=""):
"""Sets the title on the Win32 main window.
"""
return PyUnseen.setWindowTitle(title)
def createWindow(self, title = None):
handle = PyUnseen.createWindow()
self.cache[handle] = "window %s" % title
return handle
def describeWindow(self, handle, drawList):
if not handle:
return
#print "Describing window (%d): %s" % (handle, drawList)
#print "Describing window ", handle
#for d in drawList:
# print d
return PyUnseen.describeWindow(handle, drawList)
def destroyWindow(self, handle):
del self.cache[handle]
return PyUnseen.destroyWindow(handle)
def moveWindow(self, handle, x, y):
return PyUnseen.moveWindow(handle, x, y)
def moveToFront(self, handle):
return PyUnseen.moveToFront(handle)
###############################################################################
### Draw Primitives functions
###############################################################################
def drawRect(self, color, rect):
"""Fills a rectangle with the specified color."""
#skip empty rects
if rect[2] == 0 or rect[3] == 0:
return
self.drawList.append( (pyui.locals.RECT, rect[0], rect[1], rect[2], rect[3], color ) )
def drawText(self, text, pos, color, font = None):
"""Draws the text on the screen in the specified position"""
if font == 'fixed':
font = self.fixedFont
elif font == None:
font = self.font1
self.drawList.append( (pyui.locals.TEXT, pos[0], pos[1], color, font, text))
def drawGradient(self, rect, c1, c2, c3, c4):
"""Draws a gradient rectangle"""
#skip empty rects
if rect[2] == 0 or rect[3] == 0:
return
self.drawList.append( (pyui.locals.GRADIENT, rect[0], rect[1], rect[2], rect[3], c1, c2, c3, c4) )
def drawImage(self, rect, filename, pieceRect = None):
"""Draws an image at a position. NOTE: should take a texture handle"""
#skip empty rects
if rect[2] == 0 or rect[3] == 0:
return
if not self.images.has_key(filename):
self.loadImage(filename)
if not pieceRect:
pieceRect = (0,0,1,1)
self.drawList.append( (pyui.locals.IMAGE, rect[0], rect[1], rect[2], rect[3], self.images[filename], 0,
pieceRect[0], pieceRect[1], pieceRect[2], pieceRect[3]) )
def drawImageRotated(self, rect, filename, rotation=0, textureEffect=0 ):
"""Draws an image at a position. NOTE: should take a texture handle"""
#skip empty rects
if rect[2] == 0 or rect[3] == 0:
return
if not self.images.has_key(filename):
self.loadImage(filename)
self.drawList.append( (pyui.locals.IMAGE, rect[0], rect[1], rect[2], rect[3], self.images[filename], rotation) )
def drawLine(self, x1, y1, x2, y2, color):
self.drawList.append( (pyui.locals.LINE, x1, y1, x2, y2, color) )
def drawView(self, rect, handle):
"""Draws a viewport into a 3d World in the specified rectangle."""
self.drawList.append( (pyui.locals.VIEW, rect[0], rect[1], rect[2], rect[3], handle) )
def loadImage(self, filename, label = None):
if label:
handle = PyUnseen.createTexture(filename)
self.images[label] = handle
else:
handle = PyUnseen.createTexture(filename)
self.images[filename] = handle
def getImageSize(self, filename):
handle = self.images.get(filename)
if not handle:
handle = PyUnseen.createTexture(filename)
return PyUnseen.getTextureSize(handle)
def setClipping(self, rect = None):
"""set the clipping rectangle for the main screen. defaults to clearing the clipping rectangle."""
#self.drawList.append( [pyui.locals.CLIP, (rect[0], rect[1], rect[2], rect[3]) ] )
pass
def quit(self):
print "PyUnseen Quitting."
PyUnseen.destroyFont(self.font1)
PyUnseen.destroyFont(self.fixedFont)
for filename in self.images.keys():
handle = self.images[filename]
PyUnseen.destroyTexture(handle)
self.dumpCache()
PyUnseen.cleanup()
def packColor(self, r, g, b, a = 255):
"""pack the rgb triplet into a color
"""
return (r,g,b,a)
def addRect(self, rect):
"""Dont do dirty rects in 3d!"""
return
def getMustFill(self):
return 0
def getTextSize(self, text, font = None):
if font == 'fixed':
font = self.fixedFont
elif not font:
font = self.font1
return PyUnseen.getTextSize(font, text)
def readTimer(self):
return PyUnseen.getMilliseconds() * 0.001
### 3D interface
def createView(self, world):
"""Create a view object and return the handle to it.
Width and height ignored by PyUnseen
"""
handle = PyUnseen.createView(world)
self.cache[handle] = "view"
return handle
def destroyView(self, viewHandle):
"""Destroy a previously created view object.
"""
del self.cache[viewHandle]
return PyUnseen.destroyView(viewHandle)
def createObject(self, model, info=(0.0,0.0,0.0)):
handle = PyUnseen.createObject(model, info)
self.cache[handle] = model
return handle
def destroyObject(self, objectHandle):
del self.cache[objectHandle]
return PyUnseen.destroyObject(objectHandle)
def createWorld(self):
handle = PyUnseen.createWorld()
self.cache[handle] = "world"
return handle
def destroyWorld(self, worldHandle):
del self.cache[worldHandle]
return PyUnseen.destroyWorld(worldHandle)
def updateWorld(self, worldHandle, interval = None):
return PyUnseen.updateWorld(worldHandle, interval)
def addToWorld(self, worldHandle, objectHandle):
return PyUnseen.addToWorld(worldHandle, objectHandle)
def removeFromWorld(self, worldHandle, objectHandle):
return PyUnseen.removeFromWorld(worldHandle, objectHandle)
def getObjectPos(self, objectHandle):
return PyUnseen.getObjectPos(objectHandle)
def setObjectScale(self, objectHandle, scale):
return PyUnseen.setObjectScale(objectHandle, scale)
def setObjectPos(self, objectHandle, pos):
return PyUnseen.setObjectPos(objectHandle, pos)
def setObjectAnimation(self, objectHandle, animation, onCompleted = None, blendTime = 0.0, loop = 1):
return PyUnseen.setObjectAnimation(objectHandle, animation, onCompleted, blendTime, loop)
def loadAnimation(self, animation):
return PyUnseen.loadAnimation(animation)
def setObjectYPR(self, objectHandle, YPR):
(y,p,r) = YPR
return PyUnseen.setObjectYPR(objectHandle, (y,p,r) )
def getObjectYPR(self, objectHandle):
return PyUnseen.getObjectYPR(objectHandle)
def moveObjectTo(self, objectHandle, location, moveRate, turnRate = 0, onCompleted = None):
return PyUnseen.moveObjectTo(objectHandle, location, moveRate, turnRate, onCompleted)
def moveObject(self, objectHandle, delta, moveRate, turnRate = 0, onCompleted = None):
return PyUnseen.moveObject(objectHandle, delta, moveRate, turnRate, onCompleted)
def rotateObjectTo(self, objectHandle, orientation, turnRate, onCompleted = None):
return PyUnseen.rotateObjectTo(objectHandle, orientation, turnRate, onCompleted)
def rotateObject(self, objectHandle, delta, turnRate, onCompleted = None):
return PyUnseen.rotateObject(objectHandle, delta, turnRate, onCompleted)
def attachObject(self, objectHandle, toObjectHandle, connectionPointName = "", toConnectionPointName = ""):
return PyUnseen.attachObject(objectHandle, toObjectHandle, connectionPointName, toConnectionPointName)
def detachObject(self, objectHandle, fromObjectHandle):
return PyUnseen.detachObject(objectHandle, fromObjectHandle)
def setViewProjectionMode(self, viewHandle, projectionMode):
return PyUnseen.setViewProjectionMode(viewHandle, projectionMode)
def setViewParameters(self, viewHandle, parameters):
return PyUnseen.setViewParameters(viewHandle, parameters)
def setCameraYPR(self, viewHandle, YPR):
return PyUnseen.setCameraYPR(viewHandle, YPR)
def setCameraPos(self, viewHandle, pos):
return PyUnseen.setCameraPos(viewHandle, pos)
def getCameraYPR(self, viewHandle):
return PyUnseen.getCameraYPR(viewHandle)
def getCameraPos(self, viewHandle):
return PyUnseen.getCameraPos(viewHandle)
def getCameraDir(self, viewHandle):
return PyUnseen.getCameraDir(viewHandle)
def moveCamera(self, viewHandle, offset):
return PyUnseen.moveCamera(viewHandle, offset)
def setLightParameters(self, viewHandle, YPR):
return PyUnseen.setLightParameters(viewHandle, YPR)
def getDesktopWindow(self):
return PyUnseen.getDesktopWindow()
def attachView(self, windowHandle, viewHandle):
return PyUnseen.attachView(windowHandle, viewHandle)
def pickView(self, viewHandle, xy):
return PyUnseen.pickView(viewHandle, xy)
def attachController(self, objectHandle, controllerType, boneName):
return PyUnseen.attachController(objectHandle, controllerType, boneName)
def setController(self, controllerHandle, **parms):
return PyUnseen.setController(controllerHandle, parms)
def detachController(self, objectHandle, controllerHandle):
return PyUnseen.detachController(objectHandle, controllerHandle)
def getObjectProjectedPos(self, objectHandle, viewHandle):
return PyUnseen.getObjectProjectedPos(objectHandle, viewHandle)
def getNodeProjectedPos(self, nodeHandle, viewHandle):
return PyUnseen.getNodeProjectedPos(nodeHandle, viewHandle)
def getObjectNode(self, objectHandle, nodeName, iLOD):
return PyUnseen.getObjectNode(objectHandle, nodeName, iLOD)
def createFont(self, fontName, size, flag):
handle = PyUnseen.createFont(fontName, size, flag)
self.cache[handle] = "font %s %s" % ( fontName, size)
return handle
def destroyFont(self, fontHandle):
del self.cache[fontHandle]
return PyUnseen.destroyFont(fontHandle)
def getScreenSize(self):
return PyUnseen.getScreenSize()
def playSound(self, waveFileName, completionCallback = None):
return PyUnseen.playSound(waveFileName, completionCallback)
def stopSound(self, waveFileName):
return PyUnseen.stopSound(waveFileName)
def loadSound(self, waveFileName):
return PyUnseen.loadSound(waveFileName)
def playMusic(self, waveFileName, completionCallback = None):
return PyUnseen.playMusic(waveFileName, completionCallback)
def stopMusic(self, waveFileName):
return PyUnseen.stopMusic(waveFileName)
def loadMusic(self, waveFileName):
return PyUnseen.loadMusic(waveFileName)
def toggleDebugInfo(self):
return PyUnseen.togglePerfInfo()
def setWindowEffect(self, windowHandle, effectName):
return PyUnseen.setWindowViewEffect(windowHandle, effectName)
def createEmptyBody(self, xyz=(0.0,0.0,0.0), label="emptyBody"):
handle= PyUnseen.createEmptyBody(xyz)
self.cache[handle] = label
return handle
def addGeometryNode(self, objectHandle, bone=0):
return PyUnseen.addGeometryNode(objectHandle, bone)
def addGeometryPiece(self, node, iType, info, offset, ypr, effect = "", effectParams = {}):
return PyUnseen.addGeometryPiece(node, iType, info, offset, ypr, effect, effectParams)
def getNodeEffect(self, node, num):
return PyUnseen.getNodeEffect(node, num)
def setEffectParameters(self, effect, parms):
return PyUnseen.setEffectParameters(effect, parms)
def dumpCache(self):
print "====== DUMPING PYUNSEEN CACHE ======"
for k in self.cache.keys():
print "%s> %s" % (k, self.cache[k])
| [
"[email protected]"
] | |
8b50c33b2267b1a35cc4a73a591c173d6eea2280 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_revues.py | f49493dcdf30e1cce36d2723a65a559d1a24960d | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py |
#calss header
class _REVUES():
def __init__(self,):
self.name = "REVUES"
self.definitions = revue
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['revue']
| [
"[email protected]"
] | |
60847a2a95c5d716ddfcd9d539af72449d1f0591 | aa49120740b051eed9b7199340b371a9831c3050 | /greaterTree.py | 3e539eb3cf0e2a93dd98c58dad666501da74daf5 | [] | no_license | ashutosh-narkar/LeetCode | cd8d75389e1ab730b34ecd860b317b331b1dfa97 | b62862b90886f85c33271b881ac1365871731dcc | refs/heads/master | 2021-05-07T08:37:42.536436 | 2017-11-22T05:18:23 | 2017-11-22T05:18:23 | 109,366,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,593 | py | #!/usr/bin/env python
'''
Given a Binary Search Tree (BST), convert it to a Greater Tree such that every key of the original BST is changed
to the original key plus sum of all keys greater than the original key in BST.
Example:
Input: The root of a Binary Search Tree like this:
5
/ \
2 13
Output: The root of a Greater Tree like this:
18
/ \
20 13
Solution:
By leveraging the fact that the tree is a BST, we can find an O(n) solution.
The idea is to traverse BST in reverse inorder.
Reverse inorder traversal of a BST gives us keys in decreasing order.
Before visiting a node, we visit all greater nodes of that node.
While traversing we keep track of sum of keys which is the sum of all the keys greater than the key of current node.
##### Code flow is similar to diameterOfBinaryTree.py ######
'''
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def convert_bst(root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if not root:
return None
# Initialize sum
result = [0]
generate_greater_tree(root, result)
return root
def generate_greater_tree(node, result):
# Base Case
if not node:
return None
# Recur for right subtree
generate_greater_tree(node.right, result)
# Update Sum
node.val += result[0]
result[0] = node.val
# Recur for left subtree
generate_greater_tree(node.left, result)
| [
"[email protected]"
] | |
8a8112c98a32850524e2e947da26f64c30ef2ae3 | 57359cfa7158b284ca5061710a7fe233c76d9cd7 | /utils3d/bbox3d_ops.py | 4228ef72a8e7125eb80c031504550a988641d131 | [
"MIT"
] | permissive | zhuhaipeng-byte/Automatic-As-built-Reconstruction | 5d0cd5b993cb5c273fc1e133651e11c548e795dc | a3d4642f76beb572fdaf37fb832704fa88d054ab | refs/heads/master | 2022-05-27T18:15:34.801783 | 2020-04-10T12:12:27 | 2020-04-10T12:12:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52,969 | py | # xyz Nov 2018
import numpy as np
import os,sys
import open3d
import numba
import time
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(BASE_DIR)
sys.path.append(ROOT_DIR)
from open3d_util import draw_cus, gen_animation
from geometric_util import Rz as geo_Rz, angle_of_2lines, OBJ_DEF, angle_with_x
DEBUG = True
FRAME_SHOW = 1
POINTS_KEEP_RATE = 1.0
POINTS_SAMPLE_RATE = 1.0
BOX_XSURFACE_COLOR_DIF = False
CYLINDER_RADIUS = 0.02 # paper: 0.04
_cx,_cy,_cz, _sx,_sy,_sz, _yaw = range(7)
SameAngleThs = 0.01 * 6 # 0.01 rad = 0.6 degree
SameDisThs = 1e-3 * 50 # 0.1 mm
def same_a(x,y, threshold=SameDisThs):
same0 = abs(x-y) < threshold
return same0
def dif_rate(v0, v1):
max_v = max(abs(v0), abs(v1))
if max_v==0:
return 0
return 1.0* abs(v1-v0) / max_v
def points_in_scope(points, scope):
# point:[n,3]
# scope:[2,3]
c0 = points >= scope[0:1]
c1 = points <= scope[1:2]
inside = c0.all(1) * c1.all(1)
return inside
def rotate_iou_gpu_eval_standbox(boxes, query_boxes, criterion=-1, device_id=0):
'''
The standard box, need to be converted as yx_zb before apply rotate_iou_gpu_eval
'''
from second.core.non_max_suppression.nms_gpu import rotate_iou_gpu, rotate_iou_gpu_eval
boxes = Bbox3D.convert_to_yx_zb_boxes(boxes)
query_boxes = Bbox3D.convert_to_yx_zb_boxes(query_boxes)
return rotate_iou_gpu_eval(boxes, query_boxes, criterion, device_id)
def corners4_to_mesh2(corners, color=[255,0,0]):
# corners: [n,4,3]
assert corners.ndim == 3
assert corners.shape[1:] == (4,3)
n = corners.shape[0]
triangles = np.array([[[0,1,2], [2,3,0]]]) # [1,2,3]
triangles = np.tile(triangles, [n, 1, 1]) # [n,2,3]
for i in range(n):
triangles[i] += i*4
corners = corners.reshape([-1,3])
triangles = triangles.reshape([-1,3])
mesh = open3d.geometry.TriangleMesh()
mesh.vertices = open3d.utility.Vector3dVector(corners)
mesh.triangles = open3d.utility.Vector3iVector(triangles)
if color == 'random':
color = np.random.sample(3)
mesh.paint_uniform_color(color)
return mesh
def down_sample_points(points0, sample_rate):
n = points0.shape[0]
indices = np.random.choice(n, int(n*sample_rate), replace=False)
points = points0[indices]
return points
def cut_points_roof(points, keep_rate=POINTS_KEEP_RATE, sample_rate=POINTS_SAMPLE_RATE):
if points.shape[0] == 0:
return points
z_min = np.min(points[:,2])
z_max = np.max(points[:,2])
threshold = z_min + (z_max - z_min) * keep_rate
mask = points[:,2] <= threshold
points_cutted = points[mask]
points_cutted = down_sample_points(points_cutted, sample_rate)
return points_cutted
class Bbox3D():
'''
bbox standard: [xc, yc, zc, x_size, y_size, z_size, yaw]
bbox yx_zb : [xc, yc, z_bot, y_size, x_size, z_size, yaw-0.5pi]
All the approaches here (In data generation) are designed for standard boxes, up_axis='Z'.
The original up_axis from SUNCG is 'Y' (cam frame), but is already converted to 'Z' here.
The boxes feed into networ is yx_zb, up_axis='Z'.
'''
_corners_tmp = np.array([ [0,0,0],[1,0,0],[0,1,0],[1,1,0],
[0,0,1],[1,0,1],[0,1,1],[1,1,1]], dtype=np.float)
#
_xneg_vs = [0,2,6,4]
_xpos_vs = [1,3,7,5]
_yneg_vs = [0,1,5,4]
_ypos_vs = [2,3,7,6]
_zneg_vs = [0,1,3,2]
_zpos_vs = [4,5,7,6]
_face_vidxs = np.array([_xneg_vs, _xpos_vs, _yneg_vs, _ypos_vs, _zneg_vs, _zpos_vs])
_lines_vids = np.array([[0,1],[0,2],[1,3],[2,3],
[4,5],[4,6],[5,7],[6,7],
[0,4],[1,5],[2,6],[3,7]] )
_lines_z0_vids = np.array([[0,1],[0,2],[1,3],[2,3]])
# positive x face lines:
_x_pos_lines = [2,6,9,11]
# positive z face lines
_triangles_tmp = np.array( [[0,1,4],[0,2,4],[1,3,5],[2,3,6],
[4,5,0],[4,6,2],[5,7,3],[6,7,2],
[0,4,6],[1,5,7],[2,6,4],[3,7,1]] )
@staticmethod
def convert_from_yx_zb_boxes(boxes):
'''
Input
bbox yx_zb : [xc, yc, z_bot, y_size, x_size, z_size, yaw-0.5pi]
Output
bbox standard: [xc, yc, zc, x_size, y_size, z_size, yaw]
The input is kitti lidar bbox format used in SECOND: x,y,z,w,l,h,orientation
orientation=0: positive x of camera/car = negative lidar y -> car front face neg lidar y
orientation = -pi/2: car face pos x of world -> clock wise rotation is positive
orientation : (-pi,0]
In my standard definition, bbox frame is same as world -> yaw=0. Also clock wise is positive.
yaw = pi/2 is the orientation=0 status for yx_zb format of SECOND.
yaw: (-pi/2,pi/2]
yaw = orientation + pi/2
The output format is the standard format I used in Bbox3D
'''
boxes = boxes.copy().reshape([-1,7])
if boxes.shape[0] == 0:
return boxes
boxes[:,2] += boxes[:,5]*0.5
boxes = boxes[:,[0,1,2,4,3,5,6]]
boxes[:,-1] += np.pi*0.5
# limit in [-pi/2, pi/2]
boxes[:,_yaw] = OBJ_DEF.limit_yaw(boxes[:,_yaw], False)
OBJ_DEF.check_bboxes(boxes, False)
return boxes
@staticmethod
def convert_to_yx_zb_boxes(boxes):
'''
Input
bbox standard
Output
bbox yx_zb
'''
assert boxes.copy().shape[1] == 7
# This should be implemented in data prepration. For ceiling, floor, room,
# temporaly performed here.
#boxes = Bbox3D.define_walls_direction(boxes, 'Z', yx_zb=False, check_thickness=False)
boxes = boxes[:,[0,1,2,4,3,5,6]]
boxes[:,2] = boxes[:,2] - boxes[:,5]*0.5
boxes[:,-1] -= np.pi*0.5
boxes[:,_yaw] = OBJ_DEF.limit_yaw(boxes[:,_yaw], True)
OBJ_DEF.check_bboxes(boxes, True)
return boxes
@staticmethod
def set_yaw_zero(boxes):
'''
For object like ceiling, floor, room, which are symmetry about both x_b and y_b.
Always use yaw==0, length = size along x_r, thickness = size along x_y
yaw is times of pi/2
'''
if boxes.shape[0] == 0:
return boxes
yaws = boxes[:,-1]
assert np.mod( yaws, np.pi/2 ).max() < 0.01
switch_lt = np.abs(yaws / (np.pi/2)).astype(np.int)
size_y = boxes[:,3] * (1-switch_lt) + boxes[:,4] * (switch_lt)
size_x = boxes[:,4] * (1-switch_lt) + boxes[:,3] * (switch_lt)
boxes[:,3] = size_y
boxes[:,4] = size_x
boxes[:,-1] = 0
return boxes
@staticmethod
def boxes_size(boxes, up_axis='Z'):
corners = Bbox3D.bboxes_corners(boxes, up_axis).reshape([-1,3])
xyz_max = corners.max(0)
xyz_min = corners.min(0)
xyz_size = xyz_max - xyz_min
return xyz_size
@staticmethod
def video(pcds):
def rotate_view(vis):
ctr = vis.get_view_control()
ctr.rotate(10, 0)
return False
open3d.visualization.draw_geometries_with_animation_callback(pcds,
rotate_view)
@staticmethod
def draw_points_open3d(points, color=[0,1,1], show=False, points_keep_rate=POINTS_KEEP_RATE, points_sample_rate=POINTS_SAMPLE_RATE):
points = cut_points_roof(points, points_keep_rate, points_sample_rate)
pcl = open3d.geometry.PointCloud()
pcl.points = open3d.utility.Vector3dVector(points[:,0:3])
if points.shape[1] >= 6:
pcl.colors = open3d.utility.Vector3dVector(points[:,3:6])
else:
pcl.paint_uniform_color(color)
if points.shape[1] >= 9:
pcl.normals = open3d.utility.Vector3dVector(points[:,6:9])
if show:
#open3d.draw_geometries([pcl])
draw_cus([pcl])
return pcl
@staticmethod
def draw_points(points, color=[0,1,1], points_keep_rate=POINTS_KEEP_RATE, points_sample_rate=POINTS_SAMPLE_RATE, animation_fn=None, ani_size=None):
pcds = Bbox3D.draw_points_open3d(points, color, show=True, points_keep_rate=points_keep_rate, points_sample_rate=points_sample_rate)
if animation_fn is not None:
gen_animation([pcds], animation_fn, ani_size)
@staticmethod
def draw_points_bboxes(points, gt_boxes0, up_axis, is_yx_zb, labels=None, names=None, lines=None, random_color=True,
points_keep_rate=POINTS_KEEP_RATE, points_sample_rate=POINTS_SAMPLE_RATE, animation_fn=None, ani_size=None, box_colors=None):
'''
points, gt_boxes0, up_axis, is_yx_zb, labels=None, names=None, lines=None)
'''
if points is not None:
pcl = Bbox3D.draw_points_open3d(points, points_keep_rate=points_keep_rate, points_sample_rate=points_sample_rate)
bboxes_lineset_ls = Bbox3D.bboxes_lineset(gt_boxes0, up_axis, is_yx_zb, labels, names, random_color, box_colors)
if lines is not None:
lineset = [Bbox3D.draw_lines_open3d(lines)]
else:
lineset = []
if points is not None:
#open3d.draw_geometries(bboxes_lineset_ls + [pcl] + lineset)
pcds = bboxes_lineset_ls + [pcl] + lineset
else:
#open3d.draw_geometries(bboxes_lineset_ls + lineset)
pcds = bboxes_lineset_ls + lineset
draw_cus(pcds)
if animation_fn is not None:
gen_animation(pcds, animation_fn, ani_size)
@staticmethod
def draw_points_bboxes_mesh(points, gt_boxes0, up_axis, is_yx_zb, labels=None, names=None, lines=None,
points_keep_rate=POINTS_KEEP_RATE, points_sample_rate=POINTS_SAMPLE_RATE, animation_fn=None, ani_size=None, random_color=False, box_colors=None):
mesh = Bbox3D.bboxes_mesh(gt_boxes0, up_axis, is_yx_zb, labels, names, random_color=random_color, box_colors=box_colors)
#Bbox3D.video(mesh)
if points is not None:
pcl = Bbox3D.draw_points_open3d(points, points_keep_rate=points_keep_rate, points_sample_rate=points_sample_rate)
mesh.append(pcl)
draw_cus(mesh)
if animation_fn is not None:
gen_animation(mesh, animation_fn, ani_size)
@staticmethod
def draw_bboxes(gt_boxes0, up_axis, is_yx_zb, labels=None, names=None, random_color=True, highlight_ids=None):
if highlight_ids is not None:
assert labels is None
labels = np.ones([gt_boxes0.shape[0]], dtype=np.int32)
labels[highlight_ids] = 0
bboxes_lineset_ls = Bbox3D.bboxes_lineset(gt_boxes0, up_axis, is_yx_zb, labels, names, random_color=random_color)
draw_cus(bboxes_lineset_ls)
@staticmethod
def bboxes_lineset(gt_boxes0, up_axis, is_yx_zb, labels=None, names=None, random_color=True, colors=None):
from color_list import COLOR_LIST
gt_boxes0 = gt_boxes0.reshape([-1,7])
#gt_boxes0 = np.array([gtb for gtb in gt_boxes0 if gtb[3]>=0])
if colors is not None:
assert colors.shape[0] == gt_boxes0.shape[0]
#else:
# if labels is not None:
# ml = labels.max() + 1
# colors = COLOR_LIST[0:ml]
# print('colors used: {colors}')
gt_boxes1 = gt_boxes0.copy()
if is_yx_zb:
gt_boxes1 = Bbox3D.convert_from_yx_zb_boxes(gt_boxes1)
bn = gt_boxes1.shape[0]
bbox_meshes = []
if bn > COLOR_LIST.shape[0]:
print(f'bn={bn} > {COLOR_LIST.shape[0]}')
random_color = False
for i in range(bn):
box = gt_boxes1[i]
if colors is not None:
color = colors[i]
else:
if random_color:
color = COLOR_LIST[i]
else:
color = [1,0,0]
if labels is not None:
color = COLOR_LIST[labels[i]]
bbox_meshes.append( Bbox3D.get_one_bbox(box, up_axis, color=color) )
if bn > 0:
bboxes_lineset = bbox_meshes[0]
for i in range(1, bn):
bboxes_lineset += bbox_meshes[i]
else:
bboxes_lineset = None
#if names is not None:
# print("names:", names)
#print('boxes:\n', gt_boxes1)
if bn > 0:
out = [bboxes_lineset]
if FRAME_SHOW == 1:
mesh_frame = open3d.geometry.TriangleMesh.create_coordinate_frame(size = 0.6, origin = [0,0,0])
out = out + [mesh_frame]
elif FRAME_SHOW == 2:
mesh_frame = open3d.create_mesh_coordinate_frame(size = 0.6, origin = gt_boxes1[0,0:3])
out = out + [mesh_frame]
return out
else:
return []
@staticmethod
def bboxes_mesh(boxes0, up_axis, is_yx_zb, labels=None, names=None, random_color=False, box_colors=None):
from color_list import COLOR_LIST
assert boxes0.ndim == 2
if boxes0.shape[0] == 0:
return []
corners = Bbox3D.bboxes_corners(boxes0, up_axis, is_yx_zb)
faces_corners = np.take(corners, Bbox3D._face_vidxs, axis=1)
n = boxes0.shape[0]
mesh = []
if box_colors is not None:
colors = box_colors
else:
if labels is None or random_color:
colors = COLOR_LIST[0:n]
else:
colors = COLOR_LIST[labels]
for i in range(n):
mesh_i = corners4_to_mesh2(faces_corners[i].reshape([-1,4,3]), colors[i])
mesh.append( mesh_i)
return mesh
@staticmethod
def draw_bboxes_mesh(boxes0, up_axis, is_yx_zb, labels=None, names=None):
mesh = Bbox3D.bboxes_mesh(boxes0, up_axis, is_yx_zb, labels, names)
draw_cus(mesh)
@staticmethod
def draw_points_lines(points, lines, color=[0,0,0], show=False):
pcl = Bbox3D.draw_points_open3d(points)
line_set = Bbox3D.draw_lines_open3d(lines, color)
draw_cus([pcl, line_set])
@staticmethod
def draw_lines_open3d(lines, color=[0,0,0], show=False):
'''
lines: [n,2,3]
'''
assert lines.ndim == 3
nl = lines.shape[0]
assert lines.shape[1] == 2
if lines.shape[2] == 2:
lines = np.concatenate([lines, np.zeros([nl,2,1])], 2)
assert lines.shape[2]==3
lines = lines.reshape([-1,3])
line_set = open3d.LineSet()
line_set.points = open3d.utility.Vector3dVector(lines)
lines_vids = np.arange(lines.shape[0]).reshape([-1,2])
line_set.lines = open3d.Vector2iVector(lines_vids)
colors = [color for i in range(nl)]
line_set.colors = open3d.utility.Vector3dVector(colors)
if show:
draw_cus([line_set])
#open3d.draw_geometries([line_set])
return line_set
@staticmethod
def get_one_bbox(bbox, up_axis, plyfn=None, color=[1,0,0]):
return Bbox3D.get_1bbox_mesh(bbox, up_axis, plyfn, color)
#return Bbox3D.get_1bbox_lineset(bbox, up_axis, plyfn, color)
@staticmethod
def get_1bbox_mesh(bbox, up_axis, plyfn=None, color=[1,0,0], radius=CYLINDER_RADIUS):
assert bbox.shape == (7,)
corners = Bbox3D.bbox_corners(bbox, up_axis)
lines = np.take(corners, Bbox3D._lines_vids, axis=0)
centroids = lines.mean(1)
#angles = angle_with_x(directions[:,:2])
directions = lines[:,1,:]-lines[:,0,:]
heights = np.linalg.norm(directions,axis=1)
directions = directions/heights.reshape([-1,1])
mesh = []
for i in range(12):
cylinder_i = open3d.geometry.TriangleMesh.create_cylinder(radius=radius, height=heights[i])
cylinder_i.paint_uniform_color(color)
transformation = np.identity(4)
transformation[:3,3] = centroids[i]
transformation[:3,2] = directions[i]
cylinder_i.transform(transformation)
mesh.append(cylinder_i)
cm = mesh[0]
for i in range(1,12):
cm += mesh[i]
return cm
def get_1bbox_lineset(bbox, up_axis, plyfn=None, color=[1,0,0]):
'''
only one box
'''
assert bbox.shape == (7,)
corners = Bbox3D.bbox_corners(bbox, up_axis)
colors = [color for i in range(len(Bbox3D._lines_vids))]
if BOX_XSURFACE_COLOR_DIF:
for i in Bbox3D._x_pos_lines:
colors[i] = [0,0,1]
line_set = open3d.LineSet()
line_set.points = open3d.utility.Vector3dVector(corners)
line_set.lines = open3d.Vector2iVector(Bbox3D._lines_vids)
line_set.colors = open3d.utility.Vector3dVector(colors)
if plyfn!=None:
Bbox3D.save_bbox_ply(plyfn, bbox, up_axis, color)
#print('bbox:\n',bbox)
#print('corners:\n',corners)
#mesh_frame = open3d.create_mesh_coordinate_frame(size = 1.0, origin = np.mean(corners, 0))
#open3d.draw_geometries([line_set, mesh_frame])
return line_set
@staticmethod
def save_bboxes_ply(plyfn, bboxes, up_axis, color=[1,0,0]):
for i,box in enumerate(bboxes):
tmp1, tmp2 = os.path.splitext(plyfn)
plyfn_i = '%s_%d%s'%(tmp1, i, tmp2)
Bbox3D.save_bbox_ply(plyfn_i, box, up_axis, color)
@staticmethod
def save_bbox_ply(plyfn, bbox, up_axis, color=[1,0,0]):
from plyfile import PlyData, PlyElement
#*************************************
corners = Bbox3D.bbox_corners(bbox, up_axis)
lines = np.array(Bbox3D._lines_vids)
colors = np.array([color for i in range(lines.shape[0])])
#*************************************
num_vertex = corners.shape[0]
vertex = np.zeros( shape=(num_vertex) ).astype([('x', 'f8'), ('y', 'f8'),('z', 'f8')])
for i in range(num_vertex):
vertex[i] = ( corners[i,0], corners[i,1], corners[i,2] )
el_vertex = PlyElement.describe(vertex,'vertex')
#*************************************
edge = np.zeros( shape=(lines.shape[0]) ).astype(
dtype=[('vertex1', 'i4'), ('vertex2','i4'),
('red', 'u1'), ('green', 'u1'),('blue', 'u1')])
num_line = lines.shape[0]
for i in range(num_line):
edge[i] = ( lines[i,0], lines[i,1], colors[i,0], colors[i,1], colors[i,2] )
el_edge = PlyElement.describe(edge,'edge')
dirname = os.path.dirname(plyfn)
if not os.path.exists(dirname):
os.makedirs(dirname)
PlyData([el_vertex, el_edge],text=True).write(plyfn)
print('write %s ok'%(plyfn))
@staticmethod
def Unused_draw_bbox_open3d_mesh(bbox, up_axis, color=[1,0,0], plyfn=None):
'''
box_min: [3]
box_max: [3]
'''
assert bbox.shape == (7,)
corners = Bbox3D.bbox_corners(bbox, up_axis)
mesh = open3d.TriangleMesh()
mesh.vertices = open3d.utility.Vector3dVector(corners)
mesh.triangles = open3d.Vector3iVector(Bbox3D._triangles_tmp)
mesh.paint_uniform_color(color)
#open3d.draw_geometries([mesh])
if plyfn is not None:
open3d.write_triangle_mesh(plyfn, mesh,write_ascii=True)
return mesh
@staticmethod
def bbox_from_minmax(bbox_min_max):
bmin = np.array(bbox_min_max['min'])
bmax = np.array(bbox_min_max['max'])
centroid = (bmin + bmax) / 2.0
lwh = bmax - bmin
rotation = np.array([0])
bbox = np.concatenate([centroid, lwh, rotation])
return bbox
@staticmethod
def bbox_corners(bbox, up_axis):
'''
for yaw, clock wise is positive.
In Geou.Rz, anticlock wise is positive. But by:
corners = (np.matmul(R, (corners-bsize*0.5).T )).T + bsize*0.5
do not use R.transpose(), it is changed to clock wise.
'''
assert bbox.shape == (7,)
cx,cy,cz, sx,sy,sz, yaw = bbox
centroid = bbox[0:3]
bsize = bbox[3:6]
## corners aligned
corners = np.array([[0,0,0],[1,0,0],[0,1,0],[1,1,0],
[0,0,1],[1,0,1],[0,1,1],[1,1,1]], dtype=np.float)
corners[:,0] *= bsize[0]
corners[:,1] *= bsize[1]
corners[:,2] *= bsize[2]
# rotate corners
if yaw!=0:
R = Bbox3D.get_yaw_R(bbox, up_axis)
corners = (np.matmul(R, (corners-bsize*0.5).T )).T + bsize*0.5
corners = corners.astype(bbox.dtype)
zero = centroid - bsize*0.5
corners += zero
return corners
@staticmethod
def bboxes_corners(bboxes, up_axis, is_yx_zb=False):
assert bboxes.ndim==2 and bboxes.shape[1]==7
if is_yx_zb:
bboxes = Bbox3D.convert_from_yx_zb_boxes(bboxes)
corners = np.array( [Bbox3D.bbox_corners(box, up_axis) for box in bboxes], dtype=np.float32 )
return corners
@staticmethod
def bboxes_centroid_lines(bboxes, cen_axis, up_axis):
'''
in:
bboxes: [n,7]
axis: 'X'/'Y'/'Z'
up_axis: 'Y'/'Z'
out:
centroid_lines: [n, 2,3]
'''
if bboxes.shape[0] == 0:
return np.empty([0,2,3])
corners = Bbox3D.bboxes_corners(bboxes, up_axis)
if cen_axis == 'X':
neg_vs = Bbox3D._xneg_vs
pos_vs = Bbox3D._xpos_vs
elif cen_axis == 'Y':
neg_vs = Bbox3D._yneg_vs
pos_vs = Bbox3D._ypos_vs
elif cen_axis == 'Z':
neg_vs = Bbox3D._zneg_vs
pos_vs = Bbox3D._zpos_vs
else:
raise NotImplementedError
negc = corners[:,neg_vs].mean(1, keepdims=True)
posc = corners[:,pos_vs].mean(1, keepdims=True)
centroid_lines = np.concatenate([negc, posc], 1)
return centroid_lines
@staticmethod
def gen_object_for_revit(bboxes, is_yx_zb, labels):
'''
Object type for revit:
x_corner0, y_corner0, x_corner1, y_corner1, z_centroid, thickness, height, + label, wall_id
'''
if is_yx_zb:
bboxes = Bbox3D.convert_from_yx_zb_boxes(bboxes)
centroid_lines = Bbox3D.bboxes_centroid_lines(bboxes, 'X', 'Z')
n = bboxes.shape[0]
bboxes_new = np.zeros([n,9])
centroid_lines_1 = centroid_lines[:,:,0:2].reshape([n,-1])
bboxes_new[:,0:4] = centroid_lines_1
bboxes_new[:,[4,5,6]] = bboxes[:,[2,4,5]]
bboxes_new[:,7] = labels.squeeze()
bboxes_new[:,8] = 0
wall_ids = np.where(labels == 1)[0]
other_ids = np.where(labels != 1)[0]
walls = bboxes_new[wall_ids]
other_bims = bboxes_new[other_ids]
other_bims[:,0:7] = bboxes[other_ids]
dif = bboxes[wall_ids, 0:3].reshape([1,-1,3]) - bboxes[other_ids, 0:3].reshape(-1,1,3)
dis = np.linalg.norm(dif, axis=2)
wall_ids = dis.argmin(axis=1)
other_bims[:,8] = wall_ids
bims = np.concatenate([walls, other_bims], 0)
return bims
@staticmethod
def bboxes_corners_xz_central_surface(bboxes, up_axis='Z', is_yx_zb=False):
'''
in:
bboxes: [n,7]
axis: 'X'/'Y'/'Z'
up_axis: 'Y'/'Z'
out:
zpos_corners: [n, 2,3]
zneg_corners: [n, 2,3]
'''
if bboxes.shape[0] == 0:
return np.empty([0,2,3]), np.empty([0,2,3])
corners = Bbox3D.bboxes_corners(bboxes, up_axis, is_yx_zb)
cen_axis = 'Y'
if cen_axis == 'X':
neg_vs = Bbox3D._xneg_vs
pos_vs = Bbox3D._xpos_vs
elif cen_axis == 'Y':
neg_vs = Bbox3D._yneg_vs
pos_vs = Bbox3D._ypos_vs
elif cen_axis == 'Z':
neg_vs = Bbox3D._zneg_vs
pos_vs = Bbox3D._zpos_vs
else:
raise NotImplementedError
negc = corners[:,neg_vs]
posc = corners[:,pos_vs]
cen_corners = (negc + posc)/2.0
zneg_corners = cen_corners[:,[0,1],:]
zpos_corners = cen_corners[:,[2,3],:]
return zneg_corners, zpos_corners
@staticmethod
def point_in_box(points, bboxes, up_axis='Z'):
cen_lines_x = Bbox3D.bboxes_centroid_lines(bboxes, 'X', up_axis)
cen_lines_y = Bbox3D.bboxes_centroid_lines(bboxes, 'Y', up_axis)
dis_x = vertical_dis_points_lines(points, cen_lines_x)
y_inside = dis_x < bboxes[:,4]
import pdb; pdb.set_trace() # XXX BREAKPOINT
pass
@staticmethod
def bbox_face_centroids(bbox, up_axis):
corners = Bbox3D.bbox_corners(bbox, up_axis)
faces = []
for i in range(6):
vidxs = Bbox3D._face_vidxs[i]
face_i = np.mean(corners[vidxs, :], 0, keepdims=True)
faces.append(face_i)
faces = np.concatenate(faces, 0)
return faces
@staticmethod
def get_yaw_R(bbox, up_axis):
yaw = bbox[_yaw]
import geometric_util as Geou
if up_axis=='Y':
R = Geou.Ry(yaw)
elif up_axis == 'Z':
R = Geou.Rz(yaw)
else:
raise NotImplementedError
return R
@staticmethod
def merge_two_bbox(bbox0, bbox1, up_axis):
yaw0 = bbox0[_yaw]
yaw1 = bbox1[_yaw]
assert abs(yaw0-yaw1) < SameAngleThs
yaw = (yaw0+yaw1)/2.0
centroid_new = (bbox0[0:3] + bbox1[0:3])*0.5
tmp = bbox0.copy()
tmp[-1] = 0
corners0 = Bbox3D.bbox_corners(tmp, up_axis) - centroid_new
tmp = bbox1.copy()
tmp[-1] = 0
corners1 = Bbox3D.bbox_corners(tmp, up_axis) - centroid_new
corners_new = np.maximum(corners0, corners1) * Bbox3D._corners_tmp
corners_new += np.minimum(corners0, corners1) * (1-Bbox3D._corners_tmp)
sx = corners_new[1,0] - corners_new[0,0]
sy = corners_new[2,1] - corners_new[0,1]
sz = corners_new[-1,2] - corners_new[0,2]
cx,cy,cz = centroid_new
bbox_new = np.array([cx,cy,cz, sx,sy,sz, yaw])
return bbox_new
@staticmethod
def define_walls_direction(boxes, up_axis, yx_zb, check_thickness=False):
show = False
if show:
box_ls0 = np.concatenate([bx.reshape([-1,7]) for bx in boxes], 0)
box_ls0[:,0] += 20
#Bbox3D.draw_bboxes(box_ls0, up_axis, False)
bn = len(boxes)
for i in range(bn):
boxes[i] = Bbox3D.define_wall_direction(boxes[i], up_axis, yx_zb, check_thickness)
boxes[i][-1] = OBJ_DEF.limit_yaw(boxes[i][-1], yx_zb)
if show:
box_ls1 = np.concatenate([bx.reshape([-1,7]) for bx in boxes], 0)
#indices = np.where(np.abs(box_ls1[:,-1]) > 0.1)[0]
#box_ls0 = box_ls0[indices, :]
#box_ls1 = box_ls1[indices, :]
box_ls1 = np.concatenate([box_ls0, box_ls1], 0)
Bbox3D.draw_bboxes(box_ls1, up_axis, False)
return boxes
@staticmethod
def define_wall_direction(box, up_axis, yx_zb, check_thickness):
'''
bbox standard: [xc, yc, zc, x_size, y_size, z_size, yaw]
up_axis='Z', make always x_size > y_size, y_size is thickness
(1) x_size > y_size, no modify
(2) x_size < y_size, switch x_size and y_size, yaw += pi/2
up_axis='Y', make always x_size > z_size, z_size is thickness
(1) x_size > z_size, no modify
(2) x_size < z_size, switch x_size and z_size, yaw += pi/2
yaw (-pi/2, pi/2]
'''
assert box.shape == (7,)
assert up_axis == 'Z'
assert yx_zb == False, "the rule for yx_zb is different"
if up_axis == 'Y':
_up = 2+3 # z is thickness dim
if up_axis == 'Z':
_up = 1+3 # y is thickness dim
yaw0 = box[_yaw]
if box[_sx] < box[_up]:
tmp = box[_sx].copy()
box[_sx] = box[_up].copy()
box[_up] = tmp
is_increase = int(yaw0<0)*2 -1
box[_yaw] = yaw0 + np.pi * 0.5 * is_increase
pass
#if not box[3] >= box[_up]:
# Bbox3D.draw_bboxes(box, 'Z', False)
# import pdb; pdb.set_trace() # XXX BREAKPOINT
# pass
assert box[3] >= box[_up]
if check_thickness:
assert box[_up] < 0.3 # normally thickness is small
return box
@staticmethod
def line_surface_intersection(line, surface, crop):
v0,v1 = line
v01 = v1-v0
idx, value = surface
if v01[idx]==0:
# no intersection between line and surface
return line, False
k = (value-v0[idx]) / (v01[idx])
if k<0 or k>1:
# intersection is out of range of the line
return line, False
intersection = v0 + v01 * k
inversed = int(v0[idx] > v1[idx])
if crop == 'min':
line[0+inversed] = intersection
else:
line[1-inversed] = intersection
return line, True
@staticmethod
def points_in_bbox(points, bboxes):
'''
Input:
points:[m,3]
bbox standard: [n,7] [xc, yc, zc, x_size, y_size, z_size, yaw]
'''
from second.core.box_np_ops import center_to_corner_box3d, corner_to_surfaces_3d
from second.core.geometry import points_in_convex_polygon_3d_jit
assert points.ndim == 2 and points.shape[1]==3 and bboxes.ndim == 2 and bboxes.shape[1]==7
origin = [0.5,0.5,0]
h_axis = 2
bboxes = Bbox3D.convert_to_yx_zb_boxes(bboxes.copy())
bbox_corners = center_to_corner_box3d(
bboxes[:, :3], bboxes[:, 3:6], bboxes[:, 6], origin=origin, axis=h_axis)
surfaces = corner_to_surfaces_3d(bbox_corners)
point_masks = points_in_convex_polygon_3d_jit(points[:, :3], surfaces)
show = False
if show and DEBUG:
print(point_masks)
Bbox3D.draw_points_bboxes(points, bboxes, 'Z', is_yx_zb=True)
return point_masks
@staticmethod
def detect_intersection_corners(bbox0, bboxes_others, up_axis):
'''
Find if the start or end of bbox0 is inside of bboxes_others.
Do not care if the inner points of bbox0 is inside of bboxes_others.
can be used to find if x begin or end should be freezed
'''
assert up_axis == 'Z'
assert bbox0.shape == (7,)
assert bboxes_others.ndim == 2
assert bboxes_others.shape[1] == 7
corners0 = Bbox3D.bbox_corners(bbox0, up_axis)
xneg_corners0 = np.mean(corners0[Bbox3D._xneg_vs], 0, keepdims=True)
xpos_corners0 = np.mean(corners0[Bbox3D._xpos_vs], 0, keepdims=True)
direction = np.expand_dims(bbox0[0:3], 0) - xneg_corners0
direction = direction / np.linalg.norm(direction)
offset = direction * 4e-3
# corners on the negative direction of x
xneg_corners = [xneg_corners0 + offset*i for i in range(10)]
xneg_corners = np.concatenate(xneg_corners, 0)
# corners on the positive direction of x
xpos_corners = [xpos_corners0 - offset*i for i in range(10)]
xpos_corners = np.concatenate(xpos_corners, 0)
bboxes_others = bboxes_others.copy()
#bboxes_others[:,3:6] += 1e-2
neg_mask = Bbox3D.points_in_bbox(xneg_corners, bboxes_others)
neg_mask = np.any(neg_mask, 0)
pos_mask = Bbox3D.points_in_bbox(xpos_corners, bboxes_others)
pos_mask = np.any(pos_mask, 0)
neg_intersec = np.any(neg_mask)
pos_intersec = np.any(pos_mask)
dif_yaw = np.abs(bbox0[-1] - bboxes_others[:,-1])>1e-1
x_intersec = []
if np.any(neg_mask):
x_intersec.append(np.where(neg_mask)[0][0])
if not dif_yaw[x_intersec[0]]:
x_intersec[0] = -1
else:
x_intersec.append(-1)
if np.any(pos_mask):
x_intersec.append(np.where(pos_mask)[0][0])
if not dif_yaw[x_intersec[1]]:
x_intersec[1] = -1
else:
x_intersec.append(-1)
show = DEBUG and False
if show:
all_boxes = np.concatenate([bboxes_others, np.expand_dims(bbox0,0)], 0)
labels = np.array([0]*bboxes_others.shape[0] + [1])
Bbox3D.draw_points_bboxes(xpos_corners, all_boxes, up_axis, is_yx_zb=False, labels=labels)
return x_intersec, np.concatenate([xneg_corners0, xpos_corners0], 0)
@staticmethod
def detect_all_intersection_corners(bboxes, up_axis, scene_scope=None):
'''
intersec_corners_idx: [n][2]
the intersection index
intersec_corners: [n,2,3]
'''
bn = bboxes.shape[0]
intersec_corners_idx = []
intersec_corners = []
for i in range(bn):
bboxes_others = bboxes[ [j for j in range(bn) if j!=i] ]
itsc_i, xcorners_i = Bbox3D.detect_intersection_corners(bboxes[i], bboxes_others, up_axis)
itsc_i = [d+int(d>=i) for d in itsc_i]
if scene_scope is not None:
# check if the intersec_corners are inside scene_scope
is_insides = points_in_scope(xcorners_i, scene_scope)
if itsc_i[0]>=0 and not is_insides[0]:
itsc_i[0] = -1
if itsc_i[1]>=0 and (not is_insides[1]):
itsc_i[1] = -1
intersec_corners_idx.append( itsc_i )
intersec_corners.append(np.expand_dims(xcorners_i,0))
if len(intersec_corners)>0:
intersec_corners = np.concatenate(intersec_corners, 0)
intersec_corners_idx = np.array(intersec_corners_idx)
return intersec_corners_idx, intersec_corners
@staticmethod
def crop_bbox_by_points(bbox0, points0, points_aug0, up_axis, intersec_corners0=None):
'''
Rotate to make yaw=0 firstly
(1) Use points_aug0 to constrain size_x and size_z
(2) Use points0 to constrain size_y (the thickness)
'''
if points_aug0.shape[0] < 10:
# no points inside the box, rm it
return None
bbox0 = bbox0.reshape([7])
assert up_axis == 'Z'
from geometric_util import Rz
crop_thickness = False
centroid0 = np.expand_dims( bbox0[0:3], 0 )
rz = Rz(bbox0[-1])
def set_yaw0(ps0):
return np.matmul(ps0[:,:3] - centroid0, rz)
# make yaw = 0
points1 = set_yaw0(points0)
points_aug1 = set_yaw0(points_aug0)
#(1) Use points_aug1 to crop x axis
xyz_min_new = np.min(points_aug1, 0)
xyz_max_new = np.max(points_aug1, 0)
#(2) Use intersec_corners0 to freeze x axis, in order to keep unseen intersection corners
if intersec_corners0 is not None:
if intersec_corners0[0] >= 0:
xyz_min_new[0] = -bbox0[3]
#set_yaw0( intersec_corners0[0] ).reshape([3])[0]
if intersec_corners0[1] >= 0:
xyz_max_new[0] = bbox0[3] # set_yaw0(intersec_corners0[1]).reshape([3])[0]
#(3) Use points1 to crop y axis (thickness)
if crop_thickness:
if points1.shape[0] > 0:
xyz_min1 = np.min(points1, 0)
xyz_max1 = np.max(points1, 0)
xyz_min_new[1] = xyz_min1[1]
xyz_max_new[1] = xyz_max1[1]
else:
# there is no point inside, make thickness=0
# and wall is close to points_aug1
sign = np.sign(xyz_min_new[1])
xyz_min_new[1] = xyz_max_new[1] = bbox0[4] * 0.5 * sign
xyz_min_new = np.maximum(xyz_min_new, -bbox0[3:6]*0.5)
xyz_max_new = np.minimum(xyz_max_new, bbox0[3:6]*0.5)
centroid_new_0 = (xyz_min_new + xyz_max_new) / 2.0
size_new = np.maximum( xyz_max_new - xyz_min_new, 0)
#size_new = np.minimum( size_new, bbox0[3:6] )
if not crop_thickness:
centroid_new_0[1] = 0
size_new[1] = bbox0[4]
centroid_new = np.matmul(centroid_new_0, rz.T) + centroid0.reshape([3])
bbox_new = np.concatenate([centroid_new, size_new, bbox0[-1:]])
# do not crop along z
bbox_new[2] = bbox0[2]
bbox_new[5] = bbox0[5]
show = False
if show and DEBUG:
bboxes = np.concatenate([bbox0, bbox_new], 0)
Bbox3D.draw_points_bboxes(points_aug0, bboxes, up_axis='Z', is_yx_zb=False)
return bbox_new.reshape([1,7])
@staticmethod
def line_intersection_2d(line0, line1, must_on0=False, must_on1=False,
min_angle=0):
'''
line0: [2,2]
line1: [2,2]
must_on0: must on the scope of line0, no extend
must_on1: must on the scope of line1, no extend
out: [2]
v01 = p1 - p0
v23 = p3 - p2
intersection = p0 + v01*k0 = p2 + v23 * k1
[v01, v23][k0;-k1] = p2 - p0
intersection between p0 and p1: 1>=k0>=0
intersection between p2 and p3: 1>=k1>=0
'''
assert (line0.shape == (2,2) and line1.shape == (2,2))
#(line0.shape == (2,3) and line1.shape == (2,3))
dim = line0.shape[1]
p0,p1 = line0
p2,p3 = line1
v01 = p1-p0
v23 = p3-p2
v01v23 = np.concatenate([v01.reshape([2,1]), (-1)*v23.reshape([2,1])], 1)
p2sp0 = (p2-p0).reshape([2,1])
try:
inv_vov1 = np.linalg.inv(v01v23)
K = np.matmul(inv_vov1, p2sp0)
if must_on0 and (K[0]>1 or K[0]<0):
return np.array([np.nan]*2)
if must_on1 and (K[1]>1 or K[1]<0):
return np.array([np.nan]*2)
intersec = p0 + v01 * K[0]
intersec_ = p2 + v23 * K[1]
assert np.linalg.norm(intersec - intersec_) < 1e-5, f'{intersec} \n{intersec_}'
direc0 = (line0[1] - line0[0]).reshape([1,2])
direc1 = (line1[1] - line1[0]).reshape([1,2])
angle = angle_of_2lines(direc0, direc1, scope_id=1)[0]
angle = np.abs(angle)
show = False
if show and DEBUG:
print(f'K:{K}\nangle:{angle}')
lines_show = np.concatenate([np.expand_dims(line0,0), np.expand_dims(line1,0)],0)
points_show = np.array([[intersec[0], intersec[1], 0]])
Bbox3D.draw_points_lines(points_show, lines_show)
if angle > min_angle:
return intersec
else:
return np.array([np.nan]*2)
except np.linalg.LinAlgError:
return np.array([np.nan]*2)
@staticmethod
def refine_intersection_x(bbox0, side, bbox1, up_axis):
assert up_axis == 'Z'
xfc0 = Bbox3D.bbox_face_centroids(bbox0, up_axis)[0:2,0:2]
xfc1 = Bbox3D.bbox_face_centroids(bbox1, up_axis)[0:2,0:2]
intersec = Bbox3D.line_intersection_2d(xfc0, xfc1)
if intersec is None:
#bbox1[2] += 0.2
Bbox3D.draw_bboxes(np.concatenate([bbox0.reshape([-1,7]), bbox1.reshape([-1,7])]), up_axis, False)
import pdb; pdb.set_trace() # XXX BREAKPOINT
pass
dis = np.linalg.norm(intersec - bbox0[0:2])
# make fully overlap at the intersection area
dis += bbox1[4]*0.5
crop_value = bbox0[3]*0.5 - dis
return crop_value
@staticmethod
def crop_bbox_size(bbox0, axis, values):
'''
[7] 'X' [2]
'''
from geometric_util import Rz
bbox1 = bbox0.reshape([7])
centroid0 = bbox1[0:3]
rz = Rz(bbox1[-1])
dim = {'X':0, 'Y':1, 'Z':2}[axis]
#def set_yaw0(ps0):
# return np.matmul(ps0[:,:3] - centroid0, rz)
centroid_new = np.array([0,0,0.0])
centroid_new[dim] = values[0]*0.5 - values[1]*0.5
centroid_new = np.matmul(centroid_new, rz.T) + centroid0
bbox1[0:3] = centroid_new
bbox1[dim+3] = bbox1[dim+3] - values[0] - values[1]
return bbox1
@staticmethod
def boxcorners_in_boxes(anchors, gt_boxes, up_axis):
'''
anchors:[na,7]
gt_boxes:[ng,7]
'''
assert anchors.ndim == gt_boxes.ndim == 2
ng = gt_boxes.shape[0]
na = anchors.shape[0]
gt_corners = Bbox3D.bboxes_corners(gt_boxes, up_axis).reshape([-1,3]) # [ng*8,3]
inside_mask = Bbox3D.points_in_bbox(gt_corners, anchors) # [ng,8,na]
inside_mask = inside_mask.reshape([ng,8,na])
return inside_mask
@staticmethod
def bbox_bv_similiarity(anchors, gt_boxes, is_yx_zb, use_croped_gt=False):
'''
out the overlap between anchors and gt_boxes:
[ng,ng]
'''
record_t = False
if record_t: t0 = time.time()
assert anchors.ndim == gt_boxes.ndim == 2
assert anchors.shape[1] == gt_boxes.shape[1] == 7
up_axis = 'Z'
ng = gt_boxes.shape[0]
na = anchors.shape[0]
assert ng>0
anchors = anchors.copy()
gt_boxes = gt_boxes.copy()
if is_yx_zb:
anchors = Bbox3D.convert_from_yx_zb_boxes(anchors)
gt_boxes = Bbox3D.convert_from_yx_zb_boxes(gt_boxes)
if record_t: print(f'A {time.time() - t0}')
# (1) get all the intersec_corners: gt_corners inside anchors
gt_corners = Bbox3D.bboxes_corners(gt_boxes, up_axis) # [ng*8,3]
if record_t: print(f'B0 {time.time() - t0}')
inside_mask = Bbox3D.points_in_bbox(gt_corners.reshape([-1,3]), anchors) # [ng,8,na]
if record_t: print(f'B1 {time.time() - t0}')
inside_mask = inside_mask.T
inside_mask = inside_mask.reshape([na,ng,8])
any_inside_mask = np.any(inside_mask, 2)
is_total_inside = np.all(inside_mask, 2)
if record_t: print(f'B {time.time() - t0}')
# (2) get all the lines of each bbox
a_corners = Bbox3D.bboxes_corners(anchors, up_axis) # [ng*8,3]
gt_lines = np.take(gt_corners, Bbox3D._lines_z0_vids, axis=1)
a_lines = np.take(a_corners, Bbox3D._lines_z0_vids, axis=1)
#Bbox3D.draw_lines_open3d(gt_lines.reshape([-1,2,3]), show=True)
#Bbox3D.draw_lines_open3d(a_lines.reshape([-1,2,3]), show=True)
if record_t: print(f'C {time.time() - t0}')
# (3) get all the line intersections
l2d_intersec0 = np.zeros([na, ng, 4, 4, 2])
for i in range(na):
for j in range(ng):
for k in range(4):
for l in range(4):
l2d_intersec0[i,j,k,l] = Bbox3D.line_intersection_2d(gt_lines[j,k,:,0:2], a_lines[i,l,:,0:2], True, True)
#if False and DEBUG and ( not np.isnan(l2d_intersec0[i,j,k,l][0])):
# boxes_show = np.concatenate([gt_boxes[j:j+1], anchors[i:i+1]],0)
# lines_show = np.concatenate([gt_lines[np.newaxis,j,k], a_lines[np.newaxis,i,l]], 0)
# lines_show[:,:,2] = 1
# intersec_show = np.expand_dims(np.concatenate([l2d_intersec0[i,j,k,l], np.ones(1)], 0), 0)
# Bbox3D.draw_points_bboxes(intersec_show, boxes_show, 'Z', False, lines=lines_show)
# pass
#pass
l2d_intersec_mask0 = np.logical_not(np.isnan(l2d_intersec0[...,0]))
l2d_intersec_mask1 = np.any(l2d_intersec_mask0, (2,3))
# (4) No intersection: check if totally contained
valid_gt_mask = np.logical_or(any_inside_mask, l2d_intersec_mask1)
if record_t: print(f'D {time.time() - t0}')
# (5) Collect union_corners: all the intersections and gt_corners inside anchors
ids0, ids1 = np.where(valid_gt_mask)
another_box_ids = np.concatenate([np.expand_dims(ids0,1), np.expand_dims(ids1,1)],1)
nis = another_box_ids.shape[0]
overlaps = np.zeros([na,ng], dtype=np.float32)
croped_gt_boxes = np.empty([na,ng,7], dtype=np.float32)
croped_gt_boxes.fill(None)
for i in range(nis):
aidx, gidx = another_box_ids[i]
gt_box_i = gt_boxes[gidx]
l2d_intersec_i = l2d_intersec0[aidx, gidx].reshape([-1,2])
l2d_intersec_i = l2d_intersec_i[ np.logical_not(np.isnan(l2d_intersec_i[:,0])) ]
inside_corners = gt_corners[gidx][inside_mask[aidx, gidx]]
inside_corners = inside_corners.reshape([-1,3])[:,0:2]
union_corners = np.concatenate([l2d_intersec_i, inside_corners], 0)
# (6) Calculate the scope of union_corners
centroid_i = gt_box_i[0:2]
rz = geo_Rz(gt_box_i[-1])[0:2,0:2]
# rotate all union_corners to the gt_box_i frame
union_corners_gtf0 = np.matmul(union_corners-centroid_i, rz)
union_corners_gtf = union_corners_gtf0 + centroid_i
xy_min = union_corners_gtf0.min(0)
xy_max = union_corners_gtf0.max(0)
sx,sy = xy_scope = xy_max - xy_min
overlap_i = np.sum(xy_scope)
overlaps[aidx, gidx] = overlap_i
# (7) get cropped gt_boxes
if use_croped_gt:
centroid_new_i = (xy_min+xy_max)/2
cx,cy = centroid_new_i = np.matmul(centroid_new_i, rz.T) + centroid_i
croped_gtbox_i = np.array([cx,cy, gt_box_i[2], sx,sy,gt_box_i[5], gt_box_i[6]])
if is_yx_zb:
croped_gt_boxes[aidx, gidx] = Bbox3D.convert_to_yx_zb_boxes(croped_gtbox_i.reshape([1,7]))[0]
else:
croped_gt_boxes[aidx, gidx] = croped_gtbox_i
if DEBUG and False:
# before rotation
boxes_show0 = np.concatenate([gt_boxes[gidx:gidx+1], anchors[aidx:aidx+1]], 0)
points_show0 = np.concatenate([union_corners, np.zeros([union_corners.shape[0],1])], 1)
Bbox3D.draw_points_bboxes(points_show0, boxes_show0, 'Z', False)
# after rotation to gt_frame
boxes_show1 = gt_boxes[gidx:gidx+1].copy()
boxes_show1[:,-1] -= gt_box_i[-1]
points_show1 = np.concatenate([union_corners_gtf, np.zeros([union_corners_gtf.shape[0],1])], 1)
Bbox3D.draw_points_bboxes(points_show1, boxes_show1, 'Z', False)
# croped box
boxes_show2 = np.concatenate([boxes_show0, np.expand_dims(croped_gtbox_i,0)], 0)
labels_show2 = np.array([1,1,0])
Bbox3D.draw_points_bboxes(points_show0, boxes_show2, 'Z', False, labels=labels_show2)
import pdb; pdb.set_trace() # XXX BREAKPOINT
pass
if record_t: print(f'E {time.time() - t0}')
return overlaps, croped_gt_boxes
@staticmethod
def cenline_intersection_2boxes(box0, box1, check_same_height):
'''
[7] [7]
detect the intersection of two boxes along xy plane, by centroid lines
intersection: [1,3], the intersection position
on_box_corners:[1,2], if the intersection is on the corner of box0 and box1
'''
assert box0.shape == box1.shape == (7,)
corner_dis_threshold = 1.5 # times of thickness
cenline0 = Bbox3D.bboxes_centroid_lines(box0.reshape([1,7]), cen_axis='X', up_axis='Z')[0]
cenline1 = Bbox3D.bboxes_centroid_lines(box1.reshape([1,7]), cen_axis='X', up_axis='Z')[0]
intersec_2d = Bbox3D.line_intersection_2d(cenline0[:,0:2], cenline1[:,0:2], True, True,
min_angle = 10. * np.pi/180)
if not np.isnan(intersec_2d[0]):
dis_box_ends_0 = np.linalg.norm(intersec_2d - cenline0[:,0:2], axis=1).min()
dis_box_ends_1 = np.linalg.norm(intersec_2d - cenline1[:,0:2], axis=1).min()
dis_box_ends = np.array([dis_box_ends_0, dis_box_ends_1])
thickness = np.array([box0[4], box1[4]])
on_box_corners = (dis_box_ends < thickness * corner_dis_threshold).reshape([1,2]).astype(np.int32)
else:
on_box_corners = np.array([[-1, -1]])
intersec_3d = np.concatenate([intersec_2d, cenline0[0,2:3]], 0).reshape([1,3])
if check_same_height and cenline0[0,2] != cenline1[1,2] and not np.isnan(intersec_3d[0,1]):
boxes = np.concatenate([box0.reshape([1,7]), box1.reshape([1,7])], 0)
Bbox3D.draw_bboxes(boxes, 'Z', False)
import pdb; pdb.set_trace() # XXX BREAKPOINT
assert False, "merge two walls with different height is not implemented"
show = False
if show:
if np.isnan(intersec_3d[0,0]):
print('\n\tno intersection')
else:
print(intersec_3d)
print(on_box_corners)
box_show = np.concatenate([box0.reshape([1,7]), box1.reshape([1,7])],0)
Bbox3D.draw_points_bboxes(intersec_3d, box_show, 'Z', False)
return intersec_3d, on_box_corners
@staticmethod
def cenline_intersection(box0, boxes_other, check_same_height):
'''
[7] [n,7]
'''
assert box0.shape == (7,)
assert boxes_other.shape[1] == 7
n = boxes_other.shape[0]
intersections = []
on_box_corners = []
for i in range(n):
intersection_i, on_box_corners_i = Bbox3D.cenline_intersection_2boxes(box0, boxes_other[i], check_same_height)
intersections.append(intersection_i)
on_box_corners.append(on_box_corners_i)
intersections = np.concatenate(intersections, 0)
on_box_corners = np.concatenate(on_box_corners, 0)
return intersections, on_box_corners
@staticmethod
def all_intersections_by_cenline(boxes, check_same_height, not_on_corners=False, only_on_corners=False, x_size_expand=0.08, show_res=False):
'''
[n,7]
'''
assert not_on_corners * only_on_corners == 0
boxes = boxes.copy()
boxes[:,3] += x_size_expand
n = boxes.shape[0]
intersections = [np.zeros(shape=(0,3), dtype=np.float32)] * n
on_box_corners = [np.zeros(shape=(0), dtype=np.int32)] * n
another_box_ids = [np.zeros(shape=(0), dtype=np.int32)] * n
for i in range(n-1):
intersections_i, on_box_corners_i = Bbox3D.cenline_intersection(boxes[i], boxes[i+1:], check_same_height)
# extract the valid intersections
mask_i = np.logical_not( np.isnan(intersections_i[:,0]) )
idx_i = np.where(mask_i)[0]
inters_i = intersections_i[idx_i]
on_box_c_i = on_box_corners_i[idx_i]
idx_i_org = idx_i + i + 1
# append all the intersections for box i, while i as the first box
intersections[i] = np.concatenate([intersections[i], inters_i], 0)
on_box_corners[i] = np.concatenate([on_box_corners[i], on_box_c_i[:,0]], 0)
another_box_ids[i] = np.concatenate([another_box_ids[i], idx_i_org], 0)
# append all the intersections for the second boxes, while i as the first box
for j in range(idx_i_org.shape[0]):
idx_j = idx_i_org[j]
intersections[idx_j] = np.concatenate([intersections[idx_j], inters_i[j:j+1]], 0)
on_box_corners[idx_j] = np.concatenate([on_box_corners[idx_j], on_box_c_i[j,1:2]], 0)
another_box_ids[idx_j] = np.concatenate([another_box_ids[idx_j], np.array([i])], 0)
if not_on_corners or only_on_corners:
for i in range(n):
if not_on_corners:
# keep all the intersections not on box corner
mask_c_i = on_box_corners[i] == 0
else:
# keep all the intersections on box corner
mask_c_i = on_box_corners[i] == 1
intersections[i] = intersections[i][mask_c_i]
on_box_corners[i] = on_box_corners[i][mask_c_i]
another_box_ids[i] = another_box_ids[i][mask_c_i]
# filter repeat intersections
for i in range(n):
m = intersections[i].shape[0]
if m<2:
continue
keep_mask = np.array([True]*m)
for j in range(m-1):
dis_j = intersections[i][j:j+1] - intersections[i][j+1:]
dis_j = np.linalg.norm(dis_j,axis=1)
same_mask_j = dis_j < 4e-2
if np.any(same_mask_j):
keep_mask[j] = False
k = np.where(same_mask_j)[0] + j + 1
intersections[i][k] = (intersections[i][j] + intersections[i][k])/2
# set 0 if the two on_box_corners are 0 and 1
on_box_corners[i][k] *= on_box_corners[i][j]
intersections[i] = intersections[i][keep_mask]
on_box_corners[i] = on_box_corners[i][keep_mask]
another_box_ids[i] = another_box_ids[i][keep_mask]
show = show_res and not_on_corners
if show:
all_inters = np.concatenate(intersections, 0)
Bbox3D.draw_points_bboxes(all_inters, boxes, 'Z', False)
return intersections
for i in range(0,n):
#if intersections[i].shape[0] == 0:
# continue
show_boxes = boxes.copy()
show_boxes[:,2] -= 1
show_boxes = np.concatenate([show_boxes, boxes[i:i+1]], 0)
print(on_box_corners[i])
print(intersections[i])
if intersections[i].shape[0] == 0:
Bbox3D.draw_bboxes(show_boxes, 'Z', False)
else:
Bbox3D.draw_points_bboxes(intersections[i], show_boxes, 'Z', False)
return intersections
@staticmethod
def split_wall_by_centroid_intersections(box0, cen_intersecs):
'''
box0: [7]
cen_intersecs: [k,3]
'''
assert cen_intersecs.ndim == 2
assert box0.shape == (7,)
k = cen_intersecs.shape[0]
new_walls = box0.reshape([1,7])
for i in range(k):
for j in range(new_walls.shape[0]):
w = new_walls[j]
# cen_intersecs[i] is on one of wall in new_walls
new_walls_i = Bbox3D.split_wall_by_one_centroid_intersection(w, cen_intersecs[i])
if new_walls_i.shape[0] != 0:
new_walls = np.concatenate([new_walls[:j], new_walls[j+1:], new_walls_i], 0)
break
return new_walls
@staticmethod
def split_wall_by_one_centroid_intersection(box0, cen_intersec, offset_half_thickness=True):
'''
box0: [7]
cen_intersec: [3]
'''
box0 = box0.reshape([1,7])
cen_intersec = cen_intersec.reshape([1,3])
cenline0 = Bbox3D.bboxes_centroid_lines(box0, cen_axis='X', up_axis='Z')[0]
# check if cen_intersec is inside of box0
dirs = cen_intersec - cenline0
tmp = np.sum(dirs[0]/np.linalg.norm(dirs[0]) * dirs[1]/np.linalg.norm(dirs[1]))
is_inside = tmp < 0
if not is_inside:
return np.zeros(shape=[0,7], dtype=np.float32)
new_centroids = (cenline0 + cen_intersec)/2.0
x_sizes = np.linalg.norm(cenline0 - cen_intersec, axis=1)
new_boxes = np.concatenate([box0, box0], 0)
new_boxes[:,0:3] = new_centroids
new_boxes[:,3] = x_sizes
if offset_half_thickness:
thickness_offset = 0.06 * 0.5
new_boxes[:,3] += thickness_offset
tmp = cen_intersec - cenline0
tmp = tmp / np.linalg.norm(tmp, axis=1, keepdims=True)
centroid_offset = tmp * thickness_offset
new_boxes[:,0:3] += centroid_offset
show = False
if show:
show_boxes = np.concatenate([box0, new_boxes], 0)
show_boxes[1,2] += 2
show_boxes[2,2] += 2.2
Bbox3D.draw_points_bboxes(cen_intersec, show_boxes, 'Z', False)
return new_boxes
def review_bbox_format():
# bbox standard: [xc, yc, zc, x_size, y_size, z_size, yaw]
# x_size > y_size,
# yaw:(-pi/2, pi/2], clock wise is right, following rules in SECOND
bbox0 = np.array([
[1,2,1, 5, 0.5, 1, 0],
[1,2,1, 5, 0.5, 1.5, np.pi/2.0*0.5],
[1,2,1, 5, 0.5, 0.5, -np.pi/2.0*0.5],
])
print(f'{bbox0}')
Bbox3D.draw_bboxes(bbox0, 'Z', is_yx_zb=False)
# bbox yx_zb : [xc, yc, z_bot, y_size, x_size, z_size, yaw-0.5pi]
bbox1 = Bbox3D.convert_to_yx_zb_boxes(bbox0)
print(f'{bbox1}')
Bbox3D.draw_bboxes(bbox1, 'Z', is_yx_zb=True)
bbox2 = Bbox3D.convert_from_yx_zb_boxes(bbox1)
print(f'{bbox2}')
Bbox3D.draw_bboxes(bbox2, 'Z', is_yx_zb=False)
def show_bboxes():
house_name = '0004d52d1aeeb8ae6de39d6bd993e992'
boxes_fn = f'/home/z/SUNCG/suncg_v1/parsed/{house_name}/object_bbox/wall.txt'
bboxes = np.loadtxt(boxes_fn)
Bbox3D.draw_bboxes(bboxes, 'Y', False)
def test_merge_walls():
wall_fn = '/home/z/SUNCG/suncg_v1/parsed/0004d52d1aeeb8ae6de39d6bd993e992/object_bbox/wall.txt'
wall_bboxes = np.loadtxt(wall_fn)
#wall_bboxes = wall_bboxes[[0,2]]
bbox0 = wall_bboxes[0]
bbox1 = wall_bboxes[1]
#merged = Bbox3D.merge_2same_walls(bbox0, bbox1)
merged = Bbox3D.merge_2close_walls(bbox0, bbox1)
print(bbox0)
print(bbox1)
print(merged)
w_n = wall_bboxes.shape[0]
show_all = True
show_one_by_one = False
if show_all:
Bbox3D.draw_bboxes(wall_bboxes, 'Y', False)
if show_one_by_one:
for i in range(w_n):
print(i)
wall_bboxes[i,1] += 1
Bbox3D.draw_bboxes(wall_bboxes, 'Y', False)
wall_bboxes[i,1] -= 1
import pdb; pdb.set_trace() # XXX BREAKPOINT
pass
def test_draw():
box = np.array([[0,0,0, 2,1,2, 0]])
#Bbox3D.draw_bboxes_mesh(box, 'Z', False)
Bbox3D.draw_bboxes(box, 'Z', False )
if __name__ == '__main__':
#test_merge_walls()
#show_bboxes()
#review_bbox_format()
test_draw()
| [
"[email protected]"
] | |
cdc220273a6eb070856db02eee22e9766e3d7d72 | 45c142c3e3dc8d3211a86c77385ecfdd10d28fb9 | /dstore/engine/procedures/mi_RestoreDefaultValues_Ad_pb2.py | c4d2eabded23befbbf6f8898c1164f8aef205f9d | [] | no_license | dstore-io/dstore-sdk-python | 945d64995c8892af18fab26c90117245abec64a4 | 8494d12ac77c3c3cc6dd59026407ef514ad179fc | refs/heads/master | 2020-06-14T13:07:08.181547 | 2017-01-26T11:19:39 | 2017-01-26T11:19:39 | 75,177,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 12,973 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dstore/engine/procedures/mi_RestoreDefaultValues_Ad.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from dstore import values_pb2 as dstore_dot_values__pb2
from dstore.engine import engine_pb2 as dstore_dot_engine_dot_engine__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dstore/engine/procedures/mi_RestoreDefaultValues_Ad.proto',
package='dstore.engine.mi_RestoreDefaultValues_Ad',
syntax='proto3',
serialized_pb=_b('\n9dstore/engine/procedures/mi_RestoreDefaultValues_Ad.proto\x12(dstore.engine.mi_RestoreDefaultValues_Ad\x1a\x13\x64store/values.proto\x1a\x1a\x64store/engine/engine.proto\"\xc1\x05\n\nParameters\x12\x45\n!create_sup_adm_person_with_passwd\x18\x01 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12/\n&create_sup_adm_person_with_passwd_null\x18\xe9\x07 \x01(\x08\x12@\n\x1bskip_tables_for_development\x18\x02 \x01(\x0b\x32\x1b.dstore.values.BooleanValue\x12)\n skip_tables_for_development_null\x18\xea\x07 \x01(\x08\x12\x44\n generate_item_test_data_scenario\x18\x03 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12.\n%generate_item_test_data_scenario_null\x18\xeb\x07 \x01(\x08\x12\x44\n generate_pers_test_data_scenario\x18\x04 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12.\n%generate_pers_test_data_scenario_null\x18\xec\x07 \x01(\x08\x12\x42\n\x1egener_forum_test_data_scenario\x18\x05 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12,\n#gener_forum_test_data_scenario_null\x18\xed\x07 \x01(\x08\x12\x42\n\x1egener_order_test_data_scenario\x18\x06 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12,\n#gener_order_test_data_scenario_null\x18\xee\x07 \x01(\x08\"\xca\x01\n\x08Response\x12\x38\n\x10meta_information\x18\x02 \x03(\x0b\x32\x1e.dstore.engine.MetaInformation\x12\'\n\x07message\x18\x03 \x03(\x0b\x32\x16.dstore.engine.Message\x12\x43\n\x03row\x18\x04 \x03(\x0b\x32\x36.dstore.engine.mi_RestoreDefaultValues_Ad.Response.Row\x1a\x16\n\x03Row\x12\x0f\n\x06row_id\x18\x90N \x01(\x05\x42[\n\x1bio.dstore.engine.proceduresZ<gosdk.dstore.de/engine/procedures/mi_RestoreDefaultValues_Adb\x06proto3')
,
dependencies=[dstore_dot_values__pb2.DESCRIPTOR,dstore_dot_engine_dot_engine__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PARAMETERS = _descriptor.Descriptor(
name='Parameters',
full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='create_sup_adm_person_with_passwd', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.create_sup_adm_person_with_passwd', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='create_sup_adm_person_with_passwd_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.create_sup_adm_person_with_passwd_null', index=1,
number=1001, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_tables_for_development', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.skip_tables_for_development', index=2,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_tables_for_development_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.skip_tables_for_development_null', index=3,
number=1002, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='generate_item_test_data_scenario', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.generate_item_test_data_scenario', index=4,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='generate_item_test_data_scenario_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.generate_item_test_data_scenario_null', index=5,
number=1003, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='generate_pers_test_data_scenario', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.generate_pers_test_data_scenario', index=6,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='generate_pers_test_data_scenario_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.generate_pers_test_data_scenario_null', index=7,
number=1004, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gener_forum_test_data_scenario', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.gener_forum_test_data_scenario', index=8,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gener_forum_test_data_scenario_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.gener_forum_test_data_scenario_null', index=9,
number=1005, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gener_order_test_data_scenario', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.gener_order_test_data_scenario', index=10,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gener_order_test_data_scenario_null', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Parameters.gener_order_test_data_scenario_null', index=11,
number=1006, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=153,
serialized_end=858,
)
_RESPONSE_ROW = _descriptor.Descriptor(
name='Row',
full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response.Row',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_id', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response.Row.row_id', index=0,
number=10000, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1041,
serialized_end=1063,
)
_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='meta_information', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response.meta_information', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response.message', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row', full_name='dstore.engine.mi_RestoreDefaultValues_Ad.Response.row', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RESPONSE_ROW, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=861,
serialized_end=1063,
)
_PARAMETERS.fields_by_name['create_sup_adm_person_with_passwd'].message_type = dstore_dot_values__pb2._STRINGVALUE
_PARAMETERS.fields_by_name['skip_tables_for_development'].message_type = dstore_dot_values__pb2._BOOLEANVALUE
_PARAMETERS.fields_by_name['generate_item_test_data_scenario'].message_type = dstore_dot_values__pb2._STRINGVALUE
_PARAMETERS.fields_by_name['generate_pers_test_data_scenario'].message_type = dstore_dot_values__pb2._STRINGVALUE
_PARAMETERS.fields_by_name['gener_forum_test_data_scenario'].message_type = dstore_dot_values__pb2._STRINGVALUE
_PARAMETERS.fields_by_name['gener_order_test_data_scenario'].message_type = dstore_dot_values__pb2._STRINGVALUE
_RESPONSE_ROW.containing_type = _RESPONSE
_RESPONSE.fields_by_name['meta_information'].message_type = dstore_dot_engine_dot_engine__pb2._METAINFORMATION
_RESPONSE.fields_by_name['message'].message_type = dstore_dot_engine_dot_engine__pb2._MESSAGE
_RESPONSE.fields_by_name['row'].message_type = _RESPONSE_ROW
DESCRIPTOR.message_types_by_name['Parameters'] = _PARAMETERS
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE
Parameters = _reflection.GeneratedProtocolMessageType('Parameters', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERS,
__module__ = 'dstore.engine.procedures.mi_RestoreDefaultValues_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_RestoreDefaultValues_Ad.Parameters)
))
_sym_db.RegisterMessage(Parameters)
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict(
DESCRIPTOR = _RESPONSE_ROW,
__module__ = 'dstore.engine.procedures.mi_RestoreDefaultValues_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_RestoreDefaultValues_Ad.Response.Row)
))
,
DESCRIPTOR = _RESPONSE,
__module__ = 'dstore.engine.procedures.mi_RestoreDefaultValues_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_RestoreDefaultValues_Ad.Response)
))
_sym_db.RegisterMessage(Response)
_sym_db.RegisterMessage(Response.Row)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.dstore.engine.proceduresZ<gosdk.dstore.de/engine/procedures/mi_RestoreDefaultValues_Ad'))
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
7f18193d0a006abf5e133bd1826ca925772415d9 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/marketplaceordering/manual/custom.py | c3d4d03fbbc01f38de3e9be67de777584acd4f5b | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 1,754 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
def term_accept(client,
publisher,
product,
plan):
offerDetail = client.get(offer_type="virtualmachine",
publisher_id=publisher,
offer_id=product,
plan_id=plan)
if offerDetail is None:
from azure.cli.core.azclierror import ValidationError
raise ValidationError(
'cannot find offer with publisher {}, product {} and plan {}.'.format(publisher, product, plan))
parameters = {}
parameters['publisher'] = publisher
parameters['product'] = product
parameters['plan'] = plan
parameters['license_text_link'] = offerDetail.license_text_link
parameters['privacy_policy_link'] = offerDetail.privacy_policy_link
parameters['marketplace_terms_link'] = offerDetail.marketplace_terms_link
parameters['retrieve_datetime'] = offerDetail.retrieve_datetime
parameters['signature'] = offerDetail.signature
parameters['accepted'] = True
return client.create(offer_type="virtualmachine",
publisher_id=publisher,
offer_id=product,
plan_id=plan,
parameters=parameters)
| [
"[email protected]"
] | |
36678e9ffcaa6862ab6d1a86c3294802fe005f96 | 1a1e4f0f525ebe080dcd74b7e5e8c33477eab6b8 | /backend/dating/api/v1/viewsets.py | 86fd6380f5c9628e9208d847b9a0a804ff8898db | [] | no_license | crowdbotics-apps/homedesign-20669 | 0b1e65aa4f169dd9f54cbbb9d6c4af38057b7efe | 0623f0d613cdc057079a799fccfe05db9c51ca73 | refs/heads/master | 2022-12-31T18:34:29.656171 | 2020-09-25T07:55:18 | 2020-09-25T07:55:18 | 298,505,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,095 | py | from rest_framework import authentication
from dating.models import Setting, Profile, Inbox, Dislike, Match, UserPhoto, Like
from .serializers import (
SettingSerializer,
ProfileSerializer,
InboxSerializer,
DislikeSerializer,
MatchSerializer,
UserPhotoSerializer,
LikeSerializer,
)
from rest_framework import viewsets
class DislikeViewSet(viewsets.ModelViewSet):
serializer_class = DislikeSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Dislike.objects.all()
class UserPhotoViewSet(viewsets.ModelViewSet):
serializer_class = UserPhotoSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = UserPhoto.objects.all()
class MatchViewSet(viewsets.ModelViewSet):
serializer_class = MatchSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Match.objects.all()
class ProfileViewSet(viewsets.ModelViewSet):
serializer_class = ProfileSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Profile.objects.all()
class LikeViewSet(viewsets.ModelViewSet):
serializer_class = LikeSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Like.objects.all()
class SettingViewSet(viewsets.ModelViewSet):
serializer_class = SettingSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Setting.objects.all()
class InboxViewSet(viewsets.ModelViewSet):
serializer_class = InboxSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Inbox.objects.all()
| [
"[email protected]"
] | |
fde9ceaca4cd01c3520936f567cc4c0b9da7ea2f | 6a7058009587e78b5c758ff783410325ad7c2a4b | /leet/stack/removeDuplicates.py | 7b454b38480b649f7012a1ba584510296d51d6df | [
"Apache-2.0"
] | permissive | stacykutyepov/python-cp-cheatsheet | 8b96b76403c501f5579befd07b3c4a4c69fe914e | a00a57e1b36433648d1cace331e15ff276cef189 | refs/heads/master | 2023-07-16T13:26:35.130763 | 2021-08-30T11:23:39 | 2021-08-30T11:23:39 | 401,442,535 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | """
time: n
space: n
Remove All Adjacent Duplicates in String II
"""
class Solution:
def removeDuplicates(self, s: str, k: int) -> str:
stk = []
for c in s:
if stk and stk[-1][0] == c:
stk[-1][1] += 1
if stk[-1][1] >= k:
stk.pop()
else:
stk.append([c, 1])
ans = []
for c in stk:
ans.extend([c[0]] * c[1])
return "".join(ans) | [
"[email protected]"
] | |
5e70dd92d37b33cee451a79110869edc240504ee | 40b182f143fa034051fbfc80dc1bc77b204fdb89 | /fft_2d.py | 26fca42cbe8257f3a225e76e740aa65f3a6ab6f1 | [] | no_license | phaustin/fft_2d | 02ab6707cbbb1c3fcd836c4e9a6323b4439bed2a | a891669e2c70c70a87efa9b254e9103ec3d93af5 | refs/heads/master | 2016-09-06T10:51:35.568580 | 2014-02-11T06:52:04 | 2014-02-11T06:52:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,875 | py | from __future__ import division
from netCDF4 import Dataset
import numpy as np
import math
from scipy import fftpack
from matplotlib import pyplot as plt
#plt.switch_backend('Agg') #batch
plt.switch_backend('MacOSX') #interactive
test=Dataset('a17.nc')
tau=test.variables['tau'][:2000,:2000]
nn = tau.shape[0] # size of each column of the 2D array
mm = tau.shape[1] # size of each row of the array
m = int(math.floor(mm/2)) #midpoint
scale=0.025 #pixel size in km
x_dens = np.arange(0,(m))+1
x_dens = (x_dens-m)/(mm*scale)
delta_k = 1./scale #1/km
nyquist = delta_k*0.5
fft_tau = fftpack.fft2(tau)
tr_tau = fftpack.fftshift(fft_tau)
e_dens = tr_tau*np.conjugate(tr_tau)/(mm*mm)
e_dens = e_dens.real
plt.close('all')
fig,ax=plt.subplots(2,2)
ax[0,0].set_title('title1')
im1=ax[0,0].imshow(tau)
im2=ax[1,0].imshow(np.log(e_dens))
im3=ax[0,1].hist(tau.ravel())
im4=ax[1,1].hist(np.log(e_dens.ravel()))
plt.draw()
cbar_ax = fig.add_axes([0.45, 0.55, 0.03, 0.3])
fig.colorbar(im1,cax=cbar_ax)
fig.tight_layout()
fig.canvas.draw()
plt.show()
bnstep=2.
nbns = int(round((math.sqrt(2)*mm/bnstep),0)+1)
e_spec = np.zeros(nbns,np.float)
cnt = np.zeros(nbns,np.float)
for i in range(mm):
if (i%100) == 0:
print "\t\trow: "+str(i)+" completed"
for j in range(mm):
r = math.sqrt(((i+1)-mm/2)**2+((j+1)-mm/2)**2)
bn = int(math.floor(r/bnstep))
e_spec[bn]=e_spec[bn]+ np.abs(e_dens[i,j])**2.
cnt[bn]=cnt[bn]+1
for i in range(nbns):
if cnt[i]>0:
e_spec[i]=e_spec[i]/cnt[i]/(4*(math.pi**2))
e_spec=np.sqrt(e_spec)
delta_k=nyquist/nbns
x_ax=np.linspace(delta_k,nyquist,nbns)
fig=plt.figure(2)
fig.clf()
ax1=fig.add_subplot(111)
ax1.loglog(x_ax,e_spec)
l0=1.
slope=(-8/3.)
analytic=l0*x_ax**slope
the_line=l0
ax1.loglog(x_ax,analytic,'r-')
fig.tight_layout()
fig.canvas.draw()
plt.show()
| [
"[email protected]"
] | |
9beb2f007c5b1a2e985d1eb9cac90a3c7c0cb488 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /build/android/gyp/dex.py | 9c551b9e78ce988f1b931a84bb34deb6a64e2c9a | [
"BSD-3-Clause"
] | permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 14,446 | py | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import optparse
import os
import re
import shutil
import sys
import tempfile
import zipfile
from util import build_utils
sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
import convert_dex_profile
def _CheckFilePathEndsWithJar(parser, file_path):
if not file_path.endswith(".jar"):
parser.error("%s does not end in .jar" % file_path)
def _CheckFilePathsEndWithJar(parser, file_paths):
for file_path in file_paths:
_CheckFilePathEndsWithJar(parser, file_path)
def _ParseArgs(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--output-directory',
default=os.getcwd(),
help='Path to the output build directory.')
parser.add_option('--dex-path', help='Dex output path.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME.')
parser.add_option('--proguard-enabled',
help='"true" if proguard is enabled.')
parser.add_option('--debug-build-proguard-enabled',
help='"true" if proguard is enabled for debug build.')
parser.add_option('--proguard-enabled-input-path',
help=('Path to dex in Release mode when proguard '
'is enabled.'))
parser.add_option('--inputs', help='A list of additional input paths.')
parser.add_option('--excluded-paths',
help='A list of paths to exclude from the dex file.')
parser.add_option('--main-dex-list-path',
help='A file containing a list of the classes to '
'include in the main dex.')
parser.add_option('--multidex-configuration-path',
help='A JSON file containing multidex build configuration.')
parser.add_option('--multi-dex', default=False, action='store_true',
help='Generate multiple dex files.')
parser.add_option('--d8-jar-path', help='Path to D8 jar.')
parser.add_option('--release', action='store_true', default=False,
help='Run D8 in release mode. Release mode maximises main '
'dex and deletes non-essential line number information '
'(vs debug which minimizes main dex and keeps all line '
'number information, and then some.')
parser.add_option('--min-api',
help='Minimum Android API level compatibility.')
parser.add_option('--dexlayout-profile',
help=('Text profile for dexlayout. If present, a dexlayout '
'pass will happen'))
parser.add_option('--profman-path',
help=('Path to ART profman binary. There should be a '
'lib/ directory at the same path containing shared '
'libraries (shared with dexlayout).'))
parser.add_option('--dexlayout-path',
help=('Path to ART dexlayout binary. There should be a '
'lib/ directory at the same path containing shared '
'libraries (shared with dexlayout).'))
parser.add_option('--dexdump-path', help='Path to dexdump binary.')
parser.add_option(
'--proguard-mapping-path',
help=('Path to proguard map from obfuscated symbols in the jar to '
'unobfuscated symbols present in the code. If not '
'present, the jar is assumed not to be obfuscated.'))
options, paths = parser.parse_args(args)
required_options = ('d8_jar_path',)
build_utils.CheckOptions(options, parser, required=required_options)
if options.dexlayout_profile:
build_utils.CheckOptions(
options,
parser,
required=('profman_path', 'dexlayout_path', 'dexdump_path'))
elif options.proguard_mapping_path is not None:
raise Exception('Unexpected proguard mapping without dexlayout')
if options.multidex_configuration_path:
with open(options.multidex_configuration_path) as multidex_config_file:
multidex_config = json.loads(multidex_config_file.read())
options.multi_dex = multidex_config.get('enabled', False)
if options.main_dex_list_path and not options.multi_dex:
logging.warning('--main-dex-list-path is unused if multidex is not enabled')
if options.inputs:
options.inputs = build_utils.ParseGnList(options.inputs)
_CheckFilePathsEndWithJar(parser, options.inputs)
if options.excluded_paths:
options.excluded_paths = build_utils.ParseGnList(options.excluded_paths)
if options.proguard_enabled_input_path:
_CheckFilePathEndsWithJar(parser, options.proguard_enabled_input_path)
_CheckFilePathsEndWithJar(parser, paths)
return options, paths
def _MoveTempDexFile(tmp_dex_dir, dex_path):
"""Move the temp dex file out of |tmp_dex_dir|.
Args:
tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp().
The directory should have just a single file.
dex_path: Target path to move dex file to.
Raises:
Exception if there are multiple files in |tmp_dex_dir|.
"""
tempfiles = os.listdir(tmp_dex_dir)
if len(tempfiles) > 1:
raise Exception('%d files created, expected 1' % len(tempfiles))
tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0])
shutil.move(tmp_dex_path, dex_path)
def _NoClassFiles(jar_paths):
"""Returns True if there are no .class files in the given JARs.
Args:
jar_paths: list of strings representing JAR file paths.
Returns:
(bool) True if no .class files are found.
"""
for jar_path in jar_paths:
with zipfile.ZipFile(jar_path) as jar:
if any(name.endswith('.class') for name in jar.namelist()):
return False
return True
def _RunD8(dex_cmd, input_paths, output_path):
dex_cmd += ['--output', output_path]
dex_cmd += input_paths
build_utils.CheckOutput(dex_cmd, print_stderr=False)
def _EnvWithArtLibPath(binary_path):
"""Return an environment dictionary for ART host shared libraries.
Args:
binary_path: the path to an ART host binary.
Returns:
An environment dictionary where LD_LIBRARY_PATH has been augmented with the
shared library path for the binary. This assumes that there is a lib/
directory in the same location as the binary.
"""
lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
env = os.environ.copy()
libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
libraries.append(lib_path)
env['LD_LIBRARY_PATH'] = ':'.join(libraries)
return env
def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
"""Create a binary profile for dexlayout.
Args:
text_profile: The ART text profile that will be converted to a binary
profile.
input_dex: The input dex file to layout.
profman_path: Path to the profman binary.
temp_dir: Directory to work in.
Returns:
The name of the binary profile, which will live in temp_dir.
"""
binary_profile = os.path.join(
temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
open(binary_profile, 'w').close() # Touch binary_profile.
profman_cmd = [profman_path,
'--apk=' + input_dex,
'--dex-location=' + input_dex,
'--create-profile-from=' + text_profile,
'--reference-profile-file=' + binary_profile]
build_utils.CheckOutput(
profman_cmd,
env=_EnvWithArtLibPath(profman_path),
stderr_filter=lambda output:
build_utils.FilterLines(output, '|'.join(
[r'Could not find (method_id|proto_id|name):',
r'Could not create type list'])))
return binary_profile
def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
"""Layout a dexfile using a profile.
Args:
binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
input_dex: The dex file used to create the binary profile.
dexlayout_path: Path to the dexlayout binary.
temp_dir: Directory to work in.
Returns:
List of output files produced by dexlayout. This will be one if the input
was a single dexfile, or multiple files if the input was a multidex
zip. These output files are located in temp_dir.
"""
dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
os.mkdir(dexlayout_output_dir)
dexlayout_cmd = [ dexlayout_path,
'-u', # Update checksum
'-p', binary_profile,
'-w', dexlayout_output_dir,
input_dex ]
build_utils.CheckOutput(
dexlayout_cmd,
env=_EnvWithArtLibPath(dexlayout_path),
stderr_filter=lambda output:
build_utils.FilterLines(output,
r'Can.t mmap dex file.*please zipalign'))
output_files = os.listdir(dexlayout_output_dir)
if not output_files:
raise Exception('dexlayout unexpectedly produced no output')
return [os.path.join(dexlayout_output_dir, f) for f in output_files]
def _ZipMultidex(file_dir, dex_files):
"""Zip dex files into a multidex.
Args:
file_dir: The directory into which to write the output.
dex_files: The dexfiles forming the multizip. Their names must end with
classes.dex, classes2.dex, ...
Returns:
The name of the multidex file, which will live in file_dir.
"""
ordered_files = [] # List of (archive name, file name)
for f in dex_files:
if f.endswith('classes.dex.zip'):
ordered_files.append(('classes.dex', f))
break
if not ordered_files:
raise Exception('Could not find classes.dex multidex file in %s',
dex_files)
for dex_idx in xrange(2, len(dex_files) + 1):
archive_name = 'classes%d.dex' % dex_idx
for f in dex_files:
if f.endswith(archive_name):
ordered_files.append((archive_name, f))
break
else:
raise Exception('Could not find classes%d.dex multidex file in %s',
dex_files)
if len(set(f[1] for f in ordered_files)) != len(ordered_files):
raise Exception('Unexpected clashing filenames for multidex in %s',
dex_files)
zip_name = os.path.join(file_dir, 'multidex_classes.zip')
build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
for archive_name, file_name in ordered_files),
zip_name)
return zip_name
def _ZipSingleDex(dex_file, zip_name):
"""Zip up a single dex file.
Args:
dex_file: A dexfile whose name is ignored.
zip_name: The output file in which to write the zip.
"""
build_utils.DoZip([('classes.dex', dex_file)], zip_name)
def main(args):
options, paths = _ParseArgs(args)
if ((options.proguard_enabled == 'true'
and options.configuration_name == 'Release')
or (options.debug_build_proguard_enabled == 'true'
and options.configuration_name == 'Debug')):
paths = [options.proguard_enabled_input_path]
if options.inputs:
paths += options.inputs
if options.excluded_paths:
# Excluded paths are relative to the output directory.
exclude_paths = options.excluded_paths
paths = [p for p in paths if not
os.path.relpath(p, options.output_directory) in exclude_paths]
input_paths = list(paths)
if options.multi_dex and options.main_dex_list_path:
input_paths.append(options.main_dex_list_path)
dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring']
if options.multi_dex and options.main_dex_list_path:
dex_cmd += ['--main-dex-list', options.main_dex_list_path]
if options.release:
dex_cmd += ['--release']
if options.min_api:
dex_cmd += ['--min-api', options.min_api]
is_dex = options.dex_path.endswith('.dex')
is_jar = options.dex_path.endswith('.jar')
with build_utils.TempDir() as tmp_dir:
tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
os.mkdir(tmp_dex_dir)
if is_jar and _NoClassFiles(paths):
# Handle case where no classfiles are specified in inputs
# by creating an empty JAR
with zipfile.ZipFile(options.dex_path, 'w') as outfile:
outfile.comment = 'empty'
else:
# .dex files can't specify a name for D8. Instead, we output them to a
# temp directory then move them after the command has finished running
# (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
_RunD8(dex_cmd, paths, tmp_dex_dir)
tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output')
if is_dex:
_MoveTempDexFile(tmp_dex_dir, tmp_dex_output)
else:
# d8 supports outputting to a .zip, but does not have deterministic file
# ordering: https://issuetracker.google.com/issues/119945929
build_utils.ZipDir(tmp_dex_output, tmp_dex_dir)
if options.dexlayout_profile:
if options.proguard_mapping_path is not None:
matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
convert_dex_profile.ObfuscateProfile(
options.dexlayout_profile, tmp_dex_output,
options.proguard_mapping_path, options.dexdump_path,
matching_profile)
else:
logging.warning('No obfuscation for %s', options.dexlayout_profile)
matching_profile = options.dexlayout_profile
binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
options.profman_path, tmp_dir)
output_files = _LayoutDex(binary_profile, tmp_dex_output,
options.dexlayout_path, tmp_dir)
target = None
if len(output_files) > 1:
target = _ZipMultidex(tmp_dir, output_files)
else:
output = output_files[0]
if not zipfile.is_zipfile(output):
target = os.path.join(tmp_dir, 'dex_classes.zip')
_ZipSingleDex(output, target)
else:
target = output
shutil.move(os.path.join(tmp_dir, target), tmp_dex_output)
# The dex file is complete and can be moved out of tmp_dir.
shutil.move(tmp_dex_output, options.dex_path)
build_utils.WriteDepfile(
options.depfile, options.dex_path, input_paths, add_pydeps=False)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"[email protected]"
] | |
6398727d41745a5f48c6ccd6952f2945d5814a3f | 8a1200f2bd0bf25a264c065da896d1806e45155a | /packages/ply-9999.py | 591b952a924da74d2703d4d6bcf134dbd272ecc0 | [] | no_license | mhulsman/enhance | fac908eca561749ab99ea708f19f37eaceabf6c5 | 761234a263e4962a8e10026ce143c1ea0e7e8728 | refs/heads/master | 2021-01-18T23:40:56.252953 | 2016-09-29T13:04:48 | 2016-09-29T13:04:48 | 1,520,086 | 3 | 4 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from package import *
class ply(EasyInstallPackage):
dependencies = ["setuptools","python"]
| [
"[email protected]"
] | |
d22177499c618ff254b7d77a4adadebd0fbc9c16 | 82ccbe6f52a89e0acd1b88cd3de6c2c434bcef6c | /lib/PanGenomeAPI/fetch_summary/main.py | 64197ada34555c2089d05a55099c48806cd693d0 | [
"MIT"
] | permissive | kbaseapps/PanGenomeAPI | e51da942cb61f4341824e7b296fa2b694b7deda6 | 52dfc557279824f3b1c3b0a537528ccfaee39ab1 | refs/heads/master | 2021-07-20T04:30:31.032517 | 2021-02-23T22:45:09 | 2021-02-23T22:45:09 | 89,646,247 | 0 | 3 | MIT | 2021-02-23T22:45:09 | 2017-04-27T22:42:41 | Python | UTF-8 | Python | false | false | 7,818 | py | """
Fetch and construct summary data for previewing a pangenome.
"""
from installed_clients.WorkspaceClient import Workspace as Workspace
def fetch_pangenome_summary(
pangenome_ref: str,
workspace_url: str,
token: str) -> dict:
"""
Construct a summary data object for a single pangenome, used in the
"simple_summary" method.
Args:
pangenome_ref: Workspace reference to the pangenome object
workspace_url: URL of the Workspace being used in the current env
token: authorization token for fetching the data
Returns:
A python object adhering to the SimpleSummaryResult type in
PanGenomeAPI.spec
"""
ws_client = Workspace(workspace_url, token=token)
# Download the full pangenome workspace dataset
resp = ws_client.get_objects2({
'objects': [{'ref': pangenome_ref}]
})
data = resp['data'][0]['data']
# Fetch the object infos for each genome
genome_refs = [{"ref": ref} for ref in data["genome_refs"]]
genome_infos = ws_client.get_object_info3({
"objects": genome_refs,
"includeMetadata": 1
})["infos"]
name_mapping = _genome_name_mapping(genome_infos)
ret = {
"pangenome_id": data["id"],
"genomes_count": len(data["genome_refs"]),
"genes": _count_genes(data),
"families": _count_families(data),
"genomes": _genome_counts(data, genome_infos, name_mapping),
"shared_family_map": _shared_family_map(data, name_mapping),
"genome_ref_name_map": name_mapping,
}
return ret
def _count_genes(pg_data: dict) -> dict:
"""
Calculate gene counts for a pangenome object
Args:
pg_data: workspace data object for the Pangenome
Returns:
Dict of counts with the GeneFamilyReport type in PanGenomeAPI.spec
"""
counts = {
"genes_count": 0,
"homolog_family_genes_count": 0,
"singleton_family_genes_count": 0,
}
for family in pg_data["orthologs"]:
count = len(family["orthologs"])
counts["genes_count"] += count
if count == 1:
counts["singleton_family_genes_count"] += count
elif count > 1:
counts["homolog_family_genes_count"] += count
return counts
def _count_families(pg_data: dict) -> dict:
"""
Aggregate counts for the homolog families in the pangenome
Args:
pg_data: workspace data object for the Pangenome
Returns:
dict matching the type FamilyReport from PanGenomeAPI.spec
"""
counts = {
"families_count": 0,
"homolog_families_count": 0,
"singleton_families_count": 0,
}
counts["families_count"] = len(pg_data["orthologs"])
for family in pg_data["orthologs"]:
count = len(family["orthologs"])
if count == 1:
counts["singleton_families_count"] += 1
elif count > 1:
counts["homolog_families_count"] += 1
return counts
def _genome_name_mapping(genome_infos: list) -> dict:
"""
Construct a mapping of genome workspace reference to sciname
Args:
pg_data: workspace data object for the Pangenome
genome_infos: list of object info tuples (with metadata) for every
genome in the pangenome
Returns:
Mapping of genome ref to scientific name and obj name
"""
ret = {}
names = set()
# Fetch the object infos for every ref
for info in genome_infos:
ref = _get_ref(info)
sciname = info[-1].get("Name", "unknown taxon")
# Create a unique display name for each genome
name = sciname
if name in names:
name = f"{sciname} ({ref})"
names.add(name)
ret[ref] = name
return ret
def _genome_counts(
pg_data: dict,
genome_infos: list,
name_mapping: dict) -> dict:
"""
Aggregate counts of genes and families for every genome
Args:
pg_data: workspace data object for the Pangenome
genome_infos: list of genome info tuples for each object
name_mapping: mapping of workspace ref to readable name for use as keys
Returns:
Mapping of genome ref to GenomeGeneFamilyReport (from
PanGenomeAPI.spec)
"""
# Initialize the result structure
ret = {}
for name in name_mapping.values():
ret[name] = {
"genome_genes": 0,
"genome_homolog_family_genes": 0,
"genome_singleton_family_genes": 0,
"genome_homolog_family": 0,
}
# Set total feature counts from the obj info
for info in genome_infos:
key = name_mapping[_get_ref(info)]
ret[key]["genome_genes"] = _get_feature_count(info)
# Aggregate other counts from the ortholog families
for family in pg_data["orthologs"]:
count = len(family["orthologs"])
found_genomes = set()
for gene in family["orthologs"]:
genome_ref = gene[2]
key = name_mapping[genome_ref]
if count > 1:
ret[key]["genome_homolog_family_genes"] += 1
found_genomes.add(genome_ref)
for ref in found_genomes:
ret[name_mapping[ref]]["genome_homolog_family"] += 1
# Set the singleton family gene counts to be the difference of the total
# features and the homolog family counts
for ref in pg_data["genome_refs"]:
key = name_mapping[ref]
total = ret[key]["genome_genes"]
homologs = ret[key]["genome_homolog_family_genes"]
ret[key]["genome_singleton_family_genes"] = total - homologs
return ret
def _shared_family_map(pg_data: dict, name_mapping: dict) -> dict:
"""
Calculate the number of shared ortholog families between any two genomes
Args:
pg_data: workspace data object for the Pangenome
name_mapping: mapping of workspace ref to readable name for use as keys
Returns:
dict where keys are genome refs, and values are mapping of genome refs
to shared family counts.
Example: {"1": {"2": 10}} represents genome "1" and "2" sharing 10
families
"""
# Initialize the return structure
ret = {}
for ref1 in pg_data["genome_refs"]:
key1 = name_mapping[ref1]
ret[key1] = {}
for ref2 in pg_data["genome_refs"]:
key2 = name_mapping[ref2]
ret[key1][key2] = 0
# Aggregate counts of all genomes that share genes in an ortholog family
for family in pg_data["orthologs"]:
if len(family["orthologs"]) <= 1:
# We only record non-singletons
continue
genome_refs = set(orth[2] for orth in family["orthologs"])
for ref1 in genome_refs:
for ref2 in genome_refs:
key1, key2 = name_mapping[ref1], name_mapping[ref2]
ret[key1][key2] += 1
return ret
def _get_feature_count(genome_info: dict) -> int:
"""
Get the total feature count (coding and non-coding) for a genome.
We fetch this number from the genome metadata.
Older Genome versions store this as "Number features", while newer versions
(>=9) store it as "Number of Protein Encoding Genes".
Genome versions before 8 (older than July, 2014) have no metadata and
aren't supported for now.
"""
valid_keys = ("Number of Protein Encoding Genes", "Number features")
meta = genome_info[-1]
for key in valid_keys:
if key in meta:
return int(meta[key])
# TODO fallback to something else?
raise RuntimeError(
"Unable to read the number of features "
f"from the Genome metadata: {genome_info}")
def _get_ref(info: list) -> str:
"""Get the workspace reference from an info tuple"""
return f"{info[6]}/{info[0]}/{info[4]}"
| [
"[email protected]"
] | |
cc15ab4da0e3d63290d555da764c4651e5b116d5 | 2368797b51548c0f6393d63bf4973898ac99d528 | /strings/easy/q443.py | dfc657c4a91311384a0c289c38920c173469005f | [] | no_license | pengzhefu/LeetCodePython | 595887d1625666962e7e959ffa148580f9b89ada | 59eff778a5fd5cff3b5b6b88c6c7e76dd213dfb0 | refs/heads/master | 2021-06-08T19:44:52.487031 | 2021-06-01T15:44:29 | 2021-06-01T15:44:29 | 175,763,155 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,906 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 30 23:39:02 2019
@author: pengz
"""
'''
Given an array of characters, compress it in-place.
The length after compression must always be smaller than or equal to the original array.
Every element of the array should be a character (not int) of length 1.
After you are done modifying the input array in-place, return the new length of the array.
Follow up:
Could you solve it using only O(1) extra space?
Example 1:
Input:
["a","a","b","b","c","c","c"]
Output:
Return 6, and the first 6 characters of the input array should be: ["a","2","b","2","c","3"]
Explanation:
"aa" is replaced by "a2". "bb" is replaced by "b2". "ccc" is replaced by "c3".
'''
## 这道题的第二种方法就一直没写对!!!要多写几遍!要记住!
def compress(chars:list) -> int: ## try O(1) space, written by my own
if len(chars) <=1:
return len(chars)
i = len(chars) -1
while i >= 0:
ret = 1
if i != 0:
if chars[i] != chars[i-1]:
i = i-1
else:
while i > 0 and chars[i] == chars[i-1]: ## 计算连续的个数,跳出这个循环的时候,i应该在连续的第一个
ret += 1
tmp = i
i = i-1
chars.pop(tmp)
index = i+1
x = 0
ins = str(ret)
while x < len(ins):
chars.insert(index,ins[x])
x = x+1
index = index+1
# i = i-1 ## 是不需要 i-1的,因为不i-1的话,会发现现在的char[i]和char[i-1]是不一样的,
## 从上面的if去 i-1,
else:
i=i-1
return len(chars)
def compress2(chars:list) -> int: ## using two pointers, i and index, i遍历list, index做插入位置
i =0
index =0 ## index既是插入数字的位置,也是要插入相对应字母的位置,先插入字母,然后index+1就可以是数字了
while i < len(chars):
tmp = chars[i]
count = 0
while i < len(chars) and chars[i] == tmp: ## 这个循环跳出的时候i已经在不同的字母的第一位了
i= i+1
count= count+1
chars[index] =tmp ## 如果之前有插入过次数,那么这时候的index也是插入过次数的后一位,应该拿来放字母
## 这个字母需要用之前缓存的tmp的字母,此时i已经在和tmp不同的字母的首位了
index =index + 1
if count > 1:
for c in str(count):
chars[index] = c
index =index + 1
return index
chars = ["a","a","a","a","a","a","a","b","b","c","d","d","e"]
a = compress2(chars)
| [
"[email protected]"
] | |
b2cec81969bcdc94b01c28fdb7df0ad7d85d7cf4 | df1d49839642d5dd9d3232e550b448ff813b298e | /virtual/bin/pip3.6 | 3a0c125f493e9d8db29bb7bb6bbde2795be44256 | [] | no_license | vincentmuya/Tech | 86e486044eb968b25cb53edf003acde391936900 | 239dbab12a6f6324eec4c55223c331a3871375aa | refs/heads/master | 2023-01-05T17:16:48.434239 | 2020-10-29T15:23:20 | 2020-10-29T15:23:20 | 308,339,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | 6 | #!/home/v1nc3n7/Documents/Projects/Projects/blog/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
f666310032f38377abc0fdb0dbaf6d4e6c3a9267 | 853d4cec42071b76a80be38c58ffe0fbf9b9dc34 | /venv/Lib/site-packages/pandas/core/arrays/datetimes.py | 047ee2e0330a5d47eec67d4f0aa99bef673c04a4 | [] | no_license | msainTesting/TwitterAnalysis | 5e1646dbf40badf887a86e125ef30a9edaa622a4 | b1204346508ba3e3922a52380ead5a8f7079726b | refs/heads/main | 2023-08-28T08:29:28.924620 | 2021-11-04T12:36:30 | 2021-11-04T12:36:30 | 424,242,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79,063 | py | from datetime import datetime, time, timedelta
import textwrap
from typing import Union
import warnings
import numpy as np
from pytz import utc
from pandas._libs import lib, tslib
from pandas._libs.tslibs import (
NaT,
Timestamp,
ccalendar,
conversion,
fields,
iNaT,
normalize_date,
resolution as libresolution,
timezones,
tzconversion,
)
import pandas.compat as compat
from pandas.errors import PerformanceWarning
from pandas.util._decorators import Appender
from pandas.core.dtypes.common import (
_INT64_DTYPE,
_NS_DTYPE,
is_categorical_dtype,
is_datetime64_dtype,
is_datetime64_ns_dtype,
is_datetime64tz_dtype,
is_dtype_equal,
is_extension_type,
is_float_dtype,
is_object_dtype,
is_period_dtype,
is_string_dtype,
is_timedelta64_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCIndexClass,
ABCPandasArray,
ABCSeries,
)
from pandas.core.dtypes.missing import isna
from pandas.core import ops
from pandas.core.algorithms import checked_add_with_arr
from pandas.core.arrays import datetimelike as dtl
from pandas.core.arrays._ranges import generate_regular_range
import pandas.core.common as com
from pandas.tseries.frequencies import get_period_alias, to_offset
from pandas.tseries.offsets import Day, Tick
_midnight = time(0, 0)
# TODO(GH-24559): Remove warning, int_as_wall_time parameter.
_i8_message = """
Passing integer-dtype data and a timezone to DatetimeIndex. Integer values
will be interpreted differently in a future version of pandas. Previously,
these were viewed as datetime64[ns] values representing the wall time
*in the specified timezone*. In the future, these will be viewed as
datetime64[ns] values representing the wall time *in UTC*. This is similar
to a nanosecond-precision UNIX epoch. To accept the future behavior, use
pd.to_datetime(integer_data, utc=True).tz_convert(tz)
To keep the previous behavior, use
pd.to_datetime(integer_data).tz_localize(tz)
"""
def tz_to_dtype(tz):
"""
Return a datetime64[ns] dtype appropriate for the given timezone.
Parameters
----------
tz : tzinfo or None
Returns
-------
np.dtype or Datetime64TZDType
"""
if tz is None:
return _NS_DTYPE
else:
return DatetimeTZDtype(tz=tz)
def _to_M8(key, tz=None):
"""
Timestamp-like => dt64
"""
if not isinstance(key, Timestamp):
# this also converts strings
key = Timestamp(key)
if key.tzinfo is not None and tz is not None:
# Don't tz_localize(None) if key is already tz-aware
key = key.tz_convert(tz)
else:
key = key.tz_localize(tz)
return np.int64(conversion.pydt_to_i8(key)).view(_NS_DTYPE)
def _field_accessor(name, field, docstring=None):
def f(self):
values = self.asi8
if self.tz is not None and not timezones.is_utc(self.tz):
values = self._local_timestamps()
if field in self._bool_ops:
if field.endswith(("start", "end")):
freq = self.freq
month_kw = 12
if freq:
kwds = freq.kwds
month_kw = kwds.get("startingMonth", kwds.get("month", 12))
result = fields.get_start_end_field(
values, field, self.freqstr, month_kw
)
else:
result = fields.get_date_field(values, field)
# these return a boolean by-definition
return result
if field in self._object_ops:
result = fields.get_date_name_field(values, field)
result = self._maybe_mask_results(result, fill_value=None)
else:
result = fields.get_date_field(values, field)
result = self._maybe_mask_results(
result, fill_value=None, convert="float64"
)
return result
f.__name__ = name
f.__doc__ = docstring
return property(f)
def _dt_array_cmp(cls, op):
"""
Wrap comparison operations to convert datetime-like to datetime64
"""
opname = "__{name}__".format(name=op.__name__)
nat_result = opname == "__ne__"
def wrapper(self, other):
if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if isinstance(other, (datetime, np.datetime64, str)):
if isinstance(other, (datetime, np.datetime64)):
# GH#18435 strings get a pass from tzawareness compat
self._assert_tzawareness_compat(other)
try:
other = _to_M8(other, tz=self.tz)
except ValueError:
# string that cannot be parsed to Timestamp
return ops.invalid_comparison(self, other, op)
result = op(self.asi8, other.view("i8"))
if isna(other):
result.fill(nat_result)
elif lib.is_scalar(other) or np.ndim(other) == 0:
return ops.invalid_comparison(self, other, op)
elif len(other) != len(self):
raise ValueError("Lengths must match")
else:
if isinstance(other, list):
try:
other = type(self)._from_sequence(other)
except ValueError:
other = np.array(other, dtype=np.object_)
elif not isinstance(
other, (np.ndarray, ABCIndexClass, ABCSeries, DatetimeArray)
):
# Following Timestamp convention, __eq__ is all-False
# and __ne__ is all True, others raise TypeError.
return ops.invalid_comparison(self, other, op)
if is_object_dtype(other):
# We have to use _comp_method_OBJECT_ARRAY instead of numpy
# comparison otherwise it would fail to raise when
# comparing tz-aware and tz-naive
with np.errstate(all="ignore"):
result = ops._comp_method_OBJECT_ARRAY(
op, self.astype(object), other
)
o_mask = isna(other)
elif not (is_datetime64_dtype(other) or is_datetime64tz_dtype(other)):
# e.g. is_timedelta64_dtype(other)
return ops.invalid_comparison(self, other, op)
else:
self._assert_tzawareness_compat(other)
if isinstance(other, (ABCIndexClass, ABCSeries)):
other = other.array
if (
is_datetime64_dtype(other)
and not is_datetime64_ns_dtype(other)
or not hasattr(other, "asi8")
):
# e.g. other.dtype == 'datetime64[s]'
# or an object-dtype ndarray
other = type(self)._from_sequence(other)
result = op(self.view("i8"), other.view("i8"))
o_mask = other._isnan
result = com.values_from_object(result)
if o_mask.any():
result[o_mask] = nat_result
if self._hasnans:
result[self._isnan] = nat_result
return result
return compat.set_function_name(wrapper, opname, cls)
class DatetimeArray(dtl.DatetimeLikeArrayMixin, dtl.TimelikeOps, dtl.DatelikeOps):
"""
Pandas ExtensionArray for tz-naive or tz-aware datetime data.
.. versionadded:: 0.24.0
.. warning::
DatetimeArray is currently experimental, and its API may change
without warning. In particular, :attr:`DatetimeArray.dtype` is
expected to change to always be an instance of an ``ExtensionDtype``
subclass.
Parameters
----------
values : Series, Index, DatetimeArray, ndarray
The datetime data.
For DatetimeArray `values` (or a Series or Index boxing one),
`dtype` and `freq` will be extracted from `values`, with
precedence given to
dtype : numpy.dtype or DatetimeTZDtype
Note that the only NumPy dtype allowed is 'datetime64[ns]'.
freq : str or Offset, optional
copy : bool, default False
Whether to copy the underlying array of values.
Attributes
----------
None
Methods
-------
None
"""
_typ = "datetimearray"
_scalar_type = Timestamp
# define my properties & methods for delegation
_bool_ops = [
"is_month_start",
"is_month_end",
"is_quarter_start",
"is_quarter_end",
"is_year_start",
"is_year_end",
"is_leap_year",
]
_object_ops = ["weekday_name", "freq", "tz"]
_field_ops = [
"year",
"month",
"day",
"hour",
"minute",
"second",
"weekofyear",
"week",
"weekday",
"dayofweek",
"dayofyear",
"quarter",
"days_in_month",
"daysinmonth",
"microsecond",
"nanosecond",
]
_other_ops = ["date", "time", "timetz"]
_datetimelike_ops = _field_ops + _object_ops + _bool_ops + _other_ops
_datetimelike_methods = [
"to_period",
"tz_localize",
"tz_convert",
"normalize",
"strftime",
"round",
"floor",
"ceil",
"month_name",
"day_name",
]
# ndim is inherited from ExtensionArray, must exist to ensure
# Timestamp.__richcmp__(DateTimeArray) operates pointwise
# ensure that operations with numpy arrays defer to our implementation
__array_priority__ = 1000
# -----------------------------------------------------------------
# Constructors
_attributes = ["freq", "tz"]
_dtype = None # type: Union[np.dtype, DatetimeTZDtype]
_freq = None
def __init__(self, values, dtype=_NS_DTYPE, freq=None, copy=False):
if isinstance(values, (ABCSeries, ABCIndexClass)):
values = values._values
inferred_freq = getattr(values, "_freq", None)
if isinstance(values, type(self)):
# validation
dtz = getattr(dtype, "tz", None)
if dtz and values.tz is None:
dtype = DatetimeTZDtype(tz=dtype.tz)
elif dtz and values.tz:
if not timezones.tz_compare(dtz, values.tz):
msg = (
"Timezone of the array and 'dtype' do not match. "
"'{}' != '{}'"
)
raise TypeError(msg.format(dtz, values.tz))
elif values.tz:
dtype = values.dtype
# freq = validate_values_freq(values, freq)
if freq is None:
freq = values.freq
values = values._data
if not isinstance(values, np.ndarray):
msg = (
"Unexpected type '{}'. 'values' must be a DatetimeArray "
"ndarray, or Series or Index containing one of those."
)
raise ValueError(msg.format(type(values).__name__))
if values.ndim != 1:
raise ValueError("Only 1-dimensional input arrays are supported.")
if values.dtype == "i8":
# for compat with datetime/timedelta/period shared methods,
# we can sometimes get here with int64 values. These represent
# nanosecond UTC (or tz-naive) unix timestamps
values = values.view(_NS_DTYPE)
if values.dtype != _NS_DTYPE:
msg = (
"The dtype of 'values' is incorrect. Must be 'datetime64[ns]'."
" Got {} instead."
)
raise ValueError(msg.format(values.dtype))
dtype = _validate_dt64_dtype(dtype)
if freq == "infer":
msg = (
"Frequency inference not allowed in DatetimeArray.__init__. "
"Use 'pd.array()' instead."
)
raise ValueError(msg)
if copy:
values = values.copy()
if freq:
freq = to_offset(freq)
if getattr(dtype, "tz", None):
# https://github.com/pandas-dev/pandas/issues/18595
# Ensure that we have a standard timezone for pytz objects.
# Without this, things like adding an array of timedeltas and
# a tz-aware Timestamp (with a tz specific to its datetime) will
# be incorrect(ish?) for the array as a whole
dtype = DatetimeTZDtype(tz=timezones.tz_standardize(dtype.tz))
self._data = values
self._dtype = dtype
self._freq = freq
if inferred_freq is None and freq is not None:
type(self)._validate_frequency(self, freq)
@classmethod
def _simple_new(cls, values, freq=None, dtype=_NS_DTYPE):
assert isinstance(values, np.ndarray)
if values.dtype == "i8":
values = values.view(_NS_DTYPE)
result = object.__new__(cls)
result._data = values
result._freq = freq
result._dtype = dtype
return result
@classmethod
def _from_sequence(
cls,
data,
dtype=None,
copy=False,
tz=None,
freq=None,
dayfirst=False,
yearfirst=False,
ambiguous="raise",
int_as_wall_time=False,
):
freq, freq_infer = dtl.maybe_infer_freq(freq)
subarr, tz, inferred_freq = sequence_to_dt64ns(
data,
dtype=dtype,
copy=copy,
tz=tz,
dayfirst=dayfirst,
yearfirst=yearfirst,
ambiguous=ambiguous,
int_as_wall_time=int_as_wall_time,
)
freq, freq_infer = dtl.validate_inferred_freq(freq, inferred_freq, freq_infer)
dtype = tz_to_dtype(tz)
result = cls._simple_new(subarr, freq=freq, dtype=dtype)
if inferred_freq is None and freq is not None:
# this condition precludes `freq_infer`
cls._validate_frequency(result, freq, ambiguous=ambiguous)
elif freq_infer:
# Set _freq directly to bypass duplicative _validate_frequency
# check.
result._freq = to_offset(result.inferred_freq)
return result
@classmethod
def _generate_range(
cls,
start,
end,
periods,
freq,
tz=None,
normalize=False,
ambiguous="raise",
nonexistent="raise",
closed=None,
):
periods = dtl.validate_periods(periods)
if freq is None and any(x is None for x in [periods, start, end]):
raise ValueError("Must provide freq argument if no data is " "supplied")
if com.count_not_none(start, end, periods, freq) != 3:
raise ValueError(
"Of the four parameters: start, end, periods, "
"and freq, exactly three must be specified"
)
freq = to_offset(freq)
if start is not None:
start = Timestamp(start)
if end is not None:
end = Timestamp(end)
if start is None and end is None:
if closed is not None:
raise ValueError(
"Closed has to be None if not both of start" "and end are defined"
)
if start is NaT or end is NaT:
raise ValueError("Neither `start` nor `end` can be NaT")
left_closed, right_closed = dtl.validate_endpoints(closed)
start, end, _normalized = _maybe_normalize_endpoints(start, end, normalize)
tz = _infer_tz_from_endpoints(start, end, tz)
if tz is not None:
# Localize the start and end arguments
start = _maybe_localize_point(
start,
getattr(start, "tz", None),
start,
freq,
tz,
ambiguous,
nonexistent,
)
end = _maybe_localize_point(
end, getattr(end, "tz", None), end, freq, tz, ambiguous, nonexistent
)
if freq is not None:
# We break Day arithmetic (fixed 24 hour) here and opt for
# Day to mean calendar day (23/24/25 hour). Therefore, strip
# tz info from start and day to avoid DST arithmetic
if isinstance(freq, Day):
if start is not None:
start = start.tz_localize(None)
if end is not None:
end = end.tz_localize(None)
# TODO: consider re-implementing _cached_range; GH#17914
values, _tz = generate_regular_range(start, end, periods, freq)
index = cls._simple_new(values, freq=freq, dtype=tz_to_dtype(_tz))
if tz is not None and index.tz is None:
arr = conversion.tz_localize_to_utc(
index.asi8, tz, ambiguous=ambiguous, nonexistent=nonexistent
)
index = cls(arr)
# index is localized datetime64 array -> have to convert
# start/end as well to compare
if start is not None:
start = start.tz_localize(tz).asm8
if end is not None:
end = end.tz_localize(tz).asm8
else:
# Create a linearly spaced date_range in local time
# Nanosecond-granularity timestamps aren't always correctly
# representable with doubles, so we limit the range that we
# pass to np.linspace as much as possible
arr = (
np.linspace(0, end.value - start.value, periods, dtype="int64")
+ start.value
)
dtype = tz_to_dtype(tz)
index = cls._simple_new(
arr.astype("M8[ns]", copy=False), freq=None, dtype=dtype
)
if not left_closed and len(index) and index[0] == start:
index = index[1:]
if not right_closed and len(index) and index[-1] == end:
index = index[:-1]
dtype = tz_to_dtype(tz)
return cls._simple_new(index.asi8, freq=freq, dtype=dtype)
# -----------------------------------------------------------------
# DatetimeLike Interface
def _unbox_scalar(self, value):
if not isinstance(value, self._scalar_type) and value is not NaT:
raise ValueError("'value' should be a Timestamp.")
if not isna(value):
self._check_compatible_with(value)
return value.value
def _scalar_from_string(self, value):
return Timestamp(value, tz=self.tz)
def _check_compatible_with(self, other):
if other is NaT:
return
if not timezones.tz_compare(self.tz, other.tz):
raise ValueError(
"Timezones don't match. '{own} != {other}'".format(
own=self.tz, other=other.tz
)
)
def _maybe_clear_freq(self):
self._freq = None
# -----------------------------------------------------------------
# Descriptive Properties
@property
def _box_func(self):
return lambda x: Timestamp(x, freq=self.freq, tz=self.tz)
@property
def dtype(self) -> Union[np.dtype, DatetimeTZDtype]:
"""
The dtype for the DatetimeArray.
.. warning::
A future version of pandas will change dtype to never be a
``numpy.dtype``. Instead, :attr:`DatetimeArray.dtype` will
always be an instance of an ``ExtensionDtype`` subclass.
Returns
-------
numpy.dtype or DatetimeTZDtype
If the values are tz-naive, then ``np.dtype('datetime64[ns]')``
is returned.
If the values are tz-aware, then the ``DatetimeTZDtype``
is returned.
"""
return self._dtype
@property
def tz(self):
"""
Return timezone, if any.
Returns
-------
datetime.tzinfo, pytz.tzinfo.BaseTZInfo, dateutil.tz.tz.tzfile, or None
Returns None when the array is tz-naive.
"""
# GH 18595
return getattr(self.dtype, "tz", None)
@tz.setter
def tz(self, value):
# GH 3746: Prevent localizing or converting the index by setting tz
raise AttributeError(
"Cannot directly set timezone. Use tz_localize() "
"or tz_convert() as appropriate"
)
@property
def tzinfo(self):
"""
Alias for tz attribute
"""
return self.tz
@property # NB: override with cache_readonly in immutable subclasses
def _timezone(self):
"""
Comparable timezone both for pytz / dateutil
"""
return timezones.get_timezone(self.tzinfo)
@property # NB: override with cache_readonly in immutable subclasses
def is_normalized(self):
"""
Returns True if all of the dates are at midnight ("no time")
"""
return conversion.is_date_array_normalized(self.asi8, self.tz)
@property # NB: override with cache_readonly in immutable subclasses
def _resolution(self):
return libresolution.resolution(self.asi8, self.tz)
# ----------------------------------------------------------------
# Array-Like / EA-Interface Methods
def __array__(self, dtype=None):
if dtype is None and self.tz:
# The default for tz-aware is object, to preserve tz info
dtype = object
return super().__array__(dtype=dtype)
def __iter__(self):
"""
Return an iterator over the boxed values
Yields
------
tstamp : Timestamp
"""
# convert in chunks of 10k for efficiency
data = self.asi8
length = len(self)
chunksize = 10000
chunks = int(length / chunksize) + 1
for i in range(chunks):
start_i = i * chunksize
end_i = min((i + 1) * chunksize, length)
converted = tslib.ints_to_pydatetime(
data[start_i:end_i], tz=self.tz, freq=self.freq, box="timestamp"
)
for v in converted:
yield v
def astype(self, dtype, copy=True):
# We handle
# --> datetime
# --> period
# DatetimeLikeArrayMixin Super handles the rest.
dtype = pandas_dtype(dtype)
if is_datetime64_ns_dtype(dtype) and not is_dtype_equal(dtype, self.dtype):
# GH#18951: datetime64_ns dtype but not equal means different tz
new_tz = getattr(dtype, "tz", None)
if getattr(self.dtype, "tz", None) is None:
return self.tz_localize(new_tz)
result = self.tz_convert(new_tz)
if new_tz is None:
# Do we want .astype('datetime64[ns]') to be an ndarray.
# The astype in Block._astype expects this to return an
# ndarray, but we could maybe work around it there.
result = result._data
return result
elif is_datetime64tz_dtype(self.dtype) and is_dtype_equal(self.dtype, dtype):
if copy:
return self.copy()
return self
elif is_period_dtype(dtype):
return self.to_period(freq=dtype.freq)
return dtl.DatetimeLikeArrayMixin.astype(self, dtype, copy)
# ----------------------------------------------------------------
# ExtensionArray Interface
@Appender(dtl.DatetimeLikeArrayMixin._validate_fill_value.__doc__)
def _validate_fill_value(self, fill_value):
if isna(fill_value):
fill_value = iNaT
elif isinstance(fill_value, (datetime, np.datetime64)):
self._assert_tzawareness_compat(fill_value)
fill_value = Timestamp(fill_value).value
else:
raise ValueError(
"'fill_value' should be a Timestamp. "
"Got '{got}'.".format(got=fill_value)
)
return fill_value
# -----------------------------------------------------------------
# Rendering Methods
def _format_native_types(self, na_rep="NaT", date_format=None, **kwargs):
from pandas.io.formats.format import _get_format_datetime64_from_values
fmt = _get_format_datetime64_from_values(self, date_format)
return tslib.format_array_from_datetime(
self.asi8, tz=self.tz, format=fmt, na_rep=na_rep
)
# -----------------------------------------------------------------
# Comparison Methods
_create_comparison_method = classmethod(_dt_array_cmp)
def _has_same_tz(self, other):
zzone = self._timezone
# vzone shouldn't be None if value is non-datetime like
if isinstance(other, np.datetime64):
# convert to Timestamp as np.datetime64 doesn't have tz attr
other = Timestamp(other)
vzone = timezones.get_timezone(getattr(other, "tzinfo", "__no_tz__"))
return zzone == vzone
def _assert_tzawareness_compat(self, other):
# adapted from _Timestamp._assert_tzawareness_compat
other_tz = getattr(other, "tzinfo", None)
if is_datetime64tz_dtype(other):
# Get tzinfo from Series dtype
other_tz = other.dtype.tz
if other is NaT:
# pd.NaT quacks both aware and naive
pass
elif self.tz is None:
if other_tz is not None:
raise TypeError(
"Cannot compare tz-naive and tz-aware " "datetime-like objects."
)
elif other_tz is None:
raise TypeError(
"Cannot compare tz-naive and tz-aware " "datetime-like objects"
)
# -----------------------------------------------------------------
# Arithmetic Methods
def _sub_datetime_arraylike(self, other):
"""subtract DatetimeArray/Index or ndarray[datetime64]"""
if len(self) != len(other):
raise ValueError("cannot add indices of unequal length")
if isinstance(other, np.ndarray):
assert is_datetime64_dtype(other)
other = type(self)(other)
if not self._has_same_tz(other):
# require tz compat
raise TypeError(
"{cls} subtraction must have the same "
"timezones or no timezones".format(cls=type(self).__name__)
)
self_i8 = self.asi8
other_i8 = other.asi8
arr_mask = self._isnan | other._isnan
new_values = checked_add_with_arr(self_i8, -other_i8, arr_mask=arr_mask)
if self._hasnans or other._hasnans:
new_values[arr_mask] = iNaT
return new_values.view("timedelta64[ns]")
def _add_offset(self, offset):
assert not isinstance(offset, Tick)
try:
if self.tz is not None:
values = self.tz_localize(None)
else:
values = self
result = offset.apply_index(values)
if self.tz is not None:
result = result.tz_localize(self.tz)
except NotImplementedError:
warnings.warn(
"Non-vectorized DateOffset being applied to Series " "or DatetimeIndex",
PerformanceWarning,
)
result = self.astype("O") + offset
return type(self)._from_sequence(result, freq="infer")
def _sub_datetimelike_scalar(self, other):
# subtract a datetime from myself, yielding a ndarray[timedelta64[ns]]
assert isinstance(other, (datetime, np.datetime64))
assert other is not NaT
other = Timestamp(other)
if other is NaT:
return self - NaT
if not self._has_same_tz(other):
# require tz compat
raise TypeError(
"Timestamp subtraction must have the same " "timezones or no timezones"
)
i8 = self.asi8
result = checked_add_with_arr(i8, -other.value, arr_mask=self._isnan)
result = self._maybe_mask_results(result)
return result.view("timedelta64[ns]")
def _add_delta(self, delta):
"""
Add a timedelta-like, Tick, or TimedeltaIndex-like object
to self, yielding a new DatetimeArray
Parameters
----------
other : {timedelta, np.timedelta64, Tick,
TimedeltaIndex, ndarray[timedelta64]}
Returns
-------
result : DatetimeArray
"""
new_values = super()._add_delta(delta)
return type(self)._from_sequence(new_values, tz=self.tz, freq="infer")
# -----------------------------------------------------------------
# Timezone Conversion and Localization Methods
def _local_timestamps(self):
"""
Convert to an i8 (unix-like nanosecond timestamp) representation
while keeping the local timezone and not using UTC.
This is used to calculate time-of-day information as if the timestamps
were timezone-naive.
"""
return tzconversion.tz_convert(self.asi8, utc, self.tz)
def tz_convert(self, tz):
"""
Convert tz-aware Datetime Array/Index from one time zone to another.
Parameters
----------
tz : str, pytz.timezone, dateutil.tz.tzfile or None
Time zone for time. Corresponding timestamps would be converted
to this time zone of the Datetime Array/Index. A `tz` of None will
convert to UTC and remove the timezone information.
Returns
-------
Array or Index
Raises
------
TypeError
If Datetime Array/Index is tz-naive.
See Also
--------
DatetimeIndex.tz : A timezone that has a variable offset from UTC.
DatetimeIndex.tz_localize : Localize tz-naive DatetimeIndex to a
given time zone, or remove timezone from a tz-aware DatetimeIndex.
Examples
--------
With the `tz` parameter, we can change the DatetimeIndex
to other time zones:
>>> dti = pd.date_range(start='2014-08-01 09:00',
... freq='H', periods=3, tz='Europe/Berlin')
>>> dti
DatetimeIndex(['2014-08-01 09:00:00+02:00',
'2014-08-01 10:00:00+02:00',
'2014-08-01 11:00:00+02:00'],
dtype='datetime64[ns, Europe/Berlin]', freq='H')
>>> dti.tz_convert('US/Central')
DatetimeIndex(['2014-08-01 02:00:00-05:00',
'2014-08-01 03:00:00-05:00',
'2014-08-01 04:00:00-05:00'],
dtype='datetime64[ns, US/Central]', freq='H')
With the ``tz=None``, we can remove the timezone (after converting
to UTC if necessary):
>>> dti = pd.date_range(start='2014-08-01 09:00', freq='H',
... periods=3, tz='Europe/Berlin')
>>> dti
DatetimeIndex(['2014-08-01 09:00:00+02:00',
'2014-08-01 10:00:00+02:00',
'2014-08-01 11:00:00+02:00'],
dtype='datetime64[ns, Europe/Berlin]', freq='H')
>>> dti.tz_convert(None)
DatetimeIndex(['2014-08-01 07:00:00',
'2014-08-01 08:00:00',
'2014-08-01 09:00:00'],
dtype='datetime64[ns]', freq='H')
"""
tz = timezones.maybe_get_tz(tz)
if self.tz is None:
# tz naive, use tz_localize
raise TypeError(
"Cannot convert tz-naive timestamps, use " "tz_localize to localize"
)
# No conversion since timestamps are all UTC to begin with
dtype = tz_to_dtype(tz)
return self._simple_new(self.asi8, dtype=dtype, freq=self.freq)
def tz_localize(self, tz, ambiguous="raise", nonexistent="raise", errors=None):
"""
Localize tz-naive Datetime Array/Index to tz-aware
Datetime Array/Index.
This method takes a time zone (tz) naive Datetime Array/Index object
and makes this time zone aware. It does not move the time to another
time zone.
Time zone localization helps to switch from time zone aware to time
zone unaware objects.
Parameters
----------
tz : str, pytz.timezone, dateutil.tz.tzfile or None
Time zone to convert timestamps to. Passing ``None`` will
remove the time zone information preserving local time.
ambiguous : 'infer', 'NaT', bool array, default 'raise'
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from
03:00 DST to 02:00 non-DST, 02:30:00 local time occurs both at
00:30:00 UTC and at 01:30:00 UTC. In such a situation, the
`ambiguous` parameter dictates how ambiguous times should be
handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False signifies a
non-DST time (note that this flag is only applicable for
ambiguous times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous
times
nonexistent : 'shift_forward', 'shift_backward, 'NaT', timedelta, \
default 'raise'
A nonexistent time does not exist in a particular timezone
where clocks moved forward due to DST.
- 'shift_forward' will shift the nonexistent time forward to the
closest existing time
- 'shift_backward' will shift the nonexistent time backward to the
closest existing time
- 'NaT' will return NaT where there are nonexistent times
- timedelta objects will shift nonexistent times by the timedelta
- 'raise' will raise an NonExistentTimeError if there are
nonexistent times
.. versionadded:: 0.24.0
errors : {'raise', 'coerce'}, default None
- 'raise' will raise a NonExistentTimeError if a timestamp is not
valid in the specified time zone (e.g. due to a transition from
or to DST time). Use ``nonexistent='raise'`` instead.
- 'coerce' will return NaT if the timestamp can not be converted
to the specified time zone. Use ``nonexistent='NaT'`` instead.
.. deprecated:: 0.24.0
Returns
-------
Same type as self
Array/Index converted to the specified time zone.
Raises
------
TypeError
If the Datetime Array/Index is tz-aware and tz is not None.
See Also
--------
DatetimeIndex.tz_convert : Convert tz-aware DatetimeIndex from
one time zone to another.
Examples
--------
>>> tz_naive = pd.date_range('2018-03-01 09:00', periods=3)
>>> tz_naive
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq='D')
Localize DatetimeIndex in US/Eastern time zone:
>>> tz_aware = tz_naive.tz_localize(tz='US/Eastern')
>>> tz_aware
DatetimeIndex(['2018-03-01 09:00:00-05:00',
'2018-03-02 09:00:00-05:00',
'2018-03-03 09:00:00-05:00'],
dtype='datetime64[ns, US/Eastern]', freq='D')
With the ``tz=None``, we can remove the time zone information
while keeping the local time (not converted to UTC):
>>> tz_aware.tz_localize(None)
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq='D')
Be careful with DST changes. When there is sequential data, pandas can
infer the DST time:
>>> s = pd.to_datetime(pd.Series(['2018-10-28 01:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 03:00:00',
... '2018-10-28 03:30:00']))
>>> s.dt.tz_localize('CET', ambiguous='infer')
0 2018-10-28 01:30:00+02:00
1 2018-10-28 02:00:00+02:00
2 2018-10-28 02:30:00+02:00
3 2018-10-28 02:00:00+01:00
4 2018-10-28 02:30:00+01:00
5 2018-10-28 03:00:00+01:00
6 2018-10-28 03:30:00+01:00
dtype: datetime64[ns, CET]
In some cases, inferring the DST is impossible. In such cases, you can
pass an ndarray to the ambiguous parameter to set the DST explicitly
>>> s = pd.to_datetime(pd.Series(['2018-10-28 01:20:00',
... '2018-10-28 02:36:00',
... '2018-10-28 03:46:00']))
>>> s.dt.tz_localize('CET', ambiguous=np.array([True, True, False]))
0 2015-03-29 03:00:00+02:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, Europe/Warsaw]
If the DST transition causes nonexistent times, you can shift these
dates forward or backwards with a timedelta object or `'shift_forward'`
or `'shift_backwards'`.
>>> s = pd.to_datetime(pd.Series(['2015-03-29 02:30:00',
... '2015-03-29 03:30:00']))
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_forward')
0 2015-03-29 03:00:00+02:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, 'Europe/Warsaw']
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_backward')
0 2015-03-29 01:59:59.999999999+01:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, 'Europe/Warsaw']
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H'))
0 2015-03-29 03:30:00+02:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, 'Europe/Warsaw']
"""
if errors is not None:
warnings.warn(
"The errors argument is deprecated and will be "
"removed in a future release. Use "
"nonexistent='NaT' or nonexistent='raise' "
"instead.",
FutureWarning,
)
if errors == "coerce":
nonexistent = "NaT"
elif errors == "raise":
nonexistent = "raise"
else:
raise ValueError(
"The errors argument must be either 'coerce' " "or 'raise'."
)
nonexistent_options = ("raise", "NaT", "shift_forward", "shift_backward")
if nonexistent not in nonexistent_options and not isinstance(
nonexistent, timedelta
):
raise ValueError(
"The nonexistent argument must be one of 'raise',"
" 'NaT', 'shift_forward', 'shift_backward' or"
" a timedelta object"
)
if self.tz is not None:
if tz is None:
new_dates = tzconversion.tz_convert(self.asi8, timezones.UTC, self.tz)
else:
raise TypeError("Already tz-aware, use tz_convert to convert.")
else:
tz = timezones.maybe_get_tz(tz)
# Convert to UTC
new_dates = conversion.tz_localize_to_utc(
self.asi8, tz, ambiguous=ambiguous, nonexistent=nonexistent
)
new_dates = new_dates.view(_NS_DTYPE)
dtype = tz_to_dtype(tz)
return self._simple_new(new_dates, dtype=dtype, freq=self.freq)
# ----------------------------------------------------------------
# Conversion Methods - Vectorized analogues of Timestamp methods
def to_pydatetime(self):
"""
Return Datetime Array/Index as object ndarray of datetime.datetime
objects
Returns
-------
datetimes : ndarray
"""
return tslib.ints_to_pydatetime(self.asi8, tz=self.tz)
def normalize(self):
"""
Convert times to midnight.
The time component of the date-time is converted to midnight i.e.
00:00:00. This is useful in cases, when the time does not matter.
Length is unaltered. The timezones are unaffected.
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on Datetime Array/Index.
Returns
-------
DatetimeArray, DatetimeIndex or Series
The same type as the original data. Series will have the same
name and index. DatetimeIndex will have the same name.
See Also
--------
floor : Floor the datetimes to the specified freq.
ceil : Ceil the datetimes to the specified freq.
round : Round the datetimes to the specified freq.
Examples
--------
>>> idx = pd.date_range(start='2014-08-01 10:00', freq='H',
... periods=3, tz='Asia/Calcutta')
>>> idx
DatetimeIndex(['2014-08-01 10:00:00+05:30',
'2014-08-01 11:00:00+05:30',
'2014-08-01 12:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq='H')
>>> idx.normalize()
DatetimeIndex(['2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq=None)
"""
if self.tz is None or timezones.is_utc(self.tz):
not_null = ~self.isna()
DAY_NS = ccalendar.DAY_SECONDS * 1000000000
new_values = self.asi8.copy()
adjustment = new_values[not_null] % DAY_NS
new_values[not_null] = new_values[not_null] - adjustment
else:
new_values = conversion.normalize_i8_timestamps(self.asi8, self.tz)
return type(self)._from_sequence(new_values, freq="infer").tz_localize(self.tz)
def to_period(self, freq=None):
"""
Cast to PeriodArray/Index at a particular frequency.
Converts DatetimeArray/Index to PeriodArray/Index.
Parameters
----------
freq : str or Offset, optional
One of pandas' :ref:`offset strings <timeseries.offset_aliases>`
or an Offset object. Will be inferred by default.
Returns
-------
PeriodArray/Index
Raises
------
ValueError
When converting a DatetimeArray/Index with non-regular values,
so that a frequency cannot be inferred.
See Also
--------
PeriodIndex: Immutable ndarray holding ordinal values.
DatetimeIndex.to_pydatetime: Return DatetimeIndex as object.
Examples
--------
>>> df = pd.DataFrame({"y": [1, 2, 3]},
... index=pd.to_datetime(["2000-03-31 00:00:00",
... "2000-05-31 00:00:00",
... "2000-08-31 00:00:00"]))
>>> df.index.to_period("M")
PeriodIndex(['2000-03', '2000-05', '2000-08'],
dtype='period[M]', freq='M')
Infer the daily frequency
>>> idx = pd.date_range("2017-01-01", periods=2)
>>> idx.to_period()
PeriodIndex(['2017-01-01', '2017-01-02'],
dtype='period[D]', freq='D')
"""
from pandas.core.arrays import PeriodArray
if self.tz is not None:
warnings.warn(
"Converting to PeriodArray/Index representation "
"will drop timezone information.",
UserWarning,
)
if freq is None:
freq = self.freqstr or self.inferred_freq
if freq is None:
raise ValueError(
"You must pass a freq argument as " "current index has none."
)
freq = get_period_alias(freq)
return PeriodArray._from_datetime64(self._data, freq, tz=self.tz)
def to_perioddelta(self, freq):
"""
Calculate TimedeltaArray of difference between index
values and index converted to PeriodArray at specified
freq. Used for vectorized offsets
Parameters
----------
freq : Period frequency
Returns
-------
TimedeltaArray/Index
"""
# TODO: consider privatizing (discussion in GH#23113)
from pandas.core.arrays.timedeltas import TimedeltaArray
i8delta = self.asi8 - self.to_period(freq).to_timestamp().asi8
m8delta = i8delta.view("m8[ns]")
return TimedeltaArray(m8delta)
# -----------------------------------------------------------------
# Properties - Vectorized Timestamp Properties/Methods
def month_name(self, locale=None):
"""
Return the month names of the DateTimeIndex with specified locale.
.. versionadded:: 0.23.0
Parameters
----------
locale : str, optional
Locale determining the language in which to return the month name.
Default is English locale.
Returns
-------
Index
Index of month names.
Examples
--------
>>> idx = pd.date_range(start='2018-01', freq='M', periods=3)
>>> idx
DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'],
dtype='datetime64[ns]', freq='M')
>>> idx.month_name()
Index(['January', 'February', 'March'], dtype='object')
"""
if self.tz is not None and not timezones.is_utc(self.tz):
values = self._local_timestamps()
else:
values = self.asi8
result = fields.get_date_name_field(values, "month_name", locale=locale)
result = self._maybe_mask_results(result, fill_value=None)
return result
def day_name(self, locale=None):
"""
Return the day names of the DateTimeIndex with specified locale.
.. versionadded:: 0.23.0
Parameters
----------
locale : str, optional
Locale determining the language in which to return the day name.
Default is English locale.
Returns
-------
Index
Index of day names.
Examples
--------
>>> idx = pd.date_range(start='2018-01-01', freq='D', periods=3)
>>> idx
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03'],
dtype='datetime64[ns]', freq='D')
>>> idx.day_name()
Index(['Monday', 'Tuesday', 'Wednesday'], dtype='object')
"""
if self.tz is not None and not timezones.is_utc(self.tz):
values = self._local_timestamps()
else:
values = self.asi8
result = fields.get_date_name_field(values, "day_name", locale=locale)
result = self._maybe_mask_results(result, fill_value=None)
return result
@property
def time(self):
"""
Returns numpy array of datetime.time. The time part of the Timestamps.
"""
# If the Timestamps have a timezone that is not UTC,
# convert them into their i8 representation while
# keeping their timezone and not using UTC
if self.tz is not None and not timezones.is_utc(self.tz):
timestamps = self._local_timestamps()
else:
timestamps = self.asi8
return tslib.ints_to_pydatetime(timestamps, box="time")
@property
def timetz(self):
"""
Returns numpy array of datetime.time also containing timezone
information. The time part of the Timestamps.
"""
return tslib.ints_to_pydatetime(self.asi8, self.tz, box="time")
@property
def date(self):
"""
Returns numpy array of python datetime.date objects (namely, the date
part of Timestamps without timezone information).
"""
# If the Timestamps have a timezone that is not UTC,
# convert them into their i8 representation while
# keeping their timezone and not using UTC
if self.tz is not None and not timezones.is_utc(self.tz):
timestamps = self._local_timestamps()
else:
timestamps = self.asi8
return tslib.ints_to_pydatetime(timestamps, box="date")
year = _field_accessor("year", "Y", "The year of the datetime.")
month = _field_accessor("month", "M", "The month as January=1, December=12. ")
day = _field_accessor("day", "D", "The days of the datetime.")
hour = _field_accessor("hour", "h", "The hours of the datetime.")
minute = _field_accessor("minute", "m", "The minutes of the datetime.")
second = _field_accessor("second", "s", "The seconds of the datetime.")
microsecond = _field_accessor(
"microsecond", "us", "The microseconds of the datetime."
)
nanosecond = _field_accessor("nanosecond", "ns", "The nanoseconds of the datetime.")
weekofyear = _field_accessor("weekofyear", "woy", "The week ordinal of the year.")
week = weekofyear
_dayofweek_doc = """
The day of the week with Monday=0, Sunday=6.
Return the day of the week. It is assumed the week starts on
Monday, which is denoted by 0 and ends on Sunday which is denoted
by 6. This method is available on both Series with datetime
values (using the `dt` accessor) or DatetimeIndex.
Returns
-------
Series or Index
Containing integers indicating the day number.
See Also
--------
Series.dt.dayofweek : Alias.
Series.dt.weekday : Alias.
Series.dt.day_name : Returns the name of the day of the week.
Examples
--------
>>> s = pd.date_range('2016-12-31', '2017-01-08', freq='D').to_series()
>>> s.dt.dayofweek
2016-12-31 5
2017-01-01 6
2017-01-02 0
2017-01-03 1
2017-01-04 2
2017-01-05 3
2017-01-06 4
2017-01-07 5
2017-01-08 6
Freq: D, dtype: int64
"""
dayofweek = _field_accessor("dayofweek", "dow", _dayofweek_doc)
weekday = dayofweek
weekday_name = _field_accessor(
"weekday_name",
"weekday_name",
"The name of day in a week (ex: Friday)\n\n.. deprecated:: 0.23.0",
)
dayofyear = _field_accessor("dayofyear", "doy", "The ordinal day of the year.")
quarter = _field_accessor("quarter", "q", "The quarter of the date.")
days_in_month = _field_accessor(
"days_in_month", "dim", "The number of days in the month."
)
daysinmonth = days_in_month
_is_month_doc = """
Indicates whether the date is the {first_or_last} day of the month.
Returns
-------
Series or array
For Series, returns a Series with boolean values.
For DatetimeIndex, returns a boolean array.
See Also
--------
is_month_start : Return a boolean indicating whether the date
is the first day of the month.
is_month_end : Return a boolean indicating whether the date
is the last day of the month.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> s = pd.Series(pd.date_range("2018-02-27", periods=3))
>>> s
0 2018-02-27
1 2018-02-28
2 2018-03-01
dtype: datetime64[ns]
>>> s.dt.is_month_start
0 False
1 False
2 True
dtype: bool
>>> s.dt.is_month_end
0 False
1 True
2 False
dtype: bool
>>> idx = pd.date_range("2018-02-27", periods=3)
>>> idx.is_month_start
array([False, False, True])
>>> idx.is_month_end
array([False, True, False])
"""
is_month_start = _field_accessor(
"is_month_start", "is_month_start", _is_month_doc.format(first_or_last="first")
)
is_month_end = _field_accessor(
"is_month_end", "is_month_end", _is_month_doc.format(first_or_last="last")
)
is_quarter_start = _field_accessor(
"is_quarter_start",
"is_quarter_start",
"""
Indicator for whether the date is the first day of a quarter.
Returns
-------
is_quarter_start : Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
quarter : Return the quarter of the date.
is_quarter_end : Similar property for indicating the quarter start.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
... periods=4)})
>>> df.assign(quarter=df.dates.dt.quarter,
... is_quarter_start=df.dates.dt.is_quarter_start)
dates quarter is_quarter_start
0 2017-03-30 1 False
1 2017-03-31 1 False
2 2017-04-01 2 True
3 2017-04-02 2 False
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_quarter_start
array([False, False, True, False])
""",
)
is_quarter_end = _field_accessor(
"is_quarter_end",
"is_quarter_end",
"""
Indicator for whether the date is the last day of a quarter.
Returns
-------
is_quarter_end : Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
quarter : Return the quarter of the date.
is_quarter_start : Similar property indicating the quarter start.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
... periods=4)})
>>> df.assign(quarter=df.dates.dt.quarter,
... is_quarter_end=df.dates.dt.is_quarter_end)
dates quarter is_quarter_end
0 2017-03-30 1 False
1 2017-03-31 1 True
2 2017-04-01 2 False
3 2017-04-02 2 False
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_quarter_end
array([False, True, False, False])
""",
)
is_year_start = _field_accessor(
"is_year_start",
"is_year_start",
"""
Indicate whether the date is the first day of a year.
Returns
-------
Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
is_year_end : Similar property indicating the last day of the year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
>>> dates
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
>>> dates.dt.is_year_start
0 False
1 False
2 True
dtype: bool
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_year_start
array([False, False, True])
""",
)
is_year_end = _field_accessor(
"is_year_end",
"is_year_end",
"""
Indicate whether the date is the last day of the year.
Returns
-------
Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
is_year_start : Similar property indicating the start of the year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
>>> dates
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
>>> dates.dt.is_year_end
0 False
1 True
2 False
dtype: bool
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_year_end
array([False, True, False])
""",
)
is_leap_year = _field_accessor(
"is_leap_year",
"is_leap_year",
"""
Boolean indicator if the date belongs to a leap year.
A leap year is a year, which has 366 days (instead of 365) including
29th of February as an intercalary day.
Leap years are years which are multiples of four with the exception
of years divisible by 100 but not by 400.
Returns
-------
Series or ndarray
Booleans indicating if dates belong to a leap year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> idx = pd.date_range("2012-01-01", "2015-01-01", freq="Y")
>>> idx
DatetimeIndex(['2012-12-31', '2013-12-31', '2014-12-31'],
dtype='datetime64[ns]', freq='A-DEC')
>>> idx.is_leap_year
array([ True, False, False], dtype=bool)
>>> dates = pd.Series(idx)
>>> dates_series
0 2012-12-31
1 2013-12-31
2 2014-12-31
dtype: datetime64[ns]
>>> dates_series.dt.is_leap_year
0 True
1 False
2 False
dtype: bool
""",
)
def to_julian_date(self):
"""
Convert Datetime Array to float64 ndarray of Julian Dates.
0 Julian date is noon January 1, 4713 BC.
http://en.wikipedia.org/wiki/Julian_day
"""
# http://mysite.verizon.net/aesir_research/date/jdalg2.htm
year = np.asarray(self.year)
month = np.asarray(self.month)
day = np.asarray(self.day)
testarr = month < 3
year[testarr] -= 1
month[testarr] += 12
return (
day
+ np.fix((153 * month - 457) / 5)
+ 365 * year
+ np.floor(year / 4)
- np.floor(year / 100)
+ np.floor(year / 400)
+ 1721118.5
+ (
self.hour
+ self.minute / 60.0
+ self.second / 3600.0
+ self.microsecond / 3600.0 / 1e6
+ self.nanosecond / 3600.0 / 1e9
)
/ 24.0
)
DatetimeArray._add_comparison_ops()
# -------------------------------------------------------------------
# Constructor Helpers
def sequence_to_dt64ns(
data,
dtype=None,
copy=False,
tz=None,
dayfirst=False,
yearfirst=False,
ambiguous="raise",
int_as_wall_time=False,
):
"""
Parameters
----------
data : list-like
dtype : dtype, str, or None, default None
copy : bool, default False
tz : tzinfo, str, or None, default None
dayfirst : bool, default False
yearfirst : bool, default False
ambiguous : str, bool, or arraylike, default 'raise'
See pandas._libs.tslibs.conversion.tz_localize_to_utc
int_as_wall_time : bool, default False
Whether to treat ints as wall time in specified timezone, or as
nanosecond-precision UNIX epoch (wall time in UTC).
This is used in DatetimeIndex.__init__ to deprecate the wall-time
behaviour.
..versionadded:: 0.24.0
Returns
-------
result : numpy.ndarray
The sequence converted to a numpy array with dtype ``datetime64[ns]``.
tz : tzinfo or None
Either the user-provided tzinfo or one inferred from the data.
inferred_freq : Tick or None
The inferred frequency of the sequence.
Raises
------
TypeError : PeriodDType data is passed
"""
inferred_freq = None
dtype = _validate_dt64_dtype(dtype)
if not hasattr(data, "dtype"):
# e.g. list, tuple
if np.ndim(data) == 0:
# i.e. generator
data = list(data)
data = np.asarray(data)
copy = False
elif isinstance(data, ABCSeries):
data = data._values
if isinstance(data, ABCPandasArray):
data = data.to_numpy()
if hasattr(data, "freq"):
# i.e. DatetimeArray/Index
inferred_freq = data.freq
# if dtype has an embedded tz, capture it
tz = validate_tz_from_dtype(dtype, tz)
if isinstance(data, ABCIndexClass):
data = data._data
# By this point we are assured to have either a numpy array or Index
data, copy = maybe_convert_dtype(data, copy)
if is_object_dtype(data) or is_string_dtype(data):
# TODO: We do not have tests specific to string-dtypes,
# also complex or categorical or other extension
copy = False
if lib.infer_dtype(data, skipna=False) == "integer":
data = data.astype(np.int64)
else:
# data comes back here as either i8 to denote UTC timestamps
# or M8[ns] to denote wall times
data, inferred_tz = objects_to_datetime64ns(
data, dayfirst=dayfirst, yearfirst=yearfirst
)
tz = maybe_infer_tz(tz, inferred_tz)
# When a sequence of timestamp objects is passed, we always
# want to treat the (now i8-valued) data as UTC timestamps,
# not wall times.
int_as_wall_time = False
# `data` may have originally been a Categorical[datetime64[ns, tz]],
# so we need to handle these types.
if is_datetime64tz_dtype(data):
# DatetimeArray -> ndarray
tz = maybe_infer_tz(tz, data.tz)
result = data._data
elif is_datetime64_dtype(data):
# tz-naive DatetimeArray or ndarray[datetime64]
data = getattr(data, "_data", data)
if data.dtype != _NS_DTYPE:
data = conversion.ensure_datetime64ns(data)
if tz is not None:
# Convert tz-naive to UTC
tz = timezones.maybe_get_tz(tz)
data = conversion.tz_localize_to_utc(
data.view("i8"), tz, ambiguous=ambiguous
)
data = data.view(_NS_DTYPE)
assert data.dtype == _NS_DTYPE, data.dtype
result = data
else:
# must be integer dtype otherwise
# assume this data are epoch timestamps
if tz:
tz = timezones.maybe_get_tz(tz)
if data.dtype != _INT64_DTYPE:
data = data.astype(np.int64, copy=False)
if int_as_wall_time and tz is not None and not timezones.is_utc(tz):
warnings.warn(_i8_message, FutureWarning, stacklevel=4)
data = conversion.tz_localize_to_utc(
data.view("i8"), tz, ambiguous=ambiguous
)
data = data.view(_NS_DTYPE)
result = data.view(_NS_DTYPE)
if copy:
# TODO: should this be deepcopy?
result = result.copy()
assert isinstance(result, np.ndarray), type(result)
assert result.dtype == "M8[ns]", result.dtype
# We have to call this again after possibly inferring a tz above
validate_tz_from_dtype(dtype, tz)
return result, tz, inferred_freq
def objects_to_datetime64ns(
data,
dayfirst,
yearfirst,
utc=False,
errors="raise",
require_iso8601=False,
allow_object=False,
):
"""
Convert data to array of timestamps.
Parameters
----------
data : np.ndarray[object]
dayfirst : bool
yearfirst : bool
utc : bool, default False
Whether to convert timezone-aware timestamps to UTC
errors : {'raise', 'ignore', 'coerce'}
allow_object : bool
Whether to return an object-dtype ndarray instead of raising if the
data contains more than one timezone.
Returns
-------
result : ndarray
np.int64 dtype if returned values represent UTC timestamps
np.datetime64[ns] if returned values represent wall times
object if mixed timezones
inferred_tz : tzinfo or None
Raises
------
ValueError : if data cannot be converted to datetimes
"""
assert errors in ["raise", "ignore", "coerce"]
# if str-dtype, convert
data = np.array(data, copy=False, dtype=np.object_)
try:
result, tz_parsed = tslib.array_to_datetime(
data,
errors=errors,
utc=utc,
dayfirst=dayfirst,
yearfirst=yearfirst,
require_iso8601=require_iso8601,
)
except ValueError as e:
try:
values, tz_parsed = conversion.datetime_to_datetime64(data)
# If tzaware, these values represent unix timestamps, so we
# return them as i8 to distinguish from wall times
return values.view("i8"), tz_parsed
except (ValueError, TypeError):
raise e
if tz_parsed is not None:
# We can take a shortcut since the datetime64 numpy array
# is in UTC
# Return i8 values to denote unix timestamps
return result.view("i8"), tz_parsed
elif is_datetime64_dtype(result):
# returning M8[ns] denotes wall-times; since tz is None
# the distinction is a thin one
return result, tz_parsed
elif is_object_dtype(result):
# GH#23675 when called via `pd.to_datetime`, returning an object-dtype
# array is allowed. When called via `pd.DatetimeIndex`, we can
# only accept datetime64 dtype, so raise TypeError if object-dtype
# is returned, as that indicates the values can be recognized as
# datetimes but they have conflicting timezones/awareness
if allow_object:
return result, tz_parsed
raise TypeError(result)
else: # pragma: no cover
# GH#23675 this TypeError should never be hit, whereas the TypeError
# in the object-dtype branch above is reachable.
raise TypeError(result)
def maybe_convert_dtype(data, copy):
"""
Convert data based on dtype conventions, issuing deprecation warnings
or errors where appropriate.
Parameters
----------
data : np.ndarray or pd.Index
copy : bool
Returns
-------
data : np.ndarray or pd.Index
copy : bool
Raises
------
TypeError : PeriodDType data is passed
"""
if is_float_dtype(data):
# Note: we must cast to datetime64[ns] here in order to treat these
# as wall-times instead of UTC timestamps.
data = data.astype(_NS_DTYPE)
copy = False
# TODO: deprecate this behavior to instead treat symmetrically
# with integer dtypes. See discussion in GH#23675
elif is_timedelta64_dtype(data):
warnings.warn(
"Passing timedelta64-dtype data is deprecated, will "
"raise a TypeError in a future version",
FutureWarning,
stacklevel=5,
)
data = data.view(_NS_DTYPE)
elif is_period_dtype(data):
# Note: without explicitly raising here, PeriodIndex
# test_setops.test_join_does_not_recur fails
raise TypeError(
"Passing PeriodDtype data is invalid. " "Use `data.to_timestamp()` instead"
)
elif is_categorical_dtype(data):
# GH#18664 preserve tz in going DTI->Categorical->DTI
# TODO: cases where we need to do another pass through this func,
# e.g. the categories are timedelta64s
data = data.categories.take(data.codes, fill_value=NaT)._values
copy = False
elif is_extension_type(data) and not is_datetime64tz_dtype(data):
# Includes categorical
# TODO: We have no tests for these
data = np.array(data, dtype=np.object_)
copy = False
return data, copy
# -------------------------------------------------------------------
# Validation and Inference
def maybe_infer_tz(tz, inferred_tz):
"""
If a timezone is inferred from data, check that it is compatible with
the user-provided timezone, if any.
Parameters
----------
tz : tzinfo or None
inferred_tz : tzinfo or None
Returns
-------
tz : tzinfo or None
Raises
------
TypeError : if both timezones are present but do not match
"""
if tz is None:
tz = inferred_tz
elif inferred_tz is None:
pass
elif not timezones.tz_compare(tz, inferred_tz):
raise TypeError(
"data is already tz-aware {inferred_tz}, unable to "
"set specified tz: {tz}".format(inferred_tz=inferred_tz, tz=tz)
)
return tz
def _validate_dt64_dtype(dtype):
"""
Check that a dtype, if passed, represents either a numpy datetime64[ns]
dtype or a pandas DatetimeTZDtype.
Parameters
----------
dtype : object
Returns
-------
dtype : None, numpy.dtype, or DatetimeTZDtype
Raises
------
ValueError : invalid dtype
Notes
-----
Unlike validate_tz_from_dtype, this does _not_ allow non-existent
tz errors to go through
"""
if dtype is not None:
dtype = pandas_dtype(dtype)
if is_dtype_equal(dtype, np.dtype("M8")):
# no precision, warn
dtype = _NS_DTYPE
msg = textwrap.dedent(
"""\
Passing in 'datetime64' dtype with no precision is deprecated
and will raise in a future version. Please pass in
'datetime64[ns]' instead."""
)
warnings.warn(msg, FutureWarning, stacklevel=5)
if (isinstance(dtype, np.dtype) and dtype != _NS_DTYPE) or not isinstance(
dtype, (np.dtype, DatetimeTZDtype)
):
raise ValueError(
"Unexpected value for 'dtype': '{dtype}'. "
"Must be 'datetime64[ns]' or DatetimeTZDtype'.".format(dtype=dtype)
)
return dtype
def validate_tz_from_dtype(dtype, tz):
"""
If the given dtype is a DatetimeTZDtype, extract the implied
tzinfo object from it and check that it does not conflict with the given
tz.
Parameters
----------
dtype : dtype, str
tz : None, tzinfo
Returns
-------
tz : consensus tzinfo
Raises
------
ValueError : on tzinfo mismatch
"""
if dtype is not None:
if isinstance(dtype, str):
try:
dtype = DatetimeTZDtype.construct_from_string(dtype)
except TypeError:
# Things like `datetime64[ns]`, which is OK for the
# constructors, but also nonsense, which should be validated
# but not by us. We *do* allow non-existent tz errors to
# go through
pass
dtz = getattr(dtype, "tz", None)
if dtz is not None:
if tz is not None and not timezones.tz_compare(tz, dtz):
raise ValueError("cannot supply both a tz and a dtype" " with a tz")
tz = dtz
if tz is not None and is_datetime64_dtype(dtype):
# We also need to check for the case where the user passed a
# tz-naive dtype (i.e. datetime64[ns])
if tz is not None and not timezones.tz_compare(tz, dtz):
raise ValueError(
"cannot supply both a tz and a "
"timezone-naive dtype (i.e. datetime64[ns])"
)
return tz
def _infer_tz_from_endpoints(start, end, tz):
"""
If a timezone is not explicitly given via `tz`, see if one can
be inferred from the `start` and `end` endpoints. If more than one
of these inputs provides a timezone, require that they all agree.
Parameters
----------
start : Timestamp
end : Timestamp
tz : tzinfo or None
Returns
-------
tz : tzinfo or None
Raises
------
TypeError : if start and end timezones do not agree
"""
try:
inferred_tz = timezones.infer_tzinfo(start, end)
except Exception:
raise TypeError(
"Start and end cannot both be tz-aware with " "different timezones"
)
inferred_tz = timezones.maybe_get_tz(inferred_tz)
tz = timezones.maybe_get_tz(tz)
if tz is not None and inferred_tz is not None:
if not timezones.tz_compare(inferred_tz, tz):
raise AssertionError("Inferred time zone not equal to passed " "time zone")
elif inferred_tz is not None:
tz = inferred_tz
return tz
def _maybe_normalize_endpoints(start, end, normalize):
_normalized = True
if start is not None:
if normalize:
start = normalize_date(start)
_normalized = True
else:
_normalized = _normalized and start.time() == _midnight
if end is not None:
if normalize:
end = normalize_date(end)
_normalized = True
else:
_normalized = _normalized and end.time() == _midnight
return start, end, _normalized
def _maybe_localize_point(ts, is_none, is_not_none, freq, tz, ambiguous, nonexistent):
"""
Localize a start or end Timestamp to the timezone of the corresponding
start or end Timestamp
Parameters
----------
ts : start or end Timestamp to potentially localize
is_none : argument that should be None
is_not_none : argument that should not be None
freq : Tick, DateOffset, or None
tz : str, timezone object or None
ambiguous: str, localization behavior for ambiguous times
nonexistent: str, localization behavior for nonexistent times
Returns
-------
ts : Timestamp
"""
# Make sure start and end are timezone localized if:
# 1) freq = a Timedelta-like frequency (Tick)
# 2) freq = None i.e. generating a linspaced range
if is_none is None and is_not_none is not None:
# Note: We can't ambiguous='infer' a singular ambiguous time; however,
# we have historically defaulted ambiguous=False
ambiguous = ambiguous if ambiguous != "infer" else False
localize_args = {"ambiguous": ambiguous, "nonexistent": nonexistent, "tz": None}
if isinstance(freq, Tick) or freq is None:
localize_args["tz"] = tz
ts = ts.tz_localize(**localize_args)
return ts
| [
"[email protected]"
] | |
e963b984443ec4d68c597960486998c74e5281de | be6e135014a7553b8f13b99435369e3d53b58585 | /course_python/Python/student_oop.py | 43d9d213d2b47e37a632402e8d7e6afe58f5f69c | [] | no_license | ankitsoni5/python | a5555a6371e12b170703b8c16a4e8aab5988a373 | a5fcf618a476cb1745095f038b9118ce724c0b7e | refs/heads/master | 2020-09-10T23:35:45.001477 | 2019-11-23T13:41:37 | 2019-11-23T13:41:37 | 221,866,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,290 | py | from com.ankit.collage.student import Student
#s1 = Student() #student object
#internally
# 1. get address - 4001 Student Object
# 2. Student : __init__(4001)
print(Student.count) # it will give the number of count of object created of Student class.
s1 = Student('Abhishek', 'Male', 1, 90)
#internally
# 1. get address - 4001 Student Object
# 2. Student : __init__(4001,'Abhishek', 'Male', 1, 90)
# create an attribute in an object
'''s1.name = 'Ankit'
s1.gender = 'M'
s1.roll = 21
s1.marks = 80
s2 = Student() # 2nd student object
s2.name = 'Soni'
s2.gender = 'M'
s2.roll = 22
s2.marks = 54
'''
print(Student.count)
s2 = Student('Soni','M',22,54) # 2nd student object
print(s1.getdetails())
print(s1.get_grades())
print(s1.get_name_and_roll()) #using tuple as a return data.
tu = s1.get_name_and_roll()
name, roll = tu[0],tu[1] # getting saterate values from returen value
# internally
#print(Student.getdetails(s1))
print(s2.getdetails())
print(s2.get_grades())
print(Student.count)
s3 = Student() # to run this also make the __init__ method with default arguments so that if you don't pass any arg it will take default values
print(s3.getdetails())
print(s3.get_grades())
print(Student.count)
print(Student.get_min_attendence())
| [
"[email protected]"
] | |
3f9a0b1d182a5ddd38813da6721ae1a290403895 | b4bc5fb10b0d498cb0d3e5ee2ce3473b10b553e5 | /fast_transformers/recurrent/attention/self_attention/adamax_attention.py | 1cac02af4c80484f39d1bc654fd0d6ccdea11efe | [] | no_license | minhtannguyen/momentum-transformer-code-submission | 2f0005028ab7e32957612f642330acd802bded8e | 68b11ce5564a8212cd91cb2093b457a00d511046 | refs/heads/master | 2023-05-31T19:20:57.380490 | 2021-06-04T15:08:26 | 2021-06-04T15:08:26 | 373,784,396 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,429 | py | """Implement the causally masked linear attention as a recurrent model."""
import torch
from torch.nn import Module
from ....attention_registry import RecurrentAttentionRegistry, Optional, Float, Int, \
Callable, EventDispatcherInstance
from ....events import EventDispatcher
from ....feature_maps import elu_feature_map
from ..._utils import check_state
class RecurrentAdamaxAttention(Module):
"""Implement fast_transformers.attention.causal_linear_attention as a
fixed-dimensional state recurrent model.
See fast_transformers.attention.linear_attention and
fast_transformers.attention.causal_linear_attention for the general concept
of replacing the softmax with feature maps.
Arguments
---------
feature_map: callable, a callable that applies the feature map to the
last dimension of a tensor (default: elu(x)+1)
eps: float, a small number to ensure the numerical stability of the
denominator (default: 1e-6)
event_dispatcher: str or EventDispatcher instance to be used by this
module for dispatching events (default: the default
global dispatcher)
"""
def __init__(self, query_dimensions, mu, stepsize, beta, feature_map=None, eps=1e-6,
event_dispatcher=""):
super(RecurrentAdamaxAttention, self).__init__()
self.feature_map = (
feature_map(query_dimensions) if feature_map else
elu_feature_map(query_dimensions)
)
self.eps = eps
self.event_dispatcher = EventDispatcher.get(event_dispatcher)
# for adam transformer
self.mu = mu
self.stepsize = stepsize
self.beta = beta
def forward(self, query, key, value, state=None, memory=None):
# Normalize state/memory
state = check_state(state, memory)
# If this is a new sequence reinitialize the feature map
if state is None:
self.feature_map.new_feature_map()
# Apply the feature map to the query and key
Q = self.feature_map.forward_queries(query)
K = self.feature_map.forward_keys(key)
# Extract some shapes
N, H, D = Q.shape
_, _, M = value.shape
# Extract the memory or initialize it
if state is None:
Si = query.new_zeros((N, H, D, M))
Zi = query.new_zeros((N, H, D))
Pi = query.new_zeros((N, H, D, M))
Mi = query.new_zeros((N, H, D, M))
else:
Si, Zi, Pi, Mi, _ = state
# Ensure the batch size did not change
if len(Si) != N:
raise ValueError("The batch size changed during iteration")
# Update the internal state
#
# NOTE: The if clause is added due to GitHub PR #10. Simply using the
# following two lines does not perform the operation in place which
# means it is slower for inference.
if K.grad_fn is not None or value.grad_fn is not None:
Zi = Zi + K
Ui = torch.einsum("nhd,nhm->nhdm", K, value)
Pi = self.mu * Pi - self.stepsize * Ui
Mi = torch.max(self.beta * Mi, torch.abs(Ui))
Si = Si - Pi/torch.sqrt(Mi + 1e-16)
else:
Zi += K
Ui = torch.einsum("nhd,nhm->nhdm", K, value)
Pi *= self.mu
Pi -= self.stepsize * Ui
Mi = torch.max(self.beta * Mi, torch.abs(Ui))
Si -= Pi/torch.sqrt(Mi + 1e-16)
# Compute the output
Z = 1. / (torch.einsum("nhd,nhd->nh", Q, Zi) + self.eps)
V = torch.einsum("nhd,nhdm,nh->nhm", Q, Si, Z)
return V, [Si, Zi, Pi, Mi, Ui]
# Register the attention implementation so that it becomes available in our
# builders
# RecurrentAttentionRegistry.register(
# "momentum-linear", RecurrentMomentumAttention,
# [
# ("query_dimensions", Int),
# ("feature_map", Optional(Callable)),
# ("event_dispatcher", Optional(EventDispatcherInstance, ""))
# ]
# )
RecurrentAttentionRegistry.register(
"adamax-linear", RecurrentAdamaxAttention,
[
("query_dimensions", Int),
("mu", Float),
("stepsize", Float),
("beta", Float),
("feature_map", Optional(Callable)),
("event_dispatcher", Optional(EventDispatcherInstance, ""))
]
)
| [
"[email protected]"
] | |
c3d6c4f46612b7f7c5ec6f9758883ee9cf8a0f4a | d051f3fe9fda31b72fa0ddce67aa1f4293c7c37c | /infer/local_gibbs_move.py | e8d91c772ccb38d12b759ff202759798f9ddefa4 | [
"BSD-3-Clause"
] | permissive | davmre/sigvisa | 4e535215b6623310d8f5da64258f6fa9a378f9fd | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | refs/heads/master | 2021-03-24T10:24:52.307389 | 2018-01-05T19:33:23 | 2018-01-05T19:33:23 | 2,321,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,990 | py | import numpy as np
from sigvisa.models.distributions import PiecewiseLinear
from sigvisa.utils.array import index_to_time, time_to_index
"""
Methods to propose a param value from a piecewise linear approximation
to the Gibbs conditional. This can be based on either the full model
posterior (proxylp_full) or a cheap proxy based on the noise model
applied to a local region of the unexplained envelope (proxylp_localenv).
"""
def proxylp_full(sg, wn, node):
def proxylp(candidate):
node.set_value(candidate)
if node.model is not None:
nlp = node.log_p()
else:
cd = node.joint_conditional_dist()
nlp = cd.log_p(candidate)
lp = nlp + wn.log_p()
return float(lp)
return proxylp
def proxylp_localenv(sg, wn, eid, phase, param):
tmnodes = sg.get_template_nodes(eid, wn.sta, phase, wn.band, wn.chan)
k, node = tmnodes[param]
tmvals = dict([(p, n.get_value(key=k)) for (p, (k, n)) in tmnodes.items()])
atime = tmvals['arrival_time']
peak_time = tmvals['arrival_time'] + np.exp(tmvals['peak_offset'])
unexplained = wn.unexplained_env(eid, phase)
peak_idx = time_to_index(peak_time, wn.st, wn.srate)
start_idx_true = time_to_index(atime, wn.st, wn.srate)
end_idx_true = int(peak_idx + 60*wn.srate)
start_idx = max(0, start_idx_true)
end_idx = min(wn.npts, end_idx_true)
start_offset = start_idx - start_idx_true
if end_idx-start_idx < wn.srate:
# if less than 1s of available signal, don't even bother
return None
unexplained_local = unexplained[start_idx:end_idx]
n = len(unexplained_local)
def proxylp(candidate):
tmvals[param] = candidate
l = tg.abstract_logenv_raw(tmvals, srate=wn.srate, fixedlen=n+start_offset)
diff = unexplained_local - np.exp(l[start_offset:])
lp = wn.nm_env.log_p(diff) + node.model.log_p(candidate, cond=node._pv_cache)
return float(lp)
return proxylp
def approximate_scalar_gibbs_distribution(sg, wn, eid, phase, param,
node, proxylp, prior_weight = 0.0):
class priormodel(object):
def __init__(self, node):
if node.model is not None:
self.model = node.model
else:
self.model = node.joint_conditional_dist()
self.pv = node._pv_cache
def log_p(self, x, **kwargs):
return float(self.model.log_p(x, cond=self.pv, **kwargs))
def sample(self, **kwargs):
return float(self.model.sample(cond=self.pv, **kwargs))
assert (not node.deterministic())
tg = sg.template_generator(phase)
lbounds, hbounds = tg.low_bounds(), tg.high_bounds()
# generate a range of plausible values based on the prior,
# and on the current value v (which should already be adapted
# somewhat to the data).
pv = node._pv_cache
v = float(node.get_value())
if node.model is not None:
pred = node.model.predict(cond=pv)
std = np.sqrt(node.model.variance(cond=pv, include_obs=True))
else:
cd = node.joint_conditional_dist()
pred = cd.predict()
std = np.sqrt(cd.variance())
if param=="tt_residual":
prior_min, prior_max = -25, 25
elif param=="mult_wiggle_std":
prior_min = 0.1
prior_max = 0.99
else:
prior_min, prior_max = pred-4*std, pred+4*std
prior_min = min(prior_min, v-4*std)
prior_max = max(prior_max, v + 4*std)
if param in lbounds:
prior_min = max(prior_min, lbounds[param])
prior_max = min(prior_max, hbounds[param])
candidates = np.linspace(prior_min, prior_max, 20)
candidates = np.array(sorted(list(candidates) + [v,]))
# compute the logp at each of these candidates
lps = np.array([proxylp(candidate) for candidate in candidates])
# now refine the approximation in regions of high probability
def bad_indices(lps, candidates):
best_idx = np.argmax(lps)
best_lp = np.max(lps)
lp_diff = np.abs(np.diff(lps))
# an lp is "significant" if it or its neighbor is above the threshold
thresh = best_lp - 3
significant_lps = ( lps[:-1] > thresh ) + ( lps[1:] > thresh )
# a "bad step" is where we have a sharp boundary next to a significant lp.
# that is, the significant lps are the areas where it's important to
# approximate the posterior well, and a large difference in lp between adjacent
# candidates means we're not doing that.
badsteps = significant_lps * (lp_diff > 1)
bad_idxs = np.arange(len(lps)-1)[badsteps]
# if we've already refined a lot at a particular bad idx,
# just give up since there's probably a genuine discontinuity there
c_diff = np.abs(np.diff(candidates))
hopeless = c_diff < 1e-3
bad_idxs = [idx for idx in bad_idxs if not hopeless[idx]]
return bad_idxs
bad_idxs = bad_indices(lps, candidates)
while len(bad_idxs) > 0:
new_candidates = []
new_lps = []
for idx in bad_idxs:
c1 = candidates[idx]
c2 = candidates[idx+1]
c = c1 + (c2-c1)/2.0
new_candidates.append(c)
new_lps.append( proxylp(c))
# merge the new candidates into their sorted positions in
# the existing list
full_c = np.concatenate((candidates, new_candidates))
full_lps = np.concatenate((lps, new_lps))
perm = sorted(np.arange(len(full_c)), key = lambda i : full_c[i])
candidates = np.array(full_c[perm])
lps = np.array(full_lps[perm])
bad_idxs = bad_indices(lps, candidates)
node.set_value(v)
p = PiecewiseLinear(candidates, np.array(lps), mix_weight = prior_weight, mix_dist = priormodel(node))
return p
| [
"[email protected]"
] | |
33e0ba3e0a69e34cf7ebd41107f6f66e2889c636 | c9293ab68d0235a1830a3634a41a5b65b4eb5d6a | /Lessons/Section-03/lesson_0087/main.py | 525fed4f815ccabfb1476b840d227c9ecfdc9c6d | [] | no_license | lipegomes/python-django-udemy-studies | 4f836497ee10ece7ee5b40af1b636bb1c03deb75 | 938fa6a05f9505b8eaf6e7e6bc1c5e199b670432 | refs/heads/master | 2023-01-07T01:22:16.855346 | 2020-11-03T13:49:54 | 2020-11-03T13:49:54 | 283,852,942 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,200 | py | """
Faça uma lista de tarefas com as seguintes opções:
- Adicionar uma tarefa
- Listar tarefas
- Opção de desfazer (a5 cada vez que chamarmos, desfaz a última ação)
- Opção de refazer (a cada vez que chamarmos. refaz a última ação)
"""
def show_op(todo_list):
print()
print("Tarefas: ")
print(todo_list)
print()
def do_undo(todo_list, redo_list):
if not todo_list:
print("Nada a desfazer")
return
last_todo = todo_list.pop()
redo_list.append(last_todo)
def do_redo(todo_list, redo_list):
if not redo_list:
print("Nada a refazer")
return
last_redo = redo_list.pop()
todo_list.append(last_redo)
def do_add(todo, todo_list):
todo_list.append(todo)
if __name__ == "__main__":
todo_list = []
redo_list = []
while True:
todo = input("Digite uma tarefa ou ls,undo, redo: ")
if todo == "ls":
show_op(todo_list)
continue
elif todo == "undo":
do_undo(todo_list, redo_list)
continue
elif todo == "redo":
do_redo(todo_list, redo_list)
continue
do_add(todo, todo_list)
| [
"[email protected]"
] | |
a6967f5aeb2b2541339e96e0ff361039c1c4a1ef | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5753053697277952_1/Python/wiz4rd/solve.py | 71ffac888475eabb0c81b27c4c0114a8c34376e6 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,255 | py | # http://code.google.com/codejam/contest/4314486/dashboard#s=p0
from collections import defaultdict
from re import match
from numpy import array, argsort
from time import sleep
from numpy.linalg import matrix_rank
def biggest(a,N): return argsort(a)[::-1][:N]
def absolute(ls): s = int(sum(ls)/2);return any(e>s or e<0 for e in ls)
def read_file(fname):
res = []
with open(fname,"r") as f:
data = [l.strip() for l in f.readlines()][1:]
for N, Ps in zip(data[::2],data[1::2]):
res.append(list(map(int, Ps.split(" "))))
return res
def solve_all(fname):
problems = read_file("%s.in" % fname)
case = 1
text = ""
for p in problems:
print("Solving Case #%s" % case)
res = solve(p)
text += "Case #%s: %s\n" % (case, res)
case+=1
with open("%s.out" % fname, "w") as out:
out.write(text)
def solve(Ps):
return bt(Ps, [])
mask = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def bt(Ps, steps):
# generate alternatives
big1, big2 = biggest(Ps, 2)
res = Ps[:]
res[big1]-=1
res[big2]-=1
if absolute(res):
res[big2]+=1
steps.append(mask[big1])
else:
steps.append(mask[big1]+mask[big2])
if all(e==0 for e in res):
return " ".join(steps)
else:
return bt(res,steps)
solve_all("large") | [
"[email protected]"
] | |
4620c177df3478e05cc6d84a76cbc7bc79c80768 | 7ce479cac0a14d924159db9c784e3325b8f0bce7 | /schemaorgschemas/Thing/Intangible/StructuredValue/PriceSpecification/PaymentChargeSpecification/__init__.py | cd82c29e455e399753f86c2537f0d702dd8cd6b2 | [] | no_license | EvelineAndreea/AGRe | 1f0c27237eb047a60bbcfb8d73e3157035406409 | b952125896a82741f6617c259dd4060954583180 | refs/heads/master | 2020-04-08T16:08:11.517166 | 2018-11-28T07:15:56 | 2018-11-28T07:15:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,227 | py | # -*- coding: utf-8 -*-
from schemaorgschemas.Thing import potentialActionProp, descriptionProp, sameAsProp, imageProp, urlProp, mainEntityOfPageProp, additionalTypeProp, alternateNameProp, nameProp
from schemaorgschemas.Thing.Intangible.StructuredValue.PriceSpecification import validFromProp, priceCurrencyProp, priceProp, maxPriceProp, eligibleTransactionVolumeProp, valueAddedTaxIncludedProp, eligibleQuantityProp, validThroughProp, minPriceProp
from schemaorgschemas.djangoschema import SchemaObject, SchemaProperty, SchemaEnumProperty, SCHEMA_ORG
from django.conf import settings
class PaymentChargeSpecificationSchema(SchemaObject):
"""Schema Mixin for PaymentChargeSpecification
Usage: place after django model in class definition, schema will return the schema.org url for the object
The costs of settling the payment using a particular payment method.
"""
def __init__(self):
self.schema = 'PaymentChargeSpecification'
class appliesToDeliveryMethodProp(SchemaProperty):
"""
SchemaField for appliesToDeliveryMethod
Usage: Include in SchemaObject SchemaFields as your_django_field = appliesToDeliveryMethodProp()
schema.org description:The delivery method(s) to which the delivery charge or payment charge specification applies.
prop_schema returns just the property without url#
format_as is used by app templatetags based upon schema.org datatype
used to reference DeliveryMethod"""
_prop_schema = 'appliesToDeliveryMethod'
_expected_schema = 'DeliveryMethod'
_enum = False
_format_as = "ForeignKey"
class appliesToPaymentMethodProp(SchemaProperty):
"""
SchemaField for appliesToPaymentMethod
Usage: Include in SchemaObject SchemaFields as your_django_field = appliesToPaymentMethodProp()
schema.org description:The payment method(s) to which the payment charge specification applies.
prop_schema returns just the property without url#
format_as is used by app templatetags based upon schema.org datatype
used to reference PaymentMethod"""
_prop_schema = 'appliesToPaymentMethod'
_expected_schema = 'PaymentMethod'
_enum = False
_format_as = "ForeignKey"
# schema.org version 2.0
| [
"[email protected]"
] | |
2ddc5ca7f522f8159ec1ca17599d40bf2c4eca88 | 912c4445e7041869d1c8535a493b78d7ee35424b | /status/api/tests.py | fc4d45246e3fead061e60c2bd54215da6084c563 | [] | no_license | maltezc/Udemy-DjangoRestAPI | 3f243ec97ea5e8e9d6ddc2005986b6a05aa11097 | de6f885cf0cddaf22fb6fd72d18fc805b9ce48d2 | refs/heads/master | 2022-12-14T06:04:43.011691 | 2018-08-05T01:10:17 | 2018-08-05T01:10:17 | 140,590,753 | 0 | 0 | null | 2022-11-22T02:48:04 | 2018-07-11T14:56:08 | Python | UTF-8 | Python | false | false | 7,199 | py | import os
import shutil # shell utility method
import tempfile
from PIL import Image # pip install pillow
from django.urls import reverse
from rest_framework import status
from rest_framework.reverse import reverse as api_reverse
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from django.conf import settings
from rest_framework_jwt.settings import api_settings
from status.models import Status
User = get_user_model()
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
class StatusAPITestCase(APITestCase):
def setUp(self):
user = User.objects.create(username='testcfeuser', email='[email protected]')
user.set_password("yeahhhcfe")
user.save()
status_obj = Status.objects.create(user=user, content='Hello There!')
def _test_statuses(self):
self.assertEqual(Status.objects.count(), 1)
def status_user_token(self):
auth_url = api_reverse('api-auth:login')
auth_data = {
'username': 'testcfeuser',
'password': 'yeahhhcfe',
}
auth_response = self.client.post(auth_url, auth_data, format='json')
token = auth_response.data.get("token", 0)
self.client.credentials(HTTP_AUTHORIZATION='JWT ' + token)
def create_item(self):
self.status_user_token() #calls def status usertoken above instead of having to repeat code
url = api_reverse('api-status:list')
data = {
'content': 'some cool test content'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Status.objects.count(), 2)
return response.data
def test_empty_create_item(self):
self.status_user_token() #calls def status usertoken above instead of having to repeat code
url = api_reverse('api-status:list')
data = {
'content': None,
'image': None,
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
return response.data
def test_status_create_with_image(self):
self.status_user_token()
url = api_reverse('api-status:list')
# (w, h) = (800, 1200)
# (255, 255, 255)
image_item = Image.new('RGB', (800, 1280), (0, 124, 174))
tmp_file = tempfile.NamedTemporaryFile(suffix='.jpg')
image_item.save(tmp_file, format='JPEG')
with open(tmp_file.name, 'rb') as file_obj:
data = {
'content': "come cool test content",
'image': file_obj
}
response = self.client.post(url, data, format='multipart') # multipart allows you to handle data coming through
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Status.objects.count(), 2)
temp_img_dir = os.path.join(settings.MEDIA_ROOT, 'status', 'testcfeuser')
if os.path.exists(temp_img_dir):
shutil.rmtree(temp_img_dir)
def test_status_create_with_image_and_desc(self):
self.status_user_token()
url = api_reverse('api-status:list')
# (w, h) = (800, 1200)
# (255, 255, 255)
image_item = Image.new('RGB', (800, 1280), (0, 124, 174))
tmp_file = tempfile.NamedTemporaryFile(suffix='.jpg')
image_item.save(tmp_file, format='JPEG')
with open(tmp_file.name, 'rb') as file_obj:
data = {
'content': None,
'image': file_obj
}
response = self.client.post(url, data, format='multipart') # multipart allows you to handle data coming through
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
print(response.data)
img_data = response.data.get('image')
self.assertNotEqual(img_data, None)
self.assertEqual(Status.objects.count(), 2)
temp_img_dir = os.path.join(settings.MEDIA_ROOT, 'status', 'testcfeuser')
if os.path.exists(temp_img_dir):
shutil.rmtree(temp_img_dir)
def test_status_create(self):
data = self.create_item()
data_id = data.get("id")
rud_url = api_reverse('api-status:detail', kwargs={"id": data_id})
rud_data = {
'content': 'another new content'
}
'''
get method / retrieve
'''
get_response = self.client.get(rud_url, format='json')
self.assertEqual(get_response.status_code, status.HTTP_200_OK)
def test_status_update(self):
data = self.create_item()
data_id = data.get("id")
rud_url = api_reverse('api-status:detail', kwargs={"id": data_id})
rud_data = {
'content': 'another new content'
}
'''
put / update
'''
put_response = self.client.put(rud_url, rud_data, format='json')
self.assertEqual(put_response.status_code, status.HTTP_200_OK)
rud_response_data = put_response.data
self.assertEqual(rud_response_data['content'], rud_data['content'])
def test_status_delete(self):
data = self.create_item()
data_id = data.get("id")
rud_url = api_reverse('api-status:detail', kwargs={"id": data_id})
rud_data = {
'content': 'another new content'
}
'''
delete method
'''
del_response = self.client.delete(rud_url, format='json')
self.assertEqual(del_response.status_code, status.HTTP_204_NO_CONTENT)
'''
Not Found
'''
get_response = self.client.delete(rud_url, format='json')
self.assertEqual(get_response.status_code, status.HTTP_404_NOT_FOUND)
def test_status_no_token_create(self):
url = api_reverse('api-status:list')
data = {
'content': 'some cool test content'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_other_user_permissions_api(self):
data = self.create_item()
data_id = data.get("id")
user = User.objects.create(username='userjmitch')
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
self.client.credentials(HTTP_AUTHORIZATION='JWT ' + token)
rud_url = api_reverse('api-status:detail', kwargs={'id':data_id})
rud_data = {
'content':"smashing"
}
get_ = self.client.get(rud_url, format='json')
put_ = self.client.put(rud_url, rud_data, format='json')
delete_ = self.client.delete(rud_url, format='json')
self.assertEqual(get_.status_code, status.HTTP_200_OK)
self.assertEqual(put_.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(delete_.status_code, status.HTTP_403_FORBIDDEN)
| [
"[email protected]"
] | |
918ad7bb0117b30c6486fbc80d1fd1b193eca18c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_393/ch118_2020_10_04_20_23_11_549205.py | 0e12279bb35457c3ba0953ad701ea15292ec51a7 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | import math
def reflexao_total_interna(n1,n2,teta2):
y= math.radians((((n1/n2)*math.sin(math.radians(teta2)))))
if y > 1:
return True
else:
return False
| [
"[email protected]"
] | |
04d560051f784a7adef1e2450c98a8917ecc9863 | 297efd4afeb46c0b56d9a975d76665caef213acc | /src/multiplicity/migrations/0044_referencespacelocation_active.py | c685d927bce6613caf9aa25e774e3ac226361258 | [
"MIT"
] | permissive | metabolism-of-cities/metabolism-of-cities-platform-v3 | 67716c3daae86a0fe527c18aef26ce29e069cbcc | c754d3b1b401906a21640b8eacb6b724a448b31c | refs/heads/master | 2022-12-06T22:56:22.207853 | 2020-08-25T09:53:51 | 2020-08-25T09:53:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | # Generated by Django 2.1.3 on 2019-03-04 07:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('multiplicity', '0043_auto_20190209_1723'),
]
operations = [
migrations.AddField(
model_name='referencespacelocation',
name='active',
field=models.BooleanField(db_index=True, default=True),
),
]
| [
"[email protected]"
] | |
a02e962f1c5d82a41e748de8fb1d38b119166c0c | 2f557f60fc609c03fbb42badf2c4f41ef2e60227 | /CondTools/Ecal/python/copyTrivialAlignEB_cfg.py | c690529c0fa086e7e9df506f16670a72c30d21b9 | [
"Apache-2.0"
] | permissive | CMS-TMTT/cmssw | 91d70fc40a7110832a2ceb2dc08c15b5a299bd3b | 80cb3a25c0d63594fe6455b837f7c3cbe3cf42d7 | refs/heads/TMTT_1060 | 2020-03-24T07:49:39.440996 | 2020-03-04T17:21:36 | 2020-03-04T17:21:36 | 142,576,342 | 3 | 5 | Apache-2.0 | 2019-12-05T21:16:34 | 2018-07-27T12:48:13 | C++ | UTF-8 | Python | false | false | 1,416 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.load("CalibCalorimetry.EcalTrivialCondModules.EcalTrivialCondRetriever_cfi")
process.load("CondCore.DBCommon.CondDBCommon_cfi")
#process.CondDBCommon.connect = 'oracle://cms_orcoff_prep/CMS_COND_ECAL'
#process.CondDBCommon.DBParameters.authenticationPath = '/afs/cern.ch/cms/DB/conddb/'
process.CondDBCommon.connect = 'sqlite_file:EBAlign.db'
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('*'),
destinations = cms.untracked.vstring('cout')
)
process.source = cms.Source("EmptyIOVSource",
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
timetype = cms.string('runnumber'),
interval = cms.uint64(1)
)
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
process.CondDBCommon,
timetype = cms.untracked.string('runnumber'),
toPut = cms.VPSet(
cms.PSet(
record = cms.string('EBAlignmentRcd'),
tag = cms.string('EBAlignment_zero_offline')
)
)
)
process.dbCopy = cms.EDAnalyzer("EcalDBCopy",
timetype = cms.string('runnumber'),
toCopy = cms.VPSet(
cms.PSet(
record = cms.string('EBAlignmentRcd'),
container = cms.string('EBAlignment')
)
)
)
process.prod = cms.EDAnalyzer("EcalTrivialObjectAnalyzer")
process.p = cms.Path(process.prod*process.dbCopy)
| [
"[email protected]"
] | |
3c7381700dd51e46681693cb02fba56bbe79e2f3 | cea30aead7f0b529ee072c1bcab2896777e1408d | /PreprocessingCropsData/venv/Lib/site-packages/mpl_toolkits/tests/__init__.py | d5bd2047c484dceb46dbd677b7d1236edf7ab7ae | [] | no_license | pgj9702/FarmSolution | 3730ab3ca983b335ed48a60935c5fa6e3983cbb1 | a8cacc45b8519e79b51ab65b9539a01f5006e64f | refs/heads/master | 2023-03-30T15:41:10.312044 | 2021-03-31T08:47:23 | 2021-03-31T08:47:23 | 334,019,778 | 0 | 1 | null | 2021-02-22T09:32:57 | 2021-01-29T02:52:46 | Python | UTF-8 | Python | false | false | 375 | py | from pathlib import Path
# Check that the test directories exist
if not (Path(__file__).parent / "baseline_images").exists():
raise IOError(
'The baseline image directory does not exist. '
'This is most likely because the test area_data is not installed. '
'You may need to install matplotlib from source to get the '
'test area_data.')
| [
"[email protected]"
] | |
603882c35d1658fbb70d32cc0a4adab196eec5d2 | 531c47c15b97cbcb263ec86821d7f258c81c0aaf | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/operations/_express_route_circuits_operations.py | aa23a5fe60e0f1801b53387edc44271fb622cf79 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | YijunXieMS/azure-sdk-for-python | be364d3b88204fd3c7d223df23756386ff7a3361 | f779de8e53dbec033f98f976284e6d9491fd60b3 | refs/heads/master | 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 | MIT | 2020-06-16T16:38:15 | 2019-08-30T21:08:55 | Python | UTF-8 | Python | false | false | 52,312 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitsOperations(object):
"""ExpressRouteCircuitsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuit"
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuit"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "models.ExpressRouteCircuit"
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuit"
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuit"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "models.ExpressRouteCircuit"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express route circuit operation.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuit
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuit or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuit]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuit"
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuit"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
circuit_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Updates an express route circuit tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to update express route circuit tags.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuit or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuit]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
def _list_arp_table_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuitsArpTableListResult"
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsArpTableListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def begin_list_arp_table(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Gets the currently advertised ARP table associated with the express route circuit in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def _list_routes_table_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuitsRoutesTableListResult"
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsRoutesTableListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def begin_list_routes_table(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Gets the currently advertised routes table associated with the express route circuit in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def _list_routes_table_summary_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuitsRoutesTableSummaryListResult"
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def begin_list_routes_table_summary(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Gets the currently advertised routes table summary associated with the express route circuit in
a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def get_stats(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuitStats"
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitStats"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'} # type: ignore
def get_peering_stats(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ExpressRouteCircuitStats"
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitStats"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get_peering_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_peering_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ExpressRouteCircuitListResult"]
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ExpressRouteCircuitListResult"]
"""Gets all the express route circuits in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ExpressRouteCircuitListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
| [
"[email protected]"
] | |
1b5e747a4b2642018e32c6e08f4f4180972ecebd | abfff8ab3162f7003b51d3fdcc7897684d2d4e54 | /c_py_extention/Lib/site-packages/pip/_vendor/urllib3/response.py | 5502a119973864003bf66c8602b91f684a84c7cb | [] | no_license | RedKnite5/Junk | 972dc24c99fe30400ab35e77bb4b69abe9076190 | 93b5bb4b6138518724528770cf56ea1df10e95b4 | refs/heads/master | 2023-04-10T07:25:14.968070 | 2023-04-04T04:19:42 | 2023-04-04T04:19:42 | 143,909,118 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,042 | py | from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
import logging
from socket import timeout as SocketTimeout
from socket import error as SocketError
try:
import brotli
except ImportError:
brotli = None
from ._collections import HTTPHeaderDict
from .exceptions import (
BodyNotHttplibCompatible,
ProtocolError,
DecodeError,
ReadTimeoutError,
ResponseNotChunked,
IncompleteRead,
InvalidHeader,
HTTPError,
)
from .packages.six import string_types as basestring, PY3
from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = b""
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
decompressed = self._obj.decompress(data)
if decompressed:
self._first_try = False
self._data = None
return decompressed
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoderState(object):
FIRST_MEMBER = 0
OTHER_MEMBERS = 1
SWALLOW_DATA = 2
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._state = GzipDecoderState.FIRST_MEMBER
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
ret = bytearray()
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
return bytes(ret)
while True:
try:
ret += self._obj.decompress(data)
except zlib.error:
previous_state = self._state
# Ignore data after the first error
self._state = GzipDecoderState.SWALLOW_DATA
if previous_state == GzipDecoderState.OTHER_MEMBERS:
# Allow trailing garbage acceptable in other gzip clients
return bytes(ret)
raise
data = self._obj.unused_data
if not data:
return bytes(ret)
self._state = GzipDecoderState.OTHER_MEMBERS
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
if brotli is not None:
class BrotliDecoder(object):
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self):
self._obj = brotli.Decompressor()
def decompress(self, data):
if hasattr(self._obj, "decompress"):
return self._obj.decompress(data)
return self._obj.process(data)
def flush(self):
if hasattr(self._obj, "flush"):
return self._obj.flush()
return b""
class MultiDecoder(object):
"""
From RFC7231:
If one or more encodings have been applied to a representation, the
sender that applied the encodings MUST generate a Content-Encoding
header field that lists the content codings in the order in which
they were applied.
"""
def __init__(self, modes):
self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
def flush(self):
return self._decoders[0].flush()
def decompress(self, data):
for d in reversed(self._decoders):
data = d.decompress(data)
return data
def _get_decoder(mode):
if "," in mode:
return MultiDecoder(mode)
if mode == "gzip":
return GzipDecoder()
if brotli is not None and mode == "br":
return BrotliDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
:param retries:
The retries contains the last :class:`~urllib3.util.retry.Retry` that
was used during the request.
:param enforce_content_length:
Enforce content length checking. Body returned by server must match
value of Content-Length header, if present. Otherwise, raise error.
"""
CONTENT_DECODERS = ["gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(
self,
body="",
headers=None,
status=0,
version=0,
reason=None,
strict=0,
preload_content=True,
decode_content=True,
original_response=None,
pool=None,
connection=None,
msg=None,
retries=None,
enforce_content_length=False,
request_method=None,
request_url=None,
auto_close=True,
):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self.msg = msg
self._request_url = request_url
if body and isinstance(body, (basestring, bytes)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, "read"):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get("transfer-encoding", "").lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# Determine length of response
self.length_remaining = self._init_length(request_method)
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get("location")
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
def drain_conn(self):
"""
Read and discard any remaining HTTP response data in the response connection.
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
self.read()
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
@property
def connection(self):
return self._connection
def isclosed(self):
return is_fp_closed(self._fp)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_length(self, request_method):
"""
Set initial length value for Response content if available.
"""
length = self.headers.get("content-length")
if length is not None:
if self.chunked:
# This Response will fail with an IncompleteRead if it can't be
# received as chunked. This method falls back to attempt reading
# the response before raising an exception.
log.warning(
"Received response with both Content-Length and "
"Transfer-Encoding set. This is expressly forbidden "
"by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
"attempting to process response as Transfer-Encoding: "
"chunked."
)
return None
try:
# RFC 7230 section 3.3.2 specifies multiple content lengths can
# be sent in a single Content-Length header
# (e.g. Content-Length: 42, 42). This line ensures the values
# are all valid ints and that as long as the `set` length is 1,
# all values are the same. Otherwise, the header is invalid.
lengths = set([int(val) for val in length.split(",")])
if len(lengths) > 1:
raise InvalidHeader(
"Content-Length contained multiple "
"unmatching values (%s)" % length
)
length = lengths.pop()
except ValueError:
length = None
else:
if length < 0:
length = None
# Convert status to int for comparison
# In some cases, httplib returns a status of "_UNKNOWN"
try:
status = int(self.status)
except ValueError:
status = 0
# Check for responses that shouldn't include a body
if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
length = 0
return length
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessary.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get("content-encoding", "").lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
elif "," in content_encoding:
encodings = [
e.strip()
for e in content_encoding.split(",")
if e.strip() in self.CONTENT_DECODERS
]
if len(encodings):
self._decoder = _get_decoder(content_encoding)
DECODER_ERROR_CLASSES = (IOError, zlib.error)
if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
return data
try:
if self._decoder:
data = self._decoder.decompress(data)
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e,
)
if flush_decoder:
data += self._flush_decoder()
return data
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b"")
return buf + self._decoder.flush()
return b""
@contextmanager
def _error_catcher(self):
"""
Catch low-level python exceptions, instead re-raising urllib3
variants, so that low-level exceptions are not leaked in the
high-level api.
On exit, release the connection back to the pool.
"""
clean_exit = False
try:
try:
yield
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError("Connection broken: %r" % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
clean_exit = True
finally:
# If we didn't terminate cleanly, we need to throw away our
# connection.
if not clean_exit:
# The response may not be closed but we're not going to use it
# anymore so close it now to ensure that the connection is
# released back to the pool.
if self._original_response:
self._original_response.close()
# Closing the response may not actually be sufficient to close
# everything, so if we have a hold of the connection close that
# too.
if self._connection:
self._connection.close()
# If we hold the original response but it's closed now, we should
# return the connection back to the pool.
if self._original_response and self._original_response.isclosed():
self.release_conn()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt) if not fp_closed else b""
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
if self.enforce_content_length and self.length_remaining not in (
0,
None,
):
# This is an edge case that httplib failed to cover due
# to concerns of backward compatibility. We're
# addressing it here to make sure IncompleteRead is
# raised during streaming, so all calls with incorrect
# Content-Length are caught.
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
if data:
self._fp_bytes_read += len(data)
if self.length_remaining is not None:
self.length_remaining -= len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
def stream(self, amt=2 ** 16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked and self.supports_chunked_reads():
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if PY3:
headers = HTTPHeaderDict(headers.items())
else:
# Python 2.7
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, "strict", 0)
resp = ResponseCls(
body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw
)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar
def info(self):
return self.headers
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
if self._connection:
self._connection.close()
if not self.auto_close:
io.IOBase.close(self)
@property
def closed(self):
if not self.auto_close:
return io.IOBase.closed.__get__(self)
elif self._fp is None:
return True
elif hasattr(self._fp, "isclosed"):
return self._fp.isclosed()
elif hasattr(self._fp, "closed"):
return self._fp.closed
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError(
"The file-like object this HTTPResponse is wrapped "
"around has no file descriptor"
)
def flush(self):
if (
self._fp is not None
and hasattr(self._fp, "flush")
and not getattr(self._fp, "closed", False)
):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[: len(temp)] = temp
return len(temp)
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
httplib.HTTPResponse object. We do this by testing for the fp
attribute. If it is present we assume it returns raw chunks as
processed by read_chunked().
"""
return hasattr(self._fp, "fp")
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b";", 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing."
)
if not self.supports_chunked_reads():
raise BodyNotHttplibCompatible(
"Body should be httplib.HTTPResponse like. "
"It should have have an fp attribute which returns raw chunks."
)
with self._error_catcher():
# Don't bother reading the body of a HEAD request.
if self._original_response and is_response_to_head(self._original_response):
self._original_response.close()
return
# If a response is already read and closed
# then return immediately.
if self._fp.fp is None:
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
decoded = self._decode(
chunk, decode_content=decode_content, flush_decoder=False
)
if decoded:
yield decoded
if decode_content:
# On CPython and PyPy, we should never need to flush the
# decoder. However, on Jython we *might* need to, so
# lets defensively do it anyway.
decoded = self._flush_decoder()
if decoded: # Platform-specific: Jython.
yield decoded
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b"\r\n":
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
def geturl(self):
"""
Returns the URL that was the source of this response.
If the request that generated this response redirected, this method
will return the final redirect location.
"""
if self.retries is not None and len(self.retries.history):
return self.retries.history[-1].redirect_location
else:
return self._request_url
def __iter__(self):
buffer = []
for chunk in self.stream(decode_content=True):
if b"\n" in chunk:
chunk = chunk.split(b"\n")
yield b"".join(buffer) + chunk[0] + b"\n"
for x in chunk[1:-1]:
yield x + b"\n"
if chunk[-1]:
buffer = [chunk[-1]]
else:
buffer = []
else:
buffer.append(chunk)
if buffer:
yield b"".join(buffer)
| [
"[email protected]"
] | |
5bdcc6cf975d1c91609041403c71ecffcb959e0c | 62def70e2d802375b1ad28b0ac85fee2010ee0a9 | /displays/ws2801/rainbow.py | f9bd668f5edc1adab7f890ed626b53de332c52fb | [] | no_license | MarkAYoder/BeagleBoard-exercises | c48028b6e919d8c04dedfd2040a133c760f0f567 | 2fab7c7f7aa09bf101168dfb279e690bc43a6514 | refs/heads/master | 2023-07-22T08:06:19.482358 | 2023-07-12T19:24:51 | 2023-07-12T19:24:51 | 5,111,513 | 48 | 41 | null | 2021-07-29T18:02:29 | 2012-07-19T15:07:14 | JavaScript | UTF-8 | Python | false | false | 1,091 | py | #!/usr/bin/env python
import time
import math
import sys
from LedStrip_WS2801 import LedStrip_WS2801
amp = 10
f = 10
shift = 3
def fillAll(ledStrip, color, sleep):
for i in range(0, ledStrip.nLeds):
ledStrip.setPixel(i, color)
ledStrip.update()
def rainbow(ledStrip, nrOfleds):
phase = 0
skip = 60
for i in range(nrOfleds-skip, nrOfleds):
ledStrip.setPixel(i, [0, 0, 0])
while True:
for i in range(0, nrOfleds-skip):
r = int((amp * (math.sin(2*math.pi*f*(i-phase-0*shift)/nrOfleds) + 1)) + 1)
g = int((amp * (math.sin(2*math.pi*f*(i-phase-1*shift)/nrOfleds) + 1)) + 1)
b = int((amp * (math.sin(2*math.pi*f*(i-phase-2*shift)/nrOfleds) + 1)) + 1)
ledStrip.setPixel(i, [r, g, b])
ledStrip.update()
phase = phase + 0.5
time.sleep(0.050)
if __name__ == '__main__':
if len(sys.argv) == 1:
nrOfleds = 240
else:
nrOfleds = int(sys.argv[1])
delayTime = 0.0
ledStrip = LedStrip_WS2801(nrOfleds)
rainbow(ledStrip, nrOfleds)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.