blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
afa46d68ecf6d61c6df7864fbb08ae004dd62027 | 47a3a59288792f654309bfc9ceb6cbfa890720ef | /ramda/pick_all_test.py | c1dcb75e4e093273553227d190646a1b2ddff6d4 | [
"MIT"
]
| permissive | jakobkolb/ramda.py | 9531d32b9036908df09107d2cc19c04bf9544564 | 982b2172f4bb95b9a5b09eff8077362d6f2f0920 | refs/heads/master | 2023-06-23T00:46:24.347144 | 2021-02-01T16:47:51 | 2021-02-01T16:48:25 | 388,051,418 | 0 | 0 | MIT | 2021-07-21T16:31:45 | 2021-07-21T08:40:22 | null | UTF-8 | Python | false | false | 317 | py | from ramda import *
from ramda.private.asserts import *
def pick_all_test():
assert_equal(
pick_all(["a", "d"], {"a": 1, "b": 2, "c": 3, "d": 4}), {"a": 1, "d": 4}
)
assert_equal(
pick_all(["a", "e", "f"], {"a": 1, "b": 2, "c": 3, "d": 4}),
{"a": 1, "e": None, "f": None},
)
| [
"[email protected]"
]
| |
e185236b6376cf931550d58de7dbc40d13c29ad2 | 1e0e610166b36e5c73e7ff82c4c0b8b1288990bf | /scrapy/scrapy28.py | 6f5341d26d3402dea78b84c8a0d584f884360945 | []
| no_license | PythonOpen/PyhonProjects | 4ef1e70a971b9ebd0eb6a09e63e22581ad302534 | ede93314009564c31aa586d2f89ed8b1e4751c1b | refs/heads/master | 2022-05-20T23:21:03.536846 | 2020-04-27T00:59:32 | 2020-04-27T00:59:32 | 250,142,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | import re
'''
findall
'''
hello = u'您好,世界!'
s = r'[\u4e00-\u9fa5]+'
pattern = re.compile(s)
m = pattern.findall(hello)
print(m)
| [
"[email protected]"
]
| |
c40ac47c517727668db2d5ecdab88a29f78e49cd | df7f13ec34591fe1ce2d9aeebd5fd183e012711a | /hata/discord/application_command/application_command_option_metadata/tests/test__ApplicationCommandOptionMetadataFloat__magic.py | 5f59bf9f28ed267aba57cdf79310534c0cbe8d27 | [
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | HuyaneMatsu/hata | 63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e | 53f24fdb38459dc5a4fd04f11bdbfee8295b76a4 | refs/heads/master | 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 | Apache-2.0 | 2019-12-18T03:46:12 | 2018-12-31T14:59:47 | Python | UTF-8 | Python | false | false | 2,571 | py | import vampytest
from ...application_command_option_choice import ApplicationCommandOptionChoice
from ..float import ApplicationCommandOptionMetadataFloat
def test__ApplicationCommandOptionMetadataFloat__repr():
"""
Tests whether ``ApplicationCommandOptionMetadataFloat.__repr__`` works as intended.
"""
required = True
autocomplete = True
choices = [ApplicationCommandOptionChoice('19', 19.0), ApplicationCommandOptionChoice('18', 18.0)]
max_value = 10.0
min_value = 20.0
option_metadata = ApplicationCommandOptionMetadataFloat(
required = required,
autocomplete = autocomplete,
choices = choices,
max_value = max_value,
min_value = min_value,
)
vampytest.assert_instance(repr(option_metadata), str)
def test__ApplicationCommandOptionMetadataFloat__hash():
"""
Tests whether ``ApplicationCommandOptionMetadataFloat.__hash__`` works as intended.
"""
required = True
autocomplete = True
choices = [ApplicationCommandOptionChoice('19', 19.0), ApplicationCommandOptionChoice('18', 18.0)]
max_value = 10.0
min_value = 20.0
option_metadata = ApplicationCommandOptionMetadataFloat(
required = required,
autocomplete = autocomplete,
choices = choices,
max_value = max_value,
min_value = min_value,
)
vampytest.assert_instance(hash(option_metadata), int)
def test__ApplicationCommandOptionMetadataFloat__eq():
"""
Tests whether ``ApplicationCommandOptionMetadataFloat.__eq__`` works as intended.
"""
required = True
autocomplete = True
choices = [ApplicationCommandOptionChoice('19', 19.0), ApplicationCommandOptionChoice('18', 18.0)]
max_value = 10.0
min_value = 20.0
keyword_parameters = {
'required': required,
'autocomplete': autocomplete,
'choices': choices,
'max_value': max_value,
'min_value': min_value,
}
option_metadata = ApplicationCommandOptionMetadataFloat(**keyword_parameters)
vampytest.assert_eq(option_metadata, option_metadata)
vampytest.assert_ne(option_metadata, object())
for field_name, field_value in (
('required', False),
('autocomplete', False),
('choices', None),
('max_value', 11.0),
('min_value', 12.0),
):
test_option_metadata = ApplicationCommandOptionMetadataFloat(**{**keyword_parameters, field_name: field_value})
vampytest.assert_ne(option_metadata, test_option_metadata)
| [
"[email protected]"
]
| |
1cb3e56193bf9836e2e816dd830b90e36338db8b | 490ffe1023a601760ae7288e86723f0c6e366bba | /kolla-docker/python-zunclient/zunclient/osc/v1/images.py | 4af97886c8048a0d97904f5372e7928a39594407 | [
"Apache-2.0"
]
| permissive | bopopescu/Cloud-User-Management | 89696a5ea5d2f95191327fbeab6c3e400bbfb2b8 | 390988bf4915a276c7bf8d96b62c3051c17d9e6e | refs/heads/master | 2022-11-19T10:09:36.662906 | 2018-11-07T20:28:31 | 2018-11-07T20:28:31 | 281,786,345 | 0 | 0 | null | 2020-07-22T21:26:07 | 2020-07-22T21:26:06 | null | UTF-8 | Python | false | false | 5,747 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from osc_lib.command import command
from osc_lib import utils
from zunclient.common import utils as zun_utils
def _image_columns(image):
return image._info.keys()
def _get_client(obj, parsed_args):
obj.log.debug("take_action(%s)" % parsed_args)
return obj.app.client_manager.container
class ListImage(command.Lister):
"""List available images"""
log = logging.getLogger(__name__ + ".ListImage")
def get_parser(self, prog_name):
parser = super(ListImage, self).get_parser(prog_name)
parser.add_argument(
'--marker',
metavar='<marker>',
default=None,
help='The last image UUID of the previous page; '
'displays list of images after "marker".')
parser.add_argument(
'--limit',
metavar='<limit>',
type=int,
help='Maximum number of images to return')
parser.add_argument(
'--sort-key',
metavar='<sort-key>',
help='Column to sort results by')
parser.add_argument(
'--sort-dir',
metavar='<sort-dir>',
choices=['desc', 'asc'],
help='Direction to sort. "asc" or "desc".')
return parser
def take_action(self, parsed_args):
client = _get_client(self, parsed_args)
opts = {}
opts['marker'] = parsed_args.marker
opts['limit'] = parsed_args.limit
opts['sort_key'] = parsed_args.sort_key
opts['sort_dir'] = parsed_args.sort_dir
opts = zun_utils.remove_null_parms(**opts)
images = client.images.list(**opts)
columns = ('uuid', 'image_id', 'repo', 'tag', 'size')
return (columns, (utils.get_item_properties(image, columns)
for image in images))
class PullImage(command.ShowOne):
"""Pull specified image"""
log = logging.getLogger(__name__ + ".PullImage")
def get_parser(self, prog_name):
parser = super(PullImage, self).get_parser(prog_name)
parser.add_argument(
'image',
metavar='<image>',
help='Name of the image')
return parser
def take_action(self, parsed_args):
client = _get_client(self, parsed_args)
opts = {}
opts['repo'] = parsed_args.image
image = client.images.create(**opts)
columns = _image_columns(image)
return columns, utils.get_item_properties(image, columns)
class SearchImage(command.Lister):
"""Search specified image"""
log = logging.getLogger(__name__ + ".SearchImage")
def get_parser(self, prog_name):
parser = super(SearchImage, self).get_parser(prog_name)
parser.add_argument(
'--image-driver',
metavar='<image-driver>',
help='Name of the image driver')
parser.add_argument(
'image_name',
metavar='<image_name>',
help='Name of the image')
parser.add_argument(
'--exact-match',
default=False,
action='store_true',
help='exact match image name')
return parser
def take_action(self, parsed_args):
client = _get_client(self, parsed_args)
opts = {}
opts['image_driver'] = parsed_args.image_driver
opts['image'] = parsed_args.image_name
opts['exact_match'] = parsed_args.exact_match
opts = zun_utils.remove_null_parms(**opts)
images = client.images.search_image(**opts)
columns = ('ID', 'Name', 'Tags', 'Status', 'Size', 'Metadata')
return (columns, (utils.get_item_properties(image, columns)
for image in images))
class ShowImage(command.ShowOne):
"""Describe a specific image"""
log = logging.getLogger(__name__ + ".ShowImage")
def get_parser(self, prog_name):
parser = super(ShowImage, self).get_parser(prog_name)
parser.add_argument(
'uuid',
metavar='<uuid>',
help='UUID of image to describe')
return parser
def take_action(self, parsed_args):
client = _get_client(self, parsed_args)
opts = {}
opts['id'] = parsed_args.uuid
image = client.images.get(**opts)
columns = _image_columns(image)
return columns, utils.get_item_properties(image, columns)
class DeleteImage(command.Command):
"""Delete specified image"""
log = logging.getLogger(__name__ + ".DeleteImage")
def get_parser(self, prog_name):
parser = super(DeleteImage, self).get_parser(prog_name)
parser.add_argument(
'uuid',
metavar='<uuid>',
help='UUID of image to describe')
return parser
def take_action(self, parsed_args):
client = _get_client(self, parsed_args)
img_id = parsed_args.uuid
try:
client.images.delete(img_id)
print(_('Request to delete image %s has been accepted.')
% img_id)
except Exception as e:
print("Delete for image %(image)s failed: %(e)s" %
{'image': img_id, 'e': e})
| [
"[email protected]"
]
| |
605b9fe4d1a7957cb2c55e30fa0363e03ff3f7eb | 8bde826917476ba95bd3e9b4c33d4b28284c1774 | /bin/fasta2phylip.py | 20c4f9be477fe93b9186aee62a7a87c516416368 | []
| no_license | rpetit3-education/ibs594-phylogenetics | 2935d2ea3ba0ab41967cb0ddf42a2850328034e4 | f39048354d636300ba1a2067a0bdc5f0c6bddc95 | refs/heads/master | 2020-05-29T12:27:21.973218 | 2014-10-08T01:42:56 | 2014-10-08T01:42:56 | 24,832,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | #! /usr/bin/env python
'''
'''
import sys
from Bio import AlignIO
input_handle = open(sys.argv[1], "rU")
output_handle = open(sys.argv[2], "w")
alignments = AlignIO.parse(input_handle, "fasta")
AlignIO.write(alignments, output_handle, "phylip")
output_handle.close()
input_handle.close()
| [
"[email protected]"
]
| |
a8a8b14820fcddd5bd55fd019f52ee57c7ff3a51 | de9bd97adcbe4d278a1bf1d5f9107e87b94366e1 | /coding_solutions/Day9(28-05-2020)/Program2.py | 31f78a9bb4008fe640973829d9b4d7bc8af22e25 | []
| no_license | alvas-education-foundation/ANUSHA_4AL17CS007 | 6ed2957d6d1b1b1f2172de3c8ba6bfd3f886aab9 | 031849369448dd60f56769abb630fc7cf22fe325 | refs/heads/master | 2022-10-29T10:49:23.188604 | 2020-06-15T15:50:00 | 2020-06-15T15:50:00 | 267,532,875 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | #Python program to find digital root of a #number
n = int(input("Enter the digit\n"))
def digital_root(n):
m = len(str(n))
s=0
for i in range(m):
s = s+ n%10
n = n//10
print(s)
if(len(str(s))>1):
return(digital_root(s))
print(digital_root(n))
Output:
Enter the digit
162536738292
54
9
Enter the digit
0
0
| [
"[email protected]"
]
| |
6841b5f8f2844439542581821019d5fd11329764 | 07f837d8c5236fe5e75ef510cd296814452370ce | /py/four_hour_cloud.py | 220c212f5dfc9d69fd0d4fccfd0d8b1a5659a04f | [
"Apache-2.0"
]
| permissive | vkuznet/h2o | 6f9006a5186b964bac266981d9082aec7bc1067c | e08f7014f228cbaecfb21f57379970e6a3ac0756 | refs/heads/master | 2021-08-28T11:37:52.099953 | 2021-08-10T22:43:34 | 2021-08-10T22:43:34 | 20,032,996 | 0 | 0 | Apache-2.0 | 2021-08-10T22:43:35 | 2014-05-21T18:46:27 | Java | UTF-8 | Python | false | false | 3,693 | py | #!/usr/bin/python
import unittest, time, sys, random, datetime
sys.path.extend(['.','..','py','../h2o/py','../../h2o/py'])
import h2o, h2o_hosts, h2o_cmd, h2o_browse as h2b
import h2o_print as h2p
beginning = time.time()
def log(msg):
print "\033[92m[0xdata] \033[0m", msg
CHECK_WHILE_SLEEPING = False
print "Don't start a test yet..."
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, localhost
SEED = h2o.setup_random_seed()
localhost = h2o.decide_if_localhost()
if (localhost):
# h2o.nodes[0].delete_keys_at_teardown should cause the testdir_release
# tests to delete keys after each test completion (not cloud teardown)
h2o.build_cloud(3, create_json=True, java_heap_GB=4, delete_keys_at_teardown=False)
else:
h2o_hosts.build_cloud_with_hosts(create_json=True, delete_keys_at_teardown=False)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_build_for_clone(self):
# python gets confused about which 'start' if I used start here
elapsed = time.time() - beginning
print "\n%0.2f seconds to get here from start" % elapsed
# might as well open a browser on it? (because the ip/port will vary
# maybe just print the ip/port for now
## h2b.browseTheCloud()
maxTime = 4*3600
totalTime = 0
incrTime = 60
h2p.purple_print("\nSleeping for total of", (maxTime+0.0)/3600, "hours.")
print "Will check h2o logs every", incrTime, "seconds"
print "Should be able to run another test using h2o-nodes.json to clone cloud"
print "i.e. h2o.build_cloud_with_json()"
print "Bad test if a running test shuts down the cloud. I'm supposed to!\n"
h2p.green_print("To watch cloud in browser follow address:")
h2p.green_print(" http://{0}:{1}/Cloud.html".format(h2o.nodes[0].http_addr, h2o.nodes[0].port))
h2p.blue_print("You can start a test (or tests) now!")
h2p.blue_print("Will Check cloud status every %s secs and kill cloud if wrong or no answer" % incrTime)
if CHECK_WHILE_SLEEPING:
h2p.blue_print("Will also look at redirected stdout/stderr logs in sandbox every %s secs" % incrTime)
h2p.red_print("No checking of logs while sleeping, or check of cloud status")
h2p.yellow_print("So if H2O stack traces, it's up to you to kill me if 4 hours is too long")
h2p.yellow_print("ctrl-c will cause all jvms to die(thru psutil terminate, paramiko channel death or h2o shutdown...")
while (totalTime<maxTime): # die after 4 hours
h2o.sleep(incrTime)
totalTime += incrTime
# good to touch all the nodes to see if they're still responsive
# give them up to 120 secs to respond (each individually)
h2o.verify_cloud_size(timeoutSecs=120)
if CHECK_WHILE_SLEEPING:
print "Checking sandbox log files"
h2o.check_sandbox_for_errors(cloudShutdownIsError=True)
else:
print str(datetime.datetime.now()), h2o.python_cmd_line, "still here", totalTime, maxTime, incrTime
# don't do this, as the cloud may be hung?
if 1==0:
print "Shutting down cloud, but first delete all keys"
start = time.time()
h2i.delete_keys_at_all_nodes()
elapsed = time.time() - start
print "delete_keys_at_all_nodes(): took", elapsed, "secs"
if __name__ == '__main__':
h2o.unit_main()
| [
"[email protected]"
]
| |
51d94bb10630aafec0640620432c3ebe407246a3 | f5b5a6e3f844d849a05ff56c497638e607f940e0 | /capitulo 06/06.32 - Programa 6.10 Transformacao de range em uma lista.py | 51cd535815fbd14cb32451d366d9badde5012cb3 | []
| no_license | alexrogeriodj/Caixa-Eletronico-em-Python | 9237fa2f7f8fab5f17b7dd008af215fb0aaed29f | 96b5238437c88e89aed7a7b9c34b303e1e7d61e5 | refs/heads/master | 2020-09-06T21:47:36.169855 | 2019-11-09T00:22:14 | 2019-11-09T00:22:14 | 220,563,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 770 | py | ##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2019
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira edição - Janeiro/2019 - ISBN 978-85-7522-718-3
# Site: http://python.nilo.pro.br/
#
# Arquivo: listagem3\capítulo 06\06.32 - Programa 6.10 – Transformação de range em uma lista.py
# Descrição: Programa 6.10 – Transformação de range em uma lista
##############################################################################
# Programa 6.10 – Transformação de range em uma lista
L = list(range(100, 1100, 50))
print(L)
| [
"[email protected]"
]
| |
0f8f10c674666f80a8dfb7dc011afc0af5ca45d6 | 41e69a518ff146ef299e9b807a7a96428effd958 | /test/test_full_operations_operation.py | ef25b6c9c5cb0a0793bc2948de26f0953af9ea2b | []
| no_license | daxslab/enzona-payment-python | 40797a8aea7d7185ad04fe401c4f699cb1d93309 | 9a7721445cc1331e14687374df872f911a565305 | refs/heads/master | 2022-07-13T15:34:53.171246 | 2020-05-16T04:18:51 | 2020-05-16T04:18:51 | 264,357,269 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | # coding: utf-8
"""
PaymentAPI
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import enzona_payment
from enzona_payment.models.full_operations_operation import FullOperationsOperation # noqa: E501
from enzona_payment.rest import ApiException
class TestFullOperationsOperation(unittest.TestCase):
"""FullOperationsOperation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testFullOperationsOperation(self):
"""Test FullOperationsOperation"""
# FIXME: construct object with mandatory attributes with example values
# model = enzona_payment.models.full_operations_operation.FullOperationsOperation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
247f95d2dd1ba8c30dc05f432d9df8ab67e809ea | cd995dbf44647858e092f22fd99228b3aadc138a | /16 트라이/56 트라이 구현.py | a9e65658e8ed722ea191413bbcd928d182b72dc5 | []
| no_license | Wooyongjeong/python-algorithm-interview | ed970ae046bd65ac46fd4f42aaa386f353f97233 | c134dbb1aaff5f4c7aa930be79fcdf1dfad62cec | refs/heads/master | 2023-08-17T04:51:19.727919 | 2021-10-07T14:29:24 | 2021-10-07T14:29:24 | 398,216,040 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | py | import collections
# 풀이 1. 딕셔너리를 이용해 간결한 트라이 구현
# 트라이의 노드
class TrieNode:
def __init__(self):
self.word = False
self.children = collections.defaultdict(TrieNode)
class Trie:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode()
# 단어 삽입
def insert(self, word: str) -> None:
"""
Inserts a word into the trie.
"""
node = self.root
for char in word:
node = node.children[char]
node.word = True
# 단어 존재 여부 판별
def search(self, word: str) -> bool:
"""
Returns if the word is in the trie.
"""
node = self.root
for char in word:
if char not in node.children:
return False
node = node.children[char]
return node.word
# 문자열로 시작 단어 존재 여부 판별별
def startsWith(self, prefix: str) -> bool:
"""
Returns if there is any word in the trie that starts with the given prefix.
"""
node = self.root
for char in prefix:
if char not in node.children:
return False
node = node.children[char]
return True
| [
"[email protected]"
]
| |
2f1752c2bb239fc015ffd2b4a91ad7011d473660 | 177c2393fb86c6fbcc1e995a60805e4a6b901aae | /Sesion-02/Banco/tarjeta/views.py | d3de642d321bb86fe5f489875cbfc164f93fcf53 | []
| no_license | rctorr/TECP0008FUPYCMX2001 | bc6a11e33486be2ccb5d2b79a9a64ba5e48a24f4 | 419635f02de189b91a06a9b366950320dfb0e00e | refs/heads/master | 2023-05-20T22:37:25.194835 | 2021-06-08T03:12:37 | 2021-06-08T03:12:37 | 370,964,281 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | from django.shortcuts import render
# Create your views here.
def index(request):
""" Atiende la petición GET / """
return render(request, "tarjeta/index.html") | [
"[email protected]"
]
| |
eda95050e4aab8b87d5ef0e01e7b3dd35478760f | d0bd9c3c5539141c74e0eeae2fa6b7b38af84ce2 | /tests/test_data/test_molecular_weight.py | 6743fdd25cc4c253454be447b8c4c52f01c76f3a | [
"BSD-3-Clause"
]
| permissive | KaneWh1te/cogent3 | 150c72e2f80a6439de0413b39c4c37c09c9966e3 | 115e9eb5700627fdb24be61441a7e3e155c02c61 | refs/heads/master | 2023-07-29T00:32:03.742351 | 2021-04-20T04:32:00 | 2021-04-20T04:32:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,793 | py | #!/usr/bin/env python
"""Tests for molecular weight.
"""
from unittest import TestCase, main
from cogent3.data.molecular_weight import ProteinMW, RnaMW
__author__ = "Rob Knight"
__copyright__ = "Copyright 2007-2021, The Cogent Project"
__credits__ = ["Rob Knight"]
__license__ = "BSD-3"
__version__ = "2021.04.20a"
__maintainer__ = "Gavin Huttley"
__email__ = "[email protected]"
__status__ = "Production"
from numpy.testing import assert_allclose
class WeightCalculatorTests(TestCase):
"""Tests for WeightCalculator, which should calculate molecular weights."""
def test_call(self):
"""WeightCalculator should return correct molecular weight"""
r = RnaMW
p = ProteinMW
self.assertEqual(p(""), 0)
self.assertEqual(r(""), 0)
assert_allclose(p("A"), 89.09)
assert_allclose(r("A"), 375.17)
assert_allclose(p("AAA"), 231.27)
assert_allclose(r("AAA"), 1001.59)
assert_allclose(r("AAACCCA"), 2182.37)
assert_allclose(
p(
"MVQQAESLEAESNLPREALDTEEGEFMACSPVALDESDPDWCKTASGHIKRPMNAFMVWSKIERRKIMEQSPDMHNAEISKRLGKR\
WKMLKDSEKIPFIREAERLRLKHMADYPDYKYRPRKKPKMDPSAKPSASQSPEKSAAGGGGGSAGGGAGGAKTSKGSSKKCGKLKA\
PAAAGAKAGAGKAAQSGDYGGAGDDYVLGSLRVSGSGGGGAGKTVKCVFLDEDDDDDDDDDELQLQIKQEPDEEDEEPPHQQLLQP\
PGQQPSQLLRRYNVAKVPASPTLSSSAESPEGASLYDEVRAGATSGAGGGSRLYYSFKNITKQHPPPLAQPALSPASSRSVSTSSS\
SSSGSSSGSSGEDADDLMFDLSLNFSQSAHSASEQQLGGGAAAGNLSLSLVDKDLDSFSEGSLGSHFEFPDYCTPELSEMIAGDWL\
EANFSDLVFTY"
),
46685.97,
)
# run if called from command-line
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
687ceb831757b55f605ca7582082dad5862e647f | aab50ee31f6107fd08d87bd4a93ded216eebb6be | /com/baizhi/杜亚博作业/杜亚博_9.5/杜亚博_9.24.py | e22353a598b8ddc8afb188119acb08d6855be8a8 | []
| no_license | dyb-py/pk | deaf50988694475bdfcda6f2535ba0e728d79931 | b571b67a98fa0be6d73cccb48b66386bc4dfd191 | refs/heads/master | 2020-12-29T15:06:20.294353 | 2020-02-06T09:05:08 | 2020-02-06T09:05:08 | 238,646,794 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | #1
# SyntaxError 语法错误
#2
#下标越界 IndexError
#3
#属性错误 AttributeError
#4
#KeyError
#5
# SyntaxError
#6
# NameError
#7
#
# UnboundLocalError
#8
# try-except 之类的语句来处理异常
#9
# 可以, 因为异常有多种 多个except可以捕捉多个异常
#10
#自定义异常类 直接或间接继承BaseException
#except捕捉一切异常
#except(异常1,异常2.。。)
#11
#不建议 可读性极差 出现异常后 不清楚是那种异常 不好维护
#12
#用finally关闭文件
#13
# try:
# for i in range(3):
# for j in range(3):
# if i==2:
# raise KeyboardInterrupt
# print(i,j)
# except KeyboardInterrupt:
# print('退出了')
#14
# def in_input():
# a=input('输入整数')
# try:
# a=int(a)
# print(a)
# except:
# print('输入的不是整数')
# in_input()
#15
#出现NameError
# try:
# f=open('my.txt')
# print(f.read())
# except OSError as reason:
# print('出错了'+str(reason))
# finally:
# try:
# f.close()
# except NameError as e :
# print(e) | [
"[email protected]"
]
| |
4a109b25566b48449fcf3d5d6534a0e68f5e82a2 | 4341e83359d5dddbfb67998e456d4d053fb73b71 | /web_deployment/admin/catlog/apps.py | 662d3e5de0afd4a124f9e2f58e8bf8838c155df9 | []
| no_license | ArulFranklin08/django-deployments | 1c1ef80356f37b33d9ee86bcd65f032b4d1d31ee | 74938f5d946af646d7a54be0766b6b1dde59ff3d | refs/heads/main | 2023-02-01T17:40:39.045299 | 2020-12-15T07:46:20 | 2020-12-15T07:46:20 | 321,590,952 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | #!/usr/bin/env python
from django.apps import AppConfig
class CatlogConfig(AppConfig):
name = 'catlog'
| [
"[email protected]"
]
| |
0d13290dc44ec8593cb073a1ca94340e77ff9ea9 | 146db8cd0f775e0c4d051ea675bd147f1cb08503 | /old/md-service/SMDS/timestamp.py | e0bd0bdf1daac67b56a62833456e03a8e2b03702 | [
"Apache-2.0"
]
| permissive | syndicate-storage/syndicate | 3cb4a22a4ae0d92859f57ed4634b03f1665791e4 | 4837265be3e0aa18cdf4ee50316dbfc2d1f06e5b | refs/heads/master | 2020-04-14T19:59:42.849428 | 2016-03-24T15:47:49 | 2016-03-24T15:47:49 | 10,639,326 | 16 | 6 | null | null | null | null | UTF-8 | Python | false | false | 5,873 | py | #
# Utilities to handle timestamps / durations from/to integers and strings
#
# $Id: Timestamp.py 18344 2010-06-22 18:56:38Z caglar $
# $URL: http://svn.planet-lab.org/svn/PLCAPI/trunk/PLC/Timestamp.py $
#
#
# datetime.{datetime,timedelta} are powerful tools, but these objects are not
# natively marshalled over xmlrpc
#
from types import StringTypes
import time, calendar
import datetime
from SMDS.faults import *
from SMDS.parameter import Parameter, Mixed
# a dummy class mostly used as a namespace
class Timestamp:
debug=False
# debug=True
# this is how we expose times to SQL
sql_format = "%Y-%m-%d %H:%M:%S"
sql_format_utc = "%Y-%m-%d %H:%M:%S UTC"
# this one (datetime.isoformat) would work too but that's less readable - we support this input though
iso_format = "%Y-%m-%dT%H:%M:%S"
# sometimes it's convenient to understand more formats
input_formats = [ sql_format,
sql_format_utc,
iso_format,
"%Y-%m-%d %H:%M",
"%Y-%m-%d %H:%M UTC",
]
# for timestamps we usually accept either an int, or an ISO string,
# the datetime.datetime stuff can in general be used locally,
# but not sure it can be marshalled over xmlrpc though
@staticmethod
def Parameter (doc):
return Mixed (Parameter (int, doc + " (unix timestamp)"),
Parameter (str, doc + " (formatted as %s)"%Timestamp.sql_format),
)
@staticmethod
def sql_validate (input, timezone=False, check_future = False):
"""
Validates the specified GMT timestamp, returns a
standardized string suitable for SQL input.
Input may be a number (seconds since UNIX epoch back in 1970,
or a string (in one of the supported input formats).
If timezone is True, the resulting string contains
timezone information, which is hard-wired as 'UTC'
If check_future is True, raises an exception if timestamp is in
the past.
Returns a GMT timestamp string suitable to feed SQL.
"""
if not timezone: output_format = Timestamp.sql_format
else: output_format = Timestamp.sql_format_utc
if Timestamp.debug: print 'sql_validate, in:',input,
if isinstance(input, StringTypes):
sql=''
# calendar.timegm() is the inverse of time.gmtime()
for time_format in Timestamp.input_formats:
try:
timestamp = calendar.timegm(time.strptime(input, time_format))
sql = time.strftime(output_format, time.gmtime(timestamp))
break
# wrong format: ignore
except ValueError: pass
# could not parse it
if not sql:
raise MDInvalidArgument, "Cannot parse timestamp %r - not in any of %r formats"%(input,Timestamp.input_formats)
elif isinstance (input,(int,long,float)):
try:
timestamp = long(input)
sql = time.strftime(output_format, time.gmtime(timestamp))
except Exception,e:
raise MDInvalidArgument, "Timestamp %r not recognized -- %r"%(input,e)
else:
raise MDInvalidArgument, "Timestamp %r - unsupported type %r"%(input,type(input))
if check_future and input < time.time():
raise MDInvalidArgument, "'%s' not in the future" % sql
if Timestamp.debug: print 'sql_validate, out:',sql
return sql
@staticmethod
def sql_validate_utc (timestamp):
"For convenience, return sql_validate(intput, timezone=True, check_future=False)"
return Timestamp.sql_validate (timestamp, timezone=True, check_future=False)
@staticmethod
def cast_long (input):
"""
Translates input timestamp as a unix timestamp.
Input may be a number (seconds since UNIX epoch, i.e., 1970-01-01
00:00:00 GMT), a string (in one of the supported input formats above).
"""
if Timestamp.debug: print 'cast_long, in:',input,
if isinstance(input, StringTypes):
timestamp=0
for time_format in Timestamp.input_formats:
try:
result=calendar.timegm(time.strptime(input, time_format))
if Timestamp.debug: print 'out:',result
return result
# wrong format: ignore
except ValueError: pass
raise MDInvalidArgument, "Cannot parse timestamp %r - not in any of %r formats"%(input,Timestamp.input_formats)
elif isinstance (input,(int,long,float)):
result=long(input)
if Timestamp.debug: print 'out:',result
return result
else:
raise MDInvalidArgument, "Timestamp %r - unsupported type %r"%(input,type(input))
# utility for displaying durations
# be consistent in avoiding the datetime stuff
class Duration:
MINUTE = 60
HOUR = 3600
DAY = 3600*24
@staticmethod
def to_string(duration):
result=[]
left=duration
(days,left) = divmod(left,Duration.DAY)
if days: result.append("%d d)"%td.days)
(hours,left) = divmod (left,Duration.HOUR)
if hours: result.append("%d h"%hours)
(minutes, seconds) = divmod (left, Duration.MINUTE)
if minutes: result.append("%d m"%minutes)
if seconds: result.append("%d s"%seconds)
if not result: result = ['void']
return "-".join(result)
@staticmethod
def validate (duration):
# support seconds only for now, works for int/long/str
try:
return long (duration)
except:
raise MDInvalidArgument, "Could not parse duration %r"%duration
| [
"[email protected]"
]
| |
93f760d29e6465c846af2a2e81bf5e01ba327359 | 4c67533d6d5183ed985288a55631fe1c99b5ae21 | /448.py | 9f64bb067dba432831726c47da96cfac37b25b7d | []
| no_license | Jiongxiao/Leetcode | 546f789a0d892fe56d7f53a41aa97ccb2a8e1813 | 641775f750a1197f9aaa23e5122b0add2ae064ee | refs/heads/master | 2021-01-17T01:11:35.970423 | 2017-09-21T07:04:52 | 2017-09-21T07:04:52 | 56,422,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | class Solution(object):
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
for i in range(len(nums)):
nums[abs(nums[i])-1]=-abs(nums[abs(nums[i])-1])
return [j+1 for j in range(len(nums)) if nums[j]>0] | [
"[email protected]"
]
| |
2c478fb713f273ebcc755c97aa70348d66391179 | fea44d5ca4e6c9b2c7950234718a4531d453849e | /sktime/forecasting/tests/test_sarimax.py | 17fdd9984ebd6c8806270391fc1ea39884dc931e | [
"BSD-3-Clause"
]
| permissive | mlgig/sktime | 288069ab8c9b0743113877032dfca8cf1c2db3fb | 19618df351a27b77e3979efc191e53987dbd99ae | refs/heads/master | 2023-03-07T20:22:48.553615 | 2023-02-19T18:09:12 | 2023-02-19T18:09:12 | 234,604,691 | 1 | 0 | BSD-3-Clause | 2020-01-17T17:50:12 | 2020-01-17T17:50:11 | null | UTF-8 | Python | false | false | 1,049 | py | # -*- coding: utf-8 -*-
"""Tests the SARIMAX model."""
__author__ = ["TNTran92"]
import pytest
from numpy.testing import assert_allclose
from sktime.forecasting.sarimax import SARIMAX
from sktime.utils._testing.forecasting import make_forecasting_problem
from sktime.utils.validation._dependencies import _check_soft_dependencies
df = make_forecasting_problem()
@pytest.mark.skipif(
not _check_soft_dependencies("statsmodels", severity="none"),
reason="skip test if required soft dependency not available",
)
def test_SARIMAX_against_statsmodels():
"""Compares Sktime's and Statsmodel's SARIMAX."""
from statsmodels.tsa.api import SARIMAX as _SARIMAX
sktime_model = SARIMAX(order=(1, 0, 0), trend="t", seasonal_order=(1, 0, 0, 6))
sktime_model.fit(df)
y_pred = sktime_model.predict(df.index)
stats = _SARIMAX(endog=df, order=(1, 0, 0), trend="t", seasonal_order=(1, 0, 0, 6))
stats_fit = stats.fit()
stats_pred = stats_fit.predict(df.index[0])
assert_allclose(y_pred.tolist(), stats_pred.tolist())
| [
"[email protected]"
]
| |
8bbeda72f37a50488eb26bdbe34f50cb14917af4 | fcc88521f63a3c22c81a9242ae3b203f2ea888fd | /Python3/0459-Repeated-Substring-Pattern/soln.py | 4ebac002c8929e3bbc180c53514482b5e1dc4e61 | [
"MIT"
]
| permissive | wyaadarsh/LeetCode-Solutions | b5963e3427aa547d485d3a2cb24e6cedc72804fd | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | refs/heads/master | 2022-12-06T15:50:37.930987 | 2020-08-30T15:49:27 | 2020-08-30T15:49:27 | 291,811,790 | 0 | 1 | MIT | 2020-08-31T19:57:35 | 2020-08-31T19:57:34 | null | UTF-8 | Python | false | false | 158 | py | class Solution:
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
return s in (s + s)[1:-1] | [
"[email protected]"
]
| |
947b2fcbdca5e1cac34017bc1c75d05e106a4700 | 9f79c4f9a8a9154fc3dc9202ab8ed2547a722b5f | /Dictionaries/char_count.py | 6e73cf6fa167e94d9894e7ca32f38daa5956f09f | []
| no_license | grigor-stoyanov/PythonFundamentals | 31b6da00bd8294e8e802174dca4e62b231134090 | 5ae5f1f1b9ca9500d10e95318a731d3b29950a30 | refs/heads/main | 2023-02-11T12:17:19.010596 | 2021-01-14T22:14:54 | 2021-01-14T22:14:54 | 321,658,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | line = input().split()
char_dic = {}
for word in line:
for char in range(0, len(word)):
if word[char] not in char_dic:
char_dic[word[char]] = 1
else:
char_dic[word[char]] += 1
for key, value in char_dic.items():
print(f'{key} -> {value}')
| [
"[email protected]"
]
| |
8c13f655d8ae98bb3e994fd2181dfbed06df9aa9 | f82e67dd5f496d9e6d42b4fad4fb92b6bfb7bf3e | /scripts/client/gui/scaleform/locale/quests.py | d092b4f53be4bc89915ed7d17202ae9982bd837c | []
| no_license | webiumsk/WOT0.10.0 | 4e4413ed4e7b00e22fb85d25fdae9400cbb4e76b | a84f536c73f86d9e8fab559e97f88f99f2ad7e95 | refs/heads/master | 2021-01-09T21:55:00.662437 | 2015-10-23T20:46:45 | 2015-10-23T20:46:45 | 44,835,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66,835 | py | # Embedded file name: scripts/client/gui/Scaleform/locale/QUESTS.py
class QUESTS(object):
IGR_TOOLTIP_BATTLESLABEL = '#quests:igr/tooltip/battlesLabel'
IGR_TOOLTIP_WINSLABEL = '#quests:igr/tooltip/winsLabel'
POSTBATTLE_PROGRESSRESET = '#quests:postBattle/progressReset'
TOOLTIP_PROGRESS_GROUPBY_HEADER = '#quests:tooltip/progress/groupBy/header'
TOOLTIP_PROGRESS_GROUPBY_BODY = '#quests:tooltip/progress/groupBy/body'
TOOLTIP_PROGRESS_GROUPBY_NOTE = '#quests:tooltip/progress/groupBy/note'
TOOLTIP_PROGRESS_GROUPBY_NOTE_LEVEL = '#quests:tooltip/progress/groupBy/note/level'
TOOLTIP_VEHTABLE_NATION_HEADER = '#quests:tooltip/vehTable/nation/header'
TOOLTIP_VEHTABLE_NATION_BODY = '#quests:tooltip/vehTable/nation/body'
TOOLTIP_VEHTABLE_CLASS_HEADER = '#quests:tooltip/vehTable/class/header'
TOOLTIP_VEHTABLE_CLASS_BODY = '#quests:tooltip/vehTable/class/body'
TOOLTIP_VEHTABLE_LEVEL_HEADER = '#quests:tooltip/vehTable/level/header'
TOOLTIP_VEHTABLE_LEVEL_BODY = '#quests:tooltip/vehTable/level/body'
TOOLTIP_VEHTABLE_NAME_HEADER = '#quests:tooltip/vehTable/name/header'
TOOLTIP_VEHTABLE_NAME_BODY = '#quests:tooltip/vehTable/name/body'
TOOLTIP_VEHTABLE_AVAILABILITY_HEADER = '#quests:tooltip/vehTable/availability/header'
TOOLTIP_VEHTABLE_AVAILABILITY_BODY = '#quests:tooltip/vehTable/availability/body'
TOOLTIP_VEHTABLE_DISCOUNT_HEADER = '#quests:tooltip/vehTable/discount/header'
TOOLTIP_VEHTABLE_DISCOUNT_BODY = '#quests:tooltip/vehTable/discount/body'
BONUSES_ITEMS_NAME = '#quests:bonuses/items/name'
BONUSES_BOOSTERS_NAME = '#quests:bonuses/boosters/name'
BONUSES_CUSTOMIZATION_VALUE = '#quests:bonuses/customization/value'
BONUSES_VEHICLES_NAME = '#quests:bonuses/vehicles/name'
BONUSES_VEHICLES_CREWLVL = '#quests:bonuses/vehicles/crewLvl'
BONUSES_VEHICLES_RENTDAYS = '#quests:bonuses/vehicles/rentDays'
BONUSES_VEHICLES_DESCRIPTION = '#quests:bonuses/vehicles/description'
BONUSES_ITEM_TANKMENXP = '#quests:bonuses/item/tankmenXP'
BONUSES_ITEM_XPFACTOR = '#quests:bonuses/item/xpFactor'
BONUSES_ITEM_CREDITSFACTOR = '#quests:bonuses/item/creditsFactor'
BONUSES_ITEM_FREEXPFACTOR = '#quests:bonuses/item/freeXPFactor'
BONUSES_ITEM_TANKMENXPFACTOR = '#quests:bonuses/item/tankmenXPFactor'
BONUSES_ITEM_SLOTS = '#quests:bonuses/item/slots'
BONUSES_ITEM_BERTHS = '#quests:bonuses/item/berths'
BONUSES_ITEM_PREMIUM = '#quests:bonuses/item/premium'
BONUSES_ITEM_TASK = '#quests:bonuses/item/task'
BONUSES_CREDITS_DESCRIPTION = '#quests:bonuses/credits/description'
BONUSES_GOLD_DESCRIPTION = '#quests:bonuses/gold/description'
BONUSES_TANKMEN_DESCRIPTION = '#quests:bonuses/tankmen/description'
BONUSES_ITEM_TANKWOMAN = '#quests:bonuses/item/tankwoman'
BONUSES_ITEM_ADDITIONBONUS = '#quests:bonuses/item/additionBonus'
BONUSES_ITEM_TANKMEN_NO_SKILLS = '#quests:bonuses/item/tankmen/no_skills'
BONUSES_ITEM_TANKMEN_WITH_SKILLS = '#quests:bonuses/item/tankmen/with_skills'
QUESTS_TITLE = '#quests:quests/title'
QUESTS_TITLE_MANEUVERSQUESTS = '#quests:quests/title/maneuversQuests'
QUESTS_TITLE_UNGOUPEDQUESTS = '#quests:quests/title/ungoupedQuests'
QUESTS_TITLE_LADDERQUESTS = '#quests:quests/title/ladderQuests'
QUESTS_TITLE_UNGOUPEDACTIONS = '#quests:quests/title/ungoupedActions'
QUESTS_CONDITIONS = '#quests:quests/conditions'
QUESTS_REQUIREMENTS = '#quests:quests/requirements'
QUESTS_CURRENT_NODATA = '#quests:quests/current/nodata'
QUESTS_FUTURE_NODATA = '#quests:quests/future/nodata'
QUESTS_CONTENT_NOQUESTSINROAMING = '#quests:quests/content/noQuestsInRoaming'
QUESTS_TABS_PERSONAL = '#quests:quests/tabs/personal'
QUESTS_TABS_CURRENT = '#quests:quests/tabs/current'
QUESTS_TABS_LADDER = '#quests:quests/tabs/ladder'
QUESTS_TABS_BEGINNER = '#quests:quests/tabs/beginner'
QUESTS_TABS_FUTURE = '#quests:quests/tabs/future'
QUESTSCONTROL_TITLE = '#quests:questsControl/title'
QUESTSCONTROL_ADDITIONALTITLE_NEEDRECEIVEDAWARD = '#quests:questsControl/additionalTitle/needReceivedAward'
QUESTSCONTROL_ADDITIONALTITLE_FREESLOTSANDFREEQUESTS = '#quests:questsControl/additionalTitle/freeSlotsAndFreeQuests'
QUESTSCONTROL_ADDITIONALTITLE_FIRSTRUN = '#quests:questsControl/additionalTitle/firstRun'
QUESTSCONTROL_ADDITIONALTITLE_EMPTY = '#quests:questsControl/additionalTitle/empty'
QUESTS_STATUS_DONE = '#quests:quests/status/done'
QUESTS_STATUS_NOTAVAILABLE = '#quests:quests/status/notAvailable'
QUESTS_STATUS_NOTDONE = '#quests:quests/status/notDone'
QUESTS_TABLE_NOVEHICLES = '#quests:quests/table/noVehicles'
QUESTS_TABLE_AMOUNT = '#quests:quests/table/amount'
QUESTS_TABLE_BATTLESLEFT = '#quests:quests/table/battlesLeft'
QUESTS_TABLE_INHANGAR = '#quests:quests/table/inHangar'
QUESTS_TABLE_NOTINHANGAR = '#quests:quests/table/notInHangar'
QUESTS_CURRENTTAB_HEADER_SORT = '#quests:quests/currentTab/header/sort'
QUESTS_CURRENTTAB_HEADER_CHECKBOX_TEXT = '#quests:quests/currentTab/header/checkBox/text'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_DATE = '#quests:quests/currentTab/header/dropdown/date'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_TIME = '#quests:quests/currentTab/header/dropdown/time'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_ALL = '#quests:quests/currentTab/header/dropdown/all'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_ACTION = '#quests:quests/currentTab/header/dropdown/action'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_QUESTS = '#quests:quests/currentTab/header/dropdown/quests'
QUESTS_CURRENTTAB_HEADER_DROPDOWN_SPECIALMISSION = '#quests:quests/currentTab/header/dropdown/specialMission'
QUESTS_LIST_COMPLETE = '#quests:quests/list/complete'
QUESTS_LIST_CURRENT_NOALL = '#quests:quests/list/current/noAll'
QUESTS_LIST_CURRENT_NOQUESTS = '#quests:quests/list/current/noQuests'
QUESTS_LIST_CURRENT_NOACTIONS = '#quests:quests/list/current/noActions'
QUESTS_LIST_FUTURE_NOALL = '#quests:quests/list/future/noAll'
QUESTS_LIST_FUTURE_NOQUESTS = '#quests:quests/list/future/noQuests'
QUESTS_LIST_FUTURE_NOACTIONS = '#quests:quests/list/future/noActions'
QUESTS_LIST_CLICKCHECKBOX = '#quests:quests/list/clickCheckbox'
QUESTS_TABS_NOSELECTED_TEXT = '#quests:quests/tabs/noselected/text'
QUESTS_TABS_AWARD_TEXT = '#quests:quests/tabs/award/text'
QUESTS_TABS_ADDAWARD_TEXT = '#quests:quests/tabs/addAward/text'
ITEM_TYPE_ACTION = '#quests:item/type/action'
ITEM_TYPE_QUEST = '#quests:item/type/quest'
ITEM_TYPE_QUESTDAILY = '#quests:item/type/questDaily'
ITEM_TYPE_SPECIALMISSION = '#quests:item/type/specialMission'
ITEM_TYPE_QUESTSTRATEGIC = '#quests:item/type/questStrategic'
ITEM_TYPE_POTAPOV = '#quests:item/type/potapov'
ITEM_TYPE_LADDER = '#quests:item/type/ladder'
ITEM_TIMER_TILLSTART = '#quests:item/timer/tillStart'
ITEM_TIMER_TILLSTART_DAYS = '#quests:item/timer/tillStart/days'
ITEM_TIMER_TILLSTART_HOURS = '#quests:item/timer/tillStart/hours'
ITEM_TIMER_TILLSTART_MIN = '#quests:item/timer/tillStart/min'
ITEM_TIMER_TILLFINISH = '#quests:item/timer/tillFinish'
ITEM_TIMER_TILLFINISH_DAYS = '#quests:item/timer/tillFinish/days'
ITEM_TIMER_TILLFINISH_HOURS = '#quests:item/timer/tillFinish/hours'
ITEM_TIMER_TILLFINISH_LONGFORMAT = '#quests:item/timer/tillFinish/longFormat'
ITEM_TIMER_TILLFINISH_ONLYHOURS = '#quests:item/timer/tillFinish/onlyHours'
ITEM_TIMER_TILLFINISH_LESSTHANHOUR = '#quests:item/timer/tillFinish/lessThanHour'
ITEM_TIMER_TILLFINISH_SHORTFORMAT = '#quests:item/timer/tillFinish/shortFormat'
ITEM_TIMER_TILLFINISH_SHORTFULLFORMAT = '#quests:item/timer/tillFinish/shortFullFormat'
ITEM_TIMER_TILLFINISH_LONGFULLFORMAT = '#quests:item/timer/tillFinish/longFullFormat'
PERSONAL_SEASONS_AWARDSBUTTON = '#quests:personal/seasons/awardsButton'
PERSONAL_SEASONS_TAB_RANDOM = '#quests:personal/seasons/tab/random'
PERSONAL_SEASONS_TAB_FALLOUT = '#quests:personal/seasons/tab/fallout'
PERSONAL_SEASONS_ITEMTITLE = '#quests:personal/seasons/itemTitle'
PERSONAL_SEASONS_SHORTSEASONNAME = '#quests:personal/seasons/shortSeasonName'
PERSONAL_SEASONS_TILELABEL = '#quests:personal/seasons/tileLabel'
PERSONAL_SEASONS_TILEPROGRESS = '#quests:personal/seasons/tileProgress'
PERSONAL_SEASONS_SLOTS_NODATA = '#quests:personal/seasons/slots/noData'
PERSONAL_SEASONS_SLOTS_GETAWARD = '#quests:personal/seasons/slots/getAward'
PERSONAL_SEASONS_SLOTS_TITLE = '#quests:personal/seasons/slots/title'
PERSONAL_SEASONS_SLOTS_NOACTIVESLOTS_HEADER = '#quests:personal/seasons/slots/noActiveSlots/header'
PERSONAL_SEASONS_SLOTS_NOACTIVESLOTS_BODY = '#quests:personal/seasons/slots/noActiveSlots/body'
DETAILS_HEADER_INFO_TITLE = '#quests:details/header/info/title'
DETAILS_HEADER_INFO_DESCR_PARALLEL = '#quests:details/header/info/descr_parallel'
DETAILS_HEADER_INFO_DESCR_SERIAL = '#quests:details/header/info/descr_serial'
DETAILS_HEADER_TILLDATE = '#quests:details/header/tillDate'
DETAILS_HEADER_TILLDATETIMES = '#quests:details/header/tillDateTimes'
DETAILS_HEADER_TILLDATEDAYS = '#quests:details/header/tillDateDays'
DETAILS_HEADER_TILLDATEDAYSTIMES = '#quests:details/header/tillDateDaysTimes'
DETAILS_HEADER_ACTIVEDURATION = '#quests:details/header/activeDuration'
DETAILS_HEADER_ACTIVEDURATIONTIMES = '#quests:details/header/activeDurationTimes'
DETAILS_HEADER_ACTIVEDURATIONDAYS = '#quests:details/header/activeDurationDays'
DETAILS_HEADER_ACTIVEDURATIONDAYSTIMES = '#quests:details/header/activeDurationDaysTimes'
DETAILS_HEADER_SCHEDULEDAYS = '#quests:details/header/scheduleDays'
DETAILS_HEADER_SCHEDULETIMES = '#quests:details/header/scheduleTimes'
DETAILS_HEADER_SCHEDULEDAYSTIMES = '#quests:details/header/scheduleDaysTimes'
DETAILS_HEADER_HASNOVEHICLES = '#quests:details/header/hasNoVehicles'
DETAILS_HEADER_COMPLETION_DAILY = '#quests:details/header/completion/daily'
DETAILS_HEADER_COMPLETION_DAILY_GROUPBYVEHICLE = '#quests:details/header/completion/daily/groupByVehicle'
DETAILS_HEADER_COMPLETION_DAILY_GROUPBYNATION = '#quests:details/header/completion/daily/groupByNation'
DETAILS_HEADER_COMPLETION_DAILY_GROUPBYLEVEL = '#quests:details/header/completion/daily/groupByLevel'
DETAILS_HEADER_COMPLETION_DAILY_GROUPBYCLASS = '#quests:details/header/completion/daily/groupByClass'
DETAILS_HEADER_COMPLETION_UNLIMITED = '#quests:details/header/completion/unlimited'
DETAILS_HEADER_COMPLETION_SINGLE = '#quests:details/header/completion/single'
DETAILS_HEADER_COMPLETION_SINGLE_GROUPBYVEHICLE = '#quests:details/header/completion/single/groupByVehicle'
DETAILS_HEADER_COMPLETION_SINGLE_GROUPBYNATION = '#quests:details/header/completion/single/groupByNation'
DETAILS_HEADER_COMPLETION_SINGLE_GROUPBYLEVEL = '#quests:details/header/completion/single/groupByLevel'
DETAILS_HEADER_COMPLETION_SINGLE_GROUPBYCLASS = '#quests:details/header/completion/single/groupByClass'
DETAILS_HEADER_COMETOEND = '#quests:details/header/comeToEnd'
DETAILS_HEADER_COMETOENDINMINUTES = '#quests:details/header/comeToEndInMinutes'
DETAILS_TASKS_SUBTASK = '#quests:details/tasks/subTask'
DETAILS_TASKS_NEXTTASK = '#quests:details/tasks/nextTask'
DETAILS_TASKS_STRATEGIC = '#quests:details/tasks/strategic'
DETAILS_TASKS_REQUIREMENTS_ACCOUNTLABEL = '#quests:details/tasks/requirements/accountLabel'
DETAILS_TASKS_REQUIREMENTS_VEHICLELABEL = '#quests:details/tasks/requirements/vehicleLabel'
DETAILS_TASKS_REQUIREMENTS_VEHICLELABEL_SUITABLE = '#quests:details/tasks/requirements/vehicleLabel/suitable'
DETAILS_TASKS_REQUIREMENTS_VEHICLELABEL_FROM = '#quests:details/tasks/requirements/vehicleLabel/from'
DETAILS_REQUIREMENTS_VEHICLESTABLE_NAME = '#quests:details/requirements/vehiclesTable/name'
DETAILS_REQUIREMENTS_VEHICLESTABLE_DISCOUNT = '#quests:details/requirements/vehiclesTable/discount'
DETAILS_REQUIREMENTS_VEHICLESTABLE_COUNT = '#quests:details/requirements/vehiclesTable/count'
DETAILS_CONDITIONS_LABEL = '#quests:details/conditions/label'
DETAILS_STATUS_COMPLETED = '#quests:details/status/completed'
DETAILS_STATUS_COMPLETED_DAILY = '#quests:details/status/completed/daily'
DETAILS_STATUS_NOTAVAILABLE_IN_FUTURE = '#quests:details/status/notAvailable/in_future'
DETAILS_STATUS_NOTAVAILABLE_INVALID_WEEKDAY = '#quests:details/status/notAvailable/invalid_weekday'
DETAILS_STATUS_NOTAVAILABLE_INVALID_TIME_INTERVAL = '#quests:details/status/notAvailable/invalid_time_interval'
DETAILS_STATUS_NOTAVAILABLE_OUT_OF_DATE = '#quests:details/status/notAvailable/out_of_date'
DETAILS_STATUS_NOTAVAILABLE_REQUIREMENTS = '#quests:details/status/notAvailable/requirements'
DETAILS_RELATIONS1_GREATER = '#quests:details/relations1/greater'
DETAILS_RELATIONS1_LESS = '#quests:details/relations1/less'
DETAILS_RELATIONS1_EQUAL = '#quests:details/relations1/equal'
DETAILS_RELATIONS1_NOTEQUAL = '#quests:details/relations1/notEqual'
DETAILS_RELATIONS1_LESSOREQUAL = '#quests:details/relations1/lessOrEqual'
DETAILS_RELATIONS1_GREATEROREQUAL = '#quests:details/relations1/greaterOrEqual'
DETAILS_RELATIONS2_GREATER = '#quests:details/relations2/greater'
DETAILS_RELATIONS2_LESS = '#quests:details/relations2/less'
DETAILS_RELATIONS2_EQUAL = '#quests:details/relations2/equal'
DETAILS_RELATIONS2_NOTEQUAL = '#quests:details/relations2/notEqual'
DETAILS_RELATIONS2_LESSOREQUAL = '#quests:details/relations2/lessOrEqual'
DETAILS_RELATIONS2_GREATEROREQUAL = '#quests:details/relations2/greaterOrEqual'
DETAILS_GROUPS_OR = '#quests:details/groups/or'
DETAILS_REQUIREMENTS_IGR = '#quests:details/requirements/igr'
DETAILS_REQUIREMENTS_IGRBASIC = '#quests:details/requirements/igrBasic'
DETAILS_REQUIREMENTS_IGRPREMIUM = '#quests:details/requirements/igrPremium'
DETAILS_REQUIREMENTS_TOKEN = '#quests:details/requirements/token'
DETAILS_REQUIREMENTS_TOKEN_N = '#quests:details/requirements/token/N'
DETAILS_REQUIREMENTS_GROUP_TOKEN_N = '#quests:details/requirements/group/token/N'
DETAILS_REQUIREMENTS_PREMIUMACCOUNT = '#quests:details/requirements/premiumAccount'
DETAILS_REQUIREMENTS_NOTPREMIUMACCOUNT = '#quests:details/requirements/notPremiumAccount'
DETAILS_REQUIREMENTS_INCLAN = '#quests:details/requirements/inClan'
DETAILS_REQUIREMENTS_NOTINCLAN = '#quests:details/requirements/notInClan'
DETAILS_REQUIREMENTS_INANYCLAN = '#quests:details/requirements/inAnyClan'
DETAILS_REQUIREMENTS_NOTINANYCLAN = '#quests:details/requirements/notInAnyClan'
DETAILS_REQUIREMENTS_FORCURRENTCLAN = '#quests:details/requirements/forCurrentClan'
DETAILS_REQUIREMENTS_NOTFORCURRENTCLAN = '#quests:details/requirements/notForCurrentClan'
DETAILS_REQUIREMENTS_GLOBALRATING = '#quests:details/requirements/globalRating'
DETAILS_REQUIREMENTS_DOSSIERVALUE = '#quests:details/requirements/dossierValue'
DETAILS_REQUIREMENTS_DOSSIERAVGVALUE = '#quests:details/requirements/dossierAvgValue'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED = '#quests:details/requirements/vehiclesUnlocked'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NOT = '#quests:details/requirements/vehiclesUnlocked/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_ALL = '#quests:details/requirements/vehiclesUnlocked/all'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION = '#quests:details/requirements/vehiclesUnlocked/nation'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_NOT = '#quests:details/requirements/vehiclesUnlocked/nation/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_TYPE = '#quests:details/requirements/vehiclesUnlocked/type'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_TYPE_NOT = '#quests:details/requirements/vehiclesUnlocked/type/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_LEVEL = '#quests:details/requirements/vehiclesUnlocked/level'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_LEVEL_NOT = '#quests:details/requirements/vehiclesUnlocked/level/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_TYPE = '#quests:details/requirements/vehiclesUnlocked/nation_type'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_TYPE_NOT = '#quests:details/requirements/vehiclesUnlocked/nation_type/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_LEVEL = '#quests:details/requirements/vehiclesUnlocked/nation_level'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_LEVEL_NOT = '#quests:details/requirements/vehiclesUnlocked/nation_level/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_TYPE_LEVEL = '#quests:details/requirements/vehiclesUnlocked/type_level'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_TYPE_LEVEL_NOT = '#quests:details/requirements/vehiclesUnlocked/type_level/not'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_TYPE_LEVEL = '#quests:details/requirements/vehiclesUnlocked/nation_type_level'
DETAILS_REQUIREMENTS_VEHICLESUNLOCKED_NATION_TYPE_LEVEL_NOT = '#quests:details/requirements/vehiclesUnlocked/nation_type_level/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED = '#quests:details/requirements/vehiclesOwned'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NOT = '#quests:details/requirements/vehiclesOwned/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_ALL = '#quests:details/requirements/vehiclesOwned/all'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION = '#quests:details/requirements/vehiclesOwned/nation'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_NOT = '#quests:details/requirements/vehiclesOwned/nation/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_TYPE = '#quests:details/requirements/vehiclesOwned/type'
DETAILS_REQUIREMENTS_VEHICLESOWNED_TYPE_NOT = '#quests:details/requirements/vehiclesOwned/type/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_LEVEL = '#quests:details/requirements/vehiclesOwned/level'
DETAILS_REQUIREMENTS_VEHICLESOWNED_LEVEL_NOT = '#quests:details/requirements/vehiclesOwned/level/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_TYPE = '#quests:details/requirements/vehiclesOwned/nation_type'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_TYPE_NOT = '#quests:details/requirements/vehiclesOwned/nation_type/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_LEVEL = '#quests:details/requirements/vehiclesOwned/nation_level'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_LEVEL_NOT = '#quests:details/requirements/vehiclesOwned/nation_level/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_TYPE_LEVEL = '#quests:details/requirements/vehiclesOwned/type_level'
DETAILS_REQUIREMENTS_VEHICLESOWNED_TYPE_LEVEL_NOT = '#quests:details/requirements/vehiclesOwned/type_level/not'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_TYPE_LEVEL = '#quests:details/requirements/vehiclesOwned/nation_type_level'
DETAILS_REQUIREMENTS_VEHICLESOWNED_NATION_TYPE_LEVEL_NOT = '#quests:details/requirements/vehiclesOwned/nation_type_level/not'
DETAILS_REQUIREMENTS_VEHICLE_RECEIVEDMULTXP = '#quests:details/requirements/vehicle/receivedMultXp'
DETAILS_REQUIREMENTS_VEHICLE_NOTRECEIVEDMULTXP = '#quests:details/requirements/vehicle/notReceivedMultXp'
DETAILS_REQUIREMENTS_VEHICLE_ANY = '#quests:details/requirements/vehicle/any'
DETAILS_REQUIREMENTS_LADDER = '#quests:details/requirements/ladder'
DETAILS_CONDITIONS_TITLE = '#quests:details/conditions/title'
DETAILS_CONDITIONS_VEHICLEDESCR = '#quests:details/conditions/vehicleDescr'
DETAILS_CONDITIONS_VEHICLE = '#quests:details/conditions/vehicle'
DETAILS_CONDITIONS_VEHICLEKILLS = '#quests:details/conditions/vehicleKills'
DETAILS_CONDITIONS_VEHICLESUNLOCKED = '#quests:details/conditions/vehiclesUnlocked'
DETAILS_CONDITIONS_VEHICLESOWNED = '#quests:details/conditions/vehiclesOwned'
DETAILS_CONDITIONS_BATTLEBONUSTYPE = '#quests:details/conditions/battleBonusType'
DETAILS_CONDITIONS_FORMATION = '#quests:details/conditions/formation'
DETAILS_CONDITIONS_MAP = '#quests:details/conditions/map'
DETAILS_CONDITIONS_MAP_NOT = '#quests:details/conditions/map/not'
DETAILS_CONDITIONS_MAPS = '#quests:details/conditions/maps'
DETAILS_CONDITIONS_MAPS_NOT = '#quests:details/conditions/maps/not'
DETAILS_CONDITIONS_MAPSTYPE = '#quests:details/conditions/mapsType'
DETAILS_CONDITIONS_MAPSTYPE_SUMMER = '#quests:details/conditions/mapsType/summer'
DETAILS_CONDITIONS_MAPSTYPE_DESERT = '#quests:details/conditions/mapsType/desert'
DETAILS_CONDITIONS_MAPSTYPE_WINTER = '#quests:details/conditions/mapsType/winter'
DETAILS_CONDITIONS_FORMATION_SQUAD = '#quests:details/conditions/formation/squad'
DETAILS_CONDITIONS_NOTSQUAD = '#quests:details/conditions/notSquad'
DETAILS_CONDITIONS_CLANMEMBERSHIP_ANY_FORMATION = '#quests:details/conditions/clanMembership/any/formation'
DETAILS_CONDITIONS_CLANMEMBERSHIP_ANY_SQUAD = '#quests:details/conditions/clanMembership/any/squad'
DETAILS_CONDITIONS_CLANMEMBERSHIP_ANY_TEAM7X7 = '#quests:details/conditions/clanMembership/any/team7x7'
DETAILS_CONDITIONS_CLANMEMBERSHIP_ANY_COMPANY = '#quests:details/conditions/clanMembership/any/company'
DETAILS_CONDITIONS_CLANMEMBERSHIP_SAME_FORMATION = '#quests:details/conditions/clanMembership/same/formation'
DETAILS_CONDITIONS_CLANMEMBERSHIP_SAME_SQUAD = '#quests:details/conditions/clanMembership/same/squad'
DETAILS_CONDITIONS_CLANMEMBERSHIP_SAME_TEAM7X7 = '#quests:details/conditions/clanMembership/same/team7x7'
DETAILS_CONDITIONS_CLANMEMBERSHIP_SAME_COMPANY = '#quests:details/conditions/clanMembership/same/company'
DETAILS_CONDITIONS_FORMATION_CLAN = '#quests:details/conditions/formation/clan'
DETAILS_CONDITIONS_HISTORICALBATTLES = '#quests:details/conditions/historicalBattles'
DETAILS_CONDITIONS_BATTLES = '#quests:details/conditions/battles'
DETAILS_CONDITIONS_BATTLESINROW = '#quests:details/conditions/battlesInRow'
DETAILS_CONDITIONS_ACHIEVEMENTS = '#quests:details/conditions/achievements'
DETAILS_CONDITIONS_ACHIEVEMENTS_NOT = '#quests:details/conditions/achievements/not'
DETAILS_CONDITIONS_CLANKILLS = '#quests:details/conditions/clanKills'
DETAILS_CONDITIONS_CLANKILLS_NOT = '#quests:details/conditions/clanKills/not'
DETAILS_CONDITIONS_CLANKILLS_CAMO_RED = '#quests:details/conditions/clanKills/camo/red'
DETAILS_CONDITIONS_CLANKILLS_CAMO_SILVER = '#quests:details/conditions/clanKills/camo/silver'
DETAILS_CONDITIONS_CLANKILLS_CAMO_GOLD = '#quests:details/conditions/clanKills/camo/gold'
DETAILS_CONDITIONS_CLANKILLS_CAMO_BLACK = '#quests:details/conditions/clanKills/camo/black'
DETAILS_CONDITIONS_ONEACHIEVEMENT = '#quests:details/conditions/oneAchievement'
DETAILS_CONDITIONS_ONEACHIEVEMENT_NOT = '#quests:details/conditions/oneAchievement/not'
DETAILS_CONDITIONS_WIN = '#quests:details/conditions/win'
DETAILS_CONDITIONS_NOTWIN = '#quests:details/conditions/notWin'
DETAILS_CONDITIONS_SURVIVE = '#quests:details/conditions/survive'
DETAILS_CONDITIONS_NOTSURVIVE = '#quests:details/conditions/notSurvive'
DETAILS_CONDITIONS_RESULTS_SINGLE_SIMPLE = '#quests:details/conditions/results/single/simple'
DETAILS_CONDITIONS_RESULTS_SINGLE_AVG = '#quests:details/conditions/results/single/avg'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_TOP = '#quests:details/conditions/results/single/halfTeam/top'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_TOP_NOT = '#quests:details/conditions/results/single/halfTeam/top/not'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_TOP = '#quests:details/conditions/results/single/bothTeams/top'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_TOP_NOT = '#quests:details/conditions/results/single/bothTeams/top/not'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_RANGE = '#quests:details/conditions/results/single/halfTeam/range'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_RANGE_NOT = '#quests:details/conditions/results/single/halfTeam/range/not'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_RANGE = '#quests:details/conditions/results/single/bothTeams/range'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_RANGE_NOT = '#quests:details/conditions/results/single/bothTeams/range/not'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_POSITION = '#quests:details/conditions/results/single/halfTeam/position'
DETAILS_CONDITIONS_RESULTS_SINGLE_HALFTEAM_POSITION_NOT = '#quests:details/conditions/results/single/halfTeam/position/not'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_POSITION = '#quests:details/conditions/results/single/bothTeams/position'
DETAILS_CONDITIONS_RESULTS_SINGLE_BOTHTEAMS_POSITION_NOT = '#quests:details/conditions/results/single/bothTeams/position/not'
DETAILS_CONDITIONS_RESULTS_FORMATION_ALIVE = '#quests:details/conditions/results/formation/alive'
DETAILS_CONDITIONS_RESULTS_FORMATION_ALIVE_NOT = '#quests:details/conditions/results/formation/alive/not'
DETAILS_CONDITIONS_RESULTS_FORMATION_SIMPLE = '#quests:details/conditions/results/formation/simple'
DETAILS_CONDITIONS_RESULTS_FORMATION_AVG = '#quests:details/conditions/results/formation/avg'
DETAILS_CONDITIONS_RESULTS_SQUAD_ALIVE = '#quests:details/conditions/results/squad/alive'
DETAILS_CONDITIONS_RESULTS_SQUAD_ALIVE_NOT = '#quests:details/conditions/results/squad/alive/not'
DETAILS_CONDITIONS_RESULTS_SQUAD_SIMPLE = '#quests:details/conditions/results/squad/simple'
DETAILS_CONDITIONS_RESULTS_SQUAD_AVG = '#quests:details/conditions/results/squad/avg'
DETAILS_CONDITIONS_RESULTS_COMPANY_ALIVE = '#quests:details/conditions/results/company/alive'
DETAILS_CONDITIONS_RESULTS_COMPANY_ALIVE_NOT = '#quests:details/conditions/results/company/alive/not'
DETAILS_CONDITIONS_RESULTS_COMPANY_SIMPLE = '#quests:details/conditions/results/company/simple'
DETAILS_CONDITIONS_RESULTS_COMPANY_AVG = '#quests:details/conditions/results/company/avg'
DETAILS_CONDITIONS_RESULTS_TEAM7X7_ALIVE = '#quests:details/conditions/results/team7x7/alive'
DETAILS_CONDITIONS_RESULTS_TEAM7X7_ALIVE_NOT = '#quests:details/conditions/results/team7x7/alive/not'
DETAILS_CONDITIONS_RESULTS_TEAM7X7_SIMPLE = '#quests:details/conditions/results/team7x7/simple'
DETAILS_CONDITIONS_RESULTS_TEAM7X7_AVG = '#quests:details/conditions/results/team7x7/avg'
DETAILS_CONDITIONS_CUMULATIVE_SINGLE = '#quests:details/conditions/cumulative/single'
DETAILS_CONDITIONS_CUMULATIVE_FORMATION = '#quests:details/conditions/cumulative/formation'
DETAILS_CONDITIONS_CUMULATIVE_SQUAD = '#quests:details/conditions/cumulative/squad'
DETAILS_CONDITIONS_CUMULATIVE_COMPANY = '#quests:details/conditions/cumulative/company'
DETAILS_CONDITIONS_CUMULATIVE_TEAM7X7 = '#quests:details/conditions/cumulative/team7x7'
DETAILS_CONDITIONS_VEHICLESKILLS = '#quests:details/conditions/vehiclesKills'
DETAILS_CONDITIONS_VEHICLESKILLS_NOT = '#quests:details/conditions/vehiclesKills/not'
DETAILS_CONDITIONS_VEHICLESKILLS_ALL = '#quests:details/conditions/vehiclesKills/all'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION = '#quests:details/conditions/vehiclesKills/nation'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_NOT = '#quests:details/conditions/vehiclesKills/nation/not'
DETAILS_CONDITIONS_VEHICLESKILLS_TYPE = '#quests:details/conditions/vehiclesKills/type'
DETAILS_CONDITIONS_VEHICLESKILLS_TYPE_NOT = '#quests:details/conditions/vehiclesKills/type/not'
DETAILS_CONDITIONS_VEHICLESKILLS_LEVEL = '#quests:details/conditions/vehiclesKills/level'
DETAILS_CONDITIONS_VEHICLESKILLS_LEVEL_NOT = '#quests:details/conditions/vehiclesKills/level/not'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_TYPE = '#quests:details/conditions/vehiclesKills/nation_type'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_TYPE_NOT = '#quests:details/conditions/vehiclesKills/nation_type/not'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_LEVEL = '#quests:details/conditions/vehiclesKills/nation_level'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_LEVEL_NOT = '#quests:details/conditions/vehiclesKills/nation_level/not'
DETAILS_CONDITIONS_VEHICLESKILLS_TYPE_LEVEL = '#quests:details/conditions/vehiclesKills/type_level'
DETAILS_CONDITIONS_VEHICLESKILLS_TYPE_LEVEL_NOT = '#quests:details/conditions/vehiclesKills/type_level/not'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_TYPE_LEVEL = '#quests:details/conditions/vehiclesKills/nation_type_level'
DETAILS_CONDITIONS_VEHICLESKILLS_NATION_TYPE_LEVEL_NOT = '#quests:details/conditions/vehiclesKills/nation_type_level/not'
DETAILS_CONDITIONS_GROUPBY_NATION = '#quests:details/conditions/groupBy/nation'
DETAILS_CONDITIONS_GROUPBY_CLASS = '#quests:details/conditions/groupBy/class'
DETAILS_CONDITIONS_GROUPBY_LEVEL = '#quests:details/conditions/groupBy/level'
DETAILS_CONDITIONS_GROUPBY_VEHICLE = '#quests:details/conditions/groupBy/vehicle'
DETAILS_CONDITIONS_GROUPBY_LEVELLABEL = '#quests:details/conditions/groupBy/levelLabel'
DETAILS_CONDITIONS_POSTBATTLE_SEPARATOR = '#quests:details/conditions/postBattle/separator'
DETAILS_CONDITIONS_POSTBATTLE_DAILYRESET_TIMEFMT = '#quests:details/conditions/postBattle/dailyReset/timeFmt'
DETAILS_CONDITIONS_POSTBATTLE_DAILYRESET = '#quests:details/conditions/postBattle/dailyReset'
DETAILS_CONDITIONS_CUMULATIVE_HEALTH = '#quests:details/conditions/cumulative/health'
DETAILS_CONDITIONS_CUMULATIVE_XP = '#quests:details/conditions/cumulative/xp'
DETAILS_CONDITIONS_CUMULATIVE_DIRECTHITS = '#quests:details/conditions/cumulative/directHits'
DETAILS_CONDITIONS_CUMULATIVE_DIRECTTEAMHITS = '#quests:details/conditions/cumulative/directTeamHits'
DETAILS_CONDITIONS_CUMULATIVE_EXPLOSIONHITS = '#quests:details/conditions/cumulative/explosionHits'
DETAILS_CONDITIONS_CUMULATIVE_PIERCINGS = '#quests:details/conditions/cumulative/piercings'
DETAILS_CONDITIONS_CUMULATIVE_SHOTS = '#quests:details/conditions/cumulative/shots'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGEDEALT = '#quests:details/conditions/cumulative/damageDealt'
DETAILS_CONDITIONS_CUMULATIVE_MAXDAMAGE = '#quests:details/conditions/cumulative/maxDamage'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGEASSISTED = '#quests:details/conditions/cumulative/damageAssisted'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGERECEIVED = '#quests:details/conditions/cumulative/damageReceived'
DETAILS_CONDITIONS_CUMULATIVE_DIRECTHITSRECEIVED = '#quests:details/conditions/cumulative/directHitsReceived'
DETAILS_CONDITIONS_CUMULATIVE_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/conditions/cumulative/noDamageDirectHitsReceived'
DETAILS_CONDITIONS_CUMULATIVE_EXPLOSIONHITSRECEIVED = '#quests:details/conditions/cumulative/explosionHitsReceived'
DETAILS_CONDITIONS_CUMULATIVE_PIERCINGSRECEIVED = '#quests:details/conditions/cumulative/piercingsReceived'
DETAILS_CONDITIONS_CUMULATIVE_SPOTTED = '#quests:details/conditions/cumulative/spotted'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGED = '#quests:details/conditions/cumulative/damaged'
DETAILS_CONDITIONS_CUMULATIVE_KILLS = '#quests:details/conditions/cumulative/kills'
DETAILS_CONDITIONS_CUMULATIVE_TDAMAGEDEALT = '#quests:details/conditions/cumulative/tdamageDealt'
DETAILS_CONDITIONS_CUMULATIVE_TKILLS = '#quests:details/conditions/cumulative/tkills'
DETAILS_CONDITIONS_CUMULATIVE_CAPTUREPOINTS = '#quests:details/conditions/cumulative/capturePoints'
DETAILS_CONDITIONS_CUMULATIVE_DROPPEDCAPTUREPOINTS = '#quests:details/conditions/cumulative/droppedCapturePoints'
DETAILS_CONDITIONS_CUMULATIVE_ACHIEVEMENTS = '#quests:details/conditions/cumulative/achievements'
DETAILS_CONDITIONS_CUMULATIVE_POTENTIALDAMAGERECEIVED = '#quests:details/conditions/cumulative/potentialDamageReceived'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGEBLOCKEDBYARMOR = '#quests:details/conditions/cumulative/damageBlockedByArmor'
DETAILS_CONDITIONS_CUMULATIVE_FREEXP = '#quests:details/conditions/cumulative/freeXP'
DETAILS_CONDITIONS_CUMULATIVE_POTENTIALDAMAGEDEALT = '#quests:details/conditions/cumulative/potentialDamageDealt'
DETAILS_CONDITIONS_CUMULATIVE_SOLOHITSASSISTED = '#quests:details/conditions/cumulative/soloHitsAssisted'
DETAILS_CONDITIONS_CUMULATIVE_ORIGINALXP = '#quests:details/conditions/cumulative/originalXP'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGEASSISTEDTRACK = '#quests:details/conditions/cumulative/damageAssistedTrack'
DETAILS_CONDITIONS_CUMULATIVE_DAMAGEASSISTEDRADIO = '#quests:details/conditions/cumulative/damageAssistedRadio'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY = '#quests:details/conditions/cumulative/markOfMastery'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY0 = '#quests:details/conditions/cumulative/markOfMastery0'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY0_NOT = '#quests:details/conditions/cumulative/markOfMastery0/not'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY1 = '#quests:details/conditions/cumulative/markOfMastery1'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY2 = '#quests:details/conditions/cumulative/markOfMastery2'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY3 = '#quests:details/conditions/cumulative/markOfMastery3'
DETAILS_CONDITIONS_CUMULATIVE_MARKOFMASTERY4 = '#quests:details/conditions/cumulative/markOfMastery4'
DETAILS_CONDITIONS_CLUBS_GETDIVISION = '#quests:details/conditions/clubs/getDivision'
DETAILS_CONDITIONS_CLUBS_BATTLES = '#quests:details/conditions/clubs/battles'
DETAILS_CONDITIONS_CLUBS_HASCLUB = '#quests:details/conditions/clubs/hasClub'
DETAILS_DOSSIER_SNIPERSERIES = '#quests:details/dossier/sniperSeries'
DETAILS_DOSSIER_MAXSNIPERSERIES = '#quests:details/dossier/maxSniperSeries'
DETAILS_DOSSIER_INVINCIBLESERIES = '#quests:details/dossier/invincibleSeries'
DETAILS_DOSSIER_MAXINVINCIBLESERIES = '#quests:details/dossier/maxInvincibleSeries'
DETAILS_DOSSIER_DIEHARDSERIES = '#quests:details/dossier/diehardSeries'
DETAILS_DOSSIER_MAXDIEHARDSERIES = '#quests:details/dossier/maxDiehardSeries'
DETAILS_DOSSIER_KILLINGSERIES = '#quests:details/dossier/killingSeries'
DETAILS_DOSSIER_MAXKILLINGSERIES = '#quests:details/dossier/maxKillingSeries'
DETAILS_DOSSIER_PIERCINGSERIES = '#quests:details/dossier/piercingSeries'
DETAILS_DOSSIER_MAXPIERCINGSERIES = '#quests:details/dossier/maxPiercingSeries'
DETAILS_DOSSIER_MAXXP = '#quests:details/dossier/maxXP'
DETAILS_DOSSIER_MAXXPVEHICLE = '#quests:details/dossier/maxXPVehicle'
DETAILS_DOSSIER_MAXFRAGS = '#quests:details/dossier/maxFrags'
DETAILS_DOSSIER_MAXFRAGSVEHICLE = '#quests:details/dossier/maxFragsVehicle'
DETAILS_DOSSIER_MAXDAMAGE = '#quests:details/dossier/maxDamage'
DETAILS_DOSSIER_MAXDAMAGEVEHICLE = '#quests:details/dossier/maxDamageVehicle'
DETAILS_DOSSIER_MARKOFMASTERY = '#quests:details/dossier/markOfMastery'
DETAILS_DOSSIER_WARRIOR = '#quests:details/dossier/warrior'
DETAILS_DOSSIER_INVADER = '#quests:details/dossier/invader'
DETAILS_DOSSIER_SNIPER = '#quests:details/dossier/sniper'
DETAILS_DOSSIER_SNIPER2 = '#quests:details/dossier/sniper2'
DETAILS_DOSSIER_MAINGUN = '#quests:details/dossier/mainGun'
DETAILS_DOSSIER_ARMOREDFIST = '#quests:details/dossier/armoredFist'
DETAILS_DOSSIER_GENIUSFORWARMEDAL = '#quests:details/dossier/geniusForWarMedal'
DETAILS_DOSSIER_WOLFAMONGSHEEPMEDAL = '#quests:details/dossier/wolfAmongSheepMedal'
DETAILS_DOSSIER_TACTICALBREAKTHROUGH = '#quests:details/dossier/tacticalBreakthrough'
DETAILS_DOSSIER_KINGOFTHEHILL = '#quests:details/dossier/kingOfTheHill'
DETAILS_DOSSIER_DEFENDER = '#quests:details/dossier/defender'
DETAILS_DOSSIER_STEELWALL = '#quests:details/dossier/steelwall'
DETAILS_DOSSIER_SUPPORTER = '#quests:details/dossier/supporter'
DETAILS_DOSSIER_SCOUT = '#quests:details/dossier/scout'
DETAILS_DOSSIER_MEDALKAY = '#quests:details/dossier/medalKay'
DETAILS_DOSSIER_MEDALCARIUS = '#quests:details/dossier/medalCarius'
DETAILS_DOSSIER_MEDALKNISPEL = '#quests:details/dossier/medalKnispel'
DETAILS_DOSSIER_MEDALPOPPEL = '#quests:details/dossier/medalPoppel'
DETAILS_DOSSIER_MEDALABRAMS = '#quests:details/dossier/medalAbrams'
DETAILS_DOSSIER_MEDALLECLERC = '#quests:details/dossier/medalLeClerc'
DETAILS_DOSSIER_MEDALLAVRINENKO = '#quests:details/dossier/medalLavrinenko'
DETAILS_DOSSIER_MEDALEKINS = '#quests:details/dossier/medalEkins'
DETAILS_DOSSIER_MEDALWITTMANN = '#quests:details/dossier/medalWittmann'
DETAILS_DOSSIER_MEDALORLIK = '#quests:details/dossier/medalOrlik'
DETAILS_DOSSIER_MEDALOSKIN = '#quests:details/dossier/medalOskin'
DETAILS_DOSSIER_MEDALHALONEN = '#quests:details/dossier/medalHalonen'
DETAILS_DOSSIER_MEDALBURDA = '#quests:details/dossier/medalBurda'
DETAILS_DOSSIER_MEDALBILLOTTE = '#quests:details/dossier/medalBillotte'
DETAILS_DOSSIER_MEDALKOLOBANOV = '#quests:details/dossier/medalKolobanov'
DETAILS_DOSSIER_MEDALFADIN = '#quests:details/dossier/medalFadin'
DETAILS_DOSSIER_TITLESNIPER = '#quests:details/dossier/titleSniper'
DETAILS_DOSSIER_INVINCIBLE = '#quests:details/dossier/invincible'
DETAILS_DOSSIER_DIEHARD = '#quests:details/dossier/diehard'
DETAILS_DOSSIER_RAIDER = '#quests:details/dossier/raider'
DETAILS_DOSSIER_HANDOFDEATH = '#quests:details/dossier/handOfDeath'
DETAILS_DOSSIER_ARMORPIERCER = '#quests:details/dossier/armorPiercer'
DETAILS_DOSSIER_KAMIKAZE = '#quests:details/dossier/kamikaze'
DETAILS_DOSSIER_BEASTHUNTER = '#quests:details/dossier/beasthunter'
DETAILS_DOSSIER_MOUSEBANE = '#quests:details/dossier/mousebane'
DETAILS_DOSSIER_EVILEYE = '#quests:details/dossier/evileye'
DETAILS_DOSSIER_BATTLECITIZEN = '#quests:details/dossier/battleCitizen'
DETAILS_DOSSIER_MEDALRADLEYWALTERS = '#quests:details/dossier/medalRadleyWalters'
DETAILS_DOSSIER_MEDALLAFAYETTEPOOL = '#quests:details/dossier/medalLafayettePool'
DETAILS_DOSSIER_MEDALBRUNOPIETRO = '#quests:details/dossier/medalBrunoPietro'
DETAILS_DOSSIER_MEDALTARCZAY = '#quests:details/dossier/medalTarczay'
DETAILS_DOSSIER_MEDALPASCUCCI = '#quests:details/dossier/medalPascucci'
DETAILS_DOSSIER_MEDALDUMITRU = '#quests:details/dossier/medalDumitru'
DETAILS_DOSSIER_MEDALLEHVASLAIHO = '#quests:details/dossier/medalLehvaslaiho'
DETAILS_DOSSIER_MEDALNIKOLAS = '#quests:details/dossier/medalNikolas'
DETAILS_DOSSIER_FRAGSSINAI = '#quests:details/dossier/fragsSinai'
DETAILS_DOSSIER_SINAI = '#quests:details/dossier/sinai'
DETAILS_DOSSIER_HEROESOFRASSENAY = '#quests:details/dossier/heroesOfRassenay'
DETAILS_DOSSIER_MEDALBROTHERSINARMS = '#quests:details/dossier/medalBrothersInArms'
DETAILS_DOSSIER_MEDALCRUCIALCONTRIBUTION = '#quests:details/dossier/medalCrucialContribution'
DETAILS_DOSSIER_MEDALDELANGLADE = '#quests:details/dossier/medalDeLanglade'
DETAILS_DOSSIER_MEDALTAMADAYOSHIO = '#quests:details/dossier/medalTamadaYoshio'
DETAILS_DOSSIER_BOMBARDIER = '#quests:details/dossier/bombardier'
DETAILS_DOSSIER_HUNTSMAN = '#quests:details/dossier/huntsman'
DETAILS_DOSSIER_STURDY = '#quests:details/dossier/sturdy'
DETAILS_DOSSIER_IRONMAN = '#quests:details/dossier/ironMan'
DETAILS_DOSSIER_FRAGSPATTON = '#quests:details/dossier/fragsPatton'
DETAILS_DOSSIER_PATTONVALLEY = '#quests:details/dossier/pattonValley'
DETAILS_DOSSIER_RANDOM_XP = '#quests:details/dossier/random/xp'
DETAILS_DOSSIER_RANDOM_MAXXP = '#quests:details/dossier/random/maxXP'
DETAILS_DOSSIER_RANDOM_BATTLESCOUNT = '#quests:details/dossier/random/battlesCount'
DETAILS_DOSSIER_RANDOM_WINS = '#quests:details/dossier/random/wins'
DETAILS_DOSSIER_RANDOM_LOSSES = '#quests:details/dossier/random/losses'
DETAILS_DOSSIER_RANDOM_SURVIVEDBATTLES = '#quests:details/dossier/random/survivedBattles'
DETAILS_DOSSIER_RANDOM_LASTBATTLETIME = '#quests:details/dossier/random/lastBattleTime'
DETAILS_DOSSIER_RANDOM_WINANDSURVIVED = '#quests:details/dossier/random/winAndSurvived'
DETAILS_DOSSIER_RANDOM_BATTLEHEROES = '#quests:details/dossier/random/battleHeroes'
DETAILS_DOSSIER_RANDOM_FRAGS = '#quests:details/dossier/random/frags'
DETAILS_DOSSIER_RANDOM_MAXFRAGS = '#quests:details/dossier/random/maxFrags'
DETAILS_DOSSIER_RANDOM_MAXDAMAGE = '#quests:details/dossier/random/maxDamage'
DETAILS_DOSSIER_RANDOM_FRAGS8P = '#quests:details/dossier/random/frags8p'
DETAILS_DOSSIER_RANDOM_FRAGSBEAST = '#quests:details/dossier/random/fragsBeast'
DETAILS_DOSSIER_RANDOM_DIRECTHITS = '#quests:details/dossier/random/directHits'
DETAILS_DOSSIER_RANDOM_SPOTTED = '#quests:details/dossier/random/spotted'
DETAILS_DOSSIER_RANDOM_DAMAGEDEALT = '#quests:details/dossier/random/damageDealt'
DETAILS_DOSSIER_RANDOM_DAMAGERECEIVED = '#quests:details/dossier/random/damageReceived'
DETAILS_DOSSIER_RANDOM_DIRECTHITSRECEIVED = '#quests:details/dossier/random/directHitsReceived'
DETAILS_DOSSIER_RANDOM_CAPTUREPOINTS = '#quests:details/dossier/random/capturePoints'
DETAILS_DOSSIER_RANDOM_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/random/droppedCapturePoints'
DETAILS_DOSSIER_RANDOM_PIERCINGS = '#quests:details/dossier/random/piercings'
DETAILS_DOSSIER_RANDOM_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/random/noDamageDirectHitsReceived'
DETAILS_DOSSIER_RANDOM_PIERCINGSRECEIVED = '#quests:details/dossier/random/piercingsReceived'
DETAILS_DOSSIER_RANDOM_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/random/potentialDamageReceived'
DETAILS_DOSSIER_RANDOM_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/random/damageBlockedByArmor'
DETAILS_DOSSIER_RANDOM_ORIGINALXP = '#quests:details/dossier/random/originalXP'
DETAILS_DOSSIER_RANDOM_DAMAGEASSISTEDTRACK = '#quests:details/dossier/random/damageAssistedTrack'
DETAILS_DOSSIER_RANDOM_DAMAGEASSISTEDRADIO = '#quests:details/dossier/random/damageAssistedRadio'
DETAILS_DOSSIER_RANDOM_SHOTS = '#quests:details/dossier/random/shots'
DETAILS_DOSSIER_RANDOM_EXPLOSIONHITSRECEIVED = '#quests:details/dossier/random/explosionHitsReceived'
DETAILS_DOSSIER_COMPANY_XP = '#quests:details/dossier/company/xp'
DETAILS_DOSSIER_COMPANY_BATTLESCOUNT = '#quests:details/dossier/company/battlesCount'
DETAILS_DOSSIER_COMPANY_WINS = '#quests:details/dossier/company/wins'
DETAILS_DOSSIER_COMPANY_LOSSES = '#quests:details/dossier/company/losses'
DETAILS_DOSSIER_COMPANY_SURVIVEDBATTLES = '#quests:details/dossier/company/survivedBattles'
DETAILS_DOSSIER_COMPANY_FRAGS = '#quests:details/dossier/company/frags'
DETAILS_DOSSIER_COMPANY_DIRECTHITS = '#quests:details/dossier/company/directHits'
DETAILS_DOSSIER_COMPANY_SPOTTED = '#quests:details/dossier/company/spotted'
DETAILS_DOSSIER_COMPANY_DAMAGEDEALT = '#quests:details/dossier/company/damageDealt'
DETAILS_DOSSIER_COMPANY_MAXDAMAGE = '#quests:details/dossier/company/maxDamage'
DETAILS_DOSSIER_COMPANY_DAMAGERECEIVED = '#quests:details/dossier/company/damageReceived'
DETAILS_DOSSIER_COMPANY_CAPTUREPOINTS = '#quests:details/dossier/company/capturePoints'
DETAILS_DOSSIER_COMPANY_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/company/droppedCapturePoints'
DETAILS_DOSSIER_COMPANY_PIERCINGS = '#quests:details/dossier/company/piercings'
DETAILS_DOSSIER_COMPANY_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/company/noDamageDirectHitsReceived'
DETAILS_DOSSIER_COMPANY_PIERCINGSRECEIVED = '#quests:details/dossier/company/piercingsReceived'
DETAILS_DOSSIER_COMPANY_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/company/potentialDamageReceived'
DETAILS_DOSSIER_COMPANY_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/company/damageBlockedByArmor'
DETAILS_DOSSIER_COMPANY_ORIGINALXP = '#quests:details/dossier/company/originalXP'
DETAILS_DOSSIER_COMPANY_DAMAGEASSISTEDTRACK = '#quests:details/dossier/company/damageAssistedTrack'
DETAILS_DOSSIER_COMPANY_DAMAGEASSISTEDRADIO = '#quests:details/dossier/company/damageAssistedRadio'
DETAILS_DOSSIER_CLAN_XP = '#quests:details/dossier/clan/xp'
DETAILS_DOSSIER_CLAN_BATTLESCOUNT = '#quests:details/dossier/clan/battlesCount'
DETAILS_DOSSIER_CLAN_WINS = '#quests:details/dossier/clan/wins'
DETAILS_DOSSIER_CLAN_LOSSES = '#quests:details/dossier/clan/losses'
DETAILS_DOSSIER_CLAN_SURVIVEDBATTLES = '#quests:details/dossier/clan/survivedBattles'
DETAILS_DOSSIER_CLAN_FRAGS = '#quests:details/dossier/clan/frags'
DETAILS_DOSSIER_CLAN_DIRECTHITS = '#quests:details/dossier/clan/directHits'
DETAILS_DOSSIER_CLAN_SPOTTED = '#quests:details/dossier/clan/spotted'
DETAILS_DOSSIER_CLAN_DAMAGEDEALT = '#quests:details/dossier/clan/damageDealt'
DETAILS_DOSSIER_CLAN_MAXDAMAGE = '#quests:details/dossier/clan/maxDamage'
DETAILS_DOSSIER_CLAN_DAMAGERECEIVED = '#quests:details/dossier/clan/damageReceived'
DETAILS_DOSSIER_CLAN_CAPTUREPOINTS = '#quests:details/dossier/clan/capturePoints'
DETAILS_DOSSIER_CLAN_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/clan/droppedCapturePoints'
DETAILS_DOSSIER_CLAN_PIERCINGS = '#quests:details/dossier/clan/piercings'
DETAILS_DOSSIER_CLAN_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/clan/noDamageDirectHitsReceived'
DETAILS_DOSSIER_CLAN_PIERCINGSRECEIVED = '#quests:details/dossier/clan/piercingsReceived'
DETAILS_DOSSIER_CLAN_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/clan/potentialDamageReceived'
DETAILS_DOSSIER_CLAN_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/clan/damageBlockedByArmor'
DETAILS_DOSSIER_CLAN_ORIGINALXP = '#quests:details/dossier/clan/originalXP'
DETAILS_DOSSIER_CLAN_DAMAGEASSISTEDTRACK = '#quests:details/dossier/clan/damageAssistedTrack'
DETAILS_DOSSIER_CLAN_DAMAGEASSISTEDRADIO = '#quests:details/dossier/clan/damageAssistedRadio'
DETAILS_DOSSIER_HISTORICAL_XP = '#quests:details/dossier/historical/xp'
DETAILS_DOSSIER_HISTORICAL_BATTLESCOUNT = '#quests:details/dossier/historical/battlesCount'
DETAILS_DOSSIER_HISTORICAL_WINS = '#quests:details/dossier/historical/wins'
DETAILS_DOSSIER_HISTORICAL_LOSSES = '#quests:details/dossier/historical/losses'
DETAILS_DOSSIER_HISTORICAL_SURVIVEDBATTLES = '#quests:details/dossier/historical/survivedBattles'
DETAILS_DOSSIER_HISTORICAL_FRAGS = '#quests:details/dossier/historical/frags'
DETAILS_DOSSIER_HISTORICAL_DIRECTHITS = '#quests:details/dossier/historical/directHits'
DETAILS_DOSSIER_HISTORICAL_SPOTTED = '#quests:details/dossier/historical/spotted'
DETAILS_DOSSIER_HISTORICAL_DAMAGEDEALT = '#quests:details/dossier/historical/damageDealt'
DETAILS_DOSSIER_HISTORICAL_MAXDAMAGE = '#quests:details/dossier/historical/maxDamage'
DETAILS_DOSSIER_HISTORICAL_DAMAGERECEIVED = '#quests:details/dossier/historical/damageReceived'
DETAILS_DOSSIER_HISTORICAL_CAPTUREPOINTS = '#quests:details/dossier/historical/capturePoints'
DETAILS_DOSSIER_HISTORICAL_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/historical/droppedCapturePoints'
DETAILS_DOSSIER_HISTORICAL_PIERCINGS = '#quests:details/dossier/historical/piercings'
DETAILS_DOSSIER_HISTORICAL_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/historical/noDamageDirectHitsReceived'
DETAILS_DOSSIER_HISTORICAL_PIERCINGSRECEIVED = '#quests:details/dossier/historical/piercingsReceived'
DETAILS_DOSSIER_HISTORICAL_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/historical/potentialDamageReceived'
DETAILS_DOSSIER_HISTORICAL_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/historical/damageBlockedByArmor'
DETAILS_DOSSIER_HISTORICAL_ORIGINALXP = '#quests:details/dossier/historical/originalXP'
DETAILS_DOSSIER_HISTORICAL_DAMAGEASSISTEDTRACK = '#quests:details/dossier/historical/damageAssistedTrack'
DETAILS_DOSSIER_HISTORICAL_DAMAGEASSISTEDRADIO = '#quests:details/dossier/historical/damageAssistedRadio'
DETAILS_DOSSIER_TEAM_XP = '#quests:details/dossier/team/xp'
DETAILS_DOSSIER_TEAM_BATTLESCOUNT = '#quests:details/dossier/team/battlesCount'
DETAILS_DOSSIER_TEAM_WINS = '#quests:details/dossier/team/wins'
DETAILS_DOSSIER_TEAM_LOSSES = '#quests:details/dossier/team/losses'
DETAILS_DOSSIER_TEAM_SURVIVEDBATTLES = '#quests:details/dossier/team/survivedBattles'
DETAILS_DOSSIER_TEAM_FRAGS = '#quests:details/dossier/team/frags'
DETAILS_DOSSIER_TEAM_DIRECTHITS = '#quests:details/dossier/team/directHits'
DETAILS_DOSSIER_TEAM_SPOTTED = '#quests:details/dossier/team/spotted'
DETAILS_DOSSIER_TEAM_DAMAGEDEALT = '#quests:details/dossier/team/damageDealt'
DETAILS_DOSSIER_TEAM_MAXDAMAGE = '#quests:details/dossier/team/maxDamage'
DETAILS_DOSSIER_TEAM_DAMAGERECEIVED = '#quests:details/dossier/team/damageReceived'
DETAILS_DOSSIER_TEAM_CAPTUREPOINTS = '#quests:details/dossier/team/capturePoints'
DETAILS_DOSSIER_TEAM_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/team/droppedCapturePoints'
DETAILS_DOSSIER_TEAM_PIERCINGS = '#quests:details/dossier/team/piercings'
DETAILS_DOSSIER_TEAM_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/team/noDamageDirectHitsReceived'
DETAILS_DOSSIER_TEAM_PIERCINGSRECEIVED = '#quests:details/dossier/team/piercingsReceived'
DETAILS_DOSSIER_TEAM_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/team/potentialDamageReceived'
DETAILS_DOSSIER_TEAM_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/team/damageBlockedByArmor'
DETAILS_DOSSIER_TEAM_ORIGINALXP = '#quests:details/dossier/team/originalXP'
DETAILS_DOSSIER_TEAM_DAMAGEASSISTEDTRACK = '#quests:details/dossier/team/damageAssistedTrack'
DETAILS_DOSSIER_TEAM_DAMAGEASSISTEDRADIO = '#quests:details/dossier/team/damageAssistedRadio'
DETAILS_DOSSIER_LADDER_XP = '#quests:details/dossier/ladder/xp'
DETAILS_DOSSIER_LADDER_BATTLESCOUNT = '#quests:details/dossier/ladder/battlesCount'
DETAILS_DOSSIER_LADDER_WINS = '#quests:details/dossier/ladder/wins'
DETAILS_DOSSIER_LADDER_LOSSES = '#quests:details/dossier/ladder/losses'
DETAILS_DOSSIER_LADDER_SURVIVEDBATTLES = '#quests:details/dossier/ladder/survivedBattles'
DETAILS_DOSSIER_LADDER_FRAGS = '#quests:details/dossier/ladder/frags'
DETAILS_DOSSIER_LADDER_DIRECTHITS = '#quests:details/dossier/ladder/directHits'
DETAILS_DOSSIER_LADDER_SPOTTED = '#quests:details/dossier/ladder/spotted'
DETAILS_DOSSIER_LADDER_DAMAGEDEALT = '#quests:details/dossier/ladder/damageDealt'
DETAILS_DOSSIER_LADDER_MAXDAMAGE = '#quests:details/dossier/ladder/maxDamage'
DETAILS_DOSSIER_LADDER_DAMAGERECEIVED = '#quests:details/dossier/ladder/damageReceived'
DETAILS_DOSSIER_LADDER_CAPTUREPOINTS = '#quests:details/dossier/ladder/capturePoints'
DETAILS_DOSSIER_LADDER_DROPPEDCAPTUREPOINTS = '#quests:details/dossier/ladder/droppedCapturePoints'
DETAILS_DOSSIER_LADDER_PIERCINGS = '#quests:details/dossier/ladder/piercings'
DETAILS_DOSSIER_LADDER_NODAMAGEDIRECTHITSRECEIVED = '#quests:details/dossier/ladder/noDamageDirectHitsReceived'
DETAILS_DOSSIER_LADDER_PIERCINGSRECEIVED = '#quests:details/dossier/ladder/piercingsReceived'
DETAILS_DOSSIER_LADDER_POTENTIALDAMAGERECEIVED = '#quests:details/dossier/ladder/potentialDamageReceived'
DETAILS_DOSSIER_LADDER_DAMAGEBLOCKEDBYARMOR = '#quests:details/dossier/ladder/damageBlockedByArmor'
DETAILS_DOSSIER_LADDER_ORIGINALXP = '#quests:details/dossier/ladder/originalXP'
DETAILS_DOSSIER_LADDER_DAMAGEASSISTEDTRACK = '#quests:details/dossier/ladder/damageAssistedTrack'
DETAILS_DOSSIER_LADDER_DAMAGEASSISTEDRADIO = '#quests:details/dossier/ladder/damageAssistedRadio'
DETAILS_MODIFIERS_TITLE_DISCOUNT = '#quests:details/modifiers/title/discount'
DETAILS_MODIFIERS_TITLE_SELLING = '#quests:details/modifiers/title/selling'
DETAILS_MODIFIERS_TITLE_AVAILABILITY = '#quests:details/modifiers/title/availability'
DETAILS_MODIFIERS_ECONOMICS_SLOTSPRICES = '#quests:details/modifiers/economics/slotsPrices'
DETAILS_MODIFIERS_ECONOMICS_BERTHSPRICES = '#quests:details/modifiers/economics/berthsPrices'
DETAILS_MODIFIERS_ECONOMICS_CREDITSTANKMANCOST = '#quests:details/modifiers/economics/creditsTankmanCost'
DETAILS_MODIFIERS_ECONOMICS_GOLDTANKMANCOST = '#quests:details/modifiers/economics/goldTankmanCost'
DETAILS_MODIFIERS_ECONOMICS_CREDITSDROPSKILLSCOST = '#quests:details/modifiers/economics/creditsDropSkillsCost'
DETAILS_MODIFIERS_ECONOMICS_GOLDDROPSKILLSCOST = '#quests:details/modifiers/economics/goldDropSkillsCost'
DETAILS_MODIFIERS_ECONOMICS_EXCHANGERATE = '#quests:details/modifiers/economics/exchangeRate'
DETAILS_MODIFIERS_ECONOMICS_EXCHANGERATEFORSHELLSANDEQS = '#quests:details/modifiers/economics/exchangeRateForShellsAndEqs'
DETAILS_MODIFIERS_ECONOMICS_PAIDREMOVALCOST = '#quests:details/modifiers/economics/paidRemovalCost'
DETAILS_MODIFIERS_ECONOMICS_PASSPORTCHANGECOST = '#quests:details/modifiers/economics/passportChangeCost'
DETAILS_MODIFIERS_ECONOMICS_FEMALEPASSPORTCHANGECOST = '#quests:details/modifiers/economics/femalePassportChangeCost'
DETAILS_MODIFIERS_ECONOMICS_CLANCREATIONCOST = '#quests:details/modifiers/economics/clanCreationCost'
DETAILS_MODIFIERS_ECONOMICS_FREEXPCONVERSIONDISCRECITY = '#quests:details/modifiers/economics/freeXPConversionDiscrecity'
DETAILS_MODIFIERS_ECONOMICS_FREEXPTOTMANXPRATE = '#quests:details/modifiers/economics/freeXPToTManXPRate'
DETAILS_MODIFIERS_ECONOMICS_AVAILABLE_FREEXPTOTMANXPRATE = '#quests:details/modifiers/economics/available/freeXPToTManXPRate'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET1 = '#quests:details/modifiers/economics/premiumPacket1'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET3 = '#quests:details/modifiers/economics/premiumPacket3'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET7 = '#quests:details/modifiers/economics/premiumPacket7'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET30 = '#quests:details/modifiers/economics/premiumPacket30'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET180 = '#quests:details/modifiers/economics/premiumPacket180'
DETAILS_MODIFIERS_ECONOMICS_PREMIUMPACKET360 = '#quests:details/modifiers/economics/premiumPacket360'
DETAILS_MODIFIERS_ECONOMICS_CAMOUFLAGEPACKETINFCOST = '#quests:details/modifiers/economics/camouflagePacketInfCost'
DETAILS_MODIFIERS_ECONOMICS_CAMOUFLAGEPACKET7COST = '#quests:details/modifiers/economics/camouflagePacket7Cost'
DETAILS_MODIFIERS_ECONOMICS_CAMOUFLAGEPACKET30COST = '#quests:details/modifiers/economics/camouflagePacket30Cost'
DETAILS_MODIFIERS_ECONOMICS_INSCRIPTIONPACKETINFCOST = '#quests:details/modifiers/economics/inscriptionPacketInfCost'
DETAILS_MODIFIERS_ECONOMICS_INSCRIPTIONPACKET7COST = '#quests:details/modifiers/economics/inscriptionPacket7Cost'
DETAILS_MODIFIERS_ECONOMICS_INSCRIPTIONPACKET30COST = '#quests:details/modifiers/economics/inscriptionPacket30Cost'
DETAILS_MODIFIERS_ECONOMICS_EMBLEMPACKETINFCOST = '#quests:details/modifiers/economics/emblemPacketInfCost'
DETAILS_MODIFIERS_ECONOMICS_EMBLEMPACKET7COST = '#quests:details/modifiers/economics/emblemPacket7Cost'
DETAILS_MODIFIERS_ECONOMICS_EMBLEMPACKET30COST = '#quests:details/modifiers/economics/emblemPacket30Cost'
DETAILS_MODIFIERS_EQUIPMENT_GOLD = '#quests:details/modifiers/equipment/gold'
DETAILS_MODIFIERS_EQUIPMENT_CREDITS = '#quests:details/modifiers/equipment/credits'
DETAILS_MODIFIERS_EQUIPMENT_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/equipment/goldPriceMultiplier'
DETAILS_MODIFIERS_EQUIPMENT_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/equipment/creditsPriceMultiplier'
DETAILS_MODIFIERS_OPTDEVICE = '#quests:details/modifiers/optDevice'
DETAILS_MODIFIERS_OPTDEVICE_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/optDevice/goldPriceMultiplier'
DETAILS_MODIFIERS_OPTDEVICE_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/optDevice/creditsPriceMultiplier'
DETAILS_MODIFIERS_SHELL_GOLD = '#quests:details/modifiers/shell/gold'
DETAILS_MODIFIERS_SHELL_CREDITS = '#quests:details/modifiers/shell/credits'
DETAILS_MODIFIERS_SHELL_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/shell/goldPriceMultiplier'
DETAILS_MODIFIERS_SHELL_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/shell/creditsPriceMultiplier'
DETAILS_MODIFIERS_SHELL_NATION_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/shell/nation/goldPriceMultiplier'
DETAILS_MODIFIERS_SHELL_NATION_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/shell/nation/creditsPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE = '#quests:details/modifiers/vehicle'
DETAILS_MODIFIERS_RENTVEHICLE = '#quests:details/modifiers/rentVehicle'
DETAILS_MODIFIERS_VEHRENTPACKAGE = '#quests:details/modifiers/vehRentPackage'
DETAILS_MODIFIERS_VEHICLE_SELL = '#quests:details/modifiers/vehicle/sell'
DETAILS_MODIFIERS_VEHICLE_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/goldPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/creditsPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_RENT_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/rent/goldPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_RENT_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/rent/creditsPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_NATION_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/nation/goldPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_NATION_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/nation/creditsPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_RENT_NATION_GOLDPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/rent/nation/goldPriceMultiplier'
DETAILS_MODIFIERS_VEHICLE_RENT_NATION_CREDITSPRICEMULTIPLIER = '#quests:details/modifiers/vehicle/rent/nation/creditsPriceMultiplier'
DETAILS_MODIFIERS_CUSTOMIZATION_VEHCAMOUFLAGE = '#quests:details/modifiers/customization/vehCamouflage'
DETAILS_MODIFIERS_CUSTOMIZATION_GROUPEMBLEM = '#quests:details/modifiers/customization/groupEmblem'
CLASSES_LIGHTTANK = '#quests:classes/lightTank'
CLASSES_MEDIUMTANK = '#quests:classes/mediumTank'
CLASSES_HEAVYTANK = '#quests:classes/heavyTank'
CLASSES_SPG = '#quests:classes/SPG'
CLASSES_AT_SPG = '#quests:classes/AT-SPG'
SEASONAWARDSWINDOW_TITLE = '#quests:seasonAwardsWindow/title'
SEASONAWARDSWINDOW_VEHICLEAWARD_BUTTONABOUT_TEXT = '#quests:seasonAwardsWindow/vehicleAward/buttonAbout/text'
SEASONAWARDSWINDOW_TILEAWARDSTITLE = '#quests:seasonAwardsWindow/tileAwardsTitle'
SEASONAWARDSWINDOW_VEHICLENAME = '#quests:seasonAwardsWindow/vehicleName'
SEASONAWARDSWINDOW_TOKENSCOUNT = '#quests:seasonAwardsWindow/tokensCount'
SEASONAWARDSWINDOW_FEMALETANKMANAWARD_TITLE = '#quests:seasonAwardsWindow/femaleTankmanAward/title'
SEASONAWARDSWINDOW_FEMALETANKMANAWARD_DESCRIPTION = '#quests:seasonAwardsWindow/femaleTankmanAward/description'
SEASONAWARDSWINDOW_COMMENDATIONLISTSAWARD_TITLE = '#quests:seasonAwardsWindow/commendationListsAward/title'
SEASONAWARDSWINDOW_COMMENDATIONLISTSAWARD_DESCRIPTION = '#quests:seasonAwardsWindow/commendationListsAward/description'
QUESTSPERSONALWELCOMEVIEW_MAINTITLE_TEXTLABEL = '#quests:QuestsPersonalWelcomeView/mainTitle/textLabel'
QUESTSPERSONALWELCOMEVIEW_BTNLABEL = '#quests:QuestsPersonalWelcomeView/btnLabel'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK1 = '#quests:QuestsPersonalWelcomeView/textBlock/header/block1'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK1 = '#quests:QuestsPersonalWelcomeView/textBlock/body/block1'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK2 = '#quests:QuestsPersonalWelcomeView/textBlock/header/block2'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK2 = '#quests:QuestsPersonalWelcomeView/textBlock/body/block2'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK3 = '#quests:QuestsPersonalWelcomeView/textBlock/header/block3'
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK3 = '#quests:QuestsPersonalWelcomeView/textBlock/body/block3'
QUESTSPERSONALWELCOMEVIEW_ANNOUNCEMENTTEXT = '#quests:QuestsPersonalWelcomeView/announcementText'
TILECHAINSVIEW_BUTTONBACK_TEXT = '#quests:tileChainsView/buttonBack/text'
TILECHAINSVIEW_TITLE = '#quests:tileChainsView/title'
TILECHAINSVIEW_FILTERSLABEL_TEXT = '#quests:tileChainsView/filtersLabel/text'
TILECHAINSVIEW_NOTASKSLABEL_TEXT = '#quests:tileChainsView/noTasksLabel/text'
TILECHAINSVIEW_TASKTYPEFILTER_ALLITEMS_TEXT = '#quests:tileChainsView/taskTypeFilter/allItems/text'
TILECHAINSVIEW_TASKTYPEFILTER_COMPLETEDITEMS_TEXT = '#quests:tileChainsView/taskTypeFilter/completedItems/text'
TILECHAINSVIEW_TASKTYPEFILTER_ITEMSINPROGRESS_TEXT = '#quests:tileChainsView/taskTypeFilter/itemsInProgress/text'
TILECHAINSVIEW_TASKTYPEFILTER_AWARDSNOTRECEIVEDITEMS_TEXT = '#quests:tileChainsView/taskTypeFilter/awardsNotReceivedItems/text'
TILECHAINSVIEW_TASKLIST_TEXT = '#quests:tileChainsView/taskList/text'
TILECHAINSVIEW_TASKSPROGRESS_TEXT = '#quests:tileChainsView/tasksProgress/text'
TILECHAINSVIEW_TASKTYPE_COMPLETED_TEXT = '#quests:tileChainsView/taskType/completed/text'
TILECHAINSVIEW_TASKTYPE_FULLCOMPLETED_TEXT = '#quests:tileChainsView/taskType/fullCompleted/text'
TILECHAINSVIEW_TASKTYPE_INPROGRESS_TEXT = '#quests:tileChainsView/taskType/inProgress/text'
TILECHAINSVIEW_TASKTYPE_AVAILABLE_TEXT = '#quests:tileChainsView/taskType/available/text'
TILECHAINSVIEW_TASKTYPE_UNAVAILABLE_TEXT = '#quests:tileChainsView/taskType/unavailable/text'
TILECHAINSVIEW_TASKTYPE_AWARDNOTRECEIVED_TEXT = '#quests:tileChainsView/taskType/awardNotReceived/text'
TILECHAINSVIEW_STATISTICSLABEL_TEXT = '#quests:tileChainsView/statisticsLabel/text'
TILECHAINSVIEW_CHAINNAME_LIGHTTANK = '#quests:tileChainsView/chainName/lightTank'
TILECHAINSVIEW_CHAINNAME_MEDIUMTANK = '#quests:tileChainsView/chainName/mediumTank'
TILECHAINSVIEW_CHAINNAME_HEAVYTANK = '#quests:tileChainsView/chainName/heavyTank'
TILECHAINSVIEW_CHAINNAME_AT_SPG = '#quests:tileChainsView/chainName/AT-SPG'
TILECHAINSVIEW_CHAINNAME_SPG = '#quests:tileChainsView/chainName/SPG'
QUESTSCHAINPROGRESSVIEW_HEADER = '#quests:QuestsChainProgressView/header'
QUESTSCHAINPROGRESSVIEW_MAINAWARD = '#quests:QuestsChainProgressView/mainAward'
QUESTSCHAINPROGRESSVIEW_CHAINSPROGRESS = '#quests:QuestsChainProgressView/chainsProgress'
QUESTSCHAINPROGRESSVIEW_ABOUTTANKBTNLABEL = '#quests:QuestsChainProgressView/aboutTankBtnLabel'
QUESTSCHAINPROGRESSVIEW_SHOWINHANGARBTNLABEL = '#quests:QuestsChainProgressView/showInHangarBtnLabel'
QUESTSCHAINPROGRESSVIEW_CHAINPROGRESSCOUNT = '#quests:QuestsChainProgressView/chainProgressCount'
QUESTSCHAINPROGRESSVIEW_FALLOUTCHAINPROGRESSCOUNT = '#quests:QuestsChainProgressView/falloutChainProgressCount'
QUESTTASKDETAILSVIEW_HEADER = '#quests:QuestTaskDetailsView/header'
QUESTTASKDETAILSVIEW_MAINCONDITIONS = '#quests:QuestTaskDetailsView/mainConditions'
QUESTTASKDETAILSVIEW_ADDITIONALCONDITIONS = '#quests:QuestTaskDetailsView/additionalConditions'
QUESTTASKDETAILSVIEW_REQUIREMENTS = '#quests:QuestTaskDetailsView/requirements'
QUESTTASKDETAILSVIEW_REQUIREMENTS_TEXT = '#quests:QuestTaskDetailsView/requirements/text'
QUESTTASKDETAILSVIEW_REQUIREMENTS_MORE8LVL = '#quests:QuestTaskDetailsView/requirements/more8lvl'
QUESTTASKDETAILSVIEW_REQUIREMENTS_ONLY10LVL = '#quests:QuestTaskDetailsView/requirements/only10lvl'
QUESTTASKDETAILSVIEW_DESCRIPTION = '#quests:QuestTaskDetailsView/description'
QUESTTASKDETAILSVIEW_BTNLABEL_BEGIN = '#quests:QuestTaskDetailsView/btnLabel/begin'
QUESTTASKDETAILSVIEW_BTNLABEL_CANCEL = '#quests:QuestTaskDetailsView/btnLabel/cancel'
QUESTTASKDETAILSVIEW_BTNLABEL_TAKEAWARD = '#quests:QuestTaskDetailsView/btnLabel/takeAward'
QUESTTASKDETAILSVIEW_BTNLABEL_REPEAT = '#quests:QuestTaskDetailsView/btnLabel/repeat'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_DONE = '#quests:QuestTaskDetailsView/taskDescription/done'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_EXCELLENTDONE = '#quests:QuestTaskDetailsView/taskDescription/excellentDone'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_DOPREVTASKS = '#quests:QuestTaskDetailsView/taskDescription/doPrevTasks'
QUESTTASKDETAILSVIEW_FALLOUT_TASKDESCRIPTION_DOPREVTASKS = '#quests:QuestTaskDetailsView/fallout/taskDescription/doPrevTasks'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_AVAILABLE = '#quests:QuestTaskDetailsView/taskDescription/available'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_INPROGRESS = '#quests:QuestTaskDetailsView/taskDescription/inProgress'
QUESTTASKDETAILSVIEW_TASKDESCRIPTION_TAKEAWARD = '#quests:QuestTaskDetailsView/taskDescription/takeAward'
QUESTTASKDETAILSVIEW_FORADDITIONALAWARD = '#quests:QuestTaskDetailsView/forAdditionalAward'
TILECHAINSVIEW_CHAINNAME_MULTITEAM = '#quests:tileChainsView/chainName/multiteam'
TILECHAINSVIEW_CHAINNAME_CLASSIC = '#quests:tileChainsView/chainName/classic'
BEGINNERQUESTS_DETAILS_CONDITIONSTITLE = '#quests:beginnerQuests/details/conditionsTitle'
BEGINNERQUESTS_DETAILS_DESCRIPTIONTITLE = '#quests:beginnerQuests/details/descriptionTitle'
BEGINNERQUESTS_DETAILS_BUTTONSHOWVIDEOTIP = '#quests:beginnerQuests/details/buttonShowVideoTip'
BEGINNERQUESTS_DETAILS_BUTTONSTARTQUEST = '#quests:beginnerQuests/details/buttonStartQuest'
BEGINNERQUESTS_DETAILS_NOAWARD = '#quests:beginnerQuests/details/noAward'
QUESTSCONTROL_ADDITIONALTITLE_ENUM = (QUESTSCONTROL_ADDITIONALTITLE_NEEDRECEIVEDAWARD,
QUESTSCONTROL_ADDITIONALTITLE_FREESLOTSANDFREEQUESTS,
QUESTSCONTROL_ADDITIONALTITLE_FIRSTRUN,
QUESTSCONTROL_ADDITIONALTITLE_EMPTY)
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_ENUM = (QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK1, QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK2, QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_BLOCK3)
QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_ENUM = (QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK1, QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK2, QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_BLOCK3)
@staticmethod
def questscontrol_additionaltitle(key):
outcome = '#quests:questsControl/additionalTitle/%s' % key
if outcome not in QUESTS.QUESTSCONTROL_ADDITIONALTITLE_ENUM:
raise Exception, 'locale key "' + outcome + '" was not found'
return outcome
@staticmethod
def questspersonalwelcomeview_textblock_body(key):
outcome = '#quests:QuestsPersonalWelcomeView/textBlock/body/%s' % key
if outcome not in QUESTS.QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_BODY_ENUM:
raise Exception, 'locale key "' + outcome + '" was not found'
return outcome
@staticmethod
def questspersonalwelcomeview_textblock_header(key):
outcome = '#quests:QuestsPersonalWelcomeView/textBlock/header/%s' % key
if outcome not in QUESTS.QUESTSPERSONALWELCOMEVIEW_TEXTBLOCK_HEADER_ENUM:
raise Exception, 'locale key "' + outcome + '" was not found'
return outcome
| [
"[email protected]"
]
| |
0bb6bbb5cb9dab50c91c9d10e108fdd734f381b9 | c77b2f06a971d5e77a3dc71e972ef27fc85475a5 | /algo_ds/_general/pattern_matching_naive.py | b1c431aa83f479f3a7c749328a24bc2fee5b1372 | []
| no_license | thefr33radical/codeblue | f25520ea85110ed09b09ae38e7db92bab8285b2f | 86bf4a4ba693b1797564dca66b645487973dafa4 | refs/heads/master | 2022-08-01T19:05:09.486567 | 2022-07-18T22:56:05 | 2022-07-18T22:56:05 | 110,525,490 | 3 | 6 | null | null | null | null | UTF-8 | Python | false | false | 321 | py | '''
txt[] = "THIS IS A TEST TEXT"
pat[] = "TEST"
'''
import re
def compute():
txt= "THIS IS A TEST TEXT TEST"
pat = "TEST"
m = re.search(pat, txt)
print(m.span())
if pat in txt:
print("yes")
if __name__=="__main__":
compute() | [
"[email protected]"
]
| |
0da0f290f94f52c7dd23b74744834633d0fd949c | e5135867a8f2f5923b21523489c8f246d9c5a13a | /kaleo/management/commands/infinite_invites.py | b69ea50f7cd1935b122e67b8b0c4cb9bd4126e13 | [
"BSD-3-Clause"
]
| permissive | exitio/kaleo | 01574cc0675211a586995e08a4e19b6a1c9063ee | 53e73e0acf3429d83b45e6b22b1a6ec76ac69c12 | refs/heads/master | 2021-01-21T01:27:44.609605 | 2013-01-31T15:50:10 | 2013-01-31T15:50:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import sys
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from kaleo.models import InvitationStat
class Command(BaseCommand):
help = "Sets invites_allocated to -1 to represent infinite invites."
def handle(self, *args, **kwargs):
for user in User.objects.all():
stat, _ = InvitationStat.objects.get_or_create(user=user)
stat.invites_allocated = -1
stat.save()
| [
"[email protected]"
]
| |
d924c27a884790f3eccceeacebb5b4ef409f3586 | da5ef82554c6c0413193b7c99192edd70fed58dd | /mozdns/soa/tests.py | 529c22573446ed774dbfbfa53a43c6d989673fa2 | []
| no_license | rtucker-mozilla/mozilla_inventory | d643c7713c65aa870e732e18aaf19ce677e277b7 | bf9154b0d77705d8c0fe1a9a35ce9c1bd60fcbea | refs/heads/master | 2020-12-24T17:17:37.621418 | 2013-04-11T10:39:41 | 2013-04-11T10:39:41 | 2,709,399 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,757 | py | from django.test import TestCase
from django.core.exceptions import ValidationError
from mozdns.soa.models import SOA
from mozdns.domain.models import Domain
class SOATests(TestCase):
def setUp(self):
pass
def do_generic_add(self, primary, contact, retry, refresh, description):
soa = SOA(primary=primary, contact=contact,
retry=retry, refresh=refresh, description=description)
soa.save()
soa.save()
rsoa = SOA.objects.filter(primary=primary, contact=contact,
retry=retry, refresh=refresh)
self.assertTrue(len(rsoa) == 1)
return soa
def test_add_soa(self):
primary = "ns1.oregonstate.edu"
contact = "admin.oregonstate.edu"
retry = 1234
refresh = 1234123
description = "1"
self.do_generic_add(
primary, contact, retry, refresh, description=description)
soa = SOA.objects.filter(primary=primary, contact=contact,
retry=retry, refresh=refresh)
soa[0].save()
self.assertTrue(soa)
soa[0].__repr__()
soa = soa[0]
self.assertTrue(soa.details())
self.assertTrue(soa.get_absolute_url())
self.assertTrue(soa.get_edit_url())
self.assertTrue(soa.get_delete_url())
primary = "do.com"
contact = "admf.asdf"
retry = 432152
refresh = 1235146134
description = "2"
self.do_generic_add(
primary, contact, retry, refresh, description=description)
soa = SOA.objects.filter(primary=primary, contact=contact,
retry=retry, refresh=refresh)
self.assertTrue(soa)
soa = soa[0]
self.assertTrue(soa.details())
self.assertTrue(soa.get_absolute_url())
self.assertTrue(soa.get_edit_url())
self.assertTrue(soa.get_delete_url())
primary = "ns1.derp.com"
contact = "admf.asdf"
soa = SOA(primary=primary, contact=contact)
soa.save()
self.assertTrue(
soa.serial and soa.expire and soa.retry and soa.refresh)
self.assertTrue(soa.details())
self.assertTrue(soa.get_absolute_url())
self.assertTrue(soa.get_edit_url())
self.assertTrue(soa.get_delete_url())
def test_add_remove(self):
primary = "ns2.oregonstate.edu"
contact = "admin.oregonstate.edu"
retry = 1234
refresh = 1234123
description = "3"
soa = self.do_generic_add(
primary, contact, retry, refresh, description=description)
soa.delete()
soa = SOA.objects.filter(primary=primary, contact=contact,
retry=retry, refresh=refresh)
self.assertTrue(len(soa) == 0)
primary = "dddo.com"
contact = "admf.asdf"
retry = 432152
refresh = 1235146134
description = "4"
soa = self.do_generic_add(
primary, contact, retry, refresh, description=description)
soa.delete()
soa = SOA.objects.filter(primary=primary, contact=contact, retry=retry,
refresh=refresh, description=description)
self.assertTrue(len(soa) == 0)
# Add dup
description = "4"
soa = self.do_generic_add(
primary, contact, retry, refresh, description=description)
soa.save()
self.assertRaises(ValidationError, self.do_generic_add, *(
primary, contact, retry, refresh, description))
def test_add_invalid(self):
data = {'primary': "daf..fff", 'contact': "foo.com"}
soa = SOA(**data)
self.assertRaises(ValidationError, soa.save)
data = {'primary': 'foo.com', 'contact': 'dkfa..'}
soa = SOA(**data)
self.assertRaises(ValidationError, soa.save)
data = {'primary': 'adf', 'contact': '*@#$;'}
soa = SOA(**data)
self.assertRaises(ValidationError, soa.save)
def test_delete_with_domains(self):
data = {'primary': "ns1asfdadsf.foo.com", 'contact': "email.foo.com"}
soa = SOA(**data)
soa.save()
d0 = Domain(name='com')
d0.save()
d1 = Domain(name='foo.com', soa=soa)
d1.soa = soa
d1.save()
self.assertRaises(ValidationError, soa.delete)
def test_chain_soa_domain_add(self):
data = {'primary': "ns1.foo.com", 'contact': "email.foo.com"}
soa = SOA(**data)
soa.save()
d0 = Domain(name='com')
d0.save()
d1 = Domain(name='foo.com', soa=soa)
d1.save()
self.assertTrue(soa == d1.soa)
d2 = Domain(name='bar.foo.com', soa=soa)
d2.save()
self.assertTrue(soa == d2.soa)
d3 = Domain(name='new.foo.com', soa=soa)
d3.save()
self.assertTrue(soa == d3.soa)
d4 = Domain(name='far.bar.foo.com', soa=soa)
d4.save()
self.assertTrue(soa == d4.soa)
d5 = Domain(name='tee.new.foo.com', soa=soa)
d5.save()
self.assertTrue(soa == d5.soa)
d5.delete()
d4.delete()
self.assertTrue(soa == d1.soa)
self.assertTrue(soa == d2.soa)
self.assertTrue(soa == d3.soa)
def test_update_serial_no_dirty(self):
# If we update the serial, the dirty bit shouldn't change.
data = {'primary': "fakey.ns1.asdffoo.com", 'contact':
"adsffoopy.email.foo.com"}
soa = SOA(**data)
soa.save() # new soa's are always dirty
soa.dirty = False
soa.save()
soa.serial = soa.serial + 9
soa.save()
same_soa = SOA.objects.get(pk=soa.pk)
self.assertFalse(same_soa.dirty)
| [
"[email protected]"
]
| |
a2556ca3423248387c4752f977fd8c4b52bd63ec | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/135/usersdata/224/45859/submittedfiles/OBI.py | 5fea3ee2b17d98501bf3732ea5e8dc6b5487efd9 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | # -*- coding: utf-8 -*-
N=int(input('Digite o numero de competidores: '))
P=int(input('Digite o numero mínimo de ponto para as duas fases: '))
somA=o
i=1
for i in range(1.n+1,1):
x=int(input('Nota1: '))
y=int(input('Nota2: '))
if x+y==P:
soma=soma+1
print(soma) | [
"[email protected]"
]
| |
b13cac275f608ebea67b00f70a7d5e7ec19afebe | fa247fbb7755cf21fa262b02fa6c36a19adae55a | /manage.py | d344b73c096cab159e05d4a868f32b29a1888f88 | []
| no_license | crowdbotics-apps/csantosmachadogmailco-897 | 9ea0f851badc9678a8148ea051131eaa711b26de | b76954f678212ece02777d3deebb3e571bfb578c | refs/heads/master | 2022-12-13T12:51:23.337978 | 2019-02-07T13:01:07 | 2019-02-07T13:01:07 | 169,572,262 | 0 | 0 | null | 2022-12-08T01:37:36 | 2019-02-07T12:59:48 | Python | UTF-8 | Python | false | false | 823 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "csantosmachadogmailco_897.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
d14ce4d4e3bde8dff86cf8596a0610aa6ce7e652 | 210e88536cd2a917fb66010ff69f6710b2261e8e | /games/migrations/0002_auto__add_field_game_is_over.py | ab9299348edd60d9b0e5600523b1bc7cd285f9ce | []
| no_license | tlam/multiverse_sidekick | e5ef1fa908c6fd3fee4d816aa1776b7243075e8c | 9211e4cb36611088420a79666f0c40ecb0a6b645 | refs/heads/master | 2020-04-17T08:30:28.396623 | 2015-08-27T03:36:47 | 2015-08-27T03:36:47 | 9,423,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,875 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Game.is_over'
db.add_column(u'games_game', 'is_over',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Game.is_over'
db.delete_column(u'games_game', 'is_over')
models = {
u'environment.environment': {
'Meta': {'object_name': 'Environment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'games.activehero': {
'Meta': {'object_name': 'ActiveHero'},
'game': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['games.Game']"}),
'hero': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['heroes.Hero']"}),
'hp': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'games.game': {
'Meta': {'object_name': 'Game'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['environment.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_over': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.Profile']"}),
'villain': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['villains.Villain']"}),
'villain_hp': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'heroes.hero': {
'Meta': {'object_name': 'Hero'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'starting_hp': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'profiles.profile': {
'Meta': {'object_name': 'Profile'},
'date_of_birth': ('django.db.models.fields.DateField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'villains.villain': {
'Meta': {'object_name': 'Villain'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'starting_hp': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['games'] | [
"[email protected]"
]
| |
d1d2b859904e2d714def63d27148d911b68e70b9 | 43e900f11e2b230cdc0b2e48007d40294fefd87a | /Facebook/PhoneInterview/shortest_job_first.py | 02f0c808d6aad85a7f1028a46e84e57fdcc92298 | []
| no_license | DarkAlexWang/leetcode | 02f2ed993688c34d3ce8f95d81b3e36a53ca002f | 89142297559af20cf990a8e40975811b4be36955 | refs/heads/master | 2023-01-07T13:01:19.598427 | 2022-12-28T19:00:19 | 2022-12-28T19:00:19 | 232,729,581 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | import heapq
class process:
def process(self, arr, exe):
self.arrTime = arr
self.exeTime = exe
class Solution:
def shortest_job_first(self, req, dur):
if req == None or dur == None or len(req) != len(dur):
return 0
index, length = 0, len(req)
waitTime, curTime = 0, 0
pq = []
if p1.exeTime == p2.exeTime:
return p1.arrTime - p2.arrTime
return p1.exeTime - p2.exeTime
while pq or index < length:
if pq:
cur = pq.heappushpop()
waitTime += curTIme - cur.arrTime
curTime += cur.exeTime
while index < length and curTime >= req[index]:
pq.heappush((req[index], dur[index+1]))
else:
pq.heappush(req[index], dur[index])
curTime = req[index + 1]
return round(waitTime/length, 2)
if __name__ == '__main__':
solution = Solution()
res = solution.shortest_job_first([1,2, 3, 4], [1, 2, 3, 4])
print(res)
| [
"[email protected]"
]
| |
ee5988f7474b4fc537308710e17b74a741581fd1 | d8ea695288010f7496c8661bfc3a7675477dcba0 | /django/ewm/file/admin.py | edbb70030fbb242451d77d39501bf657b1b2fdb1 | []
| no_license | dabolau/demo | de9c593dabca26144ef8098c437369492797edd6 | 212f4c2ec6b49baef0ef5fcdee6f178fa21c5713 | refs/heads/master | 2021-01-17T16:09:48.381642 | 2018-10-08T10:12:45 | 2018-10-08T10:12:45 | 90,009,236 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | from django.contrib import admin
from file.models import *
###
# 注册附件信息数据库模型和自定义功能
###
@admin.register(File)
class FileAdmin(admin.ModelAdmin):
###
# 管理中要显示的字段名称
###
list_display = [
'file_name',
# 'manufacturer_name',
# 'model_specification',
# 'equipment_location',
# 'enable_date',
]
###
# 管理中要搜索的字段名称
###
search_fields = [
'file_name',
# 'manufacturer_name',
# 'model_specification',
# 'equipment_location',
# 'enable_date',
]
| [
"[email protected]"
]
| |
04153bb5d37ce2d86223a0bd652aa1f3ce650c12 | 4a344071b0dc0e43073f5aa680dc9ed46074d7db | /azure/mgmt/network/models/network_management_client_enums.py | 5f0cd8bb7a6b49af179f7ff4a5221d9d397fba4a | []
| no_license | pexip/os-python-azure-mgmt-network | 59ee8859cda2a77e03c051d104b1e8d2f08c1fe3 | fa4f791818b8432888c398b3841da58e5aeb370b | refs/heads/master | 2023-08-28T05:02:29.843835 | 2017-02-28T11:38:16 | 2017-02-28T11:47:18 | 54,524,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,166 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class TransportProtocol(Enum):
udp = "Udp"
tcp = "Tcp"
class IPAllocationMethod(Enum):
static = "Static"
dynamic = "Dynamic"
class IPVersion(Enum):
ipv4 = "IPv4"
ipv6 = "IPv6"
class SecurityRuleProtocol(Enum):
tcp = "Tcp"
udp = "Udp"
asterisk = "*"
class SecurityRuleAccess(Enum):
allow = "Allow"
deny = "Deny"
class SecurityRuleDirection(Enum):
inbound = "Inbound"
outbound = "Outbound"
class RouteNextHopType(Enum):
virtual_network_gateway = "VirtualNetworkGateway"
vnet_local = "VnetLocal"
internet = "Internet"
virtual_appliance = "VirtualAppliance"
none = "None"
class ApplicationGatewayProtocol(Enum):
http = "Http"
https = "Https"
class ApplicationGatewayCookieBasedAffinity(Enum):
enabled = "Enabled"
disabled = "Disabled"
class ApplicationGatewayBackendHealthServerHealth(Enum):
unknown = "Unknown"
healthy = "Healthy"
unhealthy = "Unhealthy"
partial = "Partial"
class ApplicationGatewaySkuName(Enum):
standard_small = "Standard_Small"
standard_medium = "Standard_Medium"
standard_large = "Standard_Large"
waf_medium = "WAF_Medium"
waf_large = "WAF_Large"
class ApplicationGatewayTier(Enum):
standard = "Standard"
waf = "WAF"
class ApplicationGatewaySslProtocol(Enum):
tl_sv1_0 = "TLSv1_0"
tl_sv1_1 = "TLSv1_1"
tl_sv1_2 = "TLSv1_2"
class ApplicationGatewayRequestRoutingRuleType(Enum):
basic = "Basic"
path_based_routing = "PathBasedRouting"
class ApplicationGatewayOperationalState(Enum):
stopped = "Stopped"
starting = "Starting"
running = "Running"
stopping = "Stopping"
class ApplicationGatewayFirewallMode(Enum):
detection = "Detection"
prevention = "Prevention"
class AuthorizationUseStatus(Enum):
available = "Available"
in_use = "InUse"
class ExpressRouteCircuitPeeringAdvertisedPublicPrefixState(Enum):
not_configured = "NotConfigured"
configuring = "Configuring"
configured = "Configured"
validation_needed = "ValidationNeeded"
class ExpressRouteCircuitPeeringType(Enum):
azure_public_peering = "AzurePublicPeering"
azure_private_peering = "AzurePrivatePeering"
microsoft_peering = "MicrosoftPeering"
class ExpressRouteCircuitPeeringState(Enum):
disabled = "Disabled"
enabled = "Enabled"
class ExpressRouteCircuitSkuTier(Enum):
standard = "Standard"
premium = "Premium"
class ExpressRouteCircuitSkuFamily(Enum):
unlimited_data = "UnlimitedData"
metered_data = "MeteredData"
class ServiceProviderProvisioningState(Enum):
not_provisioned = "NotProvisioned"
provisioning = "Provisioning"
provisioned = "Provisioned"
deprovisioning = "Deprovisioning"
class LoadDistribution(Enum):
default = "Default"
source_ip = "SourceIP"
source_ip_protocol = "SourceIPProtocol"
class ProbeProtocol(Enum):
http = "Http"
tcp = "Tcp"
class EffectiveRouteSource(Enum):
unknown = "Unknown"
user = "User"
virtual_network_gateway = "VirtualNetworkGateway"
default = "Default"
class EffectiveRouteState(Enum):
active = "Active"
invalid = "Invalid"
class VirtualNetworkPeeringState(Enum):
initiated = "Initiated"
connected = "Connected"
disconnected = "Disconnected"
class VirtualNetworkGatewayType(Enum):
vpn = "Vpn"
express_route = "ExpressRoute"
class VpnType(Enum):
policy_based = "PolicyBased"
route_based = "RouteBased"
class VirtualNetworkGatewaySkuName(Enum):
basic = "Basic"
high_performance = "HighPerformance"
standard = "Standard"
ultra_performance = "UltraPerformance"
class VirtualNetworkGatewaySkuTier(Enum):
basic = "Basic"
high_performance = "HighPerformance"
standard = "Standard"
ultra_performance = "UltraPerformance"
class ProcessorArchitecture(Enum):
amd64 = "Amd64"
x86 = "X86"
class VirtualNetworkGatewayConnectionStatus(Enum):
unknown = "Unknown"
connecting = "Connecting"
connected = "Connected"
not_connected = "NotConnected"
class VirtualNetworkGatewayConnectionType(Enum):
ipsec = "IPsec"
vnet2_vnet = "Vnet2Vnet"
express_route = "ExpressRoute"
vpn_client = "VPNClient"
class NetworkOperationStatus(Enum):
in_progress = "InProgress"
succeeded = "Succeeded"
failed = "Failed"
| [
"[email protected]"
]
| |
0fe2a3ee5bcbf151df129e38ff5051b969889aca | d9dcbd9f4dc60ab752670d2b7025c2da05f6d69d | /study_day12_flask01/15_hook.py | 61dfd8866cf5d20895859f744a5ca530dfbbfdc9 | []
| no_license | chenliang15405/python-learning | 14c7e60c794026b1f2dadbbbe82f63e4745b0c23 | 6de32a1c9b729e9528b45c080e861b3da352f858 | refs/heads/master | 2020-08-28T12:21:07.544254 | 2020-01-04T10:49:50 | 2020-01-04T10:49:50 | 217,696,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 763 | py | """
请求的钩子
"""
from flask import Flask, request
app = Flask(__name__)
@app.route("/hello")
def index():
print("hello 执行")
return "login success"
# 相当于程序启动之后在处理第一个请求的时候执行的函数
@app.before_first_request
def index():
print("第一次请求处理前执行")
@app.before_request
def index():
print("每次请求之前都被执行")
@app.after_request
def index():
print("每次请求之后都执行,出现异常则不执行")
@app.teardown_request
def index():
# 就算请求的路由不存在,也会执行这个函数
print(request.path)
print("每次请求之后执行,无论是否出现异常,都执行")
if __name__ == '__main__':
app.run()
| [
"[email protected]"
]
| |
4d3956aca08cd84cabb584e92a9c96a95ef34502 | 11cf40946c55b47886cfe8777916a17db82c2309 | /ex8.py | 717dbf1941b429dd3f54602f98d5e4661d89267e | []
| no_license | dalalsunil1986/python_the_hard_way_exercises | fc669bf2f823a4886f0de717d5f1ca0d0233f6af | bc329999490dedad842e23e8447623fd0321ffe0 | refs/heads/master | 2023-05-03T01:35:24.097087 | 2021-05-16T00:43:56 | 2021-05-16T00:43:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | formatter = "{} {} {} {}"
print(formatter.format(1, 2, 3, 4))
print(formatter.format("one", "two", "three", "four"))
print(formatter.format(formatter, formatter, formatter, formatter))
print(formatter.format(
"Try your",
"Own text",
"Maybe a",
"Or some bs"
))
| [
"[email protected]"
]
| |
bf8de6800a12c7d01677b1d90a2bcfdcc875683f | c68c841c67f03ab8794027ff8d64d29356e21bf1 | /Sort Letters by Case.py | df083f821551643b58d7653b6921c678258e3104 | []
| no_license | jke-zq/my_lintcode | 430e482bae5b18b59eb0e9b5b577606e93c4c961 | 64ce451a7f7be9ec42474f0b1164243838077a6f | refs/heads/master | 2020-05-21T20:29:11.236967 | 2018-06-14T15:14:55 | 2018-06-14T15:14:55 | 37,583,264 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | class Solution:
"""
@param chars: The letters array you should sort.
"""
def sortLetters(self, chars):
# write your code here
if not chars:
return None
length = len(chars)
left, pivot, right = 0, 0, length - 1
while left <= right:
if chars[left].islower():
chars[pivot], chars[left] = chars[left], chars[pivot]
pivot += 1
left += 1
else:
chars[left], chars[right] = chars[right], chars[left]
right -= 1
| [
"[email protected]"
]
| |
787c2274a31c79dfdceb6628ae8aab2f7590a368 | 630e5fa4fec4cee4b6936eec74a726550406c11f | /test/functional/rpc_bip38.py | 93dfb7a50712e55599f4be7defa4821459115c3a | [
"MIT"
]
| permissive | crypTuron/PengolinCoin-Core | 4d815d25de927d42dc890379d15738ee728c525e | 3d6c66dd930110075ff44ee6f5a4364c533becd7 | refs/heads/master | 2022-11-24T21:17:56.271853 | 2020-07-23T13:49:52 | 2020-07-23T13:49:52 | 282,408,670 | 0 | 0 | MIT | 2020-07-25T09:04:22 | 2020-07-25T09:04:21 | null | UTF-8 | Python | false | false | 1,016 | py | #!/usr/bin/env python3
# Copyright (c) 2018 The PENGOLINCOIN developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for BIP38 encrypting and decrypting addresses."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class Bip38Test(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
password = 'test'
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
self.log.info('encrypt address %s' % (address))
bip38key = self.nodes[0].bip38encrypt(address, password)['Encrypted Key']
self.log.info('decrypt bip38 key %s' % (bip38key))
assert_equal(self.nodes[1].bip38decrypt(bip38key, password)['Address'], address)
if __name__ == '__main__':
Bip38Test().main()
| [
"[email protected]"
]
| |
8c24d77f85ac64da80cb0bd41fae46738e54e830 | b085f79c4dbc07557e4c82f71628f44d1a2a3c55 | /accounts/migrations/0003_auto_20160922_0338.py | 0027cb116306145f49ab57259bd0c0c132400216 | []
| no_license | HugOoOguH/ctlTwynco | 6c1fd856871243cacc5668cd2ff619130d82c460 | 6d88b4a1a35ce585f6ee2df0372aac948a6b2955 | refs/heads/master | 2020-09-27T16:59:36.790096 | 2016-09-22T05:25:04 | 2016-09-22T05:25:04 | 66,982,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-22 03:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20160920_2010'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='sexM',
),
migrations.AlterField(
model_name='profile',
name='sexH',
field=models.BooleanField(choices=[(True, 'Man'), (False, 'Woman')], default=True),
),
]
| [
"[email protected]"
]
| |
030e52259d44d34774c35c42d1d85544d7bbead2 | 0f7e18a483a44352dfac27137b8d351416f1d1bb | /tools/extract_gif.py | 2954d57a2760b14fc8ed5596e03e11684e3c682c | []
| no_license | rinoshinme/slim_finetune | b5ec4ed53a2d6c15dfa5b4cfb73677ccb58a4aa6 | 1e465e3faff668e65cc873828057365114d4cfb1 | refs/heads/master | 2022-11-07T21:02:38.253001 | 2022-11-02T14:48:45 | 2022-11-02T14:48:45 | 199,089,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | """
Extract image frames from gif files.
"""
import cv2
import os
def read_gif(gif_file, write_folder, name_prefix):
capture = cv2.VideoCapture(gif_file)
cnt = 1
while True:
ret, frame = capture.read()
if not ret:
break
save_name = os.path.join(write_folder, '%s_%06d.jpg' % (name_prefix, cnt))
cv2.imwrite(save_name, frame)
cnt += 1
if __name__ == '__main__':
gif_folder = r'D:\data\21cn_baokong\bad_format'
fnames = os.listdir(gif_folder)
for name in fnames:
gif_path = os.path.join(gif_folder, name)
prefix = name.split('.')[0]
read_gif(gif_path, gif_folder, prefix)
| [
"[email protected]"
]
| |
9a303d5a18b1917d5e0ff10d6430ce6a4b6bd086 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/tools/perf/page_sets/blink_memory_mobile.py | 40b5f134886c79490da62c0fdd3e8fdc1c01ea0c | [
"BSD-3-Clause",
"Apache-2.0",
"LGPL-2.0-or-later",
"MIT",
"GPL-2.0-only",
"LicenseRef-scancode-unknown",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 5,080 | py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
from page_sets.login_helpers import google_login
DUMP_WAIT_TIME = 3
class BlinkMemoryMobilePage(page_module.Page):
def __init__(self, url, page_set, name):
super(BlinkMemoryMobilePage, self).__init__(
url=url, page_set=page_set, name=name,
shared_page_state_class=shared_page_state.SharedMobilePageState,
credentials_path='data/credentials.json')
self.archive_data_file = 'data/blink_memory_mobile.json'
def _DumpMemory(self, action_runner, phase):
with action_runner.CreateInteraction(phase):
action_runner.Wait(DUMP_WAIT_TIME)
action_runner.ForceGarbageCollection()
action_runner.SimulateMemoryPressureNotification('critical')
action_runner.Wait(DUMP_WAIT_TIME)
if not action_runner.tab.browser.DumpMemory():
logging.error('Unable to get a memory dump for %s.', self.name)
def RunPageInteractions(self, action_runner):
action_runner.ScrollPage()
self._DumpMemory(action_runner, 'scrolled')
class TheVergePage(BlinkMemoryMobilePage):
COMMENT_LINK_SELECTOR = '.show_comments_link'
def __init__(self, page_set):
super(TheVergePage, self).__init__(
'http://www.theverge.com/2015/8/11/9133883/taylor-swift-spotify-discover-weekly-what-is-going-on',
page_set=page_set,
name='TheVerge')
def RunPageInteractions(self, action_runner):
action_runner.WaitForElement(selector=TheVergePage.COMMENT_LINK_SELECTOR)
action_runner.ExecuteJavaScript(
'window.location.hash = "comments"')
action_runner.TapElement(
selector=TheVergePage.COMMENT_LINK_SELECTOR)
action_runner.WaitForJavaScriptCondition(
'window.Chorus.Comments.collection.length > 0')
super(TheVergePage, self).RunPageInteractions(action_runner)
class FacebookPage(BlinkMemoryMobilePage):
def __init__(self, page_set):
super(FacebookPage, self).__init__(
'https://facebook.com/barackobama',
page_set=page_set,
name='Facebook')
def RunNavigateSteps(self, action_runner):
super(FacebookPage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition(
'document.getElementById("u_0_c") !== null &&'
'document.body.scrollHeight > window.innerHeight')
class GmailPage(BlinkMemoryMobilePage):
def __init__(self, page_set):
super(GmailPage, self).__init__(
'https://mail.google.com/mail/',
page_set=page_set,
name='Gmail')
def RunNavigateSteps(self, action_runner):
google_login.LoginGoogleAccount(action_runner, 'google',
self.credentials_path)
super(GmailPage, self).RunNavigateSteps(action_runner)
# Needs to wait for navigation to handle redirects.
action_runner.WaitForNavigate()
action_runner.WaitForElement(selector='#apploadingdiv')
action_runner.WaitForJavaScriptCondition(
'document.querySelector("#apploadingdiv").style.opacity == "0"')
class BlinkMemoryMobilePageSet(story.StorySet):
"""Key mobile sites for Blink memory reduction."""
def __init__(self):
super(BlinkMemoryMobilePageSet, self).__init__(
archive_data_file='data/blink_memory_mobile.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
# Why: High rate of Blink's memory consumption rate.
self.AddStory(BlinkMemoryMobilePage(
'https://www.pinterest.com',
page_set=self,
name='Pinterest'))
self.AddStory(FacebookPage(self))
# TODO(bashi): Enable TheVergePage. http://crbug.com/522381
# self.AddStory(TheVergePage(self))
# Why: High rate of Blink's memory comsumption rate on low-RAM devices.
self.AddStory(BlinkMemoryMobilePage(
'http://en.m.wikipedia.org/wiki/Wikipedia',
page_set=self,
name='Wikipedia (1 tab) - delayed scroll start',))
self.AddStory(BlinkMemoryMobilePage(
url='http://www.reddit.com/r/programming/comments/1g96ve',
page_set=self,
name='Reddit'))
self.AddStory(BlinkMemoryMobilePage(
'https://en.blog.wordpress.com/2012/09/04/freshly-pressed-editors-picks-for-august-2012/',
page_set=self,
name='Wordpress'))
# Why: Renderer memory usage is high.
self.AddStory(BlinkMemoryMobilePage(
'http://worldjournal.com/',
page_set=self,
name='Worldjournal'))
# Why: Key products.
self.AddStory(GmailPage(page_set=self))
self.AddStory(BlinkMemoryMobilePage(
'http://googlewebmastercentral.blogspot.com/2015/04/rolling-out-mobile-friendly-update.html?m=1',
page_set=self,
name='Blogger'))
self.AddStory(BlinkMemoryMobilePage(
'https://plus.google.com/app/basic/110031535020051778989/posts?source=apppromo',
page_set=self,
name='GooglePlus'))
| [
"[email protected]"
]
| |
c3d90ec27fcc681bfa880229fd4e3c3d7269f3d8 | 9de3cd1f8d66e223b7f193776376147136321b30 | /tests/test_cookie_storage.py | 80d9dbd52236af389425acd1c41c63ce277620ad | [
"Apache-2.0"
]
| permissive | kolko/aiohttp_session | 223b5eaf8c291407f289e20efe692a0afef31bca | 8df78b3d7bd5623f82ff8171c9d947107421f4ce | refs/heads/master | 2021-01-18T09:43:10.928082 | 2015-08-06T16:15:51 | 2015-08-06T16:15:51 | 40,311,233 | 0 | 0 | null | 2015-08-06T15:06:58 | 2015-08-06T15:06:58 | null | UTF-8 | Python | false | false | 5,099 | py | import asyncio
import json
import socket
import unittest
from aiohttp import web, request
from aiohttp_session import (Session, session_middleware,
get_session, SimpleCookieStorage)
class TestSimleCookieStorage(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.srv = None
self.handler = None
def tearDown(self):
self.loop.run_until_complete(self.handler.finish_connections())
self.srv.close()
self.loop.close()
def find_unused_port(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 0))
port = s.getsockname()[1]
s.close()
return port
@asyncio.coroutine
def create_server(self, method, path, handler=None):
middleware = session_middleware(SimpleCookieStorage())
app = web.Application(middlewares=[middleware], loop=self.loop)
if handler:
app.router.add_route(method, path, handler)
port = self.find_unused_port()
handler = app.make_handler()
srv = yield from self.loop.create_server(
handler, '127.0.0.1', port)
url = "http://127.0.0.1:{}".format(port) + path
self.handler = handler
self.srv = srv
return app, srv, url
def make_cookie(self, data):
value = json.dumps(data)
return {'AIOHTTP_SESSION': value}
def test_create_new_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
self.assertIsInstance(session, Session)
self.assertTrue(session.new)
self.assertFalse(session._changed)
self.assertEqual({}, session)
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request('GET', url, loop=self.loop)
self.assertEqual(200, resp.status)
self.loop.run_until_complete(go())
def test_load_existing_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
self.assertIsInstance(session, Session)
self.assertFalse(session.new)
self.assertFalse(session._changed)
self.assertEqual({'a': 1, 'b': 2}, session)
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.loop.run_until_complete(go())
def test_change_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
session['c'] = 3
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
morsel = resp.cookies['AIOHTTP_SESSION']
self.assertEqual({'a': 1, 'b': 2, 'c': 3}, eval(morsel.value))
self.assertTrue(morsel['httponly'])
self.assertEqual('/', morsel['path'])
self.loop.run_until_complete(go())
def test_clear_cookie_on_sesssion_invalidation(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
session.invalidate()
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.assertEqual(
'Set-Cookie: AIOHTTP_SESSION="{}"; httponly; Path=/'.upper(),
resp.cookies['AIOHTTP_SESSION'].output().upper())
self.loop.run_until_complete(go())
def test_dont_save_not_requested_session(self):
@asyncio.coroutine
def handler(request):
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.assertNotIn('AIOHTTP_SESSION', resp.cookies)
self.loop.run_until_complete(go())
| [
"[email protected]"
]
| |
64010d201f6a37108148ac511448e0bfea8f5f95 | 2d9a17e2b896d2f6a90913a4ba02d41f0ede5dd0 | /_gsinfo/qiyecxb/test.py | 8290bb75e875e96e55b8b937147c849a9ba5f19c | []
| no_license | wolfwhoami/xxxxx | 1cf2ed2c8ed78048d87cccf2953ca86c0871a783 | 670787ec71127bc05c1645cc3d8ef7c3a91fe84b | refs/heads/master | 2020-03-30T00:44:55.864817 | 2016-12-16T01:45:03 | 2016-12-16T01:45:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,842 | py | #!/usr/bin/env python
# -*- coding:utf8 -*-
import os
import sys
sys.path.append(sys.path[0]+"/..")
from spider.spider import Spider
from spider.httpreq import SessionRequests, CurlReq
import spider.util
import pycurl
import cutil
import json
import abc
from spider.runtime import Log
import time
import base64
from Crypto.Cipher import AES
class CCIQ_AES:
def __init__( self, key = None ):
self.key = key
if self.key is None:
self.key = "9BF70E91907CA25135B36B5328AF36C4"
BS = 16
self.pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
self.unpad = lambda s : s[:-ord(s[len(s)-1:])]
def encrypt(self, raw):
iv = "\0" * 16
raw = self.pad(raw)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(cipher.encrypt(raw))
def decrypt( self, enc ):
iv = "\0" * 16
enc = base64.b64decode(enc)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self.unpad(cipher.decrypt(enc))
#req = {"encryptedJson":"ZlWT15DsXFm0Y4QnYoK2ufXYi39Plo9\/yhwguqs9FWAHRqkKsKobDI+ai8+GR4NTJNeaHC7hDsivmsbOkOQ\/0lHsES3Wl5kF+pLW98YratGzlf4Tc5qnXiNDVUrc0WaqJD8obqeFhJLQsocfxB8REE6XpIbzthyB+CHX3TQpcJskJEZkJOyPxRdg9PTsCjTLPmgNHuWq3fSNyd3DpR6RIl\/AJNb+Ex70Uf0QDarg3koMErtDXwvcnEtxblp3kaMu2QmXxnDbkClaGASOP6ZsuKgVu6LXdW\/KOHk6cP+\/tEQ=","extJson":"Hoi6oX70l9whauZmjq8jVAmoe3UspXXhX9mPG+KAeqs1rKZVr\/uapICH92P\/CrrycI\/OjobbzuafHXthwGM38\/RMXUoOjROK+Psk7SCSv2\/vBYNK3RYrJk26Fgu1HxLDg9LWqdILYeoDE2G3IezMHPYyzrU1yEoFGenXS1U8gvc="}
req = {'encryptedJson': 'j4kzTnVqwSoHC8BLgjy0n5nZl1zDkMKA3NY4NVwFfL+65BzsXkmfBvobIYdT04u/Gnyzr5ku9XUk0lJLny6uOx9z0pYqJ7cqA+KQPlb5sjJP1aBtfQPKAwFe1KEe35yy3uk8dChwBgt2Gaefubx1qb3dNP7DIyiplJ2q3WEOuvqAmIZRKet6MSds5NRzb5Kugp0IUN2kRqO10jdv+ILkem8pU9ZNxRFaqlpG+d/x5c0ouiCroEudVE50RN03eXxI/+HSudcalTJ82iWGtK/3LC8vHYl8hqz+jUai3L37/VDN65BRAEezpd7zxibNjwMf', 'extJson': 'O3wNDhI0rBOcBnMa0PfzrrQ9CVUtEGW4+4ryKQpX3Q3LSTt2psf0V8gzYq5CObmvolnCfTl8Amfk6qUIRulQqbX7Vi/hzCI44sUJE/vx8XmDT2whbjcnJFx9mmf3lAkTmWe9lQO6AfC+pxWodDf8Og=='}
vv = {"c":"yfqNKTQ0trAStqJeTkG3hBc5zyK8fgp/q+Zmwq8EDUbFcom7Xy7UdSIVX/rClF/ZmsA3I4nJUHA+5o6iURC33vNfUj/tG1pdSESwmP7fSgcG62JDjw0c7qOWU0pNr8oJy5+Q+GziteEoJpASp9G439W4WqC6bwfDgP8Serd2KGoIV95jht1cqr8GofvWI9ipWXpKj6g3YW40jTZECZpO5pVE65y5akOlEjdqE35CUKoqhFFZkVooDWPLLQC8YlsAHp17NOW3aNJHtyRd35XGsQWLA79wqorrUIjCwQ6orJqPnuthJdtF37/a8rp7D1tGJUxteInnl4Dp4Gmqdkc5oobNuRg4MrE3g8ZMuWeq7xUGGCThKoDLdP2eDrMmuFaTNHwDktEfn/gixjyA7xUbUN21A9JdakvIfNQA9x6FVg6zZdH8zExWDhq6WGFGEFexcgJZYUZUclCGy4TkQELnMQbg0ZxPcu0YKkyf3FOaKN7815wln96Vht1G3B8F7TutV53S/9E4YpAiNR5vWp7kWfbU19jmEzWtBMv2YTUZQla4XHq12FMr8XvqkPKrh4KNCfOXkziwZ3sMHpr2658wRVSTgap9WPWf0DdZsaGXmO5wHyrVt6TXG5i4ao/ArWnTRANItUJziL7NOFEpffg7MYUXNJUQF8EUH1cRV5DW5mNS3lTj7v5Rb+OufvrB2DYKiQWrTjpZHAif2cLy1ZHPKqW3VZhjcwWT+VaJUGJqPHtp/qmZr/iwl3VJdfYxnabl9TrpZ7IcE6O6JRDRuMxfpptvNnxzucWlABITre1CzPr3cQvYatv7UOFY91TEHb852lIjFafRw/jhdP66jiAOKJF1Juz/A+2RIyaiIKNnAzBHmxy5vg7OfPnoM3e5Yd7/NvGuBt7oa4+T6+d9RJgCSsaJJRMN5+Uw/uZJ1aNNZeQCz8BeVGIdyp46Ub0aeyoybTl+pZFeuULqZSO/wwSNR2SQImpsJVLfveMcR96Zuv34eJwwrv6F0e1ono5G640uoxZXjDo0G78pkcyD1QrKxxSpTg06/uMr9ZFcmdb+wvrC78hQkyWLSOG3Llce+q25ps6iCsIALndu3gJxukokDi5ILp3FD2u+9UYeVV+1kep3PuK31/em4+bzKTFFZzG1Lw5wvKpkUfeZwB2GgnnS+YCX4bCK6JPRmCIC3J+DVf7NBNk25vfHNVmr3Erw7h0KWSwuRz6gVSHj3g9BJJuwd19Pc1oLJCsHY4U8fF0mrL2v4BqwZfubv75tqLYN9HtUb++HzgWso6RPvhFpxww2pvv91o23OgKQ7RtXPn4NL1WhUmnSP16StFAR4ifyqRQLcFVGxvzFRAX+PFyIXKdYvbwjhid1oLxUk3grandJAtdd8Dd03r5SK7GmEXyLQODxE5hCnIFq1qGhqAVYXJ6GRJHYepTDRQ8pYUC8QbXs/5J5ntSmZHzYJNTIrRriwzCDUI6HazJr8iTKqeliWdGh1WypgjClvpBeYyuC9ryk74twXtKyB5E0OwtDfKPekMJ8oeH3VKiWtNFHDkwWDZMoi546JHCok/ynzqXyIB73Gq9XHiXra6nZMvYVO2HMIljoC4lfAnzDprhZ1kMgkKxSJHoXJMle2Z2Hfmn25iMprfbIsIXNdz4SRrBmWqw0qkY/pOLDvACltpOrOkL//3wXBwQN2AP6zk6E6G0qMK0MBfFPQXJ1cdaZtTXRnbgEBPqNddBwO5xnb6o8vwp2veuZ0nTxCg+X5PrTQiGqJ47JqgbgaTCfo6NDftz2Ut3N9BgRRtPBY1oWDnISZp+5qFMQYno/PYhoQSac0E7Qv1Ls9hPAhea/l1preojLXZIzbtDi+2t8kd//KbgZDcajNejtGoaxdsnR/ZNuPGvv0c8ijWE+CRV47XWrMKKrFIu/ffqTfmbANXt6GOIWzKRomYD79kOGMeSreHbauf7QfwJ8jTcqk2g8sZFMYhZhLgiqQKTNhzpvNVfzWv5clCpLOVWWft0xujuXI5YXCBvuVLAKn1DYJXcoBq6RfaQ2ZVVXaSCTKxyIPRR/26HqzSDoVx/8CHTtrtXOtsl8U/h1Qo1arncUKUfLX8htYZX+Y/APDWPd+vfVAlTB14VP56H5pPLwBNYPJ2ayqeq8tFCI2NK4eqiZ4Ts70RayYbt8brIQim1UoRLMH7tqXqfIbJqnfs6XlXioevRYYvWcStue4C2yzJZlFdksX5iYYHZ3T3y8K2LO8OZnqo7qKprd2E88IpILF2c26dpCFQg/YIhRLSm4faWwll7Y9QIkQ8PqXFjv/g2pQdux1l5qihtftbsjRh2xCtLsBMctGvtro+HtbRoEiEAomy/UC63pKnYQJgIsyIniuw1AJhjrbFs1Etc+RrYHtqysnmO2NJ+r8nY24RUL0KFt4RzjdbLUSkcpxpOAuenKB93f+qYpAsXmW01mNVo+u1bWl9OVI+WaYBJBdhrR7OQymvtF7bhJJUl+2oqEfsnSpMURm6btYPjNsCAHIsW/Erc3jUSnIGg99T2EYLfLfD2SqDcmuKwQSS1p9KUTx6B3T/vpG/4a6gdfmsyoBei7D7SVk7M4YMQH+HT3UqdCbv5Rikovvyz/U6UapYrsmRC/zMZJjnzpwi+iC0veSq1r+g5kOAoAJgwleLLpeRVes3jBql/tEws6F8zpS1TVTmqUp+Q++covQwwc/60PLPvfo5sQJYwEwNKsDEDhU9EOECyjgrEQjuDGrZApNOv9YXtS4JEpSwKb4Gayhg6m9iiOwVFPBJysugp5WQjgxFuSotX8NErqRsiWJ2Tub9jPcWd0ldjwDqRhnHEcIfmlS/jo3sjczUImB5uP4L6xtslTnq8LWEu+pSpUT4QGcxB1ynaOdGHXVSazesfdTIvz5MdBvIJXKenRUd5I8k4hkde8A3ejINiGcH0G7zIeDB/Aa0HQKvAsKmPZv4UQW4occ9zNkSBIYchDQJAG7jj+GJSDguYIslJwfRysBTZJK02bpEPccu6BF0hC0n2c83MB6ph9AYRrdoyYxDiHpS+J2sm0u5khpUhOQ3esJgQjPZ63pNnv5vPlSHeNtgbdNwdSBCncssSldU7TpBJm3ym5y7+D18DuOnOPj17nLfBwtWwXdfqsAnYwwETIZoJJi1NntU9HZ8BWKgQitxaAf6su/FgX+Khu26lzZFlcpAnHd9oCdec84kgMZiy0pM73AzqS8GJTpqYR6Qr1pPvCssu31ZlGZiJndctnRWZfZNR23IGrgsPZwYtgWtKpxNserPDCLuDayhc5T4Pm96J8EsgFC3chuTnVcxHiMewGFUNN9H2dQthrzH6NQZ6jiinqJxhqqnWR8p6E56c6CnW/qlTWlShdkkkgfBX6H11PIAAkkzTO41sToFtgiIzvZqAATpBxlTOKrVKFT9ui49bAGOqE7EZQRZJvI8jHWPzfGezVU9dH5cmxkvYH2t8Hp68YWrWIROdx9rieSstv9+uzVt1k0s69PUbCq7cB/RiPssdGlq367qNxAPq9SdV0nXb+iaT2dtMtMX/BjLP+72dJuL+iZUrMCNTxZxSKe63G036ScNtdVU6jfu8o6J9wz9WwQh2bH6xDu32CIrleLeCKVglqNEatE/k8xVSF6VPkXpo6h/rqsRqWOdZ4j/LudfabKOejIuCMCffifSePMF+YEhVtk+wxp7zYEWh4jg3prKtuQATCMzUxNty6JxN9CySxZPmi0ddVrQaakvnCFD7sznDb14xUi/I5RjqorCG6SrzoJXFEnFsrQOEJyhDwXC6XX7eWHWDf8Q4QlNU4yhq6uVufGMxySYQQTAutww2sU03ejJziYiBV0tv4upQF00MRlOtWlJGCLFj193R2PgEUaJW9I+CNK0C45rGrQ5TOGtO2BConxlAsaW4M8oo9gZHVYGOtWvVQGlgXxEQcfBrzBfh+UvUKVI8O3Jj3gJOkzf29NHYmyzHETYVGk6/XXAyYqcPmP8xWIW6PRwZtj7IQhxAoXfStOBZwwR4akiM5B4ToIBxN8IN2twfPUHUo5girbW70AN3PCiz3ECTfprjBTNNsxBu93AsKBDulnQH0WPsdtvRqwWsVO427uZ+XGS4NKreYzAU7gRo+P7S/8msPboInAyRmhyi80UuUdZaKvUMx/6RZ1TolDtiyJNIuC5ufqLyFBNi1GaZYAzprc2jJxMgt07DbLJorEVADgpgULSgZOj2d42bxF6VnARnz/j0TaCKTPez4wkaORUHCY6y5Wxwdh4LLzYhvipWKD+668/vNpQLkoSHiVryB619uhMb2DO7OTPmzgvRSpz15OvlVGC2zeCv47jA260p8uygRBtzS4ZqmCxzsLnEhlUr3VSj4ospvCa0NGdwnWOx5FdifzmTpNQQd1bwZE9u2Tbznhpb6VHwAa4Jcwnkz9VzlkF/TYj9XLj6LQUBXI118ISkz+2q8jxF05jorsE8Ly/3jh2PBBQl+vNw/x7X4O5T9obEe0fldVyhEXcd2Wk1F91LSzKtpurtMZfwHDXO9xp7dY9NkQliHbGAj22/uvsfWtscPcEFjMESQJ+DAL42McZiBMU7P1yc404FJkTOTCLjJZEQHhiIH1E1O6Xv12rCiovf+ZUBKfHqwqc54Z/rVoy0j3TtZk/N2i9ftWeFE96bnrIo4za7CL9luZmJ7qDs522L798AAMk2wdCMjK2Vxjc0PMwtNIW6Y/SQBhPmf8dWOsa/bg8XOFpbUl6JgWgETzDDIqYojXJf8YXpE+X5GYHQlV4+HV34uotjHytlqO1HNyRgUNR567mDG27zZKvGuUxHqKRYV35iLzyX6VRYzcIvZSPeB5eDs6tibO+KJ70L3NNrD402+CwPmJjak1BLINEw8ZCYYD3TI8OEDhRIXuR480QoFFc9iDeXFfJ06f2RrR8uSGuqksC39yrZ5lb3dJPtxmCL/ioAzkeRt7MKfEp4ZLopep+48YVvc34Sz3r47I8mQdQ6BVh+numD7zHeVHegHC5YvjZw/7EwtQmym9uxDzuSHGJQa+Ryw03iPU2qO7D0D85dDIEN0Yw86Qed90pcYp+2M66DPK3SZkVWuZU9hbD0E7w3I/Ha8BEJN3A4czeOxsqxFCw0ZOPW6DARXQoOg4DjE8XhNhI6/ETHs1gQP3p/9q9HiNAFvba7YKSf29sBhLzPE4y6U4l4XPTwLF+ern5ntZgoMQg+oCs/GmAJYgteiw01BJ0EWeLCNNg3AsUJyJ3G9VlvwWFNX2CSodCupr8PT8h+6O37wZENI62xjz8wJgZMf+ixpgmtoMSOxhTjFbPCRJPSweap1j3dJ58xVaUT0yBW/Kxpxw7gVoN/KnNoRWxBc1TOMWbjZ4N6GkixZWtZDCrWeN2PUTWiJAbikRYEtN0QgBKoMs7ke6PTMXge/SnE56THF/dL7irrTC0c0Jz58di6zKfo3FZDp2M6UaosOZIPqJGS8rsCCCn6yUpQyFfzXUfXEVH7mmVIYYffqG7uciVYE8KOSvLvtsh0BM5niVsFdkPiaZ4fGKLakXiRH+RaHXWsR9LxLtNqjp5vubUoW5ZvV/27+CSrbTTRx1+ut8PN06pLJuWcUQrAZAXAh0sJGbOicll1GpCO3UK9o+QZBB2mHc7j37STuWQQ+3upgVuV3Qp4jogXQdmqrR/vIXlovIiGMRkRV5s6KqwADNrXLX9PtRdQgd7E3pHylHts55NEmap9mNFO1yBx+17Zrew+e9aHzInUM1oA5kt0SqiQzO2SUgm/e0mQW6z7cxZZVIMi96v8a8GBRhwRCcJ/1865O324gCFhkQLNyLiBVdKcM+IuxM4fKTuDA+nLjx/noTruN0RKaDXaJutTbrW4C0+A/xoy7XnOS6ne2RRCDFX9qNP6lAdSFobq6J3n3/89HZZB3wDlOQcyRT95WJmAFyCkGRKwGHtar+Fs0bYClVusJvG99FDfljsI2SJM6WrXoF/dkmlBX15QA/oI3KXoQcBXZjDjA7DN3/2Fk+h9WteBp6xlmP11Idry4dS9ijgysH1befw9AbERSHLqGkzFNEUuuKJIYWQunNhjP/IJtDawMU8BFJE7XgUjofXkozgPfvZrsECq9hd9NQjXaUxsJfE/DrBIXPXBpZ+JcvLjUpQuRD49P+nFJIwRkWCUdNRtVOD3aSQtTElnFsbKXfdEDQP9nZwNpLJ7/FeVR3FakaS2TZV4rSB3PKD5X6lsaKjAoa4bZP7EJgziCHDV36l1HcPzJVlUiux71wASFNiBiPPvOWmN7wK5IadaODXXzqq59U51W+XnpHhrfluX9cYUmiZ5Om2ST2YH3x8R3WhwBrWUO/Zl2p6+Yjpzmq1zP27TKi2Uoaf6JeNA+HQaZrKlhVZvFZ/ZuXuT/99ID7pLQkX/WOx6lJGp8JYP/ndoZY8rwYRAec2zFAsp8iOc3v1xIfzJ58s95WckXE1i9ATxZpAeOXFD5QNILqj3eqaB0xL+NsJRlsQ8nX6/+qdD2Ow86MMU8knCE7oYGmCcA0C4WN/TXruiSQOmFm4lvDcv9FiF9wshvmua6zP3Gvz/y0cOScyn+WyCNpVrkGy766NYfR/jW93Gg61xZuw6PTBOXnZtHGGZsnr/+WFmLetAn30DYWdMM2fg7LagO7OZrLKvMXkpF6Q831lT0SL93Ts8rgq3Tng+livAiDHkFRRcUOU0sgPt823LjKRD2PSCjjNw6xYytOk2EQfscuKmMvV0GI5sPq1Oq11ljsbBap9qR2CyPrcuSyLoDsZPQsPQ603YBTDIQ9unqFLkXXoezs8q3OvdV82PsKTpOXdEtWH6twlcpen+6Fe789Fdck4G0glqbrEpas9fxS77qEN+GPAazBloFDFv5GMJU+LXJchzo9cQ8aFMHryVeXnhedToM81nIP1apsDRFghYddsu8LWK3FU7QTg1iNQ+wiU7aheCxDoOVH5/BEgasY5EGXG5bmurVYFCQFUlVjapUtisWcL7fcIuP/GNVaV+sp/K1CT02BRUyNJ4BideKVHTrTLHkgDF1K2GaXR8wOWow5aNfBMXdi2ucdNjhWirmvpm9bm5JNoAHXcG7m+sKVBj1jNM3HkMnuO8WVin7rwxi9QUa3g0JIBfsXi7/4sikvR2lwujLYkicrfLSBW5h87n7n4jLOvZ1LtQ9dD+C+Q8Ahmb1JqDF6ztZBZvZrNpWoe2ohLiJLNf6ailhpXVdvt2mYMdLfA6nkmETKMQ4h6PbVZsew9BBnZ1MXMc7k4zqDddgCxQb1WCs6a1XhcTDSFhjRcEqdTirxIjEiFCAzHL/uPr0k2GdleuJ4gREQ9bKejfBsGWPejoTW6SnLZMZXrUJNFS7dhxMBUw0btFXSHc7+IusWQAk5DbKqRufJpVn4md0qsqgVp5y7mLIeR0v3KJ+yzntddMihSsrIGowf6SeERXDiw6In0Z9pvw2XcM83ympShUL34iAKiVbWzI4v4i7mN1ieFyKr+pufIE/l/Cns/kycneSYHOMIQD+FGEMGDXBBMI4ygQdVKP0R8xuByEs+/Cw7vS0zdbEtSS2cNrzkmhSkzQrQdC+T1oWt727tebRk6Yf+bwL7bQuA4muw/jOfnTFXBPu1Z5m0pH9ns2agEq/Ik8CyftP7PIuAQx3r2KT9ohyBm7c5tMarPTF6KHFP8w8iandB/Mujf18oX/KBkp4QORAwKcRPP/ymIgQZDfs1PUtJo9huswe+XV0ujFdr6r+C6WmvWGGb3m2PZ40KJ2ad1OB0CTMkPQ6FiROQbiqyrO7WNFkdFGi0WQ55BWzGNNDs6wJiFDTAf7WnzI0m7EUmlBegPYiFqXYPAjqUutcFUWiukl9w+5Org/rYMaTGTr2fXrSMzjKg9NSku+/lj4PikjYMASoYAKCbYSFjKeXlKmKo0yoANHiPuNYvZOBndIuOGcam9dnkGWzKAI6n9EaRh6CTPCmNCAUSmb/O7LlKoUTUaJPIkxtBb1JU8++0bYm3Bv2SGHcnVLKpWmxRkIfiFOfdiNl4qdNbm++iEs9fT2YDP9dcNgPXrJe4b0P+60Gj0FtfqnEC5bL46zMOzb3FyH5193g32yeb9SEoUFUzxTarR18Gv3FhNaDqlRKhrqdQrgB38FN/hjWw4gHhG8g/pxz+TVWMuq6G8zdj9jdW0Htf9oUQQ1XGF6vIKju+dJeiO+0Jn0g5Lry9ocByr+2C3ZHQmZ3uPcgAJRmZgZ4HItOD+Mb7ySLOFgqf/78Sl27b+q827HCdCzSrA4N51Scwvz4nUDaSA9bgaZfqTW1a1co3pkRQJVx20NUWZ5H3dPQ4oLzlSQR28NirMkDPQJlTIyT3+7Tax+FuUU4SNe5Xayb5agqPlXfJ3y05ru3fiXbY0J0uEdE+clPJkPcs5rm9jp6f5G5bs0aNsJf854jHW/8MihrZqswB/wPKsvLSfn07slOwS5ioMZ2Mil8fPIkGp/XAU0cO0hFQDgPsscalkmfdWKL5QjmG/7Oge1kKfs0fbMkXSi/O30d7NDW3tDBKJqhRspHCxIAfs2lfF4raiELZTnhPoWBq17P6sF/fNXejq5+0gEZSmSn6n1er5mo3c5nX+5xw91bPidwI+hURhTt3BHfsfL7Qc73wgVtHmFIrVqih2YfuztVgZsucIkml91oUTuZlVXOClqKMgWPZ276uHpLI6elBaSeS901BV8U3oTtdD50mWrk9REbBqxuLwcQ6WtYAy7sgELz1igHACAbOOQtuEgEaLJ53gIjAnyUcWwrdmOOGr0B+Yv47gDTX0xN+r9Jrkz9XQtPPVUicMw5vk9S6J8SS3vyIvFV+3qwqzs/S0g4Oh3GDGKRAtvnsgBdrI+Stg3fW1kpyXPTcKEOk9AA/0mJBznUAZPHzpUsK1YVH9e84rfv1eAsrqSpyrkOU58jHI0gFo9l+0nS3PUaowHqdAIGBmo1bQgtPP6vACsjy5/a/edQNacrp87wlTxbj60RyRMKt4p8NlJNFqPRSjJ91lsgv5kYZWNtrDuA77RQTz8IvVPOgxvSx3qCj0/ktJoMyJC/Wg6WsuQG768ikq8SBkZ1BoQB1aLTUDWcJCg64nVWDoFL0oelpE82us3BAJn5wOX7jLOcVTXb3fT6UrhvAqc40xaHFNhTH1rtcvpWpbBl4tvXgAgeWmxqP1JEzhGXd2Sr34Beg+hKoq25dvidX6ThR8QPzppG/PjYzSJsuWPGlMnLw1ev1zn3vUdoWjtm8ZPVlIodpEq8e0Y5dyKi6/r1ee0O/78yYkKXr5WIz4u037aTmQ2Ow7iW8B+jKwzTB9xJkgpnnNxc6kF1Cij0zyrzjURxDu3dp7CKDxGu0RqJXdn9wFGHm0aLjF77h4Wy+UCn6PUJzxbooVP1VlMoaDUlrq11JSB/wlYSA6O1JCWpXsACaQ8OuBqBzuKbuBlCSVHlzMiuUlbGjbC+8gdw6yknPiaXuKQ1txIgv5RsXr6M0SaNUZpfkVB8smOIw2o63hp6iVKFHyBZm70peR7MAIHfQls/mBlkVKWoDS5gm5WuUyFLYVbUdYyuIWOp4MtGclJ6SIguPP5M5WfN9JjvZasS4MMhkU5NGZZyO5EHJEffJgXI5Ee+oWNYRNp/B7W6hCKFrilxxZNQTiiah2ew3iX9qpxjtOjKq25kEqEli6AgCIB49AMK6c7slZU+ef3FaR0X+vbKbxXV/RBI337LRQSd1qyvt+4gBgRKozL6TAkkI3utWZLpqKRQfTVNp33yWts8l2zwSYYX++XoCVsAVrSfK61eRETiV+JSSEiD5kxk+omaMBWheynEkBZbeoPiEBRKjWR1Onqt9caQeLmgTBtJgqfqCOBmWSEmGeF7zFTMjtu1EGF0vQPQCSMSt6gpttLoiOUsu8Dtm9UqBGiJefsEhwnLSk3mEVIOtdt8rCyGteOgQviRMYsXz7P7FvLWjTOfOE6puxOquC51gtZYPFQppOCdFwWhk3r52I/hpF82aoZoisYoOIjVQxiAe0BKTw8xcm/SmI5p7wy9NHmUlb5gVmJmOvc50CvRkJiADUXZITSNO6EhXMG0Ua+j02/V78B7LiK4FYXeegAYcpzVutBuFkkF2RJbL1WGh4BeO3/PzPi8U866haQAaeZ9tkxKprNSS1D3Hl4XJp6AUQXDB4FPYWUpQIIy3ojjDLakIYRcEzUIrsKKFWClPHOjKQefmfXP+BZ9v3RizjPjWPr4ydTJ3NrCGZgDnsjE1efpWK1rTVkMpmceOb+HPt/WnBiE+Bf2YjEAu+zokJFcgAbJrgWsCC0kVz0u8draXGzXmbWcvkh/BBezejXsNMok7MOkN3/ueeKzcljifXiJtZhbp6+hCY6vIWBVM/p+6x2oZzn4eGkpzMb15bGT8/EpcK3Vo9P1vviuJgnR+PkTR9OqiHofQ6EQeU9NG/LgwzR658+1Eai3+Xz22QLAcRAk2VQMgB05YG7hTbhwGaOZVuJ6d3sGKVFfGKL6hC9DC1w//m+eK8knSaKWb3f+DZZXWN31UUOWLVR0pikCq2T0+X9aZ8blhXJs6l+VkbNoiLvm/tcioolt7Mel/HNYZAZZ1Q5E3MSMQgfEjuOF78AgHMmvJozmWut2y04ut/7arLeFEIGQt75pK5oRcLT+Wthe2/3mvwap7425TwNCIDuNreDNvGBHEz3biAuzyS9Zixbrvdn0KoadprQlt6jUVBtcMs+nRQ1HTVcM5Q12cTvvWAAembi/B3s1MtqrbBoK+zb5sej0Eygz8AaIr+Sy673WaPsRwxCyLklKTVa0EPlxiPIOXYbbWESjbS5KJooYetyXTp3OOoII32iH9mmMk0X8Xtx96JPRC9EPsf2nTRpxDuB9DT5EwY0DxmAQbpx/dtzfQsxD2ZKG6j2wALTPIOvm/L9G2sul01QfMsqps2NIq2CCPCtLse8qdVLITxGpfGsos7FLGTzTw6/mQ0vhGlIKYLgKMaBnOMlBAKutvHjwBZcGPisl6UNT5LIRy5r6tNvBBQogjr98iTL2ERRipbgE0KZiVqhYx4aeasNl7NqxnzlHGeRabW/aDLhxtLOS+zCm3X9Po7ePHBaYXDrc0eWUeU+GAHA5nw56iACVKxd7tyuGQfgLeOnvlZg0LDio8ApjqgZTGtfkCDfuWOXdCsf6jr7X+eyMWMn1X0QQztrbIPO9Zc2bEG3ubAjVQjN/mPZaerPof65/oe/IYe0Fqd9nFuZUIbLIIzSaug0qMkbDo+4VlvgLJtAmAgKdX3yBWHajorH7GqQOTo5X0gmENbn0JUS8kyfRU61+BDQmO3U4Ma9+382c7kqhNHuFd3WE2iafYW/DjSz17Tny2i31fXo9hLwL84pCqiuBQjvbxj5rQTGwsgDDdNW55KuAIcby0d8IFMIFsLDKqrvpVKPxwsRdpDqlkoQF6WsseUNIte2/yjn+o+oGx5+sJQsjQCYu+9msVN7SILOGuElcxYS5RyjEqkSyEoWF1xCaasEYGbGZ/yzpg4L/saKyv5jnQ065FQkcjW40WuYaVAxM/sLBfW1WM8mOATFOgwur/XjF+Bv5Cv0IvKeQLt1Gbw0EbQnOGvFsEoDudHauzWM0Io+figqs65yaD+F3EjRVu6NB82k0RAt2DGImYHZYCTl32Fmm5ChdAsGJtBZNuc7mbKSUAoBldDnm1Cc0nvg3/J1TTZxB7skYn/MS0rEC9x0LO7sd5H8kLvIM+X0TSwX4li506UQh3nuPgvQLjYCiDV13PJ/1bNuWrUeetxilSxDz5Kp7A1mviNLbZeGcm794OoeE24QyHEKA0V4FAakR/OIqhCH2MVZ7BCLStcUnRl10ctDXwDx9s1VG65us9v7mkhCbInL9pm2wOGItMu1ANgpnwQZ/4rlShBpzDPTG/4fjPB6owi58V4erJAPJu46xhNaN6QO8r1h0KH4bO6DbU10brVfG9O+7VgZ0r2B7OUZ1Fojw/e6Wsx4eX70ECJwkVGKOPHZBJSoiUEMRmA5Aydzp57tpGs0+hyPIplZvUAdsG9vB/NQDhszTY9ovMuriKk7lWxoiFplZfqgD1Tgd7fQYgfa9ezXcxsTf2ge0520pEopdOvG1vQZarUNL46T2en8DJg6RBcI53fZ+n2es4Yq1oA2Vm7RLKhb/RYw8/XOCw3GQ+dPq9YR1S8AGrSuhSzKy23G19q8fBXnef2JW7mzUomwonKSIy5Ee5CndoCwJA0dRQvKAi9pRqkyp+hAACTbQBGKvJm/aUvtGsABkmSKh2YAzpB7JjU1zfWitQNy/6r4ERf9u18WTx5dr7BHsd2l9P2PEjpo7k1dO+ViK9X91vIZWFpDxfhSfh5q47c4fFUefKbQdFksFzfPx0AZ/HS5e+o7SVs1UKzO0FXi4V0kG7zjAJBCOyt9dJhC3xbLYEkx7x5fySIOCPzK7eFpn3Z/Y9g3e+mMaYP2pDH2CRVjZWIdxKrvdNY84+y4zYbuLv+bPKdor+jxhK9uJoAemNW4L0tcJHG8DdNu1izNmIRuHVxQN419AVQMSqzoU6jtH4JLVy9BW6TIOJONnrzwVcvt0O3rAb6CefgDHQLq6+WndWa0VLlk68/venjDe54pVXgMqLdzMjFKk/GG8jrZLRq9l9M2QDbHh7DjUGtDf02P80TDo/78bxH5AOt56mmurAWdZSgBtjnWUxBfrrP7ygB5TEoGU4Di1yet7/TLOWAXC8php2UYFt9ujRKH/RaQkWw7e1pj2us8O7kZI00+Eb/vvrmBdVsyBEr7iuaMB6sxCkuYW+gWC1hWnIWaXK9Kf+4zV0vI9k+RsT2wPzJbEKxYJt18HSgUWCeQWnRqHQu7w/rh5P+Qh/a+3D6om2LmsnGG8roMHcag3YP1z0jaOxMoA97B5VYS+ROGsNkQVPyZdaOo9B0p5Kjr87EA7Vm59JEK/b4BUQVoQouIz1FcnABZdyU8i6bfc0jMIWXzjgGggdzM5dehUFB3VX7dAWpGqB0fJcqeDRBDrXQa1qTRGC4xYX0fwnUgrewb7YeIhuE4eDMbwKLioNYdx5vXhFpp02pb06bP+xQiFaZsb81HUdmgw9+Han0IEKRSqkMKjWV3LFxBH8FA49QXEXV6a0zlxoZxCdY3mnIFRSAo+0N1tDw45dHVf650YuF+1dxHH1Er3STEBFGgEUmqFehilFFTxl0SuYjDD4KRk6Kj4smrXL3mDPXiOrR3239fr22ExwWJ6v3fds56LuB1p5zefN2Hy53tYynzn7ub9yjrIaldDeBpvu/JV8k4cwZKeiET7WFKKjP59vTcz0FcjZSW5ADdvLh+HMn90tQKP5lfUspX/lNMAvCJgv16yNsjTfTsefYLboKe4WAC2pGSEUCxiyw/UR7SF0VWHJi3j0v0trp1Y0vuZ6xn8a8Azrx3gI6SS/iNuiBA6SOA045UFtwGxsuzCx3viCFIKUmujhAtv/cgKqgp1DfYDILUvISr+Jn4I60fDHYMw4A4W26CWTyf1FdaTdm3qa0/i44pBgZI0YL6CQ0hRctnQd7NNC15Dm+uDIX8SFgBqZ7T6AcqD+2HQ7ctN4hgDo3TsvnDAC4uYBxLMB0baY5tSKbSSOIggGTNKNXZ8LzC1xhV8hJ7UIAY4KDDSad6M3N/QdHVsuIsmq3y/Bsnz1k4y2C1LeKilxX1ifSeOdp5PcOuj7EuBNSrsu2LuwpbEyfi0rJMf9MXXEcn2HLLZS/oJFkj7fa80vwgBh7afy6WtVTrcNorBG7yuj6oqcpF7Zgq2OESTfzNR91hqN1YafLvVe9QHjohBPUBT9nMUgie8cB0jOFWcgb9TlfxHwy3C2Me/z2uhV7qJ1KmNml6c8MNeUZHoYN/CkMwYDogUyEhtEgaDnC/qojxr1QgjNVA5lYZEoOTboAFyC67cNfJvDRqDXfN2gIPZXmrsDpcxI1uvD2fIU1OKrAVQP+xcW3xb+4lfV3xCe9ZS2ZxxtxDT47HmeNUl5qXTV81hc1xjx55wajlU2K9ozNaQS6ImSLfzU2lJB8p4ZimnULPf5j0jbq1zXdAHEkOKO2LCELBWNeBKqYdV8R1+zd38Qnh6ilq3h9/rkcwlXPwAUvgaKjk9iux7AtRFvTR+5JJwMCJzv4cDeqnmqtZt2O3/z6coz6zveWxlziO2PY0MS7ZSfYGvl4WqA9jQpinujjdE6Lh7BKl1dCWX3YfqffozfH3DjjIgAxyl2fh5NYUAJVYxge4x171maX9G4XyQAsbYZXyrIL3KdL4m9rC5R14OH9nQj9SpXBLp8p/KcewuMCXpV55OHW+udLSnEqFO1A0/wcOxWr4342t9id6aZvRvPKbs2blTxS0CZg5tDc8Q4s/HwTeQOnTUsG8pVRC4DF/r9156Sl6yyemRUfYq25xAgwcs+TYHbK2lN17CIYpVk2R5TBCUFrsqW7Bdd+7IdcWECI0heY8Z2JcQIzJVNOwGkJK6yrue1HV5RJTTCWsG6CeZUF1/TcwMnDvAennA9XO8x114tqDuDOHSqwzqBEwbVgJl5NcAHDtFcSa/C2K9fg0D7Mo1mJHflQpCpNhNLJdV/GQ1eZxXz5RM2o6kf5b05C9VVSVXluQMXnPbFFJo0fuMpvzuv3eulA/L5Cv1mkxG2j7EoRIYXXeVd0f8cm7FdQWiC7Tl5gDxr4e7yht/nRuR5FyeztnwhrqFWxQ1eroXdJVC+MUyCTG42zpWO4E=","v":"m2JSqrBtIXdQ4Vq6LTC1kw==","m":"BsRioKDdMi3oPdq2Klc/Bw=="}
if __name__ == '__main__':
# for n, v in req.items():
# print n, "=", CCIQ_AES().decrypt(v)
# print '----------------------------------'
# for n in ["v", "m"]: #, "c"
# o = CCIQ_AES().decrypt(vv[n])
# print n, "=", len(o), o
# print '----------------------------------'
# o = CCIQ_AES("BB1856A312580D41256311147089E0CC").decrypt(vv['c'])
# print len(o), o
encryptedJson = {
"pagesize" : "20",
"page" : "1",
"od_orderBy" : "0",
"sh_searchType" : "一般搜索",
"od_statusFilter" : "0",
"v1" : "QZOrgV004",
"oc_name" : "腾讯",
"sh_u_uid" : "",
"sh_u_name" : ""
}
extJson = {
"cl_screenSize" : "640x960",
"cl_cookieId" : "16923697-D73E-485A-BDCF-68FAD456AC02",
"Org_iOS_Version" : "2.0.1"
}
param = {"encryptedJson": CCIQ_AES().encrypt(encryptedJson.__str__()), "extJson": CCIQ_AES().encrypt(extJson.__str__())}
print param | [
"[email protected]"
]
| |
9d4365826dd9af614ce9b518bc6de82921605311 | 621a865f772ccbab32471fb388e20b620ebba6b0 | /compile/gameserver/data/scripts/quests/378_MagnificentFeast/__init__.py | b971073fd517a5f3915abefa119d21a16ab52a08 | []
| no_license | BloodyDawn/Scions_of_Destiny | 5257de035cdb7fe5ef92bc991119b464cba6790c | e03ef8117b57a1188ba80a381faff6f2e97d6c41 | refs/heads/master | 2021-01-12T00:02:47.744333 | 2017-04-24T06:30:39 | 2017-04-24T06:30:39 | 78,662,280 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,714 | py | # Magnificent Feast - v0.1 by DrLecter (adapted for L2JLisvus by roko91)
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
qn = "378_MagnificentFeast"
#NPC
RANSPO = 7594
#ITEMS
WINE_15,WINE_30,WINE_60 = range(5956,5959)
SCORE = 4421
RP_SALAD,RP_SAUCE,RP_STEAK = range(1455,1458)
RP_DESSERT = 5959
#REWARDS
REWARDS={
9:[847,1,5700],
10:[846,2,0],
12:[909,1,25400],
17:[846,2,1200],
18:[879,1,6900],
20:[890,2,8500],
33:[879,1,8100],
34:[910,1,0],
36:[848,1,2200],
}
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
score = st.getInt("score")
cond = st.getInt("cond")
if event == "7594-2.htm" and cond == 0 :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "7594-4a.htm" :
if st.getQuestItemsCount(WINE_15) and cond == 1 :
st.takeItems(WINE_15,1)
st.set("cond","2")
st.set("score",str(score+1))
else :
htmltext = "7594-4.htm"
elif event == "7594-4b.htm" :
if st.getQuestItemsCount(WINE_30) and cond == 1 :
st.takeItems(WINE_30,1)
st.set("cond","2")
st.set("score",str(score+2))
else :
htmltext = "7594-4.htm"
elif event == "7594-4c.htm" :
if st.getQuestItemsCount(WINE_60) and cond == 1 :
st.takeItems(WINE_60,1)
st.set("cond","2")
st.set("score",str(score+4))
else :
htmltext = "7594-4.htm"
elif event == "7594-6.htm" :
if st.getQuestItemsCount(SCORE) and cond == 2 :
st.takeItems(SCORE,1)
st.set("cond","3")
else :
htmltext = "7594-5.htm"
elif event == "7594-8a.htm" :
if st.getQuestItemsCount(RP_SALAD) and cond == 3 :
st.takeItems(RP_SALAD,1)
st.set("cond","4")
st.set("score",str(score+8))
else :
htmltext = "7594-8.htm"
elif event == "7594-8b.htm" :
if st.getQuestItemsCount(RP_SAUCE) and cond == 3 :
st.takeItems(RP_SAUCE,1)
st.set("cond","4")
st.set("score",str(score+16))
else :
htmltext = "7594-8.htm"
elif event == "7594-8c.htm" :
if st.getQuestItemsCount(RP_STEAK) and cond == 3 :
st.takeItems(RP_STEAK,1)
st.set("cond","4")
st.set("score",str(score+32))
else :
htmltext = "7594-8.htm"
return htmltext
def onTalk (self,npc,st):
htmltext = "no-quest.htm"
npcId = npc.getNpcId()
id = st.getState()
cond=st.getInt("cond")
if cond == 0 :
if st.getPlayer().getLevel() >= 20 :
htmltext = "7594-1.htm"
else:
htmltext = "7594-0.htm"
st.exitQuest(1)
elif cond == 1 :
htmltext = "7594-3.htm"
elif cond == 2 :
if st.getQuestItemsCount(SCORE) :
htmltext = "7594-5a.htm"
else :
htmltext = "7594-5.htm"
elif cond == 3 :
htmltext = "7594-7.htm"
elif cond == 4 :
score = st.getInt("score")
if st.getQuestItemsCount(RP_DESSERT) and score in REWARDS.keys() :
item,qty,adena=REWARDS[score]
st.giveItems(item,qty)
if adena :
st.giveItems(57,adena)
st.takeItems(RP_DESSERT,1)
st.playSound("ItemSound.quest_finish")
htmltext = "7594-10.htm"
st.exitQuest(1)
else :
htmltext = "7594-9.htm"
return htmltext
QUEST = Quest(378,qn,"Magnificent Feast")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(RANSPO)
QUEST.addTalkId(RANSPO) | [
"[email protected]"
]
| |
ad1bf68a660e4ab9ea3514eb7a9d1c009dd6e56d | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.3/django/contrib/gis/gdal/base.py | 642a7c541e681e266343d47f00fbee2db2507210 | []
| no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/django/contrib/gis/gdal/base.py | [
"[email protected]"
]
| |
551fbca4a6614d91516633237c0818a95fb45e7d | 986630b72263dc5db7acb2d617d989111bc23649 | /urbanizze/map/migrations/0006_terreno_setor.py | 29d7b9ccb761a532bcf7f9d2fab2d7b746bb9e42 | []
| no_license | marcellobenigno/urbanizze | afbef6d45077ed93e0edd3abf21d167561659914 | ca11fa55846030a75e8d4815e13dcd1df89ff421 | refs/heads/master | 2022-08-13T03:55:32.269185 | 2019-08-08T10:33:42 | 2019-08-08T10:33:42 | 201,235,674 | 1 | 2 | null | 2021-12-13T20:07:21 | 2019-08-08T10:31:30 | JavaScript | UTF-8 | Python | false | false | 510 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-20 01:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0005_setor'),
]
operations = [
migrations.AddField(
model_name='terreno',
name='setor',
field=models.CharField(default='Setor 04', max_length=50, verbose_name='setor'),
preserve_default=False,
),
]
| [
"[email protected]"
]
| |
56104be7b888315ad95f9167b9242fd41724b8e4 | c93c88d7e45cfcf05822c701a1c1dafee8153347 | /projects/cs102/circle_of_squares.py | fd97d939c1fb1f0196448c0b8b062ffd70ff92aa | []
| no_license | timisenman/python | b7c09f6377e9a28787fce7b0ade6cab499691524 | 9ea6b6605bd78b11b981ca26a4b9b43abe449713 | refs/heads/master | 2020-05-21T22:46:50.502488 | 2017-01-18T22:54:23 | 2017-01-18T22:54:23 | 63,548,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | #A Circle of Squares
#Either creates a draw square function,
#and rotate it 10 degrees 36 times.
import turtle
def draw_square(a_turtle):
for i in range(1,5):
a_turtle.forward(100)
a_turtle.right(90)
def draw_art():
window = turtle.Screen()
window.bgcolor("blue")
brad = turtle.Turtle()
brad.shape("turtle")
brad.color("white")
brad.speed(10)
for i in range(1,37):
draw_square(brad)
brad.right(10)
brad.right(630)
print("Square done")
## angie = turtle.Turtle()
## angie.shape("turtle")
## angie.color("yellow")
## angie.speed(2)
## angie.circle(100)
## print("Circle drawn")
##
## bro = turtle.Turtle()
## bro.shape("turtle")
## bro.color("green")
## bro.speed(2)
## bro.left(90)
## bro.forward(100)
## bro.left(90)
## bro.forward(100)
## bro.left(135)
## bro.forward(141.42)
## print("Triangle done")
window.exitonclick()
draw_art()
| [
"[email protected]"
]
| |
5d02379f98ac637617a3f89c3ac250ecf7787fbd | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/knapsack_20200708153620.py | dd426fbe9706b3c9e39d8c8e2e1af773190273a4 | []
| no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | def Knap(a,b,w):
# declare an empty dictionary
newArr = []
for i,j in zip(a,b):
smallArr = []
smallArr.append(i)
smallArr.append(j)
newArr.append(smallArr)
i = 0
# at position 0 is the weight and at position 1 is the value
# goal is to find highest value but not greater than W
while i < len(newArr):
if (newArr[i][0] + newArr[i+1][0]) <= w:
value = newArr[i][1] + newArr[i+1][1]
print('value',value ,'>',)
if value > newArr[i][1] + newArr[i+1][1]:
print(value)
i +=1
Knap([10,20,30],[60,100,120],220) | [
"[email protected]"
]
| |
27f38e0cf34c7a458c77862ed021f69b1081d219 | 9adea4131921ae4b8c94e6e20c8dcd5efa8f5f4a | /src/group_by.py | 585b07ad5374f384b3db8715b75f8af318fac5fe | [
"BSD-3-Clause"
]
| permissive | ferhatcicek/minifold | 33f447133601c299c9ddf6e7bfaa888f43c999fd | 00c5e912e18a713b0496bcb869f5f6af4f3d40c9 | refs/heads/master | 2022-12-15T05:58:04.541226 | 2020-09-25T16:55:52 | 2020-09-25T16:55:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,689 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the minifold project.
# https://github.com/nokia/minifold
__author__ = "Marc-Olivier Buob"
__maintainer__ = "Marc-Olivier Buob"
__email__ = "[email protected]"
__copyright__ = "Copyright (C) 2018, Nokia"
__license__ = "BSD-3"
from .connector import Connector
from .hash import to_hashable
from .query import Query, ACTION_READ
from .values_from_dict import ValuesFromDictFonctor
def group_by_impl(fonctor :ValuesFromDictFonctor, entries :list) -> dict:
ret = dict()
for entry in entries:
key = fonctor(entry)
if len(key) == 1:
(key,) = key
key = to_hashable(key)
if key not in ret.keys(): ret[key] = list()
ret[key].append(entry)
return ret
def group_by(attributes :list, entries :list) -> list:
fonctor = ValuesFromDictFonctor(attributes)
return group_by_impl(fonctor, entries)
class GroupByConnector(Connector):
def __init__(self, attributes :list, child):
super().__init__()
self.m_fonctor = ValuesFromDictFonctor(attributes)
self.m_child = child
@property
def child(self):
return self.m_child
def attributes(self, object :str):
return set(self.m_fonctor.attributes)
def query(self, q :Query) -> list:
super().query(q)
return self.answer(
q,
group_by_impl(
self.m_fonctor,
self.m_child.query(q)
)
)
def __str__(self) -> str:
return "GROUP BY %s" % ", ".join(self.m_fonctor.attributes)
| [
"[email protected]"
]
| |
d202ffe04150297400e04709fcc09d24982baf93 | 8d402df39c18eba7e1c86c762f205c944357c5df | /www/gallery/sidebar.py | 40830ea74ad141050c9cf4d414e8d9b5a387a0f1 | [
"BSD-3-Clause"
]
| permissive | brython-dev/brython | 87cc023e25550dec9ce459ba68774189f33712b6 | b33958bff0e8c7a280babc30232dc389a2500a7a | refs/heads/master | 2023-09-04T04:49:29.156209 | 2023-09-01T06:36:08 | 2023-09-01T06:36:08 | 24,046,239 | 6,569 | 625 | BSD-3-Clause | 2023-07-05T06:13:32 | 2014-09-15T06:58:21 | Python | UTF-8 | Python | false | false | 236 | py | x=0
from browser.html import A,B,BR
print('A',A)
from os import *
def menu(title,links):
# links is a list of tuples (name,href)
res = B(title)
for _name,href in links:
res += BR()+A(_name,href=href)
return res
| [
"[email protected]"
]
| |
53e62f8d8703c493130f1759cf7ee0c205c4f15c | d396aa3495407069935ebab3faf870ad87025014 | /python-leetcode/剑指offer/tree/剑指 Offer 55 - I. 二叉树的深度.py | 36b475c95dd52bc934243fe9103e8b69f9f96341 | []
| no_license | qwjaskzxl/For-OJ | 4d3b2f873cf11c5f9ddb74c4b91b4b78cccc3afa | 1fe6e5099d95fff2dcfc503951ff6e311202919b | refs/heads/master | 2021-12-25T21:17:44.232567 | 2021-08-05T14:01:59 | 2021-08-05T14:01:59 | 175,966,016 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,156 | py | pass
'''
输入一棵二叉树的根节点,求该树的深度。从根节点到叶节点依次经过的节点(含根、叶节点)形成树的一条路径,最长路径的长度为树的深度。
例如:
给定二叉树 [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回它的最大深度 3 。
提示:
节点总数 <= 10000
'''
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def maxDepth(self, root: TreeNode) -> int:
if not root: return 0
MAX = 0
def dfs(node, depth):
nonlocal MAX
if not node:
return
if depth > MAX:
MAX = depth
if node.left: dfs(node.left, depth + 1)
if node.right: dfs(node.right, depth + 1)
dfs(root, 1)
return MAX
class Solution_666:
def maxDepth(self, root: TreeNode) -> int:
if not root: return 0
return max(self.maxDepth(root.left),
self.maxDepth(root.right)) \
+ 1
| [
"[email protected]"
]
| |
6950bb2514c10a37f69d8268d2a1bcae8b0f65e0 | f0257428fed2a5f10950ee52e88a0f6811120323 | /book_study/book_chapter4/tuple.py | 8b984479ddcedf9425ce141e5f6f5ccf6b3c92c8 | []
| no_license | tata-LY/python | 454d42cc8f6db9a1450966aba4af6894e1b59b78 | 55d13b7f61cbb87ff3f272f596cd5b8c53b807c5 | refs/heads/main | 2023-04-08T19:31:57.945506 | 2021-04-19T05:39:17 | 2021-04-19T05:39:17 | 328,880,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | #!/usr/bin/env python3
# _*_coding:utf-8_*_
# by liuyang
tuple1 = (1,100)
print(tuple1)
print(tuple1[0])
#tuple1[0] = 10 #元组不允许修改
#print(tuple1)
for i in tuple1:
print(i)
tuple1 = ('liuyang', 'zhangjuan') # 不允许修改,可以重新定义
print(tuple1)
| [
"[email protected]"
]
| |
22f0a214ee1dca65b2464ec11b94e429d66e79cc | 78eb766321c7ed3236fb87bb6ac8547c99d0d1a4 | /oneYou2/pages/migrations/0001_initial.py | 8aaa7b7e731d48ae75dd6fe64a29f8b282817fdc | []
| no_license | danmorley/nhs-example | 9d7be76116ed962248e1f7e287355a6870534f5d | ae4b5f395d3518ee17ef89348ed756c817e0c08c | refs/heads/master | 2022-12-13T02:13:18.484448 | 2019-02-28T11:05:31 | 2019-02-28T11:05:31 | 203,353,840 | 1 | 0 | null | 2022-12-07T04:29:46 | 2019-08-20T10:30:15 | Python | UTF-8 | Python | false | false | 1,087 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-02-06 14:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0040_page_draft_title'),
]
operations = [
migrations.CreateModel(
name='OneYou2Page',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())))),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| [
"[email protected]"
]
| |
97e25af591a27b65414cb6af1325caf83381bf30 | ac7435b0b3faa6b6cf51d0d6b43984b77b70a37c | /nova/tests/unit/network/test_neutronv2.py | 7424cbb63fba3e40671c004d21b540ff5b69b930 | [
"Apache-2.0"
]
| permissive | gokrokvertskhov/nova-mesos-driver | 04688cd51cad9790cf5460b44ba527b51080760d | fdb9c8468f6a8680c19095a81bf77884ae61e170 | refs/heads/master | 2021-01-10T10:51:07.096729 | 2016-03-25T01:45:10 | 2016-03-25T01:45:10 | 54,685,199 | 0 | 1 | Apache-2.0 | 2020-07-24T01:00:58 | 2016-03-25T01:22:06 | Python | UTF-8 | Python | false | false | 181,927 | py | # Copyright 2012 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import collections
import contextlib
import copy
import uuid
import mock
from mox3 import mox
from neutronclient.common import exceptions
from neutronclient.v2_0 import client
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import six
from six.moves import range
from nova.compute import flavors
from nova import context
from nova import exception
from nova.network import model
from nova.network.neutronv2 import api as neutronapi
from nova.network.neutronv2 import constants
from nova import objects
from nova.openstack.common import policy as common_policy
from nova.pci import manager as pci_manager
from nova.pci import whitelist as pci_whitelist
from nova import policy
from nova import test
from nova.tests.unit import fake_instance
CONF = cfg.CONF
# NOTE: Neutron client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make neutron client throw a custom
# exception class instead.
NEUTRON_CLIENT_EXCEPTION = Exception
fake_info_cache = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'instance_uuid': 'fake-uuid',
'network_info': '[]',
}
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in six.iteritems(lhs):
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestNeutronClient(test.NoDBTestCase):
def setUp(self):
super(TestNeutronClient, self).setUp()
neutronapi.reset_state()
def test_withtoken(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken(self):
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(exceptions.Unauthorized,
neutronapi.get_client,
my_context)
def test_withtoken_context_is_admin(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token',
is_admin=True)
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken_keystone_connection_error(self):
self.flags(auth_strategy='keystone', group='neutron')
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
neutronapi.get_client,
my_context)
@mock.patch('nova.network.neutronv2.api._ADMIN_AUTH')
@mock.patch.object(client.Client, "list_networks", new=mock.Mock())
def test_reuse_admin_token(self, m):
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
tokens = ['new_token2', 'new_token1']
def token_vals(*args, **kwargs):
return tokens.pop()
m.get_token.side_effect = token_vals
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token1', client1.httpclient.auth.get_token(None))
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token2', client1.httpclient.auth.get_token(None))
class TestNeutronv2Base(test.TestCase):
def setUp(self):
super(TestNeutronv2Base, self).setUp()
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.tenant_id = '9d049e4b60b64716978ab415e6fbd5c0'
self.instance = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host',
'info_cache': {'network_info': []},
'security_groups': []}
self.instance2 = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance2',
'availability_zone': 'nova',
'info_cache': {'network_info': []},
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
# A network request with external networks
self.nets5 = self.nets1 + [{'id': 'the-external-one',
'name': 'out-of-this-world',
'router:external': True,
'tenant_id': 'should-be-an-admin'}]
# A network request with a duplicate
self.nets6 = []
self.nets6.append(self.nets1[0])
self.nets6.append(self.nets1[0])
# A network request with a combo
self.nets7 = []
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
# A network request with only external network
self.nets8 = [self.nets5[1]]
# An empty network
self.nets9 = []
# A network that is both shared and external
self.nets10 = [{'id': 'net_id', 'name': 'net_name',
'router:external': True, 'shared': True}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4,
self.nets5, self.nets6, self.nets7, self.nets8,
self.nets9, self.nets10]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': self.instance2['uuid'],
'tenant_id': self.tenant_id,
'device_owner': 'compute:nova',
'id': 'my_portid1',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}],
'status': 'ACTIVE',
'admin_state_up': True}]
self.port_address2 = '10.0.2.2'
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': self.instance['uuid'],
'tenant_id': self.tenant_id,
'admin_state_up': True,
'status': 'ACTIVE',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips':
[{'ip_address': self.port_address2,
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'tenant_id': self.tenant_id,
'status': 'DOWN',
'admin_state_up': True,
'device_owner': 'compute:nova',
'id': 'my_portid3',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.mox.StubOutWithMock(neutronapi, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
self.addCleanup(CONF.reset)
self.addCleanup(self.mox.VerifyAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
def _fake_instance_object(self, instance):
return fake_instance.fake_instance_obj(self.context, **instance)
def _fake_instance_info_cache(self, nw_info, instance_uuid=None):
info_cache = {}
if instance_uuid is None:
info_cache['instance_uuid'] = str(uuid.uuid4())
else:
info_cache['instance_uuid'] = instance_uuid
info_cache['deleted'] = False
info_cache['created_at'] = timeutils.utcnow()
info_cache['deleted_at'] = timeutils.utcnow()
info_cache['updated_at'] = timeutils.utcnow()
info_cache['network_info'] = model.NetworkInfo.hydrate(six.text_type(
jsonutils.dumps(nw_info)))
return info_cache
def _fake_instance_object_with_info_cache(self, instance):
expected_attrs = ['info_cache']
instance = objects.Instance._from_db_object(self.context,
objects.Instance(), fake_instance.fake_db_instance(**instance),
expected_attrs=expected_attrs)
return instance
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
self.instance = self._fake_instance_object(self.instance)
self.instance2 = self._fake_instance_object(self.instance2)
api = neutronapi.API()
self.mox.StubOutWithMock(api, 'get_instance_nw_info')
has_portbinding = False
has_extra_dhcp_opts = False
dhcp_options = kwargs.get('dhcp_options')
if dhcp_options is not None:
has_extra_dhcp_opts = True
if kwargs.get('portbinding'):
has_portbinding = True
api.extensions[constants.PORTBINDING_EXT] = 1
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
neutronapi.get_client(
mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(has_portbinding)
else:
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
ordered_networks = []
if 'requested_networks' in kwargs:
for request in kwargs['requested_networks']:
if request.port_id:
if request.port_id == 'my_portid3':
self.moxed_client.show_port(request.port_id
).AndReturn(
{'port': {'id': 'my_portid3',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
''}})
ports['my_netid1'] = [self.port_data1[0],
self.port_data3[0]]
ports[request.port_id] = self.port_data3[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
elif request.port_id == 'invalid_id':
PortNotFound = exceptions.PortNotFoundClient(
status_code=404)
self.moxed_client.show_port(request.port_id
).AndRaise(PortNotFound)
else:
self.moxed_client.show_port(request.port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
''}})
ports[request.port_id] = self.port_data1[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[request.network_id] = request.address
req_net_ids.append(request.network_id)
ordered_networks.append(request)
else:
for n in nets:
ordered_networks.append(
objects.NetworkRequest(network_id=n['id']))
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
# search all req_net_ids as in api.py
search_ids = req_net_ids
if search_ids:
mox_list_params = {'id': mox.SameElementsAs(search_ids)}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance.project_id,
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': []})
if kwargs.get('_break') == 'post_list_networks':
self.mox.ReplayAll()
return api
if (('requested_networks' not in kwargs or
kwargs['requested_networks'].as_tuples() == [(None, None, None)])
and len(nets) > 1):
self.mox.ReplayAll()
return api
preexisting_port_ids = []
ports_in_requested_net_order = []
nets_in_requested_net_order = []
for request in ordered_networks:
port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
# Network lookup for available network_id
network = None
for net in nets:
if net['id'] == request.network_id:
network = net
break
# if net_id did not pass validate_networks() and not available
# here then skip it safely not continuing with a None Network
else:
continue
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
if not has_portbinding:
api._populate_neutron_extension_values(mox.IgnoreArg(),
self.instance, mox.IgnoreArg(),
mox.IgnoreArg(), neutron=self.moxed_client).AndReturn(None)
else:
# since _populate_neutron_extension_values() will call
# _has_port_binding_extension()
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client).\
AndReturn(has_portbinding)
if request.port_id:
port = ports[request.port_id]
self.moxed_client.update_port(request.port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
ports_in_requested_net_order.append(request.port_id)
preexisting_port_ids.append(request.port_id)
else:
request.address = fixed_ips.get(request.network_id)
if request.address:
port_req_body['port']['fixed_ips'] = [
{'ip_address': str(request.address)}]
port_req_body['port']['network_id'] = request.network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance.project_id
if macs:
port_req_body['port']['mac_address'] = macs.pop()
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
res_port = {'port': {'id': 'fake'}}
if has_extra_dhcp_opts:
port_req_body['port']['extra_dhcp_opts'] = dhcp_options
if kwargs.get('_break') == 'mac' + request.network_id:
self.mox.ReplayAll()
return api
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
ports_in_requested_net_order.append(res_port['port']['id'])
nets_in_requested_net_order.append(network)
api.get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets_in_requested_net_order,
port_ids=ports_in_requested_net_order,
admin_client=None,
preexisting_port_ids=preexisting_port_ids
).AndReturn(self._returned_nw_info)
self.mox.ReplayAll()
return api
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEqual('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEqual('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEqual('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEqual('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
ip_addr = model.IP(address='8.8.%s.1' % id_suffix,
version=4, type='dns')
self.assertIn(ip_addr, nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
port_data = number == 1 and self.port_data1 or self.port_data2
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
net_ids = [port['network_id'] for port in port_data]
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
for i in range(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context, instance)
for i in range(0, number):
self._verify_nw_info(nw_inf, i)
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
class TestNeutronv2(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets_add_interface(self):
# This tests that adding an interface to an instance does not
# remove the first instance from the instance.
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': self.port_data2[0]['id'],
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
self.nets2,
[self.port_data2[1]['id']])
def test_get_instance_nw_info_remove_ports_from_neutron(self):
# This tests that when a port is removed in neutron it
# is also removed from the nova.
network_model = model.Network(id=self.port_data2[0]['network_id'],
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
None,
None)
def test_get_instance_nw_info_ignores_neutron_ports(self):
# Tests that only ports in the network_cache are updated
# and ports returned from neutron that match the same
# instance_id/device_id are ignored.
port_data2 = copy.copy(self.port_data2)
# set device_id on the ports to be the same.
port_data2[1]['device_id'] = port_data2[0]['device_id']
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
port_data2,
None,
None)
def _fake_get_instance_nw_info_helper(self, network_cache,
current_neutron_ports,
networks=None, port_ids=None):
"""Helper function to test get_instance_nw_info.
:param network_cache - data already in the nova network cache.
:param current_neutron_ports - updated list of ports from neutron.
:param networks - networks of ports being added to instance.
:param port_ids - new ports being added to instance.
"""
# keep a copy of the original ports/networks to pass to
# get_instance_nw_info() as the code below changes them.
original_port_ids = copy.copy(port_ids)
original_networks = copy.copy(networks)
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': current_neutron_ports})
ifaces = network_cache['info_cache']['network_info']
if port_ids is None:
port_ids = [iface['id'] for iface in ifaces]
net_ids = [iface['network']['id'] for iface in ifaces]
nets = [{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces]
if networks is None:
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
else:
networks = networks + [
dict(id=iface['network']['id'],
name=iface['network']['label'],
tenant_id=iface['network']['meta']['tenant_id'])
for iface in ifaces]
port_ids = [iface['id'] for iface in ifaces] + port_ids
index = 0
current_neutron_port_map = {}
for current_neutron_port in current_neutron_ports:
current_neutron_port_map[current_neutron_port['id']] = (
current_neutron_port)
for port_id in port_ids:
current_neutron_port = current_neutron_port_map.get(port_id)
if current_neutron_port:
for ip in current_neutron_port['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=current_neutron_port['id']).AndReturn(
{'floatingips': [self.float_data2[index]]})
self.moxed_client.list_subnets(
id=mox.SameElementsAs([ip['subnet_id']])
).AndReturn(
{'subnets': [self.subnet_data_n[index]]})
self.moxed_client.list_ports(
network_id=current_neutron_port['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
index += 1
self.instance['info_cache'] = self._fake_instance_info_cache(
network_cache['info_cache']['network_info'], self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).MultipleTimes().AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_infs = api.get_instance_nw_info(self.context,
instance,
networks=original_networks,
port_ids=original_port_ids)
self.assertEqual(index, len(nw_infs))
# ensure that nic ordering is preserved
for iface_index in range(index):
self.assertEqual(nw_infs[iface_index]['id'],
port_ids[iface_index])
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
id=[self.port_data1[0]['network_id']]).AndReturn(
{'networks': self.nets1})
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
net_info_cache = []
for port in self.port_data3:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).AndReturn(self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context,
instance)
id_suffix = 3
self.assertEqual(0, len(nw_inf.fixed_ips()))
self.assertEqual('my_netname1', nw_inf[0]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEqual(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_neutron_extensions_cache(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
api._refresh_neutron_extensions_cache(mox.IgnoreArg())
self.assertEqual(
{constants.QOS_QUEUE: {'name': constants.QOS_QUEUE}},
api.extensions)
def test_populate_neutron_extension_values_rxtx_factor(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
flavor = flavors.get_default_flavor()
flavor['rxtx_factor'] = 1
instance = objects.Instance(system_metadata={})
with mock.patch.object(instance, 'save'):
instance.set_flavor(flavor)
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body)
self.assertEqual(port_req_body['port']['rxtx_factor'], 1)
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
api = self._stub_allocate_for_instance(net_idx=2)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')]))
self.assertEqual(self.port_data1, result)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs_via_ports(self,
mock_unbind):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='mac' + self.nets2[1]['id'])
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs(self, mock_unbind):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='mac' + self.nets2[0]['id'])
with mock.patch.object(api, '_delete_ports'):
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance,
requested_networks=requested_networks,
macs=set(['my_mac2']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_without_requested_networks(self):
api = self._stub_allocate_for_instance(net_idx=3)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_requested_non_available_network(self):
"""verify that a non available network is ignored.
self.nets2 (net_idx=2) is composed of self.nets3[0] and self.nets3[1]
Do not create a port on a non available network self.nets3[2].
"""
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[0], self.nets3[2], self.nets3[1])])
self._allocate_for_instance(net_idx=2,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[1], self.nets3[0], self.nets3[2])])
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_invalid_network_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='invalid_id')])
api = self._stub_allocate_for_instance(net_idx=9,
requested_networks=requested_networks,
_break='post_list_networks')
self.assertRaises(exception.NetworkNotFound,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets1[0]['id'],
address='10.0.1.0')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_no_networks(self):
"""verify the exception thrown when there are no networks defined."""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': model.NetworkInfo([])})
self.mox.ReplayAll()
nwinfo = api.allocate_for_instance(self.context, self.instance)
self.assertEqual(len(nwinfo), 0)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_ex1(self,
mock_unbind,
mock_preexisting):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
index = 0
for network in self.nets2:
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'tenant_id': self.instance.project_id,
},
}
port_req_body['port'].update(binding_port_req_body['port'])
port = {'id': 'portid_' + network['id']}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body,
neutron=self.moxed_client).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
NeutronOverQuota = exceptions.OverQuotaClient()
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(NeutronOverQuota)
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(exception.PortLimitExceeded,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance.uuid,
'tenant_id': self.instance.project_id,
},
}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body,
neutron=self.moxed_client).AndReturn(None)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance.project_id,
[],
neutron=self.moxed_client).\
AndRaise(BailOutEarly)
self.mox.ReplayAll()
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest()])
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual(nw_info, [new_port])
def test_allocate_for_instance_port_in_use(self):
# If a port is already in use, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks',
_device=True)
self.assertRaises(exception.PortInUse,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_not_found(self):
# If a port is not found, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='invalid_id')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotFound,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_invalid_tenantid(self):
self.tenant_id = 'invalid_id'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_with_externalnet_forbidden(self):
"""Only one network is available, it's external, and the client
is unauthorized to use it.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# no networks in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.ExternalNetworkAttachForbidden,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_multiple(self):
"""Multiple networks are available, one the client is authorized
to use, and an external one the client is unauthorized to use.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# network found in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(
exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_admin_ctx(self):
"""Only one network is available, it's external, and the client
is authorized.
"""
admin_ctx = context.RequestContext('userid', 'my_tenantid',
is_admin=True)
api = self._stub_allocate_for_instance(net_idx=8)
api.allocate_for_instance(admin_ctx, self.instance)
def test_allocate_for_instance_with_external_shared_net(self):
"""Only one network is available, it's external and shared."""
ctx = context.RequestContext('userid', 'my_tenantid')
api = self._stub_allocate_for_instance(net_idx=10)
api.allocate_for_instance(ctx, self.instance)
def _deallocate_for_instance(self, number, requested_networks=None):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
port_data = number == 1 and self.port_data1 or self.port_data2
ports = {port['id'] for port in port_data}
ret_data = copy.deepcopy(port_data)
if requested_networks:
if isinstance(requested_networks, objects.NetworkRequestList):
# NOTE(danms): Temporary and transitional
with mock.patch('nova.utils.is_neutron', return_value=True):
requested_networks = requested_networks.as_tuples()
for net, fip, port, request_id in requested_networks:
ret_data.append({'network_id': net,
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
'id': port,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [],
'mac_address': 'fake_mac', })
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': ret_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
if requested_networks:
for net, fip, port, request_id in requested_networks:
self.moxed_client.update_port(port)
for port in ports:
self.moxed_client.delete_port(port)
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(self.context,
self.instance.uuid,
{'network_info': '[]'}).AndReturn(
fake_info_cache)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance,
requested_networks=requested_networks)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(1, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(2, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_port_not_found(self,
mock_preexisting):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
port_data = self.port_data1
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': port_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
for port in reversed(port_data):
self.moxed_client.delete_port(port['id']).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.delete_port(port_data[0]['id'])
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
net_ids = [port['network_id'] for port in port_data]
self.moxed_client.list_networks(id=net_ids).AndReturn(
{'networks': nets})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nwinfo = api.deallocate_port_for_instance(self.context, instance,
port_data[0]['id'])
self.assertEqual(len(nwinfo), len(port_data[1:]))
if len(port_data) > 1:
self.assertEqual(nwinfo[0]['network']['id'], 'my_netid2')
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
neutronapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo').AndReturn(
{'port': self.port_data1[0]})
self.mox.ReplayAll()
neutronapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_without_port_quota_on_network_side(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', None, None, None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1'])).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None),
('my_netid3', None, None, None)]
ids = ['my_netid1', 'my_netid2', 'my_netid3']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
self.assertIn("my_netid3", six.text_type(ex))
def test_validate_networks_duplicate_enable(self):
# Verify that no duplicateNetworks exception is thrown when duplicate
# network ids are passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid1')])
ids = ['my_netid1', 'my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_allocate_for_instance_with_requested_networks_duplicates(self):
# specify a duplicate network to allocate to instance
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets6[0], self.nets6[1])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_port(self):
# specify first port and last port that are in same network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port['id'])
for port in (self.port_data1[0], self.port_data3[0])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_combo(self):
# specify a combo net_idx=7 : net2, port in net1, net2, port in net1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data1[0]['id']),
objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self._allocate_for_instance(net_idx=7,
requested_networks=requested_networks)
def test_validate_networks_not_specified(self):
requested_networks = objects.NetworkRequestList(objects=[])
self.moxed_client.list_networks(
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn(
{'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkAmbiguous,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_not_found(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id='3123-ad34-bc43-32332ca33e')])
PortNotFound = exceptions.PortNotFoundClient()
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
PortNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
self.assertRaises(exception.PortNotFound,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_show_raises_non404(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
fake_port_id = '3123-ad34-bc43-32332ca33e'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id=fake_port_id)])
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
exc = self.assertRaises(exception.NovaException,
api.validate_networks,
self.context, requested_networks, 1)
expected_exception_message = ('Failed to access port %(port_id)s: '
'An unknown exception occurred.' %
{'port_id': fake_port_id})
self.assertEqual(expected_exception_message, str(exc))
def test_validate_networks_port_in_use(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self.moxed_client.show_port(self.port_data3[0]['id']).\
AndReturn({'port': self.port_data3[0]})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortInUse,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_no_subnet_id(self):
port_a = self.port_data3[0]
port_a['device_id'] = None
port_a['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortRequiresFixedIP,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_no_subnet_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='his_netid4')])
ids = ['his_netid4']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets4})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkRequiresSubnet,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_ports_in_same_network_enable(self):
# Verify that duplicateNetworks exception is not thrown when ports
# on same duplicate network are passed to validate_networks.
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data1[0]
self.assertEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn(
{'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn(
{'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ports_not_in_same_network(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_no_quota(self):
# Test validation for a request for one instance needing
# two ports, where the quota is 2 and 2 ports are in use
# => instances which can be created = 0
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 2}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 0)
def test_validate_networks_with_ports_and_networks(self):
# Test validation for a request for one instance needing
# one port allocated via nova with another port being passed in.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
ids = ['my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def test_validate_networks_one_port_and_no_networks(self):
# Test that show quota is not called if no networks are
# passed in and only ports.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def test_validate_networks_some_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is 5 and 2 ports are in use
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(max_count, 1)
def test_validate_networks_unlimited_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is -1 (unlimited)
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': -1}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(max_count, 2)
def test_validate_networks_no_quota_but_ports_supplied(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = neutronapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = neutronapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEqual(self.instance2['uuid'], result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = neutronapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets,
req_ids=None, context=None):
api = neutronapi.API()
nets = prv_nets + pub_nets
if req_ids:
mox_list_params = {'id': req_ids}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance['project_id'],
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': prv_nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(
context if context else self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_available_networks_with_custom_policy(self):
rules = {'network:attach_external_network':
common_policy.parse_rule('')}
policy.set_rules(rules)
req_ids = [net['id'] for net in self.nets5]
self._get_available_networks(self.nets5, pub_nets=[], req_ids=req_ids)
def test_get_floating_ip_pools(self):
api = neutronapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [self.fip_pool['name'], self.fip_pool_nova['name']]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
if expected['instance'] is not None:
expected['fixed_ip']['instance_uuid'] = \
expected['instance']['uuid']
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = neutronapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_id_not_found(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFound,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_raises_non404(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exceptions.NeutronClientException,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_by_address_multiple_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = neutronapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = neutronapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_addr_gen_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.IpAddressGenerationFailureClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_exhausted_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.ExternalIpAddressExhaustedClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_with_pool_id(self):
api = neutronapi.API()
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_default_pool(self):
api = neutronapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_disassociate_and_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
floating_ip = {'address': address}
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.disassociate_and_release_floating_ip(self.context, None,
floating_ip)
def test_release_floating_ip_associated(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api, instances):
nw_info = model.NetworkInfo()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
for instance in instances:
api._get_instance_nw_info(mox.IgnoreArg(), instance).\
AndReturn(nw_info)
api.db.instance_info_cache_update(mox.IgnoreArg(),
instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
def test_associate_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fixed_address = self.port_address2
fip_id = self.fip_unassociated['id']
instance = self._fake_instance_object(self.instance)
search_opts = {'device_owner': 'compute:nova',
'device_id': instance.uuid}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance,
address, fixed_address)
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_reassociate_floating_ip(self, mock_get):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
new_fixed_address = self.port_address
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance2['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': 'my_portid1',
'fixed_ip_address': new_fixed_address}})
self.moxed_client.show_port(self.fip_associated['port_id']).\
AndReturn({'port': self.port_data2[1]})
mock_get.return_value = fake_instance.fake_instance_obj(
self.context, **self.instance)
instance2 = self._fake_instance_object(self.instance2)
self._setup_mock_for_refresh_cache(api, [mock_get.return_value,
instance2])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance2,
address, new_fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
instance, address, fixed_address)
def test_disassociate_floating_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, instance, address)
def test_add_fixed_ip_to_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
search_opts = {'device_id': instance.uuid,
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context,
instance,
network_id)
def test_remove_fixed_ip_from_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, instance,
address)
def test_list_floating_ips_without_l3_support(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(
status_code=404)
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(NeutronNotFound)
self.mox.ReplayAll()
neutronapi.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual(floatingips, [])
def test_nw_info_get_ips(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'}],
'id': 'port-id',
}
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 'port-id').AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
self.mox.ReplayAll()
neutronapi.get_client('fake')
result = api._nw_info_get_ips(self.moxed_client, fake_port)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['address'], '1.1.1.1')
self.assertEqual(result[0]['floating_ips'][0]['address'], '10.0.0.1')
def test_nw_info_get_subnets(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'},
{'ip_address': '2.2.2.2'}],
'id': 'port-id',
}
fake_subnet = model.Subnet(cidr='1.0.0.0/8')
fake_ips = [model.IP(x['ip_address']) for x in fake_port['fixed_ips']]
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
api._get_subnets_from_port(self.context, fake_port).AndReturn(
[fake_subnet])
self.mox.ReplayAll()
neutronapi.get_client('fake')
subnets = api._nw_info_get_subnets(self.context, fake_port, fake_ips)
self.assertEqual(len(subnets), 1)
self.assertEqual(len(subnets[0]['ips']), 1)
self.assertEqual(subnets[0]['ips'][0]['address'], '1.1.1.1')
def _test_nw_info_build_network(self, vif_type):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': vif_type,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(net['subnets'], fake_subnets)
self.assertEqual(net['id'], 'net-id')
self.assertEqual(net['label'], 'foo')
self.assertEqual(net.get_meta('tenant_id'), 'tenant')
self.assertEqual(net.get_meta('injected'), CONF.flat_injected)
return net, iid
def test_nw_info_build_network_ovs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_OVS)
self.assertEqual(net['bridge'], CONF.neutron.ovs_bridge)
self.assertNotIn('should_create_bridge', net)
self.assertEqual(iid, 'port-id')
def test_nw_info_build_network_dvs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_DVS)
self.assertEqual('net-id', net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertNotIn('ovs_interfaceid', net)
self.assertIsNone(iid)
def test_nw_info_build_network_bridge(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_BRIDGE)
self.assertEqual(net['bridge'], 'brqnet-id')
self.assertTrue(net['should_create_bridge'])
self.assertIsNone(iid)
def test_nw_info_build_network_tap(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_TAP)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_network_other(self):
net, iid = self._test_nw_info_build_network(None)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_no_match(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id1',
'tenant_id': 'tenant',
'binding:vif_type': model.VIF_TYPE_OVS,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id2', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('tenant', net['meta']['tenant_id'])
def test_nw_info_build_network_vhostuser(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': model.VIF_TYPE_VHOSTUSER,
'binding:vif_details': {
model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True
}
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(net['subnets'], fake_subnets)
self.assertEqual(net['id'], 'net-id')
self.assertEqual(net['label'], 'foo')
self.assertEqual(net.get_meta('tenant_id'), 'tenant')
self.assertEqual(net.get_meta('injected'), CONF.flat_injected)
self.assertEqual(net['bridge'], CONF.neutron.ovs_bridge)
self.assertNotIn('should_create_bridge', net)
self.assertEqual(iid, 'port-id')
def test_build_network_info_model(self):
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=False and status='DOWN' thus vif.active=True
{'id': 'port2',
'network_id': 'net-id',
'admin_state_up': False,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:02',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='DOWN' thus vif.active=False
{'id': 'port0',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:03',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port3',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:04',
'binding:vif_type': model.VIF_TYPE_HW_VEB,
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port4',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:05',
'binding:vif_type': model.VIF_TYPE_802_QBH,
'binding:vnic_type': model.VNIC_TYPE_MACVTAP,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.2',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
# This port has no binding:vnic_type to verify default is assumed
{'id': 'port5',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:06',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
# No binding:vnic_type
'binding:vif_details': {},
},
# This does not match the networks we provide below,
# so it should be ignored (and is here to verify that)
{'id': 'port6',
'network_id': 'other-net-id',
'admin_state_up': True,
'status': 'DOWN',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [
{'id': 'net-id',
'name': 'foo',
'tenant_id': 'fake',
}
]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
requested_ports = [fake_ports[2], fake_ports[0], fake_ports[1],
fake_ports[3], fake_ports[4], fake_ports[5]]
for requested_port in requested_ports:
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', requested_port['id']).AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
for requested_port in requested_ports:
api._get_subnets_from_port(self.context, requested_port
).AndReturn(fake_subnets)
self.mox.StubOutWithMock(api, '_get_preexisting_port_ids')
api._get_preexisting_port_ids(fake_inst).AndReturn(['port5'])
self.mox.ReplayAll()
neutronapi.get_client('fake')
fake_inst.info_cache = objects.InstanceInfoCache.new(
self.context, 'fake-uuid')
fake_inst.info_cache.network_info = model.NetworkInfo.hydrate([])
nw_infos = api._build_network_info_model(
self.context, fake_inst,
fake_nets,
[fake_ports[2]['id'],
fake_ports[0]['id'],
fake_ports[1]['id'],
fake_ports[3]['id'],
fake_ports[4]['id'],
fake_ports[5]['id']],
preexisting_port_ids=['port3'])
self.assertEqual(len(nw_infos), 6)
index = 0
for nw_info in nw_infos:
self.assertEqual(nw_info['address'],
requested_ports[index]['mac_address'])
self.assertEqual(nw_info['devname'], 'tapport' + str(index))
self.assertIsNone(nw_info['ovs_interfaceid'])
self.assertEqual(nw_info['type'],
requested_ports[index]['binding:vif_type'])
if nw_info['type'] == model.VIF_TYPE_BRIDGE:
self.assertEqual(nw_info['network']['bridge'], 'brqnet-id')
self.assertEqual(nw_info['vnic_type'],
requested_ports[index].get('binding:vnic_type',
model.VNIC_TYPE_NORMAL))
self.assertEqual(nw_info.get('details'),
requested_ports[index].get('binding:vif_details'))
self.assertEqual(nw_info.get('profile'),
requested_ports[index].get('binding:profile'))
index += 1
self.assertEqual(nw_infos[0]['active'], False)
self.assertEqual(nw_infos[1]['active'], True)
self.assertEqual(nw_infos[2]['active'], True)
self.assertEqual(nw_infos[3]['active'], True)
self.assertEqual(nw_infos[4]['active'], True)
self.assertEqual(nw_infos[5]['active'], True)
self.assertEqual(nw_infos[0]['id'], 'port0')
self.assertEqual(nw_infos[1]['id'], 'port1')
self.assertEqual(nw_infos[2]['id'], 'port2')
self.assertEqual(nw_infos[3]['id'], 'port3')
self.assertEqual(nw_infos[4]['id'], 'port4')
self.assertEqual(nw_infos[5]['id'], 'port5')
self.assertFalse(nw_infos[0]['preserve_on_delete'])
self.assertFalse(nw_infos[1]['preserve_on_delete'])
self.assertFalse(nw_infos[2]['preserve_on_delete'])
self.assertTrue(nw_infos[3]['preserve_on_delete'])
self.assertFalse(nw_infos[4]['preserve_on_delete'])
self.assertTrue(nw_infos[5]['preserve_on_delete'])
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_subnets')
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_ips')
@mock.patch('nova.network.neutronv2.api.API._nw_info_build_network')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._gather_port_ids_and_networks')
def test_build_network_info_model_empty(
self, mock_gather_port_ids_and_networks,
mock_get_preexisting_port_ids,
mock_nw_info_build_network,
mock_nw_info_get_ips,
mock_nw_info_get_subnets):
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
mock_gather_port_ids_and_networks.return_value = (None, None)
mock_get_preexisting_port_ids.return_value = []
mock_nw_info_build_network.return_value = (None, None)
mock_nw_info_get_ips.return_value = []
mock_nw_info_get_subnets.return_value = fake_subnets
self.mox.ReplayAll()
neutronapi.get_client('fake')
nw_infos = api._build_network_info_model(
self.context, fake_inst)
self.assertEqual(len(nw_infos), 1)
def test_get_subnets_from_port(self):
api = neutronapi.API()
port_data = copy.copy(self.port_data1[0])
subnet_data1 = copy.copy(self.subnet_data1)
subnet_data1[0]['host_routes'] = [
{'destination': '192.168.0.0/24', 'nexthop': '1.0.0.10'}
]
self.moxed_client.list_subnets(
id=[port_data['fixed_ips'][0]['subnet_id']]
).AndReturn({'subnets': subnet_data1})
self.moxed_client.list_ports(
network_id=subnet_data1[0]['network_id'],
device_owner='network:dhcp').AndReturn({'ports': []})
self.mox.ReplayAll()
subnets = api._get_subnets_from_port(self.context, port_data)
self.assertEqual(len(subnets), 1)
self.assertEqual(len(subnets[0]['routes']), 1)
self.assertEqual(subnets[0]['routes'][0]['cidr'],
subnet_data1[0]['host_routes'][0]['destination'])
self.assertEqual(subnets[0]['routes'][0]['gateway']['address'],
subnet_data1[0]['host_routes'][0]['nexthop'])
def test_get_all_empty_list_networks(self):
api = neutronapi.API()
self.moxed_client.list_networks().AndReturn({'networks': []})
self.mox.ReplayAll()
networks = api.get_all(self.context)
self.assertIsInstance(networks, objects.NetworkList)
self.assertEqual(0, len(networks))
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_1(self, mock_get_client):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id1',
'network_id': 'net-id',
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
},
}
test_net = {'network': {'provider:physical_network': 'phynet1'}}
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
mock_client.show_network.return_value = test_net
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
mock_client.show_network.assert_called_once_with(
test_port['port']['network_id'],
fields='provider:physical_network')
self.assertEqual(model.VNIC_TYPE_DIRECT, vnic_type)
self.assertEqual(phynet_name, 'phynet1')
def _test_get_port_vnic_info(self, mock_get_client,
binding_vnic_type=None):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id2',
'network_id': 'net-id',
},
}
if binding_vnic_type:
test_port['port']['binding:vnic_type'] = binding_vnic_type
mock_get_client.reset_mock()
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
self.assertEqual(model.VNIC_TYPE_NORMAL, vnic_type)
self.assertFalse(phynet_name)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_2(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client,
binding_vnic_type=model.VNIC_TYPE_NORMAL)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_3(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client)
@mock.patch.object(neutronapi.API, "_get_port_vnic_info")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_create_pci_requests_for_sriov_ports(self, mock_get_client,
mock_get_port_vnic_info):
api = neutronapi.API()
self.mox.ResetAll()
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(port_id='my_portid1'),
objects.NetworkRequest(network_id='net1'),
objects.NetworkRequest(port_id='my_portid2'),
objects.NetworkRequest(port_id='my_portid3'),
objects.NetworkRequest(port_id='my_portid4')])
pci_requests = objects.InstancePCIRequests(requests=[])
mock_get_port_vnic_info.side_effect = [
(model.VNIC_TYPE_DIRECT, 'phynet1'),
(model.VNIC_TYPE_NORMAL, ''),
(model.VNIC_TYPE_MACVTAP, 'phynet1'),
(model.VNIC_TYPE_MACVTAP, 'phynet2')
]
api.create_pci_requests_for_sriov_ports(
None, pci_requests, requested_networks)
self.assertEqual(3, len(pci_requests.requests))
has_pci_request_id = [net.pci_request_id is not None for net in
requested_networks.objects]
expected_results = [True, False, False, True, True]
self.assertEqual(expected_results, has_pci_request_id)
class TestNeutronv2WithMock(test.TestCase):
"""Used to test Neutron V2 API with mock."""
def setUp(self):
super(TestNeutronv2WithMock, self).setUp()
self.api = neutronapi.API()
self.context = context.RequestContext(
'fake-user', 'fake-project',
auth_token='bff4a5a6b9eb4ea2a6efec6eefb77936')
@mock.patch('oslo_concurrency.lockutils.lock')
def test_get_instance_nw_info_locks_per_instance(self, mock_lock):
instance = objects.Instance(uuid=uuid.uuid4())
api = neutronapi.API()
mock_lock.side_effect = test.TestingException
self.assertRaises(test.TestingException,
api.get_instance_nw_info, 'context', instance)
mock_lock.assert_called_once_with('refresh_cache-%s' % instance.uuid)
@mock.patch('nova.network.neutronv2.api.LOG')
def test_get_instance_nw_info_verify_duplicates_ignored(self, mock_log):
"""test that the returned networks & port_ids from
_gather_port_ids_and_networks doesn't contain any duplicates
The test fakes an instance with two ports connected to two networks.
The _gather_port_ids_and_networks method will be called with the
instance and a list of port ids of which one port id is configured
already to the instance (== duplicate #1) and a list of
networks that already contains a network to which an instance port
is connected (== duplicate #2).
All-in-all, we expect the resulting port ids list to contain 3 items
(["instance_port_1", "port_1", "port_2"]) and the resulting networks
list to contain 3 items (["net_1", "net_2", "instance_network_1"])
while the warning message for duplicate items was executed twice
(due to "duplicate #1" & "duplicate #2")
"""
networks = [model.Network(id="net_1"),
model.Network(id="net_2")]
port_ids = ["port_1", "port_2"]
instance_networks = [{"id": "instance_network_1",
"name": "fake_network",
"tenant_id": "fake_tenant_id"}]
instance_port_ids = ["instance_port_1"]
network_info = model.NetworkInfo(
[{'id': port_ids[0],
'network': networks[0]},
{'id': instance_port_ids[0],
'network': model.Network(
id=instance_networks[0]["id"],
label=instance_networks[0]["name"],
meta={"tenant_id": instance_networks[0]["tenant_id"]})}]
)
instance_uuid = uuid.uuid4()
instance = objects.Instance(uuid=instance_uuid,
info_cache=objects.InstanceInfoCache(
context=self.context,
instance_uuid=instance_uuid,
network_info=network_info))
new_networks, new_port_ids = self.api._gather_port_ids_and_networks(
self.context, instance, networks, port_ids)
self.assertEqual(new_networks, networks + instance_networks)
self.assertEqual(new_port_ids, instance_port_ids + port_ids)
self.assertEqual(2, mock_log.warning.call_count)
@mock.patch('oslo_concurrency.lockutils.lock')
@mock.patch.object(neutronapi.API, '_get_instance_nw_info')
@mock.patch('nova.network.base_api.update_instance_cache_with_nw_info')
def test_get_instance_nw_info(self, mock_update, mock_get, mock_lock):
fake_result = mock.sentinel.get_nw_info_result
mock_get.return_value = fake_result
instance = fake_instance.fake_instance_obj(self.context)
result = self.api.get_instance_nw_info(self.context, instance)
mock_get.assert_called_once_with(self.context, instance)
mock_update.assert_called_once_with(self.api, self.context, instance,
nw_info=fake_result,
update_cells=False)
self.assertEqual(fake_result, result)
def _test_validate_networks_fixed_ip_no_dup(self, nets, requested_networks,
ids, list_port_values):
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
self.fail('Unexpected call to list_ports %s' % search_opts)
with contextlib.nested(
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'list_networks',
return_value={'networks': nets}),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 50}})) as (
list_ports_mock, list_networks_mock, show_quota_mock):
self.api.validate_networks(self.context, requested_networks, 1)
self.assertEqual(len(list_port_values),
len(list_ports_mock.call_args_list))
list_networks_mock.assert_called_once_with(id=ids)
show_quota_mock.assert_called_once_with(tenant_id='fake-project')
def test_validate_networks_over_limit_quota(self):
"""Test validates that a relevant exception is being raised when
there are more ports defined, than there is a quota for it.
"""
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': [1, 2, 3, 4, 5]})]
nets = [{'subnets': '1'}, {'subnets': '2'}]
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
with contextlib.nested(
mock.patch.object(self.api, '_get_available_networks',
return_value=nets),
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 1}})):
exc = self.assertRaises(exception.PortLimitExceeded,
self.api.validate_networks,
self.context, requested_networks, 1)
expected_exception_msg = ('The number of defined ports: '
'%(ports)d is over the limit: '
'%(quota)d' %
{'ports': 5,
'quota': 1})
self.assertEqual(expected_exception_msg, str(exc))
def test_validate_networks_fixed_ip_no_dup1(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because no fixed ips in use
nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
ids = ['my_netid1']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets1, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_no_dup2(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because not used on this net id
nets2 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'},
{'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
ids = ['my_netid1', 'my_netid2']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets2, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_dup(self):
# Test validation for a request for a network with a
# fixed ip that is already in use
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
list_port_mock_params = {'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'}
list_port_mock_return = {'ports': [({'device_id': 'my_deviceid'})]}
with mock.patch.object(client.Client, 'list_ports',
return_value=list_port_mock_return) as (
list_ports_mock):
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api.validate_networks,
self.context, requested_networks, 1)
list_ports_mock.assert_called_once_with(**list_port_mock_params)
def test_allocate_floating_ip_exceed_limit(self):
# Verify that the correct exception is thrown when quota exceed
pool_name = 'dummy'
api = neutronapi.API()
with contextlib.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.OverQuotaClient()
self.assertRaises(exception.FloatingIpLimitExceeded,
api.allocate_floating_ip,
self.context, pool_name)
def test_allocate_floating_ip_no_ipv4_subnet(self):
api = neutronapi.API()
net_id = uuid.uuid4()
error_msg = ('Bad floatingip request: Network %s does not contain '
'any IPv4 subnet' % net_id)
with contextlib.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.BadRequest(error_msg)
self.assertRaises(exception.FloatingIpBadRequest,
api.allocate_floating_ip, self.context,
'ext_net')
def test_create_port_for_instance_no_more_ip(self):
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
with mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressGenerationFailureClient()) as (
create_port_mock):
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
self.assertRaises(exception.NoMoreFixedIps,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body)
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.MacAddressInUseClient())
def test_create_port_for_instance_mac_address_in_use(self,
create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
available_macs = set(['XX:XX:XX:XX:XX:XX'])
# Run the code.
self.assertRaises(exception.PortInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
available_macs=available_macs)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressInUseClient())
def test_create_port_for_fixed_ip_in_use(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.InvalidIpForNetworkClient())
def test_create_port_with_invalid_ip_for_network(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
exc = self.assertRaises(exception.InvalidInput,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the exception message
expected_exception_msg = ('Invalid input received: Fixed IP %(ip)s is '
'not a valid ip address for network '
'%(net_id)s.' %
{'ip': fake_ip, 'net_id': net['id']})
self.assertEqual(expected_exception_msg, str(exc))
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
def test_get_network_detail_not_found(self):
api = neutronapi.API()
expected_exc = exceptions.NetworkNotFoundClient()
network_uuid = '02cacbca-7d48-4a2c-8011-43eecf8a9786'
with mock.patch.object(client.Client, 'show_network',
side_effect=expected_exc) as (
fake_show_network):
self.assertRaises(exception.NetworkNotFound,
api.get,
self.context,
network_uuid)
fake_show_network.assert_called_once_with(network_uuid)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API.'
'_refresh_neutron_extensions_cache')
def test_deallocate_for_instance_uses_delete_helper(self,
mock_refresh,
mock_preexisting):
# setup fake data
instance = fake_instance.fake_instance_obj(self.context)
mock_preexisting.return_value = []
port_data = {'ports': [{'id': str(uuid.uuid4())}]}
ports = set([port['id'] for port in port_data.get('ports')])
api = neutronapi.API()
# setup mocks
mock_client = mock.Mock()
mock_client.list_ports.return_value = port_data
with contextlib.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports')
) as (
mock_get_client, mock_delete
):
# run the code
api.deallocate_for_instance(self.context, instance)
# assert the calls
mock_client.list_ports.assert_called_once_with(
device_id=instance.uuid)
mock_delete.assert_called_once_with(
mock_client, instance, ports, raise_if_fail=True)
def _test_delete_ports(self, expect_raise):
results = [exceptions.NeutronClientException, None]
mock_client = mock.Mock()
with mock.patch.object(mock_client, 'delete_port',
side_effect=results):
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1', 'port2'],
raise_if_fail=expect_raise)
def test_delete_ports_raise(self):
self.assertRaises(exceptions.NeutronClientException,
self._test_delete_ports, True)
def test_delete_ports_no_raise(self):
self._test_delete_ports(False)
def test_delete_ports_never_raise_404(self):
mock_client = mock.Mock()
mock_client.delete_port.side_effect = exceptions.PortNotFoundClient
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1'],
raise_if_fail=True)
mock_client.delete_port.assert_called_once_with('port1')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_port_for_instance_fails(self, mock_preexisting):
mock_preexisting.return_value = []
mock_client = mock.Mock()
api = neutronapi.API()
with contextlib.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports',
side_effect=exceptions.Unauthorized),
mock.patch.object(api, 'get_instance_nw_info')
) as (
get_client, delete_ports, get_nw_info
):
self.assertRaises(exceptions.Unauthorized,
api.deallocate_port_for_instance,
self.context, instance={'uuid': 'fake'},
port_id='fake')
# make sure that we didn't try to reload nw info
self.assertFalse(get_nw_info.called)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def _test_show_port_exceptions(self, client_exc, expected_nova_exc,
get_client_mock):
show_port_mock = mock.Mock(side_effect=client_exc)
get_client_mock.return_value.show_port = show_port_mock
self.assertRaises(expected_nova_exc, self.api.show_port,
self.context, 'fake_port_id')
def test_show_port_not_found(self):
self._test_show_port_exceptions(exceptions.PortNotFoundClient,
exception.PortNotFound)
def test_show_port_forbidden(self):
self._test_show_port_exceptions(exceptions.Unauthorized,
exception.Forbidden)
def test_show_port_unknown_exception(self):
self._test_show_port_exceptions(exceptions.NeutronClientException,
exception.NovaException)
def test_get_network(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'show_network') as mock_show:
mock_show.return_value = {
'network': {'id': 'fake-uuid', 'name': 'fake-network'}
}
net_obj = api.get(self.context, 'fake-uuid')
self.assertEqual('fake-network', net_obj.label)
self.assertEqual('fake-network', net_obj.name)
self.assertEqual('fake-uuid', net_obj.uuid)
def test_get_all_networks(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'list_networks') as mock_list:
mock_list.return_value = {
'networks': [
{'id': 'fake-uuid1', 'name': 'fake-network1'},
{'id': 'fake-uuid2', 'name': 'fake-network2'},
]}
net_objs = api.get_all(self.context)
self.assertIsInstance(net_objs, objects.NetworkList)
self.assertEqual(2, len(net_objs))
self.assertEqual(('fake-uuid1', 'fake-network1'),
(net_objs[0].uuid, net_objs[0].name))
self.assertEqual(('fake-uuid2', 'fake-network2'),
(net_objs[1].uuid, net_objs[1].name))
@mock.patch.object(neutronapi.API, "_refresh_neutron_extensions_cache")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_instance_vnic_index(self, mock_get_client,
mock_refresh_extensions):
api = neutronapi.API()
api.extensions = set([constants.VNIC_INDEX_EXT])
mock_client = mock_get_client()
mock_client.update_port.return_value = 'port'
instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host'}
instance = objects.Instance(**instance)
vif = {'id': 'fake-port-id'}
api.update_instance_vnic_index(self.context, instance, vif, 7)
port_req_body = {'port': {'vnic_index': 7}}
mock_client.update_port.assert_called_once_with('fake-port-id',
port_req_body)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_port_bindings_for_instance_same_host(self,
get_client_mock):
instance = fake_instance.fake_instance_obj(self.context)
self.api._has_port_binding_extension = mock.Mock(return_value=True)
# We test two ports, one with the same host as the host passed in and
# one where binding:host_id isn't set, so we update that port.
fake_ports = {'ports': [
{'id': 'fake-port-1',
'binding:host_id': instance.host},
{'id': 'fake-port-2'}]}
list_ports_mock = mock.Mock(return_value=fake_ports)
get_client_mock.return_value.list_ports = list_ports_mock
update_port_mock = mock.Mock()
get_client_mock.return_value.update_port = update_port_mock
self.api._update_port_binding_for_instance(self.context, instance,
instance.host)
# Assert that update_port was only called on the port without a host.
update_port_mock.assert_called_once_with(
'fake-port-2', {'port': {'binding:host_id': instance.host}})
@mock.patch('nova.network.neutronv2.api.compute_utils')
def test_get_preexisting_port_ids(self, mocked_comp_utils):
mocked_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
result = self.api._get_preexisting_port_ids(None)
self.assertEqual(['2', '3'], result, "Invalid preexisting ports")
def _test_unbind_ports_get_client(self, mock_neutron,
mock_has_ext, has_ext=False):
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
ports = ["1", "2", "3"]
self.api._unbind_ports(mock_ctx, ports, mock_neutron)
get_client_calls = []
get_client_calls.append(mock.call(mock_ctx)
if not has_ext else
mock.call(mock_ctx, admin=True))
if has_ext:
self.assertEqual(1, mock_neutron.call_count)
mock_neutron.assert_has_calls(get_client_calls, True)
else:
self.assertEqual(0, mock_neutron.call_count)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client_binding_extension(self,
mock_neutron,
mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client(self, mock_neutron, mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext)
def _test_unbind_ports(self, mock_neutron, mock_has_ext, has_ext=False):
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
mock_neutron.return_value = mock_client
ports = ["1", "2", "3"]
api = neutronapi.API()
api._unbind_ports(mock_ctx, ports, mock_client)
body = {'port': {'device_id': '', 'device_owner': ''}}
if has_ext:
body['port']['binding:host_id'] = None
update_port_calls = []
for p in ports:
update_port_calls.append(mock.call(p, body))
self.assertEqual(3, mock_update_port.call_count)
mock_update_port.assert_has_calls(update_port_calls)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_binding_ext(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, False)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
def test_unbind_ports_no_port_ids(self, mock_has_ext):
# Tests that None entries in the ports list are filtered out.
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = True
api = neutronapi.API()
api._unbind_ports(mock_ctx, [None], mock_client, mock_client)
self.assertFalse(mock_update_port.called)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.excutils')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_allocate_for_instance_unbind(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_unbind,
mock_log,
mock_cena,
mock_del_ports,
mock_exeu,
mock_giwn):
mock_nc = mock.Mock()
def show_port(port_id):
return {'port': {'network_id': 'net-1', 'id': port_id,
'tenant_id': 'proj-1'}}
mock_nc.show_port = show_port
mock_ntrn.return_value = mock_nc
mock_nc.update_port.side_effect = [True, True, Exception]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_has_pbe.return_value = False
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake-port1'),
objects.NetworkRequest(port_id='fake-port2'),
objects.NetworkRequest(port_id='fail-port')])
mock_avail_nets.return_value = [{'id': 'net-1'}]
self.api.allocate_for_instance(mock.sentinel.ctx,
mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
['fake-port1', 'fake-port2'],
mock.ANY,
mock.ANY)
@mock.patch('nova.objects.network_request.utils')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.base_api')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_for_instance(self, mock_ntrn,
mock_gppids,
mock_unbind,
mock_deletep,
mock_baseapi,
mock_log,
req_utils):
req_utils.is_neutron.return_value = True
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_ports.return_value = {'ports': [
{'id': 'port-1'}, {'id': 'port-2'}, {'id': 'port-3'}
]}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(network_id='net-1',
address='192.168.0.3',
port_id='port-1',
pci_request_id='pci-1')])
mock_gppids.return_value = ['port-3']
self.api.deallocate_for_instance(mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
set(['port-1', 'port-3']),
mock.ANY)
mock_deletep.assert_called_once_with(mock_nc,
mock_inst,
set(['port-2']),
raise_if_fail=True)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.compute_utils')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_port_for_instance(self,
mock_ntrn,
mock_comp_utils,
mock_unbind,
mock_netinfo):
mock_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_client = mock.Mock()
mock_ntrn.return_value = mock_client
self.api.deallocate_port_for_instance(mock.sentinel.ctx,
mock_inst, '2')
mock_unbind.assert_called_once_with(mock.sentinel.ctx, ['2'],
mock_client)
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_created_port(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_cena):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_avail_nets.return_value = [{'id': 'net-1'}]
mock_nc.create_port.return_value = {'port': {'id': 'fake_id',
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}}
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx,
mock_inst)
mock_nc.delete_port.assert_called_once_with('fake_id')
@mock.patch('nova.network.neutronv2.api.API._show_port')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_with_request(self, mock_ntrn,
mock_has_pbe,
mock_show_port):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_show_port.return_value = {
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake_id')])
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
class TestNeutronv2ModuleMethods(test.NoDBTestCase):
def test_gather_port_ids_and_networks_wrong_params(self):
api = neutronapi.API()
# Test with networks not None and port_ids is None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
[{'network': {'name': 'foo'}}], None)
# Test with networks is None and port_ids not None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
None, ['list', 'of', 'port_ids'])
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
neutronapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
class TestNeutronv2Portbinding(TestNeutronv2Base):
def test_allocate_for_instance_portbinding(self):
self._allocate_for_instance(1, portbinding=True)
def test_populate_neutron_extension_values_binding(self):
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.PORTBINDING_EXT}]})
self.mox.ReplayAll()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body)
self.assertEqual(port_req_body['port']['binding:host_id'], host_id)
self.assertFalse(port_req_body['port'].get('binding:profile'))
@mock.patch.object(pci_whitelist, 'get_pci_device_devspec')
@mock.patch.object(pci_manager, 'get_instance_pci_devs')
def test_populate_neutron_extension_values_binding_sriov(self,
mock_get_instance_pci_devs,
mock_get_pci_device_devspec):
api = neutronapi.API()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
pci_req_id = 'my_req_id'
pci_dev = {'vendor_id': '1377',
'product_id': '0047',
'address': '0000:0a:00.1',
}
PciDevice = collections.namedtuple('PciDevice',
['vendor_id', 'product_id', 'address'])
mydev = PciDevice(**pci_dev)
profile = {'pci_vendor_info': '1377:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1',
}
mock_get_instance_pci_devs.return_value = [mydev]
devspec = mock.Mock()
devspec.get_tags.return_value = {'physical_network': 'phynet1'}
mock_get_pci_device_devspec.return_value = devspec
api._populate_neutron_binding_profile(instance,
pci_req_id, port_req_body)
self.assertEqual(port_req_body['port']['binding:profile'], profile)
def _test_update_port_binding_false(self, func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(False)
self.mox.ReplayAll()
func(*args)
def _test_update_port_binding_true(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndReturn(None)
self.mox.ReplayAll()
func(*args)
def _test_update_port_true_exception(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndRaise(
Exception("fail to update port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
func,
*args)
def test_migrate_instance_finish_binding_false(self):
self._test_update_port_binding_false('migrate_instance_finish',
self.context, None,
{'dest_compute': 'fake'})
def test_migrate_instance_finish_binding_true(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_migrate_instance_finish_binding_true_exception(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_setup_instance_network_on_host_false(self):
self._test_update_port_binding_false(
'setup_instance_network_on_host', self.context, None,
'fake_host')
def test_setup_instance_network_on_host_true(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('fake_host',
'setup_instance_network_on_host',
self.context,
instance,
'fake_host')
def test_setup_instance_network_on_host_exception(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception(
'fake_host', 'setup_instance_network_on_host',
self.context, instance, 'fake_host')
def test_associate_not_implemented(self):
api = neutronapi.API()
self.assertRaises(NotImplementedError,
api.associate,
self.context, 'id')
class TestNeutronv2ExtraDhcpOpts(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2ExtraDhcpOpts, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_allocate_for_instance_1_with_extra_dhcp_opts_turned_off(self):
self._allocate_for_instance(1, extra_dhcp_opts=False)
def test_allocate_for_instance_extradhcpopts(self):
dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'}]
self._allocate_for_instance(1, dhcp_options=dhcp_opts)
class TestNeutronClientForAdminScenarios(test.NoDBTestCase):
@mock.patch('keystoneclient.auth.identity.v2.Password.get_token')
def _test_get_client_for_admin(self, auth_mock,
use_id=False, admin_context=False):
token_value = uuid.uuid4().hex
auth_mock.return_value = token_value
self.flags(auth_strategy=None, group='neutron')
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
if use_id:
self.flags(admin_tenant_id='admin_tenant_id', group='neutron')
self.flags(admin_user_id='admin_user_id', group='neutron')
if admin_context:
my_context = context.get_admin_context()
else:
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
# clean global
neutronapi.reset_state()
if admin_context:
# Note that the context does not contain a token but is
# an admin context which will force an elevation to admin
# credentials.
context_client = neutronapi.get_client(my_context)
else:
# Note that the context is not elevated, but the True is passed in
# which will force an elevation to admin credentials even though
# the context has an auth_token.
context_client = neutronapi.get_client(my_context, True)
admin_auth = neutronapi._ADMIN_AUTH
self.assertEqual(CONF.neutron.admin_auth_url, admin_auth.auth_url)
self.assertEqual(CONF.neutron.admin_password, admin_auth.password)
if use_id:
self.assertEqual(CONF.neutron.admin_tenant_id,
admin_auth.tenant_id)
self.assertEqual(CONF.neutron.admin_user_id, admin_auth.user_id)
self.assertIsNone(admin_auth.tenant_name)
self.assertIsNone(admin_auth.username)
else:
self.assertEqual(CONF.neutron.admin_tenant_name,
admin_auth.tenant_name)
self.assertEqual(CONF.neutron.admin_username, admin_auth.username)
self.assertIsNone(admin_auth.tenant_id)
self.assertIsNone(admin_auth.user_id)
self.assertEqual(CONF.neutron.timeout, neutronapi._SESSION.timeout)
self.assertEqual(token_value, context_client.httpclient.auth.token)
self.assertEqual(CONF.neutron.url,
context_client.httpclient.auth.endpoint)
def test_get_client_for_admin(self):
self._test_get_client_for_admin()
def test_get_client_for_admin_with_id(self):
self._test_get_client_for_admin(use_id=True)
def test_get_client_for_admin_context(self):
self._test_get_client_for_admin(admin_context=True)
def test_get_client_for_admin_context_with_id(self):
self._test_get_client_for_admin(use_id=True, admin_context=True)
| [
"[email protected]"
]
| |
6148f29660770166eb2b88bd95268e8ebb855c58 | eff5cd25fa442b70491262bada0584eaaf8add46 | /tfx/tools/cli/testdata/test_pipeline_airflow_2.py | 324d6b0cfbc2ed928677e8a5bbc8c1e5c375d2ac | [
"Apache-2.0"
]
| permissive | fsx950223/tfx | c58e58a85e6de6e9abcb8790acbf36424b5b2029 | 527fe2bab6e4f62febfe1a2029358fabe55f418c | refs/heads/master | 2021-01-04T12:12:51.010090 | 2020-01-26T04:43:14 | 2020-01-26T04:43:14 | 240,543,231 | 1 | 0 | Apache-2.0 | 2020-02-14T15:48:12 | 2020-02-14T15:48:11 | null | UTF-8 | Python | false | false | 3,007 | py | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pipeline for testing CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen
from tfx.components.schema_gen.component import SchemaGen
from tfx.components.statistics_gen.component import StatisticsGen
from tfx.orchestration import pipeline
from tfx.orchestration.airflow.airflow_dag_runner import AirflowDagRunner
from tfx.orchestration.airflow.airflow_dag_runner import AirflowPipelineConfig
from tfx.utils.dsl_utils import csv_input
# This example assumes that the taxi data is stored in ~/taxi/data and the
# taxi utility function is in ~/taxi. Feel free to customize this as needed.
_taxi_root = os.path.join(os.environ['HOME'], 'taxi')
_data_root = os.path.join(_taxi_root, 'data/simple')
# Directory and data locations. This example assumes all of the chicago taxi
# example code and metadata library is relative to $HOME, but you can store
# these files anywhere on your local filesystem.
_tfx_root = os.path.join(os.environ['HOME'], 'tfx')
_pipeline_root = os.path.join(_tfx_root, 'pipelines')
_metadata_db_root = os.path.join(_tfx_root, 'metadata')
_log_root = os.path.join(_tfx_root, 'logs')
# Airflow-specific configs; these will be passed directly to airflow
_airflow_config = {
'schedule_interval': None,
'start_date': datetime.datetime(2019, 1, 1),
}
def _create_pipeline():
"""Implements the chicago taxi pipeline with TFX."""
examples = csv_input(_data_root)
# Brings data into the pipeline or otherwise joins/converts training data.
example_gen = CsvExampleGen(input=examples)
# Computes statistics over data for visualization and example validation.
statistics_gen = StatisticsGen(examples=example_gen.outputs['examples'])
# Generates schema based on statistics files.
infer_schema = SchemaGen(statistics=statistics_gen.outputs['statistics'])
return pipeline.Pipeline(
pipeline_name='chicago_taxi_simple',
pipeline_root=_pipeline_root,
components=[
example_gen, statistics_gen, infer_schema
],
enable_cache=True,
metadata_db_root=_metadata_db_root,
)
# Airflow checks 'DAG' keyword for finding the dag.
airflow_pipeline = AirflowDagRunner(AirflowPipelineConfig(_airflow_config)).run(
_create_pipeline())
| [
"[email protected]"
]
| |
a703dc2eb5ad305e5bd2196c3484053910cf36c6 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_07_01/aio/operations/_network_security_groups_operations.py | 55371021d9e09323425a87dad705ef9be90f1746 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 27,723 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkSecurityGroupsOperations:
"""NetworkSecurityGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.NetworkSecurityGroup":
"""Gets the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs
) -> "_models.NetworkSecurityGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkSecurityGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs
) -> AsyncLROPoller["_models.NetworkSecurityGroup"]:
"""Creates or updates a network security group in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to the create or update network security group
operation.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.NetworkSecurityGroup":
"""Updates a network security group tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to update network security group tags.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
| [
"[email protected]"
]
| |
d6cdeb85c2c728cec647a482d4b7f3bb21bb0366 | a80047f86b96367c75fd24a43be67bb8202645aa | /tailor/models.py | 00f95b7fe8b02f9d3e439941c5eebce2bbfdddc3 | [
"MIT"
]
| permissive | Albert-Byrone/Tailor_Hub | 84deaee04809c56bb4d7638dfa800e434936c82a | 0ab4402302d03f261ff00af79f84b62cbd241a27 | refs/heads/master | 2022-12-09T14:18:01.392545 | 2019-12-23T10:26:44 | 2019-12-23T10:26:44 | 224,847,452 | 0 | 0 | MIT | 2022-12-08T03:18:46 | 2019-11-29T12:10:07 | Python | UTF-8 | Python | false | false | 6,060 | py | from django.db import models
from django.contrib.auth.models import User
from django.shortcuts import reverse
from pyuploadcare.dj.models import ImageField
from django.db.models.signals import post_save
from django.dispatch import receiver
import datetime as datetime
from django_countries.fields import CountryField
from django.db.models import Q
CATEGORY_CHOICES=(
('SU','Suits'),
('TR','Trousers'),
('CO','Coats'),
('DR','Dresses')
)
LABEL_CHOICES=(
('P','primary'),
('S','secondary'),
('D','danger')
)
class Profile(models.Model):
user = models.OneToOneField(User,on_delete=models.CASCADE,related_name='profile')
prof_pic = models.ImageField(upload_to='images/',default='./img/avator.png')
bio = models.TextField(max_length=50,default='this is my bio')
name = models.CharField(blank=True,max_length=120)
contact =models.PositiveIntegerField(null=True,blank=True)
email = models.EmailField()
location = models.CharField(max_length=60, blank=True)
stripe_customer_id = models.CharField(max_length=50, blank=True, null=True)
one_click_purchasing = models.BooleanField(default=False)
def __str__(self):
return f'{self.user.username} Profile'
@receiver(post_save,sender=User)
def create_user_profile(sender,instance,created,**kwargs):
if created:
userprofile = Profile.objects.create(user=instance)
@receiver(post_save,sender=User)
def save_user_profile(sender,instance,created,**kwargs):
instance.profile.save()
def save_profile(self):
return self.save()
def delete_profile(self):
return self.delete()
@classmethod
def search_profile(cls,name):
return cls.objects.filter(user__username__icontains=name).all()
class Item(models.Model):
title = models.CharField(max_length=200)
price = models.FloatField()
discount_price = models.FloatField(null=True,blank=True,default="0")
category = models.CharField(choices=CATEGORY_CHOICES,max_length=2,default="SU")
label = models.CharField(choices=LABEL_CHOICES,max_length=1,default="P")
photo = models.ImageField(null=True,blank=True)
description = models.TextField(null=True,blank=True)
user = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='posts')
created = models.DateTimeField(auto_now_add=True, null=True)
def get_all_comments(self):
return self.comments.all()
@classmethod
def search_term(cls,searchterm):
search = Item.objects.filter(Q(title__icontains=searchterm)|Q(description__icontains=searchterm)|Q(category__icontains=searchterm))
return search
def __str__(self):
return f"{self.title}"
class Comment(models.Model):
comment = models.TextField()
item = models.ForeignKey(Item, on_delete=models.CASCADE, related_name='comments')
user = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='comments')
created = models.DateTimeField(auto_now_add=True, null=True)
class OrderItem(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE)
item = models.ForeignKey(Item,on_delete=models.CASCADE)
is_ordered = models.BooleanField(default=False)
quantity = models.IntegerField(default=1)
def __str__(self):
return f"{self.quantity} of {self.item.title}"
def get_total_price(self):
return self.quantity * self.item.price
def get_total_discount_price(self):
return self.quantity * self.item.discount_price
def get_amount_saved(self):
return self.get_total_price() - self.get_total_discount_price()
def get_final_price(self):
if self.item.discount_price:
return self.get_total_discount_price()
return self.get_total_price()
class Order(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE)
ref_code = models.CharField(max_length=30)
is_ordered = models.BooleanField(default=False)
items = models.ManyToManyField(OrderItem)
start_date = models.DateTimeField(auto_now_add=True)
ordered_date = models.DateTimeField()
billing_address = models.ForeignKey('BillingAddress',on_delete=models.SET_NULL,null=True,blank=True)
payment = models.ForeignKey('Payment',on_delete=models.SET_NULL,null=True,blank=True)
coupon = models.ForeignKey('Coupon',on_delete=models.SET_NULL,null=True,blank=True)
being_delivered = models.BooleanField(default=False)
received = models.BooleanField(default=False)
refund_requested = models.BooleanField(default=False)
refund_approved = models.BooleanField(default=False)
def __str__(self):
return f"{ self.user.username }"
def get_total(self):
total = 0
for order_item in self.items.all():
total += order_item.get_final_price()
if self.coupon:
total -= self.coupon.amount
return total
class BillingAddress(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE)
street_address = models.CharField(max_length=50)
apartment_address=models.CharField(max_length=50)
country = CountryField(multiple=False,default="Kenya")
zipcode = models.CharField(max_length=50)
def __str__(self):
return f"{self.user.username }"
class Payment(models.Model):
stripe_charge_id = models.CharField(max_length=50)
user = models.ForeignKey(User,on_delete=models.SET_NULL,blank=True,null=True)
amount= models.FloatField()
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f"{ self.user.username }"
class Coupon(models.Model):
code = models.CharField(max_length=15)
amount = models.FloatField(default=50)
def __str__(self):
return f"{self.code }"
class Refund(models.Model):
order = models.ForeignKey(Order,on_delete=models.CASCADE)
reason = models.TextField()
accepted = models.BooleanField(default=False)
email = models.EmailField()
def __str__(self):
return f"{ self.pk }" | [
"[email protected]"
]
| |
c16d87aad5c12e73552775d7063b510229c7d2b4 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/word-count/c9dbb4f567ab4afa840dedb35209b154.py | 3ac8cb50e479178794591a5bbba3d8776dab6ff1 | []
| no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 220 | py | """
Solution for "Word Count" Exercise
"""
def word_count(input):
result = {}
words = input.replace('\n',' ').split(' ')
for word in words:
if word != '':
result[word] = result.get(word,0) + 1
return result
| [
"[email protected]"
]
| |
60fcc7583e7a4666ee84df1b89aa0de7c824794d | 5c724d6e03e4194680c793718a4f72a58ca66bb1 | /app/migrations/0015_auto_20180903_2010.py | 75587dd9dfbf783469020f2e266a255f975a8bff | []
| no_license | tigrezhito1/bat | 26002de4540bb4eac2751a31171adc45687f4293 | 0ea6b9b85e130a201c21eb6cbf09bc21988d6443 | refs/heads/master | 2020-05-02T07:13:06.936015 | 2019-03-26T15:04:17 | 2019-03-26T15:04:17 | 177,812,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-03 20:10
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0014_auto_20180903_1946'),
]
operations = [
migrations.AlterField(
model_name='produccion',
name='fecha',
field=models.DateTimeField(default=datetime.datetime(2018, 9, 3, 20, 10, 30, 268737), editable=False, help_text='Fecha de recepci\xf3n de la llamada (No se puede modificar)'),
),
]
| [
"[email protected]"
]
| |
f4cb702bb909488b44205a7c3c85860429401cc4 | 0737b583899d7d5ddcbdda49731046036d65d998 | /sven/settings.py | 746ef6ac71a5df61f46113c3869cf2e519f9fb90 | []
| no_license | martolini/sven_admin | 71b8b6e84e0eae3d7e971a62e3715efac708b160 | 610183b2d995b1d1f6b1946c13e9b4c9070ef26f | refs/heads/master | 2021-01-25T04:58:00.067439 | 2014-02-25T22:04:25 | 2014-02-25T22:04:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,190 | py | # Django settings for sven project.
import os
import socket
DEBUG = 'cauchy' not in socket.gethostname()
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
if DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'dev.db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
STATIC_ROOT = ''
MEDIA_ROOT = os.path.join(BASE_DIR, 'files/uploads')
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'svenquiz', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'sven',
'PASSWORD': 'svenadmin',
'HOST': 'localhost', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
STATIC_ROOT = '/opt/svenv/static'
MEDIA_ROOT = '/opt/svenv/uploads'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_URL = '/uploads/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'ald-9%v)kc*#$@hip$kpr6&tv_1h*$snl)p67_lvpag+1+phq&'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sven.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sven.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'sven.app',
'sven.app.quiz',
'south',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"[email protected]"
]
| |
9fdef289488f09abcc2b7641aa8a537ae0231dd7 | e70d0bcc547d63b338ff51c253aa95d78ea99992 | /xdl/test/python/unit_test/ps_ops/test_ps_pull_op.py | dfa1f106fecb5690866aa4d7d8629b0e15c3b2d8 | [
"Apache-2.0"
]
| permissive | niumeng07/x-deeplearning | 2513f7ba823521c40e0346284f5dd0aca5562e40 | 6d3bc3ad4996ab8938c56d8a834af07a04dc2f67 | refs/heads/master | 2020-04-12T23:06:24.447833 | 2019-07-06T16:06:16 | 2019-07-06T16:06:16 | 162,808,758 | 2 | 0 | Apache-2.0 | 2018-12-22T12:18:01 | 2018-12-22T12:17:59 | null | UTF-8 | Python | false | false | 1,377 | py | # Copyright (C) 2016-2018 Alibaba Group Holding Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import xdl
import unittest
import numpy as np
from xdl.python.lib.datatype import *
from xdl.python.lib.graph import execute
class TestPsPullOp(unittest.TestCase):
def test_all(self):
var = xdl.Variable(name="w", dtype=DataType.int32, shape=[4], initializer=xdl.Ones())
execute(xdl.variable_registers())
execute(xdl.global_initializers())
op = xdl.ps_pull_op(var_name="w", var_type="index", dtype=DataType.int32)
ret = execute(op)
self.assertTrue((ret == np.array([1,1,1,1])).all())
def suite():
return unittest.TestLoader().loadTestsFromTestCase(TestPsPullOp)
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| [
"[email protected]"
]
| |
95b462c1589ad5fcc2afbb3b460ed427d4606ddf | ef8d6ba2afe24cd981eeb92624e838b6acc98166 | /src/ckanext-delineate/setup.py | 4b2815902ac1b31eff768239e15632773f5b8dd6 | [
"BSD-3-Clause"
]
| permissive | CI-WATER/portal | cd68cd8f514d10092faea3e78d4421819c9714c3 | c61660c8389c7af82517cbd0154bc83f9737c4d1 | refs/heads/master | 2016-09-05T10:40:10.721184 | 2014-07-01T17:43:06 | 2014-07-01T17:43:06 | 20,502,671 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | from setuptools import setup, find_packages
version = '0.1'
setup(
name='ckanext-delineate',
version=version,
description="Watershed delineation extension",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Pabitra Dash',
author_email='[email protected]',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['ckanext', 'ckanext.delineate'],
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points=\
"""
[ckan.plugins]
# Add plugins here, eg
watershed_delineate=ckanext.delineate.plugin:DelineateWatershedPlugins
""",
)
| [
"[email protected]"
]
| |
29ade0802ba7f753dde50457d3d040b9d9b4e45a | a16feb303b7599afac19a89945fc2a9603ae2477 | /Simple_Python/standard/traceback/traceback_6.py | 98c71ab419e8207fcac4ff823ee242f3b6713f45 | []
| no_license | yafeile/Simple_Study | d75874745ce388b3d0f9acfa9ebc5606a5745d78 | c3c554f14b378b487c632e11f22e5e3118be940c | refs/heads/master | 2021-01-10T22:08:34.636123 | 2015-06-10T11:58:59 | 2015-06-10T11:58:59 | 24,746,770 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | #! /usr/bin/env/python
# -*- coding:utf-8 -*-
import traceback
import sys
from traceback_1 import call_function
def f():
traceback.print_stack(file=sys.stdout)
print 'Calling f() directly:'
f()
print
print 'Calling f() from 3 levels deep:'
call_function(f) | [
"[email protected]"
]
| |
d7af9a0c88e28ef76bd358174e5b026502fcb910 | 956d2056e7cc56cc5ef2c6224a52a5e5c2987785 | /utils/pyrrd/backend/external.py | 69b49adb1b75d3f9cf03e1a3dd935cd127612bc0 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | AndriyZabavskyy/taf | 60f7ff7476d6a7443a1e5a752778277f6629e65b | 2007bf3fe66edfe704e485141c55caed54fe13aa | refs/heads/master | 2021-05-20T01:01:23.835257 | 2020-04-10T08:31:15 | 2020-04-10T08:31:15 | 252,119,009 | 0 | 0 | Apache-2.0 | 2020-04-01T08:40:24 | 2020-04-01T08:40:23 | null | UTF-8 | Python | false | false | 11,722 | py | """
Copyright (c) 2004-2008, AdytumSolutions, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of AdytumSolutions, Inc. nor the names of
its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import sys
from subprocess import Popen, PIPE
from . import common
from ..exceptions import ExternalCommandError
from ..util import XML
def _cmd(command, args=""):
if sys.platform == 'win32':
close_fds = False
else:
close_fds = True
command = "rrdtool %s %s" % (command, args)
process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
close_fds=close_fds)
(stdout, stderr) = process.communicate()
if stderr:
print(command)
raise ExternalCommandError(stderr.strip())
if process.returncode != 0:
errmsg = "Return code from '%s' was %s." % (
command, process.returncode)
raise ExternalCommandError(errmsg)
return stdout
def concat(args):
if isinstance(args, list):
args = " ".join(args)
return args
def create(filename, parameters):
"""
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> import os
>>> os.path.exists(rrdfile.name)
True
"""
parameters = "%s %s" % (filename, concat(parameters))
output = _cmd('create', parameters)
def update(filename, data, debug=False):
"""
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> import os
>>> os.path.exists(rrdfile.name)
True
>>> update(rrdfile.name,
... '920804700:12345 920805000:12357 920805300:12363')
>>> update(rrdfile.name,
... '920805600:12363 920805900:12363 920806200:12373')
>>> update(rrdfile.name,
... '920806500:12383 920806800:12393 920807100:12399')
>>> update(rrdfile.name,
... '920807400:12405 920807700:12411 920808000:12415')
>>> update(rrdfile.name,
... '920808300:12420 920808600:12422 920808900:12423')
"""
parameters = "%s %s" % (filename, concat(data))
if debug:
_cmd('updatev', parameters)
else:
_cmd('update', parameters)
def fetchRaw(filename, query):
parameters = "%s %s" % (filename, concat(query))
return _cmd('fetch', parameters).strip()
def fetch(filename, query):
"""
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> import os
>>> os.path.exists(rrdfile.name)
True
>>> update(rrdfile.name, '920804700:12345 920805000:12357 920805300:12363')
>>> update(rrdfile.name, '920805600:12363 920805900:12363 920806200:12373')
>>> update(rrdfile.name, '920806500:12383 920806800:12393 920807100:12399')
>>> update(rrdfile.name, '920807400:12405 920807700:12411 920808000:12415')
>>> update(rrdfile.name, '920808300:12420 920808600:12422 920808900:12423')
>>> results = fetch(rrdfile.name, 'AVERAGE --start 920804400 --end 920809200')
# Results are provided in two ways, one of which is by the data source
# name:
>>> sorted(results["ds"].keys())
['speed']
# accessing a DS entry like this gives of a (time, data) tuple:
>>> results["ds"]["speed"][0]
(920805000, 0.04)
# The other way of accessing the results data is by data source time
# entries:
>>> keys = sorted(results["time"].keys())
>>> len(keys)
16
>>> keys[0:6]
[920805000, 920805300, 920805600, 920805900, 920806200, 920806500]
>>> results["time"][920805000]
{'speed': 0.04}
The benefits of using an approach like this become obvious when the RRD
file has multiple DSs and RRAs.
"""
output = fetchRaw(filename, concat(query))
lines = [line for line in output.split('\n') if line]
dsNames = lines[0].split()
results = {
"ds": {},
"time": {},
}
for line in lines[2:]:
time, data = line.split(":")
data = [common.coerce(datum) for datum in data.split()]
results["time"][int(time)] = dict(list(zip(dsNames, data)))
for dsName, datum in zip(dsNames, data):
results["ds"].setdefault(dsName, [])
results["ds"][dsName].append((int(time), common.coerce(datum)))
return results
def dump(filename, outfile="", parameters=""):
"""
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> xml = dump(rrdfile.name)
>>> xmlBytes = len(xml)
>>> 3300 < xmlBytes < 4000
True
>>> xmlCommentCheck = '<!-- Round Robin Database Dump'
>>> xmlCommentCheck in xml[0:200]
True
>>> xmlfile = tempfile.NamedTemporaryFile()
>>> dump(rrdfile.name, xmlfile.name)
>>> import os
>>> os.path.exists(xmlfile.name)
True
"""
parameters = "%s %s %s" % (filename, outfile, concat(parameters))
output = _cmd('dump', parameters)
if not outfile:
return output.strip()
def load(filename):
"""
Load RRD data via the RRDtool XML dump into an ElementTree.
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> tree = load(rrdfile.name)
>>> [x.tag for x in tree]
['version', 'step', 'lastupdate', 'ds', 'rra', 'rra']
"""
return XML(dump(filename))
def info(filename, obj, **kwargs):
"""
"""
obj.printInfo()
def graph(filename, parameters):
"""
>>> import tempfile
>>> rrdfile = tempfile.NamedTemporaryFile()
>>> graphfile = tempfile.NamedTemporaryFile()
>>> parameters = ' --start 920804400'
>>> parameters += ' DS:speed:COUNTER:600:U:U'
>>> parameters += ' RRA:AVERAGE:0.5:1:24'
>>> parameters += ' RRA:AVERAGE:0.5:6:10'
>>> create(rrdfile.name, parameters)
>>> import os
>>> os.path.exists(rrdfile.name)
True
>>> update(rrdfile.name, '920804700:12345 920805000:12357 920805300:12363')
>>> update(rrdfile.name, '920805600:12363 920805900:12363 920806200:12373')
>>> update(rrdfile.name, '920806500:12383 920806800:12393 920807100:12399')
>>> update(rrdfile.name, '920807400:12405 920807700:12411 920808000:12415')
>>> update(rrdfile.name, '920808300:12420 920808600:12422 920808900:12423')
>>> parameters = ' --start 920804400 --end 920808000'
>>> parameters += ' --vertical-label km/h'
>>> parameters += ' DEF:myspeed=%s:speed:AVERAGE' % rrdfile.name
>>> parameters += ' CDEF:realspeed=myspeed,1000,*'
>>> parameters += ' CDEF:kmh=myspeed,3600,*'
>>> parameters += ' CDEF:fast=kmh,100,GT,kmh,0,IF'
>>> parameters += ' CDEF:good=kmh,100,GT,0,kmh,IF'
>>> parameters += ' HRULE:100#0000FF:"Maximum allowed"'
>>> parameters += ' AREA:good#00FF00:"Good speed"'
>>> parameters += ' AREA:fast#00FFFF:"Too fast"'
>>> parameters += ' LINE2:realspeed#FF0000:Unadjusted'
>>> graph(graphfile.name, parameters)
>>> os.path.exists(graphfile.name)
True
"""
parameters = "%s %s" % (filename, concat(parameters))
_cmd('graph', parameters)
def prepareObject(function, obj):
"""
This is a funtion that serves to make interacting with the
backend as transparent as possible. It's sole purpose it to
prepare the attributes and data of the various pyrrd objects
for use by the functions that call out to rrdtool.
For all of the rrdtool-methods in this module, we need a filename
and then parameters -- both as strings. That's it.
This function will get called by methods in the pyrrd wrapper
objects. For instance, most of the methods of pyrrd.rrd.RRD
will call this function. In graph, Pretty much only the method
pyrrd.graph.Graph.write() will call this function.
"""
if function == 'create':
validParams = ['start', 'step']
params = common.buildParameters(obj, validParams)
data = [str(x) for x in obj.ds]
data += [str(x) for x in obj.rra]
return (obj.filename, params + data)
if function == 'update':
validParams = ['template']
params = common.buildParameters(obj, validParams)
FIRST_VALUE = 0
DATA = 1
TIME_OR_DATA = 0
if obj.values[FIRST_VALUE][DATA]:
data = ['%s:%s' % (time, values)
for time, values in obj.values]
else:
data = [data for data, nil in obj.values]
return (obj.filename, params + data)
if function == 'fetch':
validParams = ['resolution', 'start', 'end']
params = common.buildParameters(obj, validParams)
return (obj.filename, obj.cf, params)
if function == 'info':
return (obj.filename, obj)
if function == 'graph':
validParams = ['start', 'end', 'step', 'title',
'vertical_label', 'width', 'height', 'only_graph',
'upper_limit', 'lower_limit', 'rigid', 'alt_autoscale',
'alt_autoscale_max', 'no_gridfit', 'x_grid', 'y_grid',
'alt_y_grid', 'logarithmic', 'units_exponent', 'zoom',
'font', 'font_render_mode', 'interlaced', 'no_legend',
'force_rules_legend', 'tabwidth', 'base', 'color', 'imgformat',
'slope_mode']
params = common.buildParameters(obj, validParams)
data = [str(x) for x in obj.data]
return (obj.filename, params + data)
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"[email protected]"
]
| |
22df3a45ebed1aaa000d917da4cf9f6d77c8ad8e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04001/s037589622.py | 245a5174bd7db200bf673123ca5d544e43c7f7dc | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | #!/usr/bin python3
# -*- coding: utf-8 -*-
def main():
S = input()
l = len(S)-1
ret = 0
for i in range(2**l):
fs = S[0]
for j in range(l):
if (i>>j & 1) == 1:
fs += '+'
fs += S[j+1]
ret += eval(fs)
print(ret)
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
0150ee4b951d7aa98d33671a60f4da5d924e1da8 | 2de33ba731066a63352080dd19da1e4582bb00c4 | /collective.project/collective/project/browser/create_iteration.py | d3c03e2dcb7ff08becb20002dd3249c618cfcb4f | []
| no_license | adam139/plonesrc | 58f48e7cdfc8fbed7398011c40649f095df10066 | cbf20045d31d13cf09d0a0b2a4fb78b96c464d20 | refs/heads/master | 2021-01-10T21:36:44.014240 | 2014-09-09T04:28:04 | 2014-09-09T04:28:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,139 | py | from zope import interface, schema
from z3c.form import form, field, button
from plone.app.z3cform.layout import wrap_form
from collective.project import projectMessageFactory as _
from collective.project.bbb import getSite
from zope.schema import vocabulary
from zope.interface import directlyProvides
from zope.schema.interfaces import IVocabularyFactory
import datetime
import calendar
projectsField = schema.Choice(
title=u'Projects',
description=u'Projects field.',
vocabulary='projects_vocab')
def breadcrumbs(context, project):
results = []
path = list(project.getObject().getPhysicalPath())[2:]
for i in range(len(path)):
results.append(context.restrictedTraverse('/'.join(path)).Title())
path.pop()
return ' > '.join(results)
class CreateIterationSchema(interface.Interface):
iteration = schema.TextLine(
title=_(u"Iteration"),
default=_(unicode(datetime.datetime.today().strftime('%B %Y'))))
projects = schema.Set(
title=u'Project(s)',
value_type=projectsField,
)
class CreateIterationForm(form.Form):
fields = field.Fields(CreateIterationSchema)
ignoreContext = True # don't use context to get widget data
label = _(u"Iteration tool")
description = _(u"Create iteration for selected projects")
def deactivate_iteration(self, action, data):
wftool = self.context.portal_workflow
iterations = find_iterations()
for iteration in iterations:
if data['iteration']:
data_iter = data['iteration']
new_iter = data_iter.lower().replace(' ', '-')
iter = iteration.getObject()
if not unicode(iter.id) == new_iter: # Don't deactive the current iter
if wftool.getInfoFor(iter, 'review_state') == 'active':
wftool.doActionFor(iter,'deactivate')
def create_iteration(self, action, data):
if data['iteration']:
data_iter = data['iteration']
new_iter = data_iter.lower().replace(' ', '-')
projects = find_projects()
for project in projects:
proj = project.getObject()
if proj in data['projects']:
try:
proj.invokeFactory('iteration', new_iter)
except:
print "Cannot create iteration %s for project %s." % (new_iter, proj.absolute_url())
try:
iter_new = proj[new_iter]
iter_new.setTitle(data_iter)
iter_new.start = startDate()
iter_new.stop = stopDate()
iter_new.reindexObject()
except:
print "Cannot create iteration %s for project %s." % (new_iter, proj.absolute_url())
@button.buttonAndHandler(u'Create')
def handleApply(self, action):
data, errors = self.extractData()
if errors:
self.status = form.EditForm.formErrorsMessage
else:
self.create_iteration(action, data)
self.deactivate_iteration(action, data)
def projectsDict(context):
projects = {}
for p in find_projects():
obj = p.getObject()
bc = breadcrumbs(context,p)
projects[bc] = obj
return projects
def projectsVocab(context):
projects = projectsDict(context)
items = sorted(projects.items())
return vocabulary.SimpleVocabulary.fromItems(items)
def find_iterations():
site = getSite()
return site.portal_catalog(portal_type='iteration')
def find_projects():
site = getSite()
return site.portal_catalog(portal_type='project')
def startDate():
# start on first day of current month
now = datetime.datetime.now()
first_day = datetime.datetime(now.year, now.month, 1)
return first_day
def stopDate():
# stop in one month
now = datetime.datetime.now()
last_day = calendar.monthrange(now.year, now.month)[1]
return datetime.datetime(now.year, now.month, last_day)
directlyProvides(projectsVocab, IVocabularyFactory)
CreateIteration = wrap_form(CreateIterationForm)
| [
"[email protected]"
]
| |
c6b2508756ca483742fc681caa459b0b1fe7fff9 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_wounder.py | b0d6cd04f474aad1a04a22fbffe4505de907b214 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py |
from xai.brain.wordbase.verbs._wound import _WOUND
#calss header
class _WOUNDER(_WOUND, ):
def __init__(self,):
_WOUND.__init__(self)
self.name = "WOUNDER"
self.specie = 'verbs'
self.basic = "wound"
self.jsondata = {}
| [
"[email protected]"
]
| |
73b384d0b1fb7908b2521c4f150f93c076b807c7 | 10d17864a685c025bb77959545f74b797f1d6077 | /capitulo 07/07.22.py | d7274f669aa67ce98579dc96a66e3b781eec34c2 | []
| no_license | jcicerof/IntroducaoPython | 02178d2dfcaa014587edbd3090c517089ccef7c2 | 02e619c7c17e74acdc3268fbfae9ab624a3601dd | refs/heads/master | 2020-04-24T18:12:21.422079 | 2019-02-23T05:14:43 | 2019-02-23T05:14:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | ##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2019
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira edição - Janeiro/2019 - ISBN 978-85-7522-718-3
# Site: http://python.nilo.pro.br/
#
# Arquivo: listagem3\capítulo 07\07.22.py
# Descrição:
##############################################################################
"771".isdigit()
"10.4".isdigit()
"+10".isdigit()
"-5".isdigit()
| [
"[email protected]"
]
| |
a15ba967a5a20d5c51a3fe4bd4b1b7ee046e37de | 350db570521d3fc43f07df645addb9d6e648c17e | /0779_K-th_Symbol_in_Grammar/solution_test.py | 954e62fd402ddafba1bf568523536c189f2aedf3 | []
| no_license | benjaminhuanghuang/ben-leetcode | 2efcc9185459a1dd881c6e2ded96c42c5715560a | a2cd0dc5e098080df87c4fb57d16877d21ca47a3 | refs/heads/master | 2022-12-10T02:30:06.744566 | 2022-11-27T04:06:52 | 2022-11-27T04:06:52 | 236,252,145 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 378 | py |
'''
779. K-th Symbol in Grammar
Level: Medium
https://leetcode.com/problems/k-th-symbol-in-grammar
'''
import unittest
class TestSum(unittest.TestCase):
def test_sum(self):
self.assertEqual(sum([1, 2, 3]), 6, "Should be 6")
def test_sum_tuple(self):
self.assertEqual(sum((1, 2, 2)), 6, "Should be 6")
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
]
| |
64899c99b053884b463b2aca741431e307f7b480 | 1919fc2555dbcb6b865fdef0cc44c56c6c47e2f0 | /chapter_7/demo_7_2.py | 8b2b373f88eeeae54cc30d598f1dd6d869f9e330 | []
| no_license | ender8848/the_fluent_python | 10f8dd98fcf206b04ea6d34f47ad5e35f896a3ac | 058d59f6a11da34e23deb228e24a160d907f7643 | refs/heads/master | 2022-12-19T07:51:51.908127 | 2019-01-17T09:53:33 | 2019-01-17T09:53:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,781 | py | '''
装饰器的一个关键特性是,它们在被装饰的函数定义之后立即运行。
这通常是在导入时(即 Python 加载模块时) ,
如示例 7-2 中的 demo_7_2.py 模块所示。
'''
# registry 保存被 @register 装饰的函数引用
registry = []
# register 的参数是一个函数
def register(func):
print('running register(%s)' % func)
registry.append(func)
return func
@register
def f1():
print('running f1()')
@register
def f2():
print('running f2()')
def f3():
print('running f3()')
def main():
print('running main()')
print('registry ->', registry)
f1()
f2()
f3()
if __name__ == '__main__':
main()
'''
running register(<function f1 at 0x1081ad268>)
running register(<function f2 at 0x1095c99d8>)
running main()
registry -> [<function f1 at 0x1081ad268>, <function f2 at 0x1095c99d8>]
running f1()
running f2()
running f3()
'''
'''
注意,register 在模块中其他函数之前运行(两次) 。
调用 register 时,传给它的参数是被装饰的函数,例如 <function f1 at 0x100631bf8>。
加载模块后,registry 中有两个被装饰函数的引用:f1 和 f2。
这两个函数,以及 f3,只在 main 明确调用它们时才执行。
如果导入 demo_7_2.py 模块(不作为脚本运行) ,
输出如下:
>>> import registration
running register(<function f1 at 0x10063b1e0>)
running register(<function f2 at 0x10063b268>)
此时查看 registry 的值,得到的输出如下:
>>> registration.registry
[<function f1 at 0x10063b1e0>, <function f2 at 0x10063b268>]
示例 7-2 主要想强调,函数装饰器在导入模块时立即执行,而被装饰的函数只在明确调用时运行。
这突出了 Python 程序员所说的导入时和运行时之间的区别
''' | [
"[email protected]"
]
| |
abee031f9dd8d4d4d9ab634022ebb1f4a6c78b3d | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/argo/models/io_argoproj_workflow_v1alpha1_workflow.py | 9004cddc944622ff5565d97235c12f3fc64c9b23 | []
| no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,259 | py | # coding: utf-8
"""
Argo
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v2.3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from argo.models.io_argoproj_workflow_v1alpha1_workflow_spec import IoArgoprojWorkflowV1alpha1WorkflowSpec # noqa: F401,E501
from argo.models.io_argoproj_workflow_v1alpha1_workflow_status import IoArgoprojWorkflowV1alpha1WorkflowStatus # noqa: F401,E501
from argo.models.io_k8s_apimachinery_pkg_apis_meta_v1_object_meta import IoK8sApimachineryPkgApisMetaV1ObjectMeta # noqa: F401,E501
class IoArgoprojWorkflowV1alpha1Workflow(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'IoK8sApimachineryPkgApisMetaV1ObjectMeta',
'spec': 'IoArgoprojWorkflowV1alpha1WorkflowSpec',
'status': 'IoArgoprojWorkflowV1alpha1WorkflowStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): # noqa: E501
"""IoArgoprojWorkflowV1alpha1Workflow - a model defined in Swagger""" # noqa: E501
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
self.metadata = metadata
self.spec = spec
self.status = status
@property
def api_version(self):
"""Gets the api_version of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this IoArgoprojWorkflowV1alpha1Workflow.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this IoArgoprojWorkflowV1alpha1Workflow.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:return: The metadata of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:rtype: IoK8sApimachineryPkgApisMetaV1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this IoArgoprojWorkflowV1alpha1Workflow.
:param metadata: The metadata of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:type: IoK8sApimachineryPkgApisMetaV1ObjectMeta
"""
if metadata is None:
raise ValueError("Invalid value for `metadata`, must not be `None`") # noqa: E501
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:return: The spec of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:rtype: IoArgoprojWorkflowV1alpha1WorkflowSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this IoArgoprojWorkflowV1alpha1Workflow.
:param spec: The spec of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:type: IoArgoprojWorkflowV1alpha1WorkflowSpec
"""
if spec is None:
raise ValueError("Invalid value for `spec`, must not be `None`") # noqa: E501
self._spec = spec
@property
def status(self):
"""Gets the status of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:return: The status of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:rtype: IoArgoprojWorkflowV1alpha1WorkflowStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this IoArgoprojWorkflowV1alpha1Workflow.
:param status: The status of this IoArgoprojWorkflowV1alpha1Workflow. # noqa: E501
:type: IoArgoprojWorkflowV1alpha1WorkflowStatus
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(IoArgoprojWorkflowV1alpha1Workflow, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IoArgoprojWorkflowV1alpha1Workflow):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
de9f1199dc5aaa7e1cc8da79229dc61e059a54e2 | 25e2d5bb03c5b2534880ab41fb7de82d815f83da | /menpo/landmark/labels/__init__.py | 36cab907dbfd7bbb95e2c5c10a432ae9e3e2fea5 | [
"BSD-3-Clause"
]
| permissive | justusschock/menpo | f4c4a15b62eab17f06387c5772af2699d52a9419 | d915bec26de64a5711b96be75cd145661a32290e | refs/heads/master | 2020-04-06T18:08:26.397844 | 2019-03-09T20:54:07 | 2019-03-09T20:54:07 | 157,687,031 | 0 | 0 | NOASSERTION | 2018-11-15T09:39:03 | 2018-11-15T09:39:03 | null | UTF-8 | Python | false | false | 612 | py | from .base import labeller
from .human import *
from .car import (
car_streetscene_20_to_car_streetscene_view_0_8,
car_streetscene_20_to_car_streetscene_view_1_14,
car_streetscene_20_to_car_streetscene_view_2_10,
car_streetscene_20_to_car_streetscene_view_3_14,
car_streetscene_20_to_car_streetscene_view_4_14,
car_streetscene_20_to_car_streetscene_view_5_10,
car_streetscene_20_to_car_streetscene_view_6_14,
car_streetscene_20_to_car_streetscene_view_7_8)
from .bounding_box import (bounding_box_to_bounding_box,
bounding_box_mirrored_to_bounding_box)
| [
"[email protected]"
]
| |
27a2982791e485a8d4a48f5bf1b30eb87d869ecc | d45b08745ab3f1e951485fa6b9f905be65f71da4 | /programmers/kakao20_경주로건설.py | 9bc2872919783233501f1b27d53f832c1fc7caf3 | []
| no_license | HYUNMIN-HWANG/Algorithm_practice | d0a82e73a735e22a70c98fd4882b66929061bd6c | 96b70fbea42ca11881546531fa6a9486e9a5289f | refs/heads/main | 2023-04-25T05:40:24.126757 | 2021-05-08T13:42:56 | 2021-05-08T13:42:56 | 324,109,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,983 | py | from collections import deque
def solution(board):
n = len(board)
visited = [[1000000 for _ in range(n)] for _ in range(n)]
ds = [[-1, 0], [1, 0], [0, 1], [0, -1]]
q = deque()
q.append((0, 0, 0, -1))
visited[0][0] = 0
while q:
y, x, c, t = q.popleft()
for i in range(4):
ay, ax = y + ds[i][0], x + ds[i][1]
if 0 <= ay < n and 0 <= ax < n and not board[ay][ax]:
if t == i or t == -1:
if visited[ay][ax] >= c + 100:
visited[ay][ax] = c + 100
q.append((ay, ax, c + 100, i))
elif t != i:
if visited[ay][ax] >= c + 600:
visited[ay][ax] = c + 600
q.append((ay, ax, c + 600, i))
answer = visited[n - 1][n - 1]
return answer
# 위에 꺼 참고한 나의 풀이
from collections import deque
def solution(board):
queue = deque()
queue.append((0,0,0,-1)) # x, y, cost, 방향
n = len(board[0])
visited = [[1000000] * n for _ in range(n)]
visited[0][0] = 0
dx = [-1, 1, 0, 0]
dy = [0, 0, 1, -1]
while queue :
x, y, c, t = queue.popleft()
for i in range(4) :
nx = x + dx[i]
ny = y + dy[i]
if 0 <= nx < n and 0 <= ny < n and not board[nx][ny] :
if t == i or t == -1 : # 방향이 같다. 직선거리
if visited[nx][ny] >= c + 100 : # 최소 가격으로 갱신하기 위함
visited[nx][ny] = c + 100
queue.append((nx, ny, c + 100, i))
elif t != i : # 방향이 달라짐. 곡선거리
if visited[nx][ny] >= c + 600 :
visited[nx][ny] = c + 600
queue.append((nx, ny, c + 600, i))
answer = visited[n-1][n-1]
return answer
| [
"[email protected]"
]
| |
3248d8abf873f771774003dc8d0226a2dff074f3 | 599069eeeae294950aab730ca8d4858ac1929a5c | /bemani/client/iidx/tricoro.py | 1ea4a005f356b6586ed3b5e9f75655973ecc8b36 | []
| no_license | ByteFun/bemaniutils | 232d057d4b548f929af4da4f145565ad51482113 | bd467a9b732a25a1c8aba75106dc459fbdff61b0 | refs/heads/master | 2020-12-04T07:45:45.503620 | 2019-12-08T21:57:08 | 2019-12-08T21:57:08 | 231,683,196 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 31,071 | py | import random
import time
from typing import Any, Dict, Optional, Tuple
from bemani.client.base import BaseClient
from bemani.protocol import Node
class IIDXTricoroClient(BaseClient):
NAME = 'TEST'
def verify_shop_getname(self, lid: str) -> str:
call = self.call_node()
# Construct node
IIDX21shop = Node.void('shop')
call.add_child(IIDX21shop)
IIDX21shop.set_attribute('method', 'getname')
IIDX21shop.set_attribute('lid', lid)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/shop/@opname")
self.assert_path(resp, "response/shop/@pid")
self.assert_path(resp, "response/shop/@cls_opt")
return resp.child('shop').attribute('opname')
def verify_shop_savename(self, lid: str, name: str) -> None:
call = self.call_node()
# Construct node
IIDX21shop = Node.void('shop')
IIDX21shop.set_attribute('lid', lid)
IIDX21shop.set_attribute('pid', '51')
IIDX21shop.set_attribute('method', 'savename')
IIDX21shop.set_attribute('cls_opt', '0')
IIDX21shop.set_attribute('ccode', 'US')
IIDX21shop.set_attribute('opname', name)
IIDX21shop.set_attribute('rcode', '.')
call.add_child(IIDX21shop)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/shop")
def verify_pc_common(self) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('method', 'common')
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/pc/ir/@beat")
self.assert_path(resp, "response/pc/limit/@phase")
self.assert_path(resp, "response/pc/boss/@phase")
self.assert_path(resp, "response/pc/red/@phase")
self.assert_path(resp, "response/pc/yellow/@phase")
self.assert_path(resp, "response/pc/medal/@phase")
self.assert_path(resp, "response/pc/tricolettepark/@open")
self.assert_path(resp, "response/pc/cafe/@open")
def verify_music_crate(self) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('music')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('method', 'crate')
# Swap with server
resp = self.exchange('', call)
self.assert_path(resp, "response/music")
for child in resp.child("music").children:
if child.name != 'c':
raise Exception('Invalid node {} in clear rate response!'.format(child))
if len(child.value) != 12:
raise Exception('Invalid node data {} in clear rate response!'.format(child))
for v in child.value:
if v < 0 or v > 101:
raise Exception('Invalid clear percent {} in clear rate response!'.format(child))
def verify_shop_getconvention(self, lid: str) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('shop')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('method', 'getconvention')
IIDX21pc.set_attribute('lid', lid)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/shop/valid")
self.assert_path(resp, "response/shop/@music_0")
self.assert_path(resp, "response/shop/@music_1")
self.assert_path(resp, "response/shop/@music_2")
self.assert_path(resp, "response/shop/@music_3")
def verify_pc_visit(self, extid: int, lid: str) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('iidxid', str(extid))
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('method', 'visit')
IIDX21pc.set_attribute('pid', '51')
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/pc/@aflg")
self.assert_path(resp, "response/pc/@anum")
self.assert_path(resp, "response/pc/@pflg")
self.assert_path(resp, "response/pc/@pnum")
self.assert_path(resp, "response/pc/@sflg")
self.assert_path(resp, "response/pc/@snum")
def verify_ranking_getranker(self, lid: str) -> None:
for clid in [0, 1, 2, 3, 4, 5, 6]:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('ranking')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('method', 'getranker')
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('clid', str(clid))
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/ranking")
def verify_shop_sentinfo(self, lid: str) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('shop')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('method', 'sentinfo')
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('bflg', '1')
IIDX21pc.set_attribute('bnum', '2')
IIDX21pc.set_attribute('ioid', '0')
IIDX21pc.set_attribute('tax_phase', '0')
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/shop")
def verify_pc_get(self, ref_id: str, card_id: str, lid: str) -> Dict[str, Any]:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('rid', ref_id)
IIDX21pc.set_attribute('did', ref_id)
IIDX21pc.set_attribute('pid', '51')
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('cid', card_id)
IIDX21pc.set_attribute('method', 'get')
IIDX21pc.set_attribute('ctype', '1')
# Swap with server
resp = self.exchange('', call)
# Verify that the response is correct
self.assert_path(resp, "response/pc/pcdata/@name")
self.assert_path(resp, "response/pc/pcdata/@pid")
self.assert_path(resp, "response/pc/pcdata/@id")
self.assert_path(resp, "response/pc/pcdata/@idstr")
self.assert_path(resp, "response/pc/packinfo")
self.assert_path(resp, "response/pc/commonboss/@deller")
self.assert_path(resp, "response/pc/commonboss/@orb")
self.assert_path(resp, "response/pc/commonboss/@baron")
self.assert_path(resp, "response/pc/secret/flg1")
self.assert_path(resp, "response/pc/secret/flg2")
self.assert_path(resp, "response/pc/secret/flg3")
self.assert_path(resp, "response/pc/achievements/trophy")
self.assert_path(resp, "response/pc/skin")
self.assert_path(resp, "response/pc/grade")
self.assert_path(resp, "response/pc/rlist")
self.assert_path(resp, "response/pc/step")
name = resp.child('pc/pcdata').attribute('name')
if name != self.NAME:
raise Exception('Invalid name \'{}\' returned for Ref ID \'{}\''.format(name, ref_id))
return {
'extid': int(resp.child('pc/pcdata').attribute('id')),
'sp_dan': int(resp.child('pc/grade').attribute('sgid')),
'dp_dan': int(resp.child('pc/grade').attribute('dgid')),
'deller': int(resp.child('pc/commonboss').attribute('deller')),
}
def verify_music_getrank(self, extid: int) -> Dict[int, Dict[int, Dict[str, int]]]:
scores: Dict[int, Dict[int, Dict[str, int]]] = {}
for cltype in [0, 1]: # singles, doubles
call = self.call_node()
# Construct node
IIDX21music = Node.void('music')
call.add_child(IIDX21music)
IIDX21music.set_attribute('method', 'getrank')
IIDX21music.set_attribute('iidxid', str(extid))
IIDX21music.set_attribute('cltype', str(cltype))
# Swap with server
resp = self.exchange('', call)
self.assert_path(resp, "response/music/style")
if int(resp.child('music/style').attribute('type')) != cltype:
raise Exception('Returned wrong clear type for IIDX21music.getrank!')
for child in resp.child('music').children:
if child.name == 'm':
if child.value[0] != -1:
raise Exception('Got non-self score back when requesting only our scores!')
music_id = child.value[1]
normal_clear_status = child.value[2]
hyper_clear_status = child.value[3]
another_clear_status = child.value[4]
normal_ex_score = child.value[5]
hyper_ex_score = child.value[6]
another_ex_score = child.value[7]
normal_miss_count = child.value[8]
hyper_miss_count = child.value[9]
another_miss_count = child.value[10]
if cltype == 0:
normal = 0
hyper = 1
another = 2
else:
normal = 3
hyper = 4
another = 5
if music_id not in scores:
scores[music_id] = {}
scores[music_id][normal] = {
'clear_status': normal_clear_status,
'ex_score': normal_ex_score,
'miss_count': normal_miss_count,
}
scores[music_id][hyper] = {
'clear_status': hyper_clear_status,
'ex_score': hyper_ex_score,
'miss_count': hyper_miss_count,
}
scores[music_id][another] = {
'clear_status': another_clear_status,
'ex_score': another_ex_score,
'miss_count': another_miss_count,
}
elif child.name == 'b':
music_id = child.value[0]
clear_status = child.value[1]
scores[music_id][6] = {
'clear_status': clear_status,
'ex_score': -1,
'miss_count': -1,
}
return scores
def verify_pc_save(self, extid: int, card: str, lid: str) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('achi', '449')
IIDX21pc.set_attribute('opt', '8208')
IIDX21pc.set_attribute('gpos', '0')
IIDX21pc.set_attribute('gno', '8')
IIDX21pc.set_attribute('timing', '0')
IIDX21pc.set_attribute('help', '0')
IIDX21pc.set_attribute('sdhd', '0')
IIDX21pc.set_attribute('sdtype', '0')
IIDX21pc.set_attribute('notes', '31.484070')
IIDX21pc.set_attribute('pase', '0')
IIDX21pc.set_attribute('judge', '0')
IIDX21pc.set_attribute('opstyle', '1')
IIDX21pc.set_attribute('hispeed', '5.771802')
IIDX21pc.set_attribute('mode', '6')
IIDX21pc.set_attribute('pmode', '0')
IIDX21pc.set_attribute('lift', '60')
IIDX21pc.set_attribute('judgeAdj', '0')
IIDX21pc.set_attribute('method', 'save')
IIDX21pc.set_attribute('iidxid', str(extid))
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('cid', card)
IIDX21pc.set_attribute('cltype', '0')
IIDX21pc.set_attribute('ctype', '1')
pyramid = Node.void('pyramid')
IIDX21pc.add_child(pyramid)
pyramid.set_attribute('point', '290')
destiny_catharsis = Node.void('destiny_catharsis')
IIDX21pc.add_child(destiny_catharsis)
destiny_catharsis.set_attribute('point', '290')
bemani_summer_collabo = Node.void('bemani_summer_collabo')
IIDX21pc.add_child(bemani_summer_collabo)
bemani_summer_collabo.set_attribute('point', '290')
deller = Node.void('deller')
IIDX21pc.add_child(deller)
deller.set_attribute('deller', '150')
# Swap with server
resp = self.exchange('', call)
self.assert_path(resp, "response/pc")
def verify_music_reg(self, extid: int, lid: str, score: Dict[str, Any]) -> None:
call = self.call_node()
# Construct node
IIDX21music = Node.void('music')
call.add_child(IIDX21music)
IIDX21music.set_attribute('convid', '-1')
IIDX21music.set_attribute('iidxid', str(extid))
IIDX21music.set_attribute('pgnum', str(score['pgnum']))
IIDX21music.set_attribute('pid', '51')
IIDX21music.set_attribute('rankside', '1')
IIDX21music.set_attribute('cflg', str(score['clear_status']))
IIDX21music.set_attribute('method', 'reg')
IIDX21music.set_attribute('gnum', str(score['gnum']))
IIDX21music.set_attribute('clid', str(score['chart']))
IIDX21music.set_attribute('mnum', str(score['mnum']))
IIDX21music.set_attribute('is_death', '0')
IIDX21music.set_attribute('theory', '0')
IIDX21music.set_attribute('shopconvid', lid)
IIDX21music.set_attribute('mid', str(score['id']))
IIDX21music.set_attribute('shopflg', '1')
IIDX21music.add_child(Node.binary('ghost', bytes([1] * 64)))
# Swap with server
resp = self.exchange('', call)
self.assert_path(resp, "response/music/shopdata/@rank")
self.assert_path(resp, "response/music/ranklist/data")
def verify_music_appoint(self, extid: int, musicid: int, chart: int) -> Tuple[int, bytes]:
call = self.call_node()
# Construct node
IIDX21music = Node.void('music')
call.add_child(IIDX21music)
IIDX21music.set_attribute('clid', str(chart))
IIDX21music.set_attribute('method', 'appoint')
IIDX21music.set_attribute('ctype', '0')
IIDX21music.set_attribute('iidxid', str(extid))
IIDX21music.set_attribute('subtype', '')
IIDX21music.set_attribute('mid', str(musicid))
# Swap with server
resp = self.exchange('', call)
self.assert_path(resp, "response/music/mydata/@score")
return (
int(resp.child('music/mydata').attribute('score')),
resp.child_value('music/mydata'),
)
def verify_pc_reg(self, ref_id: str, card_id: str, lid: str) -> int:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
call.add_child(IIDX21pc)
IIDX21pc.set_attribute('lid', lid)
IIDX21pc.set_attribute('pid', '51')
IIDX21pc.set_attribute('method', 'reg')
IIDX21pc.set_attribute('cid', card_id)
IIDX21pc.set_attribute('did', ref_id)
IIDX21pc.set_attribute('rid', ref_id)
IIDX21pc.set_attribute('name', self.NAME)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/pc/@id")
self.assert_path(resp, "response/pc/@id_str")
return int(resp.child('pc').attribute('id'))
def verify_pc_playstart(self) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
IIDX21pc.set_attribute('method', 'playstart')
IIDX21pc.set_attribute('side', '1')
call.add_child(IIDX21pc)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/pc")
def verify_music_play(self, score: Dict[str, int]) -> None:
call = self.call_node()
# Construct node
IIDX21music = Node.void('music')
IIDX21music.set_attribute('opt', '64')
IIDX21music.set_attribute('clid', str(score['chart']))
IIDX21music.set_attribute('mid', str(score['id']))
IIDX21music.set_attribute('gnum', str(score['gnum']))
IIDX21music.set_attribute('cflg', str(score['clear_status']))
IIDX21music.set_attribute('pgnum', str(score['pgnum']))
IIDX21music.set_attribute('pid', '51')
IIDX21music.set_attribute('method', 'play')
call.add_child(IIDX21music)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/music/@clid")
self.assert_path(resp, "response/music/@crate")
self.assert_path(resp, "response/music/@frate")
self.assert_path(resp, "response/music/@mid")
def verify_pc_playend(self) -> None:
call = self.call_node()
# Construct node
IIDX21pc = Node.void('pc')
IIDX21pc.set_attribute('cltype', '0')
IIDX21pc.set_attribute('bookkeep', '0')
IIDX21pc.set_attribute('mode', '1')
IIDX21pc.set_attribute('method', 'playend')
call.add_child(IIDX21pc)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/pc")
def verify_music_breg(self, iidxid: int, score: Dict[str, int]) -> None:
call = self.call_node()
# Construct node
IIDX21music = Node.void('music')
IIDX21music.set_attribute('gnum', str(score['gnum']))
IIDX21music.set_attribute('iidxid', str(iidxid))
IIDX21music.set_attribute('mid', str(score['id']))
IIDX21music.set_attribute('method', 'breg')
IIDX21music.set_attribute('pgnum', str(score['pgnum']))
IIDX21music.set_attribute('cflg', str(score['clear_status']))
call.add_child(IIDX21music)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/music")
def verify_grade_raised(self, iidxid: int, shop_name: str, dantype: str) -> None:
call = self.call_node()
# Construct node
IIDX21grade = Node.void('grade')
IIDX21grade.set_attribute('opname', shop_name)
IIDX21grade.set_attribute('is_mirror', '0')
IIDX21grade.set_attribute('oppid', '51')
IIDX21grade.set_attribute('achi', '50')
IIDX21grade.set_attribute('cflg', '4' if dantype == 'sp' else '3')
IIDX21grade.set_attribute('gid', '5')
IIDX21grade.set_attribute('iidxid', str(iidxid))
IIDX21grade.set_attribute('gtype', '0' if dantype == 'sp' else '1')
IIDX21grade.set_attribute('is_ex', '0')
IIDX21grade.set_attribute('pside', '0')
IIDX21grade.set_attribute('method', 'raised')
call.add_child(IIDX21grade)
# Swap with server
resp = self.exchange('', call)
# Verify nodes that cause crashes if they don't exist
self.assert_path(resp, "response/grade/@pnum")
def verify(self, cardid: Optional[str]) -> None:
# Verify boot sequence is okay
self.verify_services_get(
expected_services=[
'pcbtracker',
'pcbevent',
'local',
'message',
'facility',
'cardmng',
'package',
'posevent',
'pkglist',
'dlstatus',
'eacoin',
'lobby',
'ntp',
'keepalive'
]
)
paseli_enabled = self.verify_pcbtracker_alive()
self.verify_package_list()
self.verify_message_get()
lid = self.verify_facility_get()
self.verify_pcbevent_put()
self.verify_shop_getname(lid)
self.verify_pc_common()
self.verify_music_crate()
self.verify_shop_getconvention(lid)
self.verify_ranking_getranker(lid)
self.verify_shop_sentinfo(lid)
# Verify card registration and profile lookup
if cardid is not None:
card = cardid
else:
card = self.random_card()
print("Generated random card ID {} for use.".format(card))
if cardid is None:
self.verify_cardmng_inquire(card, msg_type='unregistered', paseli_enabled=paseli_enabled)
ref_id = self.verify_cardmng_getrefid(card)
if len(ref_id) != 16:
raise Exception('Invalid refid \'{}\' returned when registering card'.format(ref_id))
if ref_id != self.verify_cardmng_inquire(card, msg_type='new', paseli_enabled=paseli_enabled):
raise Exception('Invalid refid \'{}\' returned when querying card'.format(ref_id))
self.verify_pc_reg(ref_id, card, lid)
self.verify_pc_get(ref_id, card, lid)
else:
print("Skipping new card checks for existing card")
ref_id = self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled)
# Verify pin handling and return card handling
self.verify_cardmng_authpass(ref_id, correct=True)
self.verify_cardmng_authpass(ref_id, correct=False)
if ref_id != self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled):
raise Exception('Invalid refid \'{}\' returned when querying card'.format(ref_id))
if cardid is None:
# Verify score handling
profile = self.verify_pc_get(ref_id, card, lid)
if profile['sp_dan'] != -1:
raise Exception('Somehow has SP DAN ranking on new profile!')
if profile['dp_dan'] != -1:
raise Exception('Somehow has DP DAN ranking on new profile!')
if profile['deller'] != 0:
raise Exception('Somehow has deller on new profile!')
scores = self.verify_music_getrank(profile['extid'])
if len(scores.keys()) > 0:
raise Exception('Somehow have scores on a new profile!')
for phase in [1, 2]:
if phase == 1:
dummyscores = [
# An okay score on a chart
{
'id': 1000,
'chart': 2,
'clear_status': 4,
'pgnum': 123,
'gnum': 123,
'mnum': 5,
},
# A good score on an easier chart of the same song
{
'id': 1000,
'chart': 0,
'clear_status': 7,
'pgnum': 246,
'gnum': 0,
'mnum': 0,
},
# A bad score on a hard chart
{
'id': 1003,
'chart': 2,
'clear_status': 1,
'pgnum': 10,
'gnum': 20,
'mnum': 50,
},
# A terrible score on an easy chart
{
'id': 1003,
'chart': 0,
'clear_status': 1,
'pgnum': 2,
'gnum': 5,
'mnum': 75,
},
]
if phase == 2:
dummyscores = [
# A better score on the same chart
{
'id': 1000,
'chart': 2,
'clear_status': 5,
'pgnum': 234,
'gnum': 234,
'mnum': 3,
},
# A worse score on another same chart
{
'id': 1000,
'chart': 0,
'clear_status': 4,
'pgnum': 123,
'gnum': 123,
'mnum': 35,
'expected_clear_status': 7,
'expected_ex_score': 492,
'expected_miss_count': 0,
},
]
for dummyscore in dummyscores:
self.verify_music_reg(profile['extid'], lid, dummyscore)
self.verify_pc_visit(profile['extid'], lid)
self.verify_pc_save(profile['extid'], card, lid)
scores = self.verify_music_getrank(profile['extid'])
for score in dummyscores:
data = scores.get(score['id'], {}).get(score['chart'], None)
if data is None:
raise Exception('Expected to get score back for song {} chart {}!'.format(score['id'], score['chart']))
if 'expected_ex_score' in score:
expected_score = score['expected_ex_score']
else:
expected_score = (score['pgnum'] * 2) + score['gnum']
if 'expected_clear_status' in score:
expected_clear_status = score['expected_clear_status']
else:
expected_clear_status = score['clear_status']
if 'expected_miss_count' in score:
expected_miss_count = score['expected_miss_count']
else:
expected_miss_count = score['mnum']
if data['ex_score'] != expected_score:
raise Exception('Expected a score of \'{}\' for song \'{}\' chart \'{}\' but got score \'{}\''.format(
expected_score, score['id'], score['chart'], data['ex_score'],
))
if data['clear_status'] != expected_clear_status:
raise Exception('Expected a clear status of \'{}\' for song \'{}\' chart \'{}\' but got clear status \'{}\''.format(
expected_clear_status, score['id'], score['chart'], data['clear_status'],
))
if data['miss_count'] != expected_miss_count:
raise Exception('Expected a miss count of \'{}\' for song \'{}\' chart \'{}\' but got miss count \'{}\''.format(
expected_miss_count, score['id'], score['chart'], data['miss_count'],
))
# Verify we can fetch our own ghost
ex_score, ghost = self.verify_music_appoint(profile['extid'], score['id'], score['chart'])
if ex_score != expected_score:
raise Exception('Expected a score of \'{}\' for song \'{}\' chart \'{}\' but got score \'{}\''.format(
expected_score, score['id'], score['chart'], data['ex_score'],
))
if len(ghost) != 64:
raise Exception('Wrong ghost length {} for ghost!'.format(len(ghost)))
for g in ghost:
if g != 0x01:
raise Exception('Got back wrong ghost data for song \'{}\' chart \'{}\''.format(score['id'], score['chart']))
# Sleep so we don't end up putting in score history on the same second
time.sleep(1)
# Verify that a player without a card can play
self.verify_pc_playstart()
self.verify_music_play({
'id': 1000,
'chart': 2,
'clear_status': 4,
'pgnum': 123,
'gnum': 123,
})
self.verify_pc_playend()
# Verify shop name change setting
self.verify_shop_savename(lid, 'newname1')
newname = self.verify_shop_getname(lid)
if newname != 'newname1':
raise Exception('Invalid shop name returned after change!')
self.verify_shop_savename(lid, 'newname2')
newname = self.verify_shop_getname(lid)
if newname != 'newname2':
raise Exception('Invalid shop name returned after change!')
# Verify beginner score saving
self.verify_music_breg(profile['extid'], {
'id': 1000,
'clear_status': 4,
'pgnum': 123,
'gnum': 123,
})
scores = self.verify_music_getrank(profile['extid'])
if 1000 not in scores:
raise Exception('Didn\'t get expected scores back for song {} beginner chart!'.format(1000))
if 6 not in scores[1000]:
raise Exception('Didn\'t get beginner score back for song {}!'.format(1000))
if scores[1000][6] != {'clear_status': 4, 'ex_score': -1, 'miss_count': -1}:
raise Exception('Didn\'t get correct status back from beginner save!')
# Verify DAN score saving and loading
self.verify_grade_raised(profile['extid'], newname, 'sp')
self.verify_grade_raised(profile['extid'], newname, 'dp')
profile = self.verify_pc_get(ref_id, card, lid)
if profile['sp_dan'] != 5:
raise Exception('Got wrong DAN score back for SP!')
if profile['dp_dan'] != 5:
raise Exception('Got wrong DAN score back for DP!')
else:
print("Skipping score checks for existing card")
# Verify paseli handling
if paseli_enabled:
print("PASELI enabled for this PCBID, executing PASELI checks")
else:
print("PASELI disabled for this PCBID, skipping PASELI checks")
return
sessid, balance = self.verify_eacoin_checkin(card)
if balance == 0:
print("Skipping PASELI consume check because card has 0 balance")
else:
self.verify_eacoin_consume(sessid, balance, random.randint(0, balance))
self.verify_eacoin_checkout(sessid)
| [
"[email protected]"
]
| |
81b071b194eaf7cf41f099b4b2ae69ce0fdb01f6 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03168/s984146914.py | d29e8d5cf735e05031e772be32479e5b3e032425 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | n = int(input())
ns = list(map(float, input().split()))
dp = [[0 for j in range(n+1)] for i in range(n+1)]
dp[0][0] = 1
for i in range(1,n+1):
for j in range(0, i + 1):
dp[i][j] = dp[i-1][j] * (1-ns[i-1]) + dp[i-1][j-1] * ns[i-1]
print(sum(dp[-1][(n+1)//2:])) | [
"[email protected]"
]
| |
37cb1a55bf2943e3cb7f46d64c52de8169b903f0 | 11459f6d05c7091537570a63bb2654b3d3a5440c | /mlprodict/plotting/plotting_validate_graph.py | f30922b8ca4cda3cd369f447e8fb26caa3a256dd | [
"MIT"
]
| permissive | xhochy/mlprodict | 663e6ac21ae913d5dff59cfab938fc62f59d1c79 | 22ab05af3ff4a67f4390af1c44e349efa454f0d5 | refs/heads/master | 2023-08-11T00:53:24.133020 | 2021-09-24T15:52:46 | 2021-09-24T15:52:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,540 | py | """
@file
@brief Functions to help visualizing performances.
"""
import numpy
import pandas
def _model_name(name):
"""
Extracts the main component of a model, removes
suffixes such ``Classifier``, ``Regressor``, ``CV``.
@param name string
@return shorter string
"""
if name.startswith("Select"):
return "Select"
if name.startswith("Nu"):
return "Nu"
modif = 1
while modif > 0:
modif = 0
for suf in ['Classifier', 'Regressor', 'CV', 'IC',
'Transformer']:
if name.endswith(suf):
name = name[:-len(suf)]
modif += 1
return name
def plot_validate_benchmark(df):
"""
Plots a graph which summarizes the performances of a benchmark
validating a runtime for :epkg:`ONNX`.
@param df output of function @see fn summary_report
@return fig, ax
.. plot::
from logging import getLogger
from pandas import DataFrame
import matplotlib.pyplot as plt
from mlprodict.onnxrt.validate import enumerate_validated_operator_opsets, summary_report
from mlprodict.tools.plotting import plot_validate_benchmark
logger = getLogger('skl2onnx')
logger.disabled = True
rows = list(enumerate_validated_operator_opsets(
verbose=0, models={"LinearRegression"}, opset_min=11,
runtime=['python', 'onnxruntime1'], debug=False,
benchmark=True, n_features=[None, 10]))
df = DataFrame(rows)
piv = summary_report(df)
fig, ax = plot_validate_benchmark(piv)
plt.show()
"""
import matplotlib.pyplot as plt
if 'n_features' not in df.columns:
df["n_features"] = numpy.nan # pragma: no cover
if 'runtime' not in df.columns:
df['runtime'] = '?' # pragma: no cover
fmt = "{} [{}-{}|{}] D{}"
df["label"] = df.apply(
lambda row: fmt.format(
row["name"], row["problem"], row["scenario"],
row['optim'], row["n_features"]).replace("-default|", "-**]"), axis=1)
df = df.sort_values(["name", "problem", "scenario", "optim",
"n_features", "runtime"],
ascending=False).reset_index(drop=True).copy()
indices = ['label', 'runtime']
values = [c for c in df.columns
if 'N=' in c and '-min' not in c and '-max' not in c]
try:
df = df[indices + values]
except KeyError as e: # pragma: no cover
raise RuntimeError(
"Unable to find the following columns {}\nin {}".format(
indices + values, df.columns)) from e
if 'RT/SKL-N=1' not in df.columns:
raise RuntimeError( # pragma: no cover
"Column 'RT/SKL-N=1' is missing, benchmark was probably not run.")
na = df["RT/SKL-N=1"].isnull()
dfp = df[~na]
runtimes = list(sorted(set(dfp['runtime'])))
final = None
for rt in runtimes:
sub = dfp[dfp.runtime == rt].drop('runtime', axis=1).copy()
col = list(sub.columns)
for i in range(1, len(col)):
col[i] += "__" + rt
sub.columns = col
if final is None:
final = sub
else:
final = final.merge(sub, on='label', how='outer')
# let's add average and median
ncol = (final.shape[1] - 1) // len(runtimes)
if len(runtimes) + 1 > final.shape[0]:
dfp_legend = final.iloc[:len(runtimes) + 1, :].copy()
while dfp_legend.shape[0] < len(runtimes) + 1:
dfp_legend = pandas.concat([dfp_legend, dfp_legend[:1]])
else:
dfp_legend = final.iloc[:len(runtimes) + 1, :].copy()
rleg = dfp_legend.copy()
dfp_legend.iloc[:, 1:] = numpy.nan
rleg.iloc[:, 1:] = numpy.nan
for r, runt in enumerate(runtimes):
sli = slice(1 + ncol * r, 1 + ncol * r + ncol)
cm = final.iloc[:, sli].mean().values
dfp_legend.iloc[r + 1, sli] = cm
rleg.iloc[r, sli] = final.iloc[:, sli].median()
dfp_legend.iloc[r + 1, 0] = "avg_" + runt
rleg.iloc[r, 0] = "med_" + runt
dfp_legend.iloc[0, 0] = "------"
rleg.iloc[-1, 0] = "------"
# sort
final = final.sort_values('label', ascending=False).copy()
# add global statistics
final = pandas.concat([rleg, final, dfp_legend]).reset_index(drop=True)
# graph beginning
total = final.shape[0] * 0.45
fig, ax = plt.subplots(1, len(values), figsize=(14, total),
sharex=False, sharey=True)
x = numpy.arange(final.shape[0])
subh = 1.0 / len(runtimes)
height = total / final.shape[0] * (subh + 0.1)
decrt = {rt: height * i for i, rt in enumerate(runtimes)}
colors = {rt: c for rt, c in zip(
runtimes, ['blue', 'orange', 'cyan', 'yellow'])}
# draw lines between models
vals = final.iloc[:, 1:].values.ravel()
xlim = [min(0.5, min(vals)), max(2, max(vals))]
while i < final.shape[0] - 1:
i += 1
label = final.iloc[i, 0]
if '[' not in label:
continue
prev = final.iloc[i - 1, 0]
if '[' not in label:
continue # pragma: no cover
label = label.split()[0]
prev = prev.split()[0]
if _model_name(label) == _model_name(prev):
continue
blank = final.iloc[:1, :].copy()
blank.iloc[0, 0] = '------'
blank.iloc[0, 1:] = xlim[0]
final = pandas.concat([final[:i], blank, final[i:]])
i += 1
final = final.reset_index(drop=True).copy()
x = numpy.arange(final.shape[0])
done = set()
for c in final.columns[1:]:
place, runtime = c.split('__')
if hasattr(ax, 'shape'):
index = values.index(place)
if (index, runtime) in done:
raise RuntimeError( # pragma: no cover
"Issue with column '{}'\nlabels={}\nruntimes={}\ncolumns="
"{}\nvalues={}\n{}".format(
c, list(final.label), runtimes, final.columns, values, final))
axi = ax[index]
done.add((index, runtime))
else:
if (0, runtime) in done: # pragma: no cover
raise RuntimeError(
"Issue with column '{}'\nlabels={}\nruntimes={}\ncolumns="
"{}\nvalues={}\n{}".format(
c, final.label, runtimes, final.columns, values, final))
done.add((0, runtime)) # pragma: no cover
axi = ax # pragma: no cover
if c in final.columns:
yl = final.loc[:, c]
xl = x + decrt[runtime] / 2
axi.barh(xl, yl, label=runtime, height=height,
color=colors[runtime])
axi.set_title(place)
def _plot_axis(axi, x, xlim):
axi.plot([1, 1], [0, max(x)], 'g-')
axi.plot([2, 2], [0, max(x)], 'r--')
axi.set_xlim(xlim)
axi.set_xscale('log')
axi.set_ylim([min(x) - 2, max(x) + 1])
def _plot_final(axi, x, final):
axi.set_yticks(x)
axi.set_yticklabels(final['label'])
if hasattr(ax, 'shape'):
for i in range(len(ax)): # pylint: disable=C0200
_plot_axis(ax[i], x, xlim)
ax[min(ax.shape[0] - 1, 2)].legend()
_plot_final(ax[0], x, final)
else: # pragma: no cover
_plot_axis(ax, x, xlim)
_plot_final(ax, x, final)
ax.legend()
fig.subplots_adjust(left=0.25)
return fig, ax
| [
"[email protected]"
]
| |
ab25cc9d4cab4c0e131e57540685f03f68c4b4f0 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/nlp/Scaling-nmt_for_Pytorch/fairseq/criterions/label_smoothed_cross_entropy_with_ctc.py | f2e8cdf3bfe0caea99125c6f9607dff9495891cf | [
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 3,403 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass, field
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.logging import metrics
from fairseq.criterions import register_criterion
from fairseq.criterions.label_smoothed_cross_entropy import (
LabelSmoothedCrossEntropyCriterion,
LabelSmoothedCrossEntropyCriterionConfig,
)
from fairseq.data.data_utils import lengths_to_mask
@dataclass
class LabelSmoothedCrossEntropyWithCtcCriterionConfig(
LabelSmoothedCrossEntropyCriterionConfig
):
ctc_weight: float = field(default=1.0, metadata={"help": "weight for CTC loss"})
@register_criterion(
"label_smoothed_cross_entropy_with_ctc",
dataclass=LabelSmoothedCrossEntropyWithCtcCriterionConfig,
)
class LabelSmoothedCrossEntropyWithCtcCriterion(LabelSmoothedCrossEntropyCriterion):
def __init__(
self,
task,
sentence_avg,
label_smoothing,
ignore_prefix_size,
report_accuracy,
ctc_weight,
):
super().__init__(
task, sentence_avg, label_smoothing, ignore_prefix_size, report_accuracy
)
self.ctc_weight = ctc_weight
def forward(self, model, sample, reduce=True):
net_output = model(**sample["net_input"])
loss, nll_loss = self.compute_loss(model, net_output, sample, reduce=reduce)
ctc_loss = torch.tensor(0.0).type_as(loss)
if self.ctc_weight > 0.0:
ctc_lprobs, ctc_lens = model.get_ctc_output(net_output, sample)
ctc_tgt, ctc_tgt_lens = model.get_ctc_target(sample)
ctc_tgt_mask = lengths_to_mask(ctc_tgt_lens)
ctc_tgt_flat = ctc_tgt.masked_select(ctc_tgt_mask)
reduction = "sum" if reduce else "none"
ctc_loss = (
F.ctc_loss(
ctc_lprobs,
ctc_tgt_flat,
ctc_lens,
ctc_tgt_lens,
reduction=reduction,
zero_infinity=True,
)
* self.ctc_weight
)
loss += ctc_loss
sample_size = (
sample["target"].size(0) if self.sentence_avg else sample["ntokens"]
)
logging_output = {
"loss": utils.item(loss.data),
"nll_loss": utils.item(nll_loss.data),
"ctc_loss": utils.item(ctc_loss.data),
"ntokens": sample["ntokens"],
"nsentences": sample["target"].size(0),
"sample_size": sample_size,
}
if self.report_accuracy:
n_correct, total = self.compute_accuracy(model, net_output, sample)
logging_output["n_correct"] = utils.item(n_correct.data)
logging_output["total"] = utils.item(total.data)
return loss, sample_size, logging_output
@classmethod
def reduce_metrics(cls, logging_outputs) -> None:
super().reduce_metrics(logging_outputs)
loss_sum = sum(log.get("ctc_loss", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
metrics.log_scalar(
"ctc_loss", loss_sum / sample_size / math.log(2), sample_size, round=3
)
| [
"[email protected]"
]
| |
d1f546681794e6a313737c2b6c0ecc037fcc1921 | a79c9afff9afea0c64ce8efac5683bde4ef7eeb8 | /apps/app_two/urls.py | 46e912dd7bc1de957131647bfdebb35d948d591d | []
| no_license | kswelch53/login_registration | 82e8a865125e903efbc624a3b7e3c5a4b6f98a60 | 364cfae780fe9f7e39c11dbe9e53e684d0798f5d | refs/heads/master | 2021-05-12T08:48:04.707930 | 2018-01-13T00:11:50 | 2018-01-13T00:11:50 | 117,300,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | from django.conf.urls import url, include
from . import views
urlpatterns = [
# root route to index method in app_two
url(r'^$', views.index, name='index'),
]
| [
"[email protected]"
]
| |
1367392cb579fabd31cd0a289627c57077a7bda2 | a53998e56ee06a96d59d97b2601fd6ec1e4124d7 | /基础课/jichu/day19/multiple_inherit.py | 97a196a49966ef3c0d0f854d88f992a637e776cf | []
| no_license | zh-en520/aid1901 | f0ec0ec54e3fd616a2a85883da16670f34d4f873 | a56f82d0ea60b2395deacc57c4bdf3b6bc73bd2e | refs/heads/master | 2020-06-28T21:16:22.259665 | 2019-08-03T07:09:29 | 2019-08-03T07:09:29 | 200,344,127 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | class Car:
def run(self,speed):
print('汽车以',speed,'公里/小时的速度行驶')
class Plane:
def fly(self,height):
print('飞机以海拔',height,'米的高度飞行')
class PlaneCar(Car,Plane):
'''PlaneCar为飞行汽车类'''
p1 = PlaneCar()
p1.fly(10000)
p1.run(300) | [
"[email protected]"
]
| |
e7e90eb396092cbfdd8804bdc98e246f49e58d27 | f2e70de60adb32014fc718647e00e221687c4055 | /openacademy/models/models.py | 18977c7d29a89790deb59ee8d4506deae8805181 | []
| no_license | fawad4bros/Odoo13_employe_info | 6a28f929cda1a3cb2c1ba1b665ba9565df3d2e37 | 40dd51c011726f49b331f29ffc2f7445877e5624 | refs/heads/master | 2023-04-05T10:01:42.591943 | 2021-04-04T06:58:47 | 2021-04-04T06:58:47 | 352,701,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,569 | py | # -*- coding: utf-8 -*-
from odoo import models, fields, api, exceptions
class Course(models.Model):
_name = 'openacademy.course'
_description = 'OpenAcademy Courses'
name = fields.Char(string="Title", required=True, help="Name of the Course")
description = fields.Text()
responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session','course_id',string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
class Session(models.Model):
_name = 'openacademy.session'
_description = "OpenAcademy Sessions"
name = fields.Char(required=True)
start_date = fields.Date(default=fields.Date.today)
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
active = fields.Boolean(default=True)
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner',string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
@api.depends('seats', 'attendee_ids')
def _taken_seats(self):
for r in self:
if not r.seats:
r.taken_seats= 0.0
else:
r.taken_seats = 100.0 * len(r.attendee_ids) / r.seats
@api.onchange('seats', 'attendee_ids')
def _verify_valid_seats(self):
if self.seats < 0:
return {
'warning': {
'title': "Incorrect 'seats' value",
'message': "The number of available seats may not be negative",
},
}
if self.seats < len(self.attendee_ids):
return {
'warning': {
'title': "Too many attendees",
'message': "Increase seats or remove excess attendees",
},
}
@api.constrains('instructor_id','attendee_ids')
def _check_instructor_not_in_attendees(self):
for r in self:
if r.instructor_id and r.instructor_id in r.attendee_ids:
raise exceptions.ValidationError("A session's instructor can't be an attendee") | [
"[email protected]"
]
| |
b5c67701e84dfa41e36333c8a985bf9e72b01b6a | 5ad6b1b7fead932860d7a1f5d1281c339ca4d487 | /papermerge/contrib/admin/migrations/0001_initial.py | d389247d6f2852eb8cd071971030e581fc0c895c | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"GPL-3.0-only"
]
| permissive | zhiliangpersonal/papermerge | da51fc7c6674a53e1c007e2eb70e5cc05de5da9e | 56c10c889e1db4760a3c47f2374a63ec12fcec3b | refs/heads/master | 2022-12-18T15:20:42.747194 | 2020-09-28T05:09:32 | 2020-09-28T05:09:32 | 299,205,072 | 1 | 0 | Apache-2.0 | 2020-09-28T06:04:35 | 2020-09-28T06:04:34 | null | UTF-8 | Python | false | false | 1,256 | py | # Generated by Django 3.0.8 on 2020-08-28 11:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='LogEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField()),
('level', models.PositiveIntegerField(choices=[(10, 'Debug'), (20, 'Info'), (30, 'Warning'), (40, 'Error'), (50, 'Critical')], default=20)),
('action_time', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='action time')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'log entry',
'verbose_name_plural': 'log entries',
'ordering': ('-action_time',),
},
),
]
| [
"[email protected]"
]
| |
34757da4411d0823effcccd1a88b96cea9eb401a | 1ef536d93c6616f9793e57a9ebc6b44248d50202 | /Unit of Measure/models/res_partner.py | 6d1e384ef6259b088b57ea48681fdce6824c20e1 | []
| no_license | mohamed4185/Express | 157f21f8eba2b76042f4dbe09e4071e4411342ac | 604aa39a68bfb41165549d605d40a27b9251d742 | refs/heads/master | 2022-04-12T17:04:05.407820 | 2020-03-09T14:02:17 | 2020-03-09T14:02:17 | 246,014,712 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | # -*- coding: utf-8 -*-
from odoo import api, fields ,models
from odoo.exceptions import ValidationError
import logging
_logger = logging.getLogger(__name__)
class ResPartner(models.Model):
_inherit="uom.category"
measure_type = fields.Selection([
('unit', 'Units'),
('weight', 'Weight'),
('time', 'Time'),
('length', 'Length'),
('volume', 'Volume'),
], string="نوع وحدة القياس")
| [
"[email protected]"
]
| |
401502d8d8b40bd00eb29a1d486ea9d4465a8eb3 | a0cc82bef6e39617f88d5b64ad960593615bfa59 | /wsgi.py | ed8a8c9bc5854819e56be7b2f9007832c9f63486 | []
| no_license | RevengeComing/RatSnake | 624b2563234daf83f62667160483bde5eb7e56a0 | e8fbc3b58c2c1fb9e029da979baa51b5fae38a85 | refs/heads/master | 2021-01-11T17:43:46.816389 | 2018-03-05T16:04:24 | 2018-03-05T16:04:24 | 79,827,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | from ratsnake.app import create_app
app = create_app()
if __name__ == "__main__":
app.run(debug=True) | [
"[email protected]"
]
| |
c38a872e6e6ae5b1d7edc0e1731ec393f49a556d | a0241ddd6feb1236740c4b57f86b4e2b9c812ffe | /userdata/models/beirat.py | ee4cada6ca3852e198c2996ce9b2dd941e739f94 | []
| no_license | patta42/website | 0a08d4b11c49020acefaf8f8cca30982ca2afa50 | 72d2e136272e0ed23f74080697d16eb9bc692ac3 | refs/heads/master | 2021-06-10T16:30:51.292679 | 2021-04-24T06:27:02 | 2021-04-24T06:27:02 | 151,085,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,722 | py | from django.db import models
from django.utils.translation import gettext as _
from website.models import TranslatedField
from userdata.models import StaffUser
class BeiratGroups(models.Model):
title_de = models.CharField(
max_length = 128,
verbose_name = 'Gruppe innerhalb des Beirats (deutsch)'
)
title_en = models.CharField(
max_length = 128,
verbose_name = 'Gruppe innerhalb des Beirats (english)'
)
order = models.IntegerField(
verbose_name = 'Anzeigeposition in der Beiratsliste'
)
has_sub_groups = models.BooleanField(
default = False,
help_text = _('Ist diese Gruppe in Untergruppen nach Bereichen unterteilt?')
)
title = TranslatedField('title_en', 'title_de')
def __str__(self):
return self.title
class Beirat2StaffRelation(models.Model):
beirat_group = models.ForeignKey(
BeiratGroups,
on_delete = models.CASCADE
)
member = models.ForeignKey(
StaffUser,
on_delete = models.CASCADE,
null = True,
blank = True
)
is_surrogate = models.BooleanField(
default = False,
help_text = _('Is this Beitrat member surrogate?')
)
is_head = models.BooleanField(
default = False,
help_text = _('Is the member head of the Beirat')
)
faculty_group = models.CharField(
max_length = 64,
choices = (
('natural', _('Natural Sciences')),
('engineering', _('Engineering')),
('medicine', _('Medicine'))
),
blank = True,
null = True
)
def __str__(self):
return '{} ({})'.format(str(self.member), str(self.beirat_group))
| [
"[email protected]"
]
| |
7bc22ff7aaf4908fbac56962aad200eb123b3427 | 59522e46a73630181f19251b8bfef90e497c2f82 | /coop_cms/management/commands/create_db_password.py | 5e1a63ab5f5a28fa874530f70fdb8939143577be | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | ljean/coop_cms | 9befe74edda007686007f8566cd2555856099ae8 | 9e6c70afb61b57dc0326fbb64f9d6b19c04f48a1 | refs/heads/master | 2023-07-11T16:02:35.945029 | 2023-06-30T12:16:26 | 2023-06-30T12:16:26 | 5,846,409 | 3 | 5 | NOASSERTION | 2019-08-30T10:55:02 | 2012-09-17T19:53:56 | Python | UTF-8 | Python | false | false | 409 | py | # -*- coding: utf-8 -*-
from getpass import getpass
from django.core.management.base import BaseCommand
from django.conf import settings
from ...secrets import set_db_password
class Command(BaseCommand):
help = 'Generates a password DB'
def handle(self, *args, **options):
db_password = getpass("password?")
set_db_password(settings.BASE_DIR, settings.SECRET_KEY, db_password)
| [
"[email protected]"
]
| |
a3abb973df7c49c23f085d85ce66b4a6df1246d2 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/gigasecond/f02e9fb37a7446a1af9ca9e795c77365.py | e490967c96d68ba8fa7ac4318031d539f832a649 | []
| no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 135 | py | from datetime import timedelta
def add_gigasecond(dateTimeObj):
gigasecond=10**9
return dateTimeObj + timedelta(0,gigasecond)
| [
"[email protected]"
]
| |
a2bfd5334ad964b8fe2f5c3addb9d88084f31e5b | 0d9b75fee49b37038a10e467c39cf75c9cf4d5ae | /OpenCV_learn/code_030/opencv_030.py | d4195aea4d2a004c17e47d6166072a6ffa2427bc | []
| no_license | MachineLP/OpenCV- | 743a5fcfc3f300ccb135f869e2f048cb5fdcd02a | f3da4feb71c20d2e8bc426eb5a4e2e61a2fd4a75 | refs/heads/master | 2023-03-23T15:31:22.985413 | 2023-03-08T09:33:28 | 2023-03-08T09:33:28 | 178,887,816 | 104 | 51 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | import cv2 as cv
import numpy as np
src = cv.imread("./test.png")
cv.namedWindow("input", cv.WINDOW_AUTOSIZE)
cv.imshow("input", src)
blur_op = np.ones([5, 5], dtype=np.float32)/25.
shape_op = np.array([[0, -1, 0],
[-1, 5, -1],
[0, -1, 0]], np.float32)
grad_op = np.array([[1, 0],[0, -1]], dtype=np.float32)
dst1 = cv.filter2D(src, -1, blur_op)
dst2 = cv.filter2D(src, -1, shape_op)
dst3 = cv.filter2D(src, cv.CV_32F, grad_op)
dst3 = cv.convertScaleAbs(dst3)
cv.imshow("blur=5x5", dst1);
cv.imshow("shape=3x3", dst2);
cv.imshow("gradient=2x2", dst3);
cv.waitKey(0)
cv.destroyAllWindows()
| [
"[email protected]"
]
| |
02bf8fdde14c3aac9ce51f353f4387eeb79001e6 | e627d47d5102bd68c2012501aa120833b9271da7 | /aws_api/core/models.py | 89508d58bf7297df0d5b42ee5c50cbd82b4d8508 | []
| no_license | aayushgupta97/django-km | 5ba275d1f85eaaf8bc052e47d2b6b6f1a5e4cf90 | d34cd4f8637718044832d9baeecee86df5e821a5 | refs/heads/master | 2023-01-02T18:12:31.384634 | 2020-10-24T09:21:50 | 2020-10-24T09:21:50 | 298,391,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 912 | py | from django.db import models
from django.contrib.auth.models import User
from jsonfield import JSONField
# Create your models here.
class AWSCredentials(models.Model):
access_key = models.CharField(max_length=128)
secret_key = models.CharField(max_length=512)
account_id = models.CharField(max_length=40)
default_region = models.CharField(max_length=32, default='us-east-1')
user = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.account_id
class EC2(models.Model):
instance_id = models.CharField(max_length=255, blank=False)
instance_type = models.CharField(max_length=255, blank=False)
state = models.BooleanField()
instance_data = JSONField(null=True)
credentials = models.ForeignKey(AWSCredentials, null=False, on_delete=models.CASCADE)
def __str__(self):
return f"{self.instance_id} {self.credentials}"
| [
"[email protected]"
]
| |
81ea85c94df6be769b91ca66567cb99c500486e8 | ae231d793f80966513e225dafd127c25772c2e6b | /nuitka/tree/ReformulationClasses.py | 95e73f0da69e65ac532dc10c0ba890fe779bd1d6 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
]
| permissive | awesome-python/Nuitka | 41bbc3cbd8a0e025ac5f59e6d3f7bcca3efe1522 | ad0ed473eb4a919d758c72cfc6cfbd5977998c5b | refs/heads/master | 2021-01-18T06:23:06.493913 | 2016-05-26T12:58:27 | 2016-05-26T12:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,489 | py | # Copyright 2016, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Reformulation of class statements.
Consult the developer manual for information. TODO: Add ability to sync
source code comments with developer manual sections.
"""
from nuitka.nodes.AssignNodes import (
ExpressionTargetTempVariableRef,
ExpressionTargetVariableRef,
StatementAssignmentVariable,
StatementReleaseVariable
)
from nuitka.nodes.AttributeNodes import (
ExpressionAttributeLookup,
ExpressionBuiltinHasattr
)
from nuitka.nodes.BuiltinRefNodes import ExpressionBuiltinRef
from nuitka.nodes.CallNodes import ExpressionCall, ExpressionCallNoKeywords
from nuitka.nodes.ClassNodes import (
ExpressionClassBody,
ExpressionSelectMetaclass
)
from nuitka.nodes.CodeObjectSpecs import CodeObjectSpec
from nuitka.nodes.ComparisonNodes import ExpressionComparisonIn
from nuitka.nodes.ConditionalNodes import (
ExpressionConditional,
StatementConditional
)
from nuitka.nodes.ConstantRefNodes import ExpressionConstantRef
from nuitka.nodes.ContainerMakingNodes import ExpressionMakeTuple
from nuitka.nodes.DictionaryNodes import (
ExpressionDictOperationGet,
StatementDictOperationRemove
)
from nuitka.nodes.FunctionNodes import (
ExpressionFunctionCall,
ExpressionFunctionCreation,
ExpressionFunctionQualnameRef,
ExpressionFunctionRef
)
from nuitka.nodes.GlobalsLocalsNodes import (
ExpressionBuiltinLocals,
StatementSetLocals
)
from nuitka.nodes.ReturnNodes import StatementReturn
from nuitka.nodes.SubscriptNodes import ExpressionSubscriptLookup
from nuitka.nodes.TypeNodes import ExpressionBuiltinType1
from nuitka.nodes.VariableRefNodes import (
ExpressionTempVariableRef,
ExpressionVariableRef
)
from nuitka.PythonVersions import python_version
from .Helpers import (
buildNode,
buildNodeList,
buildStatementsNode,
extractDocFromBody,
getKind,
makeDictCreationOrConstant,
makeSequenceCreationOrConstant,
makeStatementsSequence,
makeStatementsSequenceFromStatement
)
from .ReformulationTryFinallyStatements import makeTryFinallyStatement
def _buildClassNode3(provider, node, source_ref):
# Many variables, due to the huge re-formulation that is going on here,
# which just has the complexity, pylint: disable=R0914
# This function is the Python3 special case with special re-formulation as
# according to developer manual.
class_statement_nodes, class_doc = extractDocFromBody(node)
# We need a scope for the temporary variables, and they might be closured.
temp_scope = provider.allocateTempScope(
name = "class_creation",
allow_closure = True
)
tmp_bases = provider.allocateTempVariable(
temp_scope = temp_scope,
name = "bases"
)
tmp_class_decl_dict = provider.allocateTempVariable(
temp_scope = temp_scope,
name = "class_decl_dict"
)
tmp_metaclass = provider.allocateTempVariable(
temp_scope = temp_scope,
name = "metaclass"
)
tmp_prepared = provider.allocateTempVariable(
temp_scope = temp_scope,
name = "prepared"
)
class_creation_function = ExpressionClassBody(
provider = provider,
name = node.name,
doc = class_doc,
flags = set(),
source_ref = source_ref
)
if python_version >= 340 and False: # TODO: Temporarily reverted:
tmp_class = class_creation_function.allocateTempVariable(
temp_scope = None,
name = "__class__"
)
class_target_variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_class,
source_ref = source_ref
)
class_variable_ref = ExpressionTempVariableRef(
variable = tmp_class,
source_ref = source_ref
)
else:
class_variable = class_creation_function.getVariableForAssignment(
"__class__"
)
class_target_variable_ref = ExpressionTargetVariableRef(
variable_name = "__class__",
variable = class_variable,
source_ref = source_ref
)
class_variable_ref = ExpressionVariableRef(
variable_name = "__class__",
variable = class_variable,
source_ref = source_ref
)
code_object = CodeObjectSpec(
code_name = node.name,
code_kind = "Class",
arg_names = (),
kw_only_count = 0,
has_starlist = False,
has_stardict = False
)
body = buildStatementsNode(
provider = class_creation_function,
nodes = class_statement_nodes,
code_object = code_object,
source_ref = source_ref
)
source_ref_orig = source_ref
if body is not None:
# The frame guard has nothing to tell its line number to.
body.source_ref = source_ref
module_variable = class_creation_function.getVariableForAssignment(
"__module__"
)
statements = [
StatementSetLocals(
new_locals = ExpressionTempVariableRef(
variable = tmp_prepared,
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = "__module__",
variable = module_variable,
source_ref = source_ref
),
source = ExpressionConstantRef(
constant = provider.getParentModule().getFullName(),
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
)
]
if class_doc is not None:
doc_variable = class_creation_function.getVariableForAssignment(
"__doc__"
)
statements.append(
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = "__doc__",
variable = doc_variable,
source_ref = source_ref
),
source = ExpressionConstantRef(
constant = class_doc,
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
)
)
# The "__qualname__" attribute is new in Python 3.3.
if python_version >= 330:
qualname = class_creation_function.getFunctionQualname()
qualname_variable = class_creation_function.getVariableForAssignment(
"__qualname__"
)
if python_version < 340:
qualname_ref = ExpressionConstantRef(
constant = qualname,
source_ref = source_ref,
user_provided = True
)
else:
qualname_ref = ExpressionFunctionQualnameRef(
function_body = class_creation_function,
source_ref = source_ref,
)
statements.append(
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = "__qualname__",
variable = qualname_variable,
source_ref = source_ref
),
source = qualname_ref,
source_ref = source_ref
)
)
if python_version >= 340:
qualname_assign = statements[-1]
statements += [
body,
StatementAssignmentVariable(
variable_ref = class_target_variable_ref,
source = ExpressionCall(
called = ExpressionTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
args = makeSequenceCreationOrConstant(
sequence_kind = "tuple",
elements = (
ExpressionConstantRef(
constant = node.name,
source_ref = source_ref,
user_provided = True
),
ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
ExpressionBuiltinLocals(
source_ref = source_ref
)
),
source_ref = source_ref
),
kw = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
StatementReturn(
expression = class_variable_ref,
source_ref = source_ref
)
]
body = makeStatementsSequence(
statements = statements,
allow_none = True,
source_ref = source_ref
)
# The class body is basically a function that implicitly, at the end
# returns its locals and cannot have other return statements contained.
class_creation_function.setBody(body)
class_creation_function.registerProvidedVariable(tmp_bases)
class_creation_function.registerProvidedVariable(tmp_class_decl_dict)
class_creation_function.registerProvidedVariable(tmp_metaclass)
class_creation_function.registerProvidedVariable(tmp_prepared)
# The class body is basically a function that implicitly, at the end
# returns its created class and cannot have other return statements
# contained.
decorated_body = ExpressionFunctionCall(
function = ExpressionFunctionCreation(
function_ref = ExpressionFunctionRef(
function_body = class_creation_function,
source_ref = source_ref
),
code_object = code_object,
defaults = (),
kw_defaults = None,
annotations = None,
source_ref = source_ref
),
values = (),
source_ref = source_ref
)
for decorator in buildNodeList(
provider,
reversed(node.decorator_list),
source_ref
):
decorated_body = ExpressionCallNoKeywords(
called = decorator,
args = ExpressionMakeTuple(
elements = (
decorated_body,
),
source_ref = source_ref
),
source_ref = decorator.getSourceReference()
)
statements = (
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
source = makeSequenceCreationOrConstant(
sequence_kind = "tuple",
elements = buildNodeList(
provider, node.bases, source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
source = makeDictCreationOrConstant(
keys = [
ExpressionConstantRef(
constant = keyword.arg,
source_ref = source_ref,
user_provided = True
)
for keyword in
node.keywords
],
values = [
buildNode(provider, keyword.value, source_ref)
for keyword in
node.keywords
],
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
source = ExpressionSelectMetaclass(
metaclass = ExpressionConditional(
condition = ExpressionComparisonIn(
left = ExpressionConstantRef(
constant = "metaclass",
source_ref = source_ref,
user_provided = True
),
right = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
source_ref = source_ref
),
expression_yes = ExpressionDictOperationGet(
dict_arg = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
key = ExpressionConstantRef(
constant = "metaclass",
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
),
expression_no = ExpressionConditional(
condition = ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
expression_no = ExpressionBuiltinRef(
builtin_name = "type",
source_ref = source_ref
),
expression_yes = ExpressionBuiltinType1(
value = ExpressionSubscriptLookup(
subscribed = ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
subscript = ExpressionConstantRef(
constant = 0,
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
bases = ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref_orig
),
StatementConditional(
condition = ExpressionComparisonIn(
left = ExpressionConstantRef(
constant = "metaclass",
source_ref = source_ref,
user_provided = True
),
right = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
source_ref = source_ref
),
no_branch = None,
yes_branch = makeStatementsSequenceFromStatement(
statement = StatementDictOperationRemove(
dict_arg = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
key = ExpressionConstantRef(
constant = "metaclass",
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
)
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_prepared,
source_ref = source_ref
),
source = ExpressionConditional(
condition = ExpressionBuiltinHasattr( # pylint: disable=E1120,E1123
object = ExpressionTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
name = ExpressionConstantRef(
constant = "__prepare__",
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
),
expression_no = ExpressionConstantRef(
constant = {},
source_ref = source_ref,
user_provided = True
),
expression_yes = ExpressionCall(
called = ExpressionAttributeLookup(
source = ExpressionTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
attribute_name = "__prepare__",
source_ref = source_ref
),
args = ExpressionMakeTuple(
elements = (
ExpressionConstantRef(
constant = node.name,
source_ref = source_ref,
user_provided = True
),
ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
)
),
source_ref = source_ref
),
kw = ExpressionTempVariableRef(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = node.name,
source_ref = source_ref
),
source = decorated_body,
source_ref = source_ref
),
)
if python_version >= 340:
class_assign = statements[-1]
class_creation_function.qualname_setup = class_assign, qualname_assign
final = (
StatementReleaseVariable(
variable = tmp_bases,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_class_decl_dict,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_metaclass,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_prepared,
source_ref = source_ref
)
)
return makeTryFinallyStatement(
provider = provider,
tried = statements,
final = final,
source_ref = source_ref
)
def _buildClassNode2(provider, node, source_ref):
# This function is the Python2 special case with special re-formulation as
# according to developer manual, and it's very detailed, pylint: disable=R0914
class_statement_nodes, class_doc = extractDocFromBody(node)
function_body = ExpressionClassBody(
provider = provider,
name = node.name,
doc = class_doc,
flags = set(),
source_ref = source_ref
)
code_object = CodeObjectSpec(
code_name = node.name,
code_kind = "Class",
arg_names = (),
kw_only_count = 0,
has_starlist = False,
has_stardict = False
)
body = buildStatementsNode(
provider = function_body,
nodes = class_statement_nodes,
code_object = code_object,
source_ref = source_ref
)
if body is not None:
# The frame guard has nothing to tell its line number to.
body.source_ref = source_ref.atInternal()
# The class body is basically a function that implicitly, at the end
# returns its locals and cannot have other return statements contained, and
# starts out with a variables "__module__" and potentially "__doc__" set.
statements = [
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = "__module__",
source_ref = source_ref
),
source = ExpressionConstantRef(
constant = provider.getParentModule().getFullName(),
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref.atInternal()
)
]
if class_doc is not None:
statements.append(
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = "__doc__",
source_ref = source_ref
),
source = ExpressionConstantRef(
constant = class_doc,
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref.atInternal()
)
)
statements += [
body,
StatementReturn(
expression = ExpressionBuiltinLocals(
source_ref = source_ref
),
source_ref = source_ref.atInternal()
)
]
body = makeStatementsSequence(
statements = statements,
allow_none = True,
source_ref = source_ref
)
# The class body is basically a function that implicitly, at the end
# returns its locals and cannot have other return statements contained.
function_body.setBody(body)
temp_scope = provider.allocateTempScope("class_creation")
tmp_bases = provider.allocateTempVariable(temp_scope, "bases")
tmp_class_dict = provider.allocateTempVariable(temp_scope, "class_dict")
tmp_metaclass = provider.allocateTempVariable(temp_scope, "metaclass")
tmp_class = provider.allocateTempVariable(temp_scope, "class")
statements = [
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
source = makeSequenceCreationOrConstant(
sequence_kind = "tuple",
elements = buildNodeList(
provider, node.bases, source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_class_dict,
source_ref = source_ref
),
source = ExpressionFunctionCall(
function = ExpressionFunctionCreation(
function_ref = ExpressionFunctionRef(
function_body = function_body,
source_ref = source_ref
),
code_object = None,
defaults = (),
kw_defaults = None,
annotations = None,
source_ref = source_ref
),
values = (),
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
source = ExpressionConditional(
condition = ExpressionComparisonIn(
left = ExpressionConstantRef(
constant = "__metaclass__",
source_ref = source_ref,
user_provided = True
),
right = ExpressionTempVariableRef(
variable = tmp_class_dict,
source_ref = source_ref
),
source_ref = source_ref
),
expression_yes = ExpressionDictOperationGet(
dict_arg = ExpressionTempVariableRef(
variable = tmp_class_dict,
source_ref = source_ref
),
key = ExpressionConstantRef(
constant = "__metaclass__",
source_ref = source_ref,
user_provided = True
),
source_ref = source_ref
),
expression_no = ExpressionSelectMetaclass(
metaclass = None,
bases = ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_class,
source_ref = source_ref
),
source = ExpressionCallNoKeywords(
called = ExpressionTempVariableRef(
variable = tmp_metaclass,
source_ref = source_ref
),
args = ExpressionMakeTuple(
elements = (
ExpressionConstantRef(
constant = node.name,
source_ref = source_ref,
user_provided = True
),
ExpressionTempVariableRef(
variable = tmp_bases,
source_ref = source_ref
),
ExpressionTempVariableRef(
variable = tmp_class_dict,
source_ref = source_ref
)
),
source_ref = source_ref
),
source_ref = source_ref
),
source_ref = source_ref
),
]
for decorator in buildNodeList(
provider,
reversed(node.decorator_list),
source_ref
):
statements.append(
StatementAssignmentVariable(
variable_ref = ExpressionTargetTempVariableRef(
variable = tmp_class,
source_ref = source_ref
),
source = ExpressionCallNoKeywords(
called = decorator,
args = ExpressionMakeTuple(
elements = (
ExpressionTempVariableRef(
variable = tmp_class,
source_ref = source_ref
),
),
source_ref = source_ref
),
source_ref = decorator.getSourceReference()
),
source_ref = decorator.getSourceReference()
)
)
statements.append(
StatementAssignmentVariable(
variable_ref = ExpressionTargetVariableRef(
variable_name = node.name,
source_ref = source_ref
),
source = ExpressionTempVariableRef(
variable = tmp_class,
source_ref = source_ref
),
source_ref = source_ref
)
)
final = (
StatementReleaseVariable(
variable = tmp_class,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_bases,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_class_dict,
source_ref = source_ref
),
StatementReleaseVariable(
variable = tmp_metaclass,
source_ref = source_ref
)
)
return makeTryFinallyStatement(
provider = function_body,
tried = statements,
final = final,
source_ref = source_ref
)
def buildClassNode(provider, node, source_ref):
assert getKind(node) == "ClassDef"
# There appears to be a inconsistency with the top level line number
# not being the one really the class has, if there are bases, and a
# decorator.
if node.bases:
source_ref = source_ref.atLineNumber(node.bases[-1].lineno)
# Python2 and Python3 are similar, but fundamentally different, so handle
# them in dedicated code.
if python_version < 300:
return _buildClassNode2(provider, node, source_ref)
else:
return _buildClassNode3(provider, node, source_ref)
| [
"[email protected]"
]
| |
d17ce8802fa93c39c6b0c878618591c8e9e54804 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-eihealth/huaweicloudsdkeihealth/v1/model/list_workflow_statistic_request.py | 15c6ff0b1d99414e34d6ff1f3f4a86a7a12c7a08 | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 3,669 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListWorkflowStatisticRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'eihealth_project_id': 'str'
}
attribute_map = {
'eihealth_project_id': 'eihealth_project_id'
}
def __init__(self, eihealth_project_id=None):
"""ListWorkflowStatisticRequest
The model defined in huaweicloud sdk
:param eihealth_project_id: 医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:type eihealth_project_id: str
"""
self._eihealth_project_id = None
self.discriminator = None
self.eihealth_project_id = eihealth_project_id
@property
def eihealth_project_id(self):
"""Gets the eihealth_project_id of this ListWorkflowStatisticRequest.
医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:return: The eihealth_project_id of this ListWorkflowStatisticRequest.
:rtype: str
"""
return self._eihealth_project_id
@eihealth_project_id.setter
def eihealth_project_id(self, eihealth_project_id):
"""Sets the eihealth_project_id of this ListWorkflowStatisticRequest.
医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:param eihealth_project_id: The eihealth_project_id of this ListWorkflowStatisticRequest.
:type eihealth_project_id: str
"""
self._eihealth_project_id = eihealth_project_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListWorkflowStatisticRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
9e8d73095336a9aec74a9c50d0d0e8418c3ee1fa | 72863e7278f4be8b5d63d999144f9eaec3e7ec48 | /venv/lib/python2.7/site-packages/libmproxy/console/window.py | 69d5e242cd0f379d95e9b2ce08e66ce6ea7d6a88 | [
"MIT"
]
| permissive | sravani-m/Web-Application-Security-Framework | 6e484b6c8642f47dac94e67b657a92fd0dbb6412 | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | refs/heads/master | 2020-04-26T11:54:01.334566 | 2019-05-03T19:17:30 | 2019-05-03T19:17:30 | 173,532,718 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,189 | py | import urwid
from . import signals
class Window(urwid.Frame):
def __init__(self, master, body, header, footer, helpctx):
urwid.Frame.__init__(
self,
urwid.AttrWrap(body, "background"),
header = urwid.AttrWrap(header, "background") if header else None,
footer = urwid.AttrWrap(footer, "background") if footer else None
)
self.master = master
self.helpctx = helpctx
signals.focus.connect(self.sig_focus)
def sig_focus(self, sender, section):
self.focus_position = section
def mouse_event(self, *args, **kwargs):
# args: (size, event, button, col, row)
k = super(self.__class__, self).mouse_event(*args, **kwargs)
if not k:
if args[1] == "mouse drag":
signals.status_message.send(
message = "Hold down shift, alt or ctrl to select text.",
expire = 1
)
elif args[1] == "mouse press" and args[2] == 4:
self.keypress(args[0], "up")
elif args[1] == "mouse press" and args[2] == 5:
self.keypress(args[0], "down")
else:
return False
return True
def keypress(self, size, k):
k = super(self.__class__, self).keypress(size, k)
if k == "?":
self.master.view_help(self.helpctx)
elif k == "c":
if not self.master.client_playback:
signals.status_prompt_path.send(
self,
prompt = "Client replay",
callback = self.master.client_playback_path
)
else:
signals.status_prompt_onekey.send(
self,
prompt = "Stop current client replay?",
keys = (
("yes", "y"),
("no", "n"),
),
callback = self.master.stop_client_playback_prompt,
)
elif k == "i":
signals.status_prompt.send(
self,
prompt = "Intercept filter",
text = self.master.state.intercept_txt,
callback = self.master.set_intercept
)
elif k == "o":
self.master.view_options()
elif k == "Q":
raise urwid.ExitMainLoop
elif k == "q":
signals.pop_view_state.send(self)
elif k == "S":
if not self.master.server_playback:
signals.status_prompt_path.send(
self,
prompt = "Server replay path",
callback = self.master.server_playback_path
)
else:
signals.status_prompt_onekey.send(
self,
prompt = "Stop current server replay?",
keys = (
("yes", "y"),
("no", "n"),
),
callback = self.master.stop_server_playback_prompt,
)
else:
return k
| [
"[email protected]"
]
| |
72a814435d4159ba19a2c548b890a43020e942c8 | f68cd225b050d11616ad9542dda60288f6eeccff | /testscripts/RDKB/component/PAM/TS_PAM_GetProcessNumberOfEntries.py | 173f02eb2b3d59f3694789afc506cc744abebd56 | [
"Apache-2.0"
]
| permissive | cablelabs/tools-tdkb | 18fb98fadcd169fa9000db8865285fbf6ff8dc9d | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | refs/heads/master | 2020-03-28T03:06:50.595160 | 2018-09-04T11:11:00 | 2018-09-05T00:24:38 | 147,621,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,419 | py | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2016 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>5</version>
<name>TS_PAM_GetProcessNumberOfEntries</name>
<primitive_test_id/>
<primitive_test_name>pam_GetParameterValues</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>This testcase returns the no: of processes running in the device</synopsis>
<groups_id/>
<execution_time>1</execution_time>
<long_duration>false</long_duration>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>RPI</box_type>
<box_type>Emulator</box_type>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_PAM_89</test_case_id>
<test_objective>To get the no: of processes running in the device</test_objective>
<test_type>Positive</test_type>
<test_setup>Emulator,XB3</test_setup>
<pre_requisite>1.Ccsp Components in DUT should be in a running state that includes component under test Cable Modem
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>None</api_or_interface_used>
<input_parameters>Json Interface:
API Name
pam_GetParameterValues
Input:
ParamName -
Device.DeviceInfo.ProcessStatus.ProcessNumberOfEntries</input_parameters>
<automation_approch>1.Function which needs to be tested will be configured in Test Manager GUI.
2.Python Script will be generated by Test Manager with provided arguments in configure page.
3.TM will load the PAM library via Test agent
4.From python script, invoke pam_GetParameterValues() stub function to get the number of processes running.
5.pam stub function will call the ssp_getParameterValue() function of tdk component.
6.Responses from the pam stub function will be logged in Agent Console log.
7.pam stub will validate the actual result with the expected result and send the result status to Test Manager.
8.Test Manager will publish the result in GUI as PASS/FAILURE based on the response from pam stub.</automation_approch>
<except_output>CheckPoint 1:
The output should be logged in the Agent console/Component log
CheckPoint 2:
Stub function result should be success and should see corresponding log in the agent console log
CheckPoint 3:
TestManager GUI will publish the result as PASS in Execution/Console page of Test Manager</except_output>
<priority>High</priority>
<test_stub_interface>None</test_stub_interface>
<test_script>TS_PAM_GetProcessNumberOfEntries</test_script>
<skipped>No</skipped>
<release_version/>
<remarks/>
</test_cases>
<script_tags/>
</xml>
'''
#import statement
import tdklib;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("pam","RDKB");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_PAM_GetProcessNumberOfEntries');
#Get the result of connection with test component and STB
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus ;
if "SUCCESS" in loadmodulestatus.upper():
#Set the result status of execution
obj.setLoadModuleStatus("SUCCESS");
tdkTestObj = obj.createTestStep('pam_GetParameterValues');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.ProcessStatus.ProcessNumberOfEntries");
expectedresult="SUCCESS";
#Execute the test case in STB
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Get the number of processes";
print "EXPECTED RESULT 1: Should get the number of processes";
print "ACTUAL RESULT 1: No of Processes %s" %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS"
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Get the number of processes";
print "EXPECTED RESULT 1: Should get the number of processes";
print "ACTUAL RESULT 1: Failure in getting the number of processes. Details : %s" %details;
print "[TEST EXECUTION RESULT] : FAILURE";
obj.unloadModule("pam");
else:
print "Failed to load pam module";
obj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| [
"[email protected]"
]
| |
a3896f595a20d91502cbd6e39182b02c88162ce0 | c8a84cdced4d408a273a99a82432e41fc6a9ab55 | /src/neprojects/settings/prod.py | f0bd684ef104bfaafcf743399bf9f4088f514eba | []
| no_license | poudel/goodmandu | 584bb652ef4f51ee4ca336f674e37d38a782e3c9 | 64177d7fd95002dd06ad9b99817fff6095a9166c | refs/heads/master | 2021-06-12T14:56:18.802948 | 2019-10-09T19:15:34 | 2019-10-09T19:47:16 | 128,678,514 | 1 | 0 | null | 2020-06-05T18:07:43 | 2018-04-08T20:18:13 | Python | UTF-8 | Python | false | false | 96 | py | from .base import *
DEBUG = False
try:
from .local import *
except ImportError:
pass
| [
"[email protected]"
]
| |
1e3202d244ebfff9dd6bcecadaa71c32fb40e2fd | 709bd5f2ecc69a340da85f6aed67af4d0603177e | /saleor/product/migrations/0073_auto_20181010_0729.py | b6435a6d48c72a8b3723f876f1995998d63fc51b | [
"BSD-3-Clause"
]
| permissive | Kenstogram/opensale | 41c869ee004d195bd191a1a28bf582cc6fbb3c00 | 5102f461fa90f2eeb13b9a0a94ef9cb86bd3a3ba | refs/heads/master | 2022-12-15T02:48:48.810025 | 2020-03-10T02:55:10 | 2020-03-10T02:55:10 | 163,656,395 | 8 | 0 | BSD-3-Clause | 2022-12-08T01:31:09 | 2018-12-31T09:30:41 | Python | UTF-8 | Python | false | false | 1,129 | py | # Generated by Django 2.1.2 on 2018-10-10 12:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('product', '0072_auto_20180925_1048'),
]
operations = [
migrations.AddField(
model_name='attribute',
name='product_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='temp_product_attributes', to='product.ProductType'),
),
migrations.AddField(
model_name='attribute',
name='product_variant_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='temp_variant_attributes', to='product.ProductType'),
),
migrations.AlterField(
model_name='attribute',
name='name',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='attribute',
name='slug',
field=models.SlugField(),
),
]
| [
"[email protected]"
]
| |
8b0c025a56778453f144197d06f980ed00737458 | af8f0d50bb11279c9ff0b81fae97f754df98c350 | /src/book/api/views/category.py | 3037b5486bcde77e4a473f50e746cb978cc99220 | [
"Apache-2.0"
]
| permissive | DmytroKaminskiy/ltt | 592ed061efe3cae169a4e01f21d2e112e58714a1 | d08df4d102e678651cd42928e2343733c3308d71 | refs/heads/master | 2022-12-18T09:56:36.077545 | 2020-09-20T15:57:35 | 2020-09-20T15:57:35 | 292,520,616 | 0 | 0 | Apache-2.0 | 2020-09-20T15:49:58 | 2020-09-03T09:09:26 | HTML | UTF-8 | Python | false | false | 513 | py | from book.api.serializers.category import CategorySerializer
from book.models import Category
from rest_framework import generics
__all__ = [
'ListCreateCategoryView',
'RetrieveCategoryView',
]
class ListCreateCategoryView(generics.ListCreateAPIView):
serializer_class = CategorySerializer
queryset = Category.objects.all().order_by('-id')
class RetrieveCategoryView(generics.RetrieveAPIView):
queryset = Category.objects.all().order_by('-id')
serializer_class = CategorySerializer
| [
"[email protected]"
]
| |
3eebded3e51926fff9c1f76a81b7786c011c7547 | 8aa1b94626402c0c614128d6061edb771dad05cf | /e100/e017.py | b24dd8c62dd7fb877ccffbdbd147ff7d80e27ed6 | []
| no_license | netfj/Project_Stu02 | 31e76c1b656ee74c54cae2185821dec7ccf50401 | afc1b26b7c586fd6979ab574c7d357a6b9ef4d29 | refs/heads/master | 2023-03-13T22:24:40.364167 | 2021-02-23T09:53:31 | 2021-02-23T09:53:31 | 341,506,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | #coding:utf-8
"""
@info: 题目:输入一行字符,分别统计出其中英文字母、空格、数字和其它字符的个数。
@author:NetFj @software:PyCharm @file:e017.py @time:2018/11/1.16:45
"""
# c='0'
# while c != '':
# c = input('Input a string:')
c='abc1 2 3 4 5 6[(@#$)]'
y,k,s,q =0,0,0,0
for x in c:
if x.isalpha():y+=1
elif x.isspace():k+=1
elif x.isdigit(): s += 1
else: q+=1
print(y,k,s,q)
y,k,s,q =0,0,0,0
for n in range(0,len(c)):
if c[n].isalpha():
y += 1
elif c[n].isspace():
k += 1
elif c[n].isdigit():
s += 1
else:
q += 1
print(y, k, s, q) | [
"[email protected]"
]
| |
511334a97959c166add09f07d30f551acd4b2aeb | 268b0441a5fd45da501f5e50e155a86043de2472 | /test/test_puchikarui.py | 6495727bd08686c549c8fb266e4650e6d8b26d27 | [
"MIT"
]
| permissive | cliffpham/puchikarui | c837e4b487cd4c3a8fcfe0c46d69ea2804bdfc9a | c6cf9292685f2a5c93f9feb206e2dc1371ce20f2 | refs/heads/master | 2020-07-31T16:28:01.674364 | 2019-07-03T03:36:54 | 2019-07-03T03:36:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,282 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Script for testing puchikarui library
Latest version can be found at https://github.com/letuananh/puchikarui
References:
Python documentation:
https://docs.python.org/
Python unittest
https://docs.python.org/3/library/unittest.html
@author: Le Tuan Anh <[email protected]>
@license: MIT
'''
# Copyright (c) 2014-2017, Le Tuan Anh <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
########################################################################
import os
import unittest
import logging
from puchikarui import DataSource
from puchikarui import Schema, with_ctx
from puchikarui import escape_like, head_like, tail_like, contain_like
# ----------------------------------------------------------------------
# Configuration
# ----------------------------------------------------------------------
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
TEST_DATA = os.path.join(TEST_DIR, 'data')
SETUP_FILE = os.path.join(TEST_DATA, 'init_script.sql')
SETUP_SCRIPT = "INSERT INTO person (name, age) VALUES ('Chun', 78)"
TEST_DB = os.path.join(TEST_DIR, 'data', 'test.db')
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# ------------------------------------------------------------------------------
# Test cases
# ------------------------------------------------------------------------------
class SchemaDemo(Schema):
def __init__(self, data_source=':memory:', setup_script=SETUP_SCRIPT, setup_file=SETUP_FILE):
Schema.__init__(self, data_source=data_source, setup_script=setup_script, setup_file=setup_file)
self.add_table('person', ['ID', 'name', 'age'], proto=Person, id_cols=('ID',))
self.add_table('hobby').add_fields('pid', 'hobby')
self.add_table('diary', ['ID', 'pid', 'text'], proto=Diary).set_id('ID').field_map(pid='ownerID', text='content')
class Diary(object):
def __init__(self, content='', owner=None):
"""
"""
self.ID = None
if owner:
self.owner = owner
self.ownerID = owner.ID
else:
self.owner = None
self.ownerID = None
self.content = content
def __str__(self):
return "{per} wrote `{txt}`".format(per=self.owner.name if self.owner else '#{}'.format(self.ownerID), txt=self.content)
class Person(object):
def __init__(self, name='', age=-1):
self.ID = None
self.name = name
self.age = age
def __str__(self):
return "#{}: {}/{}".format(self.ID, self.name, self.age)
########################################################################
class TestUtilClass(unittest.TestCase):
def test_path(self):
my_home = os.path.expanduser('~')
expected_loc = os.path.join(my_home, 'tmp', 'test.db')
ds = DataSource('~/tmp/test.db')
self.assertEqual(expected_loc, ds.path)
class TestDemoLib(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Setting up tests ...")
if os.path.isfile(TEST_DB):
logger.info("Test DB exists, removing it now")
os.unlink(TEST_DB)
def test_sqlite_methods(self):
db = SchemaDemo()
num = db.ds.select_scalar('SELECT 2')
self.assertEqual(num, 2)
nums = db.ds.select_single('SELECT 2, 3, 4')
self.assertEqual(tuple(nums), (2, 3, 4))
matrix = db.ds.select('SELECT 1, 2, 3 UNION SELECT 4, 5, 6')
self.assertEqual(tuple(tuple(row) for row in matrix), ((1, 2, 3), (4, 5, 6)))
def test_basic(self):
print("Testing basic database actions")
db = SchemaDemo(TEST_DB, setup_file=SETUP_FILE, setup_script=SETUP_SCRIPT)
# We can excute SQLite script as usual ...
db.ds.execute("INSERT INTO person (name, age) VALUES ('Chen', 15);")
# Or use this ORM-like method
# Test insert
db.person.insert('Kent', 42)
# Test select data
persons = db.person.select(where='age > ?', values=[25], orderby='age', limit=10)
expected = [('Ji', 28), ('Ka', 32), ('Vi', 33), ('Kent', 42), ('Chun', 78)]
actual = [(person.name, person.age) for person in persons]
self.assertEqual(expected, actual)
# Test select single
ji = db.person.select_single('name=?', ('Ji',))
self.assertIsNotNone(ji)
self.assertEqual(ji.age, 28)
# Test delete
db.person.delete(where='age > ?', values=(70,))
chun = db.person.select_single('name=?', ('Chun',))
self.assertIsNone(chun)
def test_execution_context(self):
db = SchemaDemo(":memory:")
with db.ctx() as ctx:
# test select
ppl = ctx.person.select()
self.assertEqual(len(ppl), 6)
# test insert
ctx.person.insert('Totoro', columns=('name',)) # insert partial data
ctx.person.insert('Shizuka', 10) # full record
p = ctx.person.select_single(where='name=?', values=('Dunno',))
self.assertIsNone(p)
# Test update data & select single
ctx.person.update((10,), "name=?", ("Totoro",), columns=('age',))
totoro = ctx.person.select_single(where='name=?', values=('Totoro',))
self.assertEqual(totoro.age, 10)
# test updated
ppl = ctx.person.select()
self.assertEqual(len(ppl), 8)
# test delete
ctx.person.delete('age > ?', (70,))
ppl = ctx.person.select()
# done!
expected = [(1, 'Ji', 28), (2, 'Zen', 25), (3, 'Ka', 32), (4, 'Anh', 15), (5, 'Vi', 33), (7, 'Totoro', 10), (8, 'Shizuka', 10)]
actual = [(person.ID, person.name, person.age) for person in ppl]
self.assertEqual(expected, actual)
def test_selective_select(self):
db = SchemaDemo() # create a new DB in RAM
pers = db.person.select(columns=('name',))
names = [x.name for x in pers]
self.assertEqual(names, ['Ji', 'Zen', 'Ka', 'Anh', 'Vi', 'Chun'])
def test_orm_persistent(self):
db = SchemaDemo(TEST_DB)
bid = db.person.save(Person('Buu', 1000))
buu = db.person.by_id(bid)
self.assertIsNotNone(buu)
self.assertEqual(buu.name, 'Buu')
# insert more stuff
db.hobby.insert(buu.ID, 'candies')
db.hobby.insert(buu.ID, 'chocolate')
db.hobby.insert(buu.ID, 'santa')
hobbies = db.hobby.select('pid=?', (buu.ID,))
self.assertEqual({x.hobby for x in hobbies}, {'candies', 'chocolate', 'santa'})
db.hobby.delete('hobby=?', ('chocolate',))
hobbies = db.hobby.select('pid=?', (buu.ID,))
self.assertEqual({x.hobby for x in hobbies}, {'candies', 'santa'})
def test_orm_with_context(self):
db = SchemaDemo() # create a new DB in RAM
with db.ctx() as ctx:
p = ctx.person.select_single('name=?', ('Anh',))
# There is no prototype class for hobby, so a namedtuple will be generated
hobbies = ctx.hobby.select('pid=?', (p.ID,))
self.assertIsInstance(p, Person)
self.assertIsInstance(hobbies[0], tuple)
self.assertEqual(hobbies[0].hobby, 'coding')
# insert hobby
ctx.hobby.insert(p.ID, 'reading')
hobbies = [x.hobby for x in ctx.hobby.select('pid=?', (p.ID,), columns=('hobby',))]
self.assertEqual(hobbies, ['coding', 'reading'])
# now only select the name and not the age
p2 = ctx.person.select_single('name=?', ('Vi',), columns=('ID', 'name',))
self.assertEqual(p2.name, 'Vi')
self.assertEqual(p2.age, -1)
# test updating object
p2.name = 'Vee'
ctx.update_object(db.person, p2, ('name',))
p2.age = 29
ctx.update_object(db.person, p2)
# ensure that data was updated
p2n = ctx.person.by_id(p2.ID)
self.assertEqual(p2n.name, 'Vee')
self.assertEqual(p2n.age, 29)
self.assertEqual(p2n.ID, p2.ID)
def test_field_mapping(self):
content = 'I am better than Emacs'
new_content = 'I am NOT better than Emacs'
db = SchemaDemo()
with db.ctx() as ctx:
vi = ctx.person.select_single('name=?', ('Vi',))
diary = Diary(content, owner=vi)
ctx.diary.save(diary)
diaries = ctx.diary.select('pid=?', (vi.ID,))
for d in diaries:
d.owner = ctx.person.by_id(d.ownerID)
print(d)
# test update
d.content = new_content
ctx.diary.save(d)
diary = ctx.diary.by_id(d.ID)
self.assertEqual(diary.content, new_content)
print(diary)
class SchemaA(Schema):
SETUP_FILE = os.path.join(TEST_DATA, 'schemaA.sql')
def __init__(self, data_source=':memory:', setup_script=None, setup_file=None):
super().__init__(data_source=data_source, setup_script=setup_script, setup_file=setup_file)
# setup scripts & files
self.add_file(SchemaA.SETUP_FILE)
self.add_script("INSERT INTO person (name, age) VALUES ('potter', 10)")
# Table definitions
self.add_table('person', ['ID', 'name', 'age'], proto=Person, id_cols=('ID',))
class SchemaB(Schema):
SETUP_FILE = os.path.join(TEST_DATA, 'schemaB.sql')
def __init__(self, data_source=':memory:', setup_script=None, setup_file=None):
super().__init__(data_source=data_source, setup_script=setup_script, setup_file=setup_file)
# setup scripts & files
self.add_file(SchemaB.SETUP_FILE)
self.add_script("INSERT INTO hobby (name) VALUES ('magic')")
# Table definitions
self.add_table('hobby', ['ID', 'name'], proto=Hobby, id_cols=('ID',))
self.add_table("person_hobby", ["hid", "pid"])
class Hobby(object):
def __init__(self, name=None):
self.name = name
def __repr__(self):
return "Hobby: {}".format(self.name)
class SchemaAB(SchemaB, SchemaA):
''' Execution order: setup_files > setup_scripts
Schema's file > SchemaA's file > SchemaB's file >
Schema's script > SchemaA's script > SchemaB's script
Note: The first class in inheritance list will be executed last
'''
def __init__(self, data_source=":memory:", setup_script=None, setup_file=None):
super().__init__(data_source=data_source, setup_script=setup_script, setup_file=setup_file)
self.add_script('''INSERT INTO person_hobby VALUES ((SELECT ID FROM hobby WHERE name='magic'), (SELECT ID FROM person WHERE name='potter'));''')
@with_ctx
def all_hobby(self, ctx=None):
return ctx.hobby.select()
@with_ctx
def find_hobby(self, name, ctx=None):
return ctx.hobby.select("name = ?", (name,))
class TestMultipleSchema(unittest.TestCase):
def test_ms(self):
db = SchemaAB()
with db.ctx() as ctx:
potter = ctx.person.select_single()
magic = ctx.hobby.select_single()
link = ctx.person_hobby.select_single()
self.assertEqual(potter.name, 'potter')
self.assertEqual(magic.name, 'magic')
self.assertEqual(link.hid, magic.ID)
self.assertEqual(link.pid, potter.ID)
# access schema function from context
self.assertEqual(len(ctx.all_hobby()), 1)
self.assertEqual(ctx.find_hobby('magic')[0].name, 'magic')
print
pass
class AdvancedDemo(SchemaDemo):
@with_ctx
def demo(self, ctx=None):
p = Person("Buu", 1000)
p.ID = ctx.person.save(p)
return ctx.person.by_id(p.ID)
class TestWithContext(unittest.TestCase):
def test_ms(self):
db = AdvancedDemo()
print(db.demo().age)
with db.ctx() as ctx:
print(db.demo(ctx=ctx))
class TestHelpers(unittest.TestCase):
def test_escape(self):
actual = escape_like('_')
expect = '@_'
self.assertEqual(actual, expect)
actual = escape_like('%')
expect = '@%'
self.assertEqual(actual, expect)
actual = escape_like('@')
expect = '@@'
self.assertEqual(actual, expect)
actual = escape_like('')
expect = ''
self.assertEqual(actual, expect)
actual = escape_like('usual')
expect = 'usual'
self.assertEqual(actual, expect)
self.assertRaises(Exception, lambda: escape_like(None))
actual = escape_like('%_%@')
expect = '@%@_@%@@'
self.assertEqual(actual, expect)
actual = head_like('a@b')
expect = 'a@@b%'
self.assertEqual(actual, expect)
actual = tail_like('a@b')
expect = '%a@@b'
self.assertEqual(actual, expect)
actual = contain_like('a_@_b')
expect = '%a@_@@@_b%'
self.assertEqual(actual, expect)
# ------------------------------------------------------------------------------
# Main
# ------------------------------------------------------------------------------
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
]
| |
ab6b6c52579d74e382ce37f2ebe8f535a24dbc3f | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_59/455.py | 3e6cea92b28872ec185a3a22fdbde19106c357b6 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,472 | py | #! /usr/bin/env python
import sys
f = file(sys.argv[1])
lines = [ln.strip() for ln in f.readlines()]
T = int(lines[0])
print('%s contains %i (T) test cases' % (sys.argv[1],T))
cases = []
ind = 1
for i in range(T):
#print(lines[ind], lines[ind].split(' '))
n,m = [int(k) for k in lines[ind].split(' ')]
#print(n,m)
ind = ind + 1
dirsExisting = lines[ind:ind+n]
ind = ind + n
dirsToBeCreated = lines[ind:ind+m]
ind = ind + m
cases.append([dirsExisting, dirsToBeCreated])
print(cases)
class directoryNode:
def __init__(self, name, parent, level):
self.name = name
self.parent = parent
self.level = level
self.folders = []
def has_folder(self, fname):
return any([folder.name == fname for folder in self.folders])
def create_folder(self, fname):
self.folders.append(directoryNode(fname,self,self.level+1))
def get_folder(self, fname):
return self.folders[[folder.name == fname for folder in self.folders].index(True)]
def __repr__(self):
return repr(self.parent) + '/' + self.name
def directoryProblem(iDirs, mDirs):
directoryRoot = directoryNode('',None,0)
def mkdirs(dirsClean):
creations = 0
currentDir = directoryRoot
dirs = sorted(dirsClean)
currentFolders = []
for d in dirs:
folders = d.split('/')[1:]
#print('d,folders',d,folders)
j = 0
while j < min(len(folders),len(currentFolders)) and folders[j] == currentFolders[j]:
j = j + 1
# rolling back required dirs
while len(currentFolders) > j:
currentDir = currentDir.parent
del currentFolders[-1]
#print('currentDir, currentFolders',currentDir, currentFolders)
for fold in folders[j:]:
if not currentDir.has_folder(fold):
currentDir.create_folder(fold)
creations = creations + 1
currentDir = currentDir.get_folder(fold)
currentFolders = folders
return creations
c1 = mkdirs(iDirs)
c2 = mkdirs(mDirs)
return c2
results = []
for t in range(T):
print('case %i of %i' % (t+1,T))
print(cases[t])
res = directoryProblem(*cases[t])
results.append('Case #%i: %i' % (t+1,res))
print(results[-1])
f = file(sys.argv[1].replace('.in','.out'),'w')
f.write('\n'.join(results))
f.close()
| [
"[email protected]"
]
| |
dc56ce3d672acadfd6e6b640ae3c4b0cf870703f | c28b15dfca3212f0f28727c7ad9d6193936598dc | /tasks/_iblrig_tasks_trainingChoiceWorld/_iblrig_tasks_trainingChoiceWorld.py | 7e7cf1d44f7c01a2c0531d5126b05a0bfb48d72f | [
"MIT"
]
| permissive | justinplittle/iblrig | 78d0703cda0b8623015f77939220128f20a997e6 | 0ee17a14633f0dc7b87f268f433a027e73fd6d57 | refs/heads/master | 2022-11-30T07:20:20.637002 | 2020-08-12T17:04:41 | 2020-08-12T17:04:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,435 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Niccolò Bonacchi
# @Date: 2018-02-02 12:31:13
import logging
import matplotlib.pyplot as plt
from pybpodapi.protocol import Bpod, StateMachine
import online_plots as op
import task_settings
import user_settings
from iblrig.bpod_helper import BpodMessageCreator
from session_params import SessionParamHandler
from trial_params import TrialParamHandler
log = logging.getLogger("iblrig")
log.setLevel(logging.INFO)
global sph
sph = SessionParamHandler(task_settings, user_settings)
def bpod_loop_handler():
f.canvas.flush_events() # 100µs
def softcode_handler(data):
"""
Soft codes should work with resasonable latency considering our limiting
factor is the refresh rate of the screen which should be 16.667ms @ a frame
rate of 60Hz
1 : go_tone
2 : white_noise
"""
global sph
if data == 0:
sph.stop_sound()
elif data == 1:
sph.play_tone()
elif data == 2:
sph.play_noise()
elif data == 3:
sph.start_camera_recording()
# sph.OSC_CLIENT.send_message("/e", data)
# =============================================================================
# CONNECT TO BPOD
# =============================================================================
bpod = Bpod()
# Loop handler function is used to flush events for the online plotting
bpod.loop_handler = bpod_loop_handler
# Soft code handler function can run arbitrary code from within state machine
bpod.softcode_handler_function = softcode_handler
# Bpod message creator
msg = BpodMessageCreator(bpod)
re_reset = msg.rotary_encoder_reset()
bonsai_hide_stim = msg.bonsai_hide_stim()
bonsai_show_stim = msg.bonsai_show_stim()
bonsai_close_loop = msg.bonsai_close_loop()
bonsai_freeze_stim = msg.bonsai_freeze_stim()
sc_play_tone = msg.sound_card_play_idx(sph.GO_TONE_IDX)
sc_play_noise = msg.sound_card_play_idx(sph.WHITE_NOISE_IDX)
bpod = msg.return_bpod()
# =============================================================================
# TRIAL PARAMETERS AND STATE MACHINE
# =============================================================================
global tph
tph = TrialParamHandler(sph)
f, axes = op.make_fig(sph)
plt.pause(1)
for i in range(sph.NTRIALS): # Main loop
tph.next_trial()
log.info(f"Starting trial: {i + 1}")
# =============================================================================
# Start state machine definition
# =============================================================================
sma = StateMachine(bpod)
if i == 0: # First trial exception start camera
log.info("Waiting for camera pulses...")
sma.add_state(
state_name="trial_start",
state_timer=0,
state_change_conditions={"Port1In": "reset_rotary_encoder"},
output_actions=[("SoftCode", 3)],
) # sart camera
else:
sma.add_state(
state_name="trial_start",
state_timer=0, # ~100µs hardware irreducible delay
state_change_conditions={"Tup": "reset_rotary_encoder"},
output_actions=[tph.out_stop_sound],
) # stop all sounds
sma.add_state(
state_name="reset_rotary_encoder",
state_timer=0,
state_change_conditions={"Tup": "quiescent_period"},
output_actions=[("Serial1", re_reset)],
)
sma.add_state( # '>back' | '>reset_timer'
state_name="quiescent_period",
state_timer=tph.quiescent_period,
state_change_conditions={
"Tup": "stim_on",
tph.movement_left: "reset_rotary_encoder",
tph.movement_right: "reset_rotary_encoder",
},
output_actions=[],
)
sma.add_state(
state_name="stim_on",
state_timer=0.1,
state_change_conditions={
"Tup": "interactive_delay",
"BNC1High": "interactive_delay",
"BNC1Low": "interactive_delay",
},
output_actions=[("Serial1", bonsai_show_stim)],
)
sma.add_state(
state_name="interactive_delay",
state_timer=tph.interactive_delay,
state_change_conditions={"Tup": "play_tone"},
output_actions=[],
)
sma.add_state(
state_name="play_tone",
state_timer=0.1,
state_change_conditions={
"Tup": "reset2_rotary_encoder",
"BNC2High": "reset2_rotary_encoder",
},
output_actions=[tph.out_tone],
)
sma.add_state(
state_name="reset2_rotary_encoder",
state_timer=0,
state_change_conditions={"Tup": "closed_loop"},
output_actions=[("Serial1", re_reset)],
)
sma.add_state(
state_name="closed_loop",
state_timer=tph.response_window,
state_change_conditions={
"Tup": "no_go",
tph.event_error: "freeze_error",
tph.event_reward: "freeze_reward",
},
output_actions=[("Serial1", bonsai_close_loop)],
)
sma.add_state(
state_name="no_go",
state_timer=tph.iti_error,
state_change_conditions={"Tup": "exit_state"},
output_actions=[("Serial1", bonsai_hide_stim), tph.out_noise],
)
sma.add_state(
state_name="freeze_error",
state_timer=0,
state_change_conditions={"Tup": "error"},
output_actions=[("Serial1", bonsai_freeze_stim)],
)
sma.add_state(
state_name="error",
state_timer=tph.iti_error,
state_change_conditions={"Tup": "hide_stim"},
output_actions=[tph.out_noise],
)
sma.add_state(
state_name="freeze_reward",
state_timer=0,
state_change_conditions={"Tup": "reward"},
output_actions=[("Serial1", bonsai_freeze_stim)],
)
sma.add_state(
state_name="reward",
state_timer=tph.reward_valve_time,
state_change_conditions={"Tup": "correct"},
output_actions=[("Valve1", 255)],
)
sma.add_state(
state_name="correct",
state_timer=tph.iti_correct,
state_change_conditions={"Tup": "hide_stim"},
output_actions=[],
)
sma.add_state(
state_name="hide_stim",
state_timer=0.1,
state_change_conditions={
"Tup": "exit_state",
"BNC1High": "exit_state",
"BNC1Low": "exit_state",
},
output_actions=[("Serial1", bonsai_hide_stim)],
)
sma.add_state(
state_name="exit_state",
state_timer=0.5,
state_change_conditions={"Tup": "exit"},
output_actions=[],
)
# Send state machine description to Bpod device
bpod.send_state_machine(sma)
# Run state machine
if not bpod.run_state_machine(sma): # Locks until state machine 'exit' is reached
break
tph = tph.trial_completed(bpod.session.current_trial.export())
as_data = tph.save_ambient_sensor_data(bpod, sph.SESSION_RAW_DATA_FOLDER)
tph.show_trial_log()
# Update online plots
op.update_fig(f, axes, tph)
tph.check_sync_pulses()
stop_crit = tph.check_stop_criterions()
if stop_crit and sph.USE_AUTOMATIC_STOPPING_CRITERIONS:
if stop_crit == 1:
msg = "STOPPING CRITERIA Nº1: PLEASE STOP TASK AND REMOVE MOUSE\
\n< 400 trials in 45min"
f.patch.set_facecolor("xkcd:mint green")
elif stop_crit == 2:
msg = "STOPPING CRITERIA Nº2: PLEASE STOP TASK AND REMOVE MOUSE\
\nMouse seems to be inactive"
f.patch.set_facecolor("xkcd:yellow")
elif stop_crit == 3:
msg = "STOPPING CRITERIA Nº3: PLEASE STOP TASK AND REMOVE MOUSE\
\n> 90 minutes have passed since session start"
f.patch.set_facecolor("xkcd:red")
if not sph.SUBJECT_DISENGAGED_TRIGGERED and stop_crit:
patch = {
"SUBJECT_DISENGAGED_TRIGGERED": stop_crit,
"SUBJECT_DISENGAGED_TRIALNUM": i + 1,
}
sph.patch_settings_file(patch)
[log.warning(msg) for x in range(5)]
bpod.close()
if __name__ == "__main__":
print("main")
| [
"[email protected]"
]
| |
043e9fa6f520efc3819ea417696518a68ba03ca1 | 7769cb512623c8d3ba96c68556b2cea5547df5fd | /configs/cascade_mask_rcnn_x101_64x4d_fpn_1x.py | 80a5ed6a2b31ff06816ce74d04570d82517229a0 | [
"MIT"
]
| permissive | JialeCao001/D2Det | 0e49f4c76e539d574e46b02f278242ca912c31ea | a76781ab624a1304f9c15679852a73b4b6770950 | refs/heads/master | 2022-12-05T01:00:08.498629 | 2020-09-04T11:33:26 | 2020-09-04T11:33:26 | 270,723,372 | 312 | 88 | MIT | 2020-07-08T23:53:23 | 2020-06-08T15:37:35 | Python | UTF-8 | Python | false | false | 8,061 | py | # model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='open-mmlab://resnext101_64x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric=['bbox', 'segm'])
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/cascade_mask_rcnn_x101_64x4d_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| [
"[email protected]"
]
| |
dba1a03904559ee7acc59f6c910257a5156bf9d0 | 52c4444b7a8e1a313d847ba5f0474f5a429be4bd | /celescope/fusion/multi_fusion.py | b82cd4bece7083905eb8b0e0063018ff9117df0c | [
"MIT"
]
| permissive | JING-XINXING/CeleScope | 98d0d018f3689dbe355679c1b8c06f8d796c296d | d401e01bdf15c8eeb71bddede484ed8d4f189dcd | refs/heads/master | 2023-05-07T11:47:04.133216 | 2021-05-28T10:14:53 | 2021-05-28T10:14:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | from celescope.fusion.__init__ import __ASSAY__
from celescope.tools.multi import Multi
class Multi_fusion(Multi):
def star_fusion(self, sample):
step = 'star_fusion'
cmd_line = self.get_cmd_line(step, sample)
fq = f'{self.outdir_dic[sample]["cutadapt"]}/{sample}_clean_2.fq{self.fq_suffix}'
cmd = (
f'{cmd_line} '
f'--fq {fq} '
)
self.process_cmd(cmd, step, sample, m=self.args.starMem, x=self.args.thread)
def count_fusion(self, sample):
step = 'count_fusion'
cmd_line = self.get_cmd_line(step, sample)
bam = f'{self.outdir_dic[sample]["star_fusion"]}/{sample}_Aligned.sortedByCoord.out.bam'
cmd = (
f'{cmd_line} '
f'--bam {bam} '
f'--match_dir {self.col4_dict[sample]} '
)
self.process_cmd(cmd, step, sample, m=15, x=1)
def main():
multi = Multi_fusion(__ASSAY__)
multi.run()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
ad0f463e50fc0f5b7824e19aca588087c8539085 | 80b7f2a10506f70477d8720e229d7530da2eff5d | /uhd_restpy/testplatform/sessions/ixnetwork/topology/bfdv6interface_b9a91920db1b70c8c6410d2de0b438d3.py | 949dbb8037a1e6340226858d1613dfa36d82a596 | [
"MIT"
]
| permissive | OpenIxia/ixnetwork_restpy | 00fdc305901aa7e4b26e4000b133655e2d0e346a | c8ecc779421bffbc27c906c1ea51af3756d83398 | refs/heads/master | 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 | MIT | 2023-02-02T07:02:43 | 2019-03-06T15:27:20 | Python | UTF-8 | Python | false | false | 52,891 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from uhd_restpy.base import Base
from uhd_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class Bfdv6Interface(Base):
"""BFDv6 Interface level Configuration
The Bfdv6Interface class encapsulates a list of bfdv6Interface resources that are managed by the user.
A list of resources can be retrieved from the server using the Bfdv6Interface.find() method.
The list can be managed by using the Bfdv6Interface.add() and Bfdv6Interface.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'bfdv6Interface'
_SDM_ATT_MAP = {
'Active': 'active',
'AggregateBfdSession': 'aggregateBfdSession',
'ConfigureEchoSourceIp': 'configureEchoSourceIp',
'ConnectedVia': 'connectedVia',
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'EchoRxInterval': 'echoRxInterval',
'EchoTimeOut': 'echoTimeOut',
'EchoTxInterval': 'echoTxInterval',
'EnableControlPlaneIndependent': 'enableControlPlaneIndependent',
'EnableDemandMode': 'enableDemandMode',
'Errors': 'errors',
'FlapTxIntervals': 'flapTxIntervals',
'IpDiffServ': 'ipDiffServ',
'LocalRouterId': 'localRouterId',
'MinRxInterval': 'minRxInterval',
'Multiplier': 'multiplier',
'Name': 'name',
'NoOfSessions': 'noOfSessions',
'PollInterval': 'pollInterval',
'SessionStatus': 'sessionStatus',
'SourceIp6': 'sourceIp6',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
'TimeoutMultiplier': 'timeoutMultiplier',
'TxInterval': 'txInterval',
'Vni': 'vni',
}
_SDM_ENUM_MAP = {
'status': ['configured', 'error', 'mixed', 'notStarted', 'started', 'starting', 'stopping'],
}
def __init__(self, parent, list_op=False):
super(Bfdv6Interface, self).__init__(parent, list_op)
@property
def Bfdv6Session(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv6session_0227b1efa1d435dd43ed809b84abf3ba.Bfdv6Session): An instance of the Bfdv6Session class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.bfdv6session_0227b1efa1d435dd43ed809b84abf3ba import Bfdv6Session
if len(self._object_properties) > 0:
if self._properties.get('Bfdv6Session', None) is not None:
return self._properties.get('Bfdv6Session')
return Bfdv6Session(self)._select()
@property
def LearnedInfo(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100.LearnedInfo): An instance of the LearnedInfo class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100 import LearnedInfo
if len(self._object_properties) > 0:
if self._properties.get('LearnedInfo', None) is not None:
return self._properties.get('LearnedInfo')
return LearnedInfo(self)
@property
def Active(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
@property
def AggregateBfdSession(self):
# type: () -> bool
"""
Returns
-------
- bool: If enabled, all interfaces except on VNI 0 will be disabled and grayed-out.
"""
return self._get_attribute(self._SDM_ATT_MAP['AggregateBfdSession'])
@AggregateBfdSession.setter
def AggregateBfdSession(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['AggregateBfdSession'], value)
@property
def ConfigureEchoSourceIp(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Selecting this check box enables the ability to configure the source address IP of echo message
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ConfigureEchoSourceIp']))
@property
def ConnectedVia(self):
# type: () -> List[str]
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
# type: () -> str
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def EchoRxInterval(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The minimum interval, in milliseconds, between received BFD Echo packets that this interface is capable of supporting. If this value is zero, the transmitting system does not support the receipt of BFD Echo packets
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EchoRxInterval']))
@property
def EchoTimeOut(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The interval, in milliseconds, that the interface waits for a response to the last Echo packet sent out
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EchoTimeOut']))
@property
def EchoTxInterval(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The minimum interval, in milliseconds, that the interface would like to use when transmitting BFD Echo packets
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EchoTxInterval']))
@property
def EnableControlPlaneIndependent(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This check box enables Control Plane Independent Mode. If set, the interface's BFD is implemented in the forwarding plane and can continue to function through disruptions in the control plane
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableControlPlaneIndependent']))
@property
def EnableDemandMode(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): This check box enables Demand Mode. In this mode, it is assumed the interface has an independent way of verifying it has connectivity to the other system. Once a BFD session is established, the systems stop sending BFD Control packets, except when either system feels the need to verify connectivity explicitly. In this case, a short sequence of BFD Control packets is sent
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableDemandMode']))
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def FlapTxIntervals(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The number of Tx packets sent from device after which session flaps for BFD. A value of zero means no flapping
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['FlapTxIntervals']))
@property
def IpDiffServ(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): IP DiffServ/TOSByte (Dec)
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['IpDiffServ']))
@property
def LocalRouterId(self):
# type: () -> List[str]
"""
Returns
-------
- list(str): The BFD Router ID value, in IPv4 format.
"""
return self._get_attribute(self._SDM_ATT_MAP['LocalRouterId'])
@property
def MinRxInterval(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The minimum interval, in milliseconds, between received BFD Control packets that this interface is capable of supporting
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MinRxInterval']))
@property
def Multiplier(self):
# type: () -> int
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NoOfSessions(self):
# type: () -> int
"""
Returns
-------
- number: The number of configured BFD sessions
"""
return self._get_attribute(self._SDM_ATT_MAP['NoOfSessions'])
@NoOfSessions.setter
def NoOfSessions(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['NoOfSessions'], value)
@property
def PollInterval(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The interval, in milliseconds, between exchanges of Control Messages in Demand Mode
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PollInterval']))
@property
def SessionStatus(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def SourceIp6(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): If Configure Echo Source-IP is selected, the IPv6 source address of the Echo Message
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SourceIp6']))
@property
def StackedLayers(self):
# type: () -> List[str]
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
# type: (List[str]) -> None
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
# type: () -> str
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
@property
def TimeoutMultiplier(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The negotiated transmit interval, multiplied by this value, provides the detection time for the interface
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TimeoutMultiplier']))
@property
def TxInterval(self):
# type: () -> 'Multivalue'
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): The minimum interval, in milliseconds, that the interface would like to use when transmitting BFD Control packets
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TxInterval']))
@property
def Vni(self):
# type: () -> List[int]
"""
Returns
-------
- list(number): Corresponding VXLAN Protocol VNI.
"""
return self._get_attribute(self._SDM_ATT_MAP['Vni'])
def update(self, AggregateBfdSession=None, ConnectedVia=None, Multiplier=None, Name=None, NoOfSessions=None, StackedLayers=None):
# type: (bool, List[str], int, str, int, List[str]) -> Bfdv6Interface
"""Updates bfdv6Interface resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- AggregateBfdSession (bool): If enabled, all interfaces except on VNI 0 will be disabled and grayed-out.
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- NoOfSessions (number): The number of configured BFD sessions
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, AggregateBfdSession=None, ConnectedVia=None, Multiplier=None, Name=None, NoOfSessions=None, StackedLayers=None):
# type: (bool, List[str], int, str, int, List[str]) -> Bfdv6Interface
"""Adds a new bfdv6Interface resource on the server and adds it to the container.
Args
----
- AggregateBfdSession (bool): If enabled, all interfaces except on VNI 0 will be disabled and grayed-out.
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- NoOfSessions (number): The number of configured BFD sessions
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Returns
-------
- self: This instance with all currently retrieved bfdv6Interface resources using find and the newly added bfdv6Interface resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained bfdv6Interface resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AggregateBfdSession=None, ConnectedVia=None, Count=None, DescriptiveName=None, Errors=None, LocalRouterId=None, Multiplier=None, Name=None, NoOfSessions=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None, Vni=None):
"""Finds and retrieves bfdv6Interface resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve bfdv6Interface resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all bfdv6Interface resources from the server.
Args
----
- AggregateBfdSession (bool): If enabled, all interfaces except on VNI 0 will be disabled and grayed-out.
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- LocalRouterId (list(str)): The BFD Router ID value, in IPv4 format.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- NoOfSessions (number): The number of configured BFD sessions
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
- Vni (list(number)): Corresponding VXLAN Protocol VNI.
Returns
-------
- self: This instance with matching bfdv6Interface resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of bfdv6Interface data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the bfdv6Interface resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Abort(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
abort(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def ClearLearnedInfo(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the clearLearnedInfo operation on the server.
Clear Learned Info
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
clearLearnedInfo(async_operation=bool)
--------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
clearLearnedInfo(SessionIndices=list, async_operation=bool)
-----------------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
clearLearnedInfo(SessionIndices=string, async_operation=bool)
-------------------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
clearLearnedInfo(Arg2=list, async_operation=bool)list
-----------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('clearLearnedInfo', payload=payload, response_object=None)
def DisableDemandMode(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the disableDemandMode operation on the server.
Disable Demand Mode
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
disableDemandMode(Arg2=list, Arg3=enum, async_operation=bool)list
-----------------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
disableDemandMode(Arg2=enum, async_operation=bool)list
------------------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('disableDemandMode', payload=payload, response_object=None)
def EnableDemandMode(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the enableDemandMode operation on the server.
Enable Demand Mode
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
enableDemandMode(Arg2=list, Arg3=enum, async_operation=bool)list
----------------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
enableDemandMode(Arg2=enum, async_operation=bool)list
-----------------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('enableDemandMode', payload=payload, response_object=None)
def GetLearnedInfo(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the getLearnedInfo operation on the server.
Get Learned Info
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
getLearnedInfo(async_operation=bool)
------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
getLearnedInfo(SessionIndices=list, async_operation=bool)
---------------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
getLearnedInfo(SessionIndices=string, async_operation=bool)
-----------------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
getLearnedInfo(Arg2=list, async_operation=bool)list
---------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('getLearnedInfo', payload=payload, response_object=None)
def InitiatePoll(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the initiatePoll operation on the server.
Initiate Poll
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
initiatePoll(Arg2=list, Arg3=enum, async_operation=bool)list
------------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
initiatePoll(Arg2=enum, async_operation=bool)list
-------------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('initiatePoll', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(async_operation=bool)
---------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=list, async_operation=bool)
------------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
restartDown(SessionIndices=string, async_operation=bool)
--------------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def ResumePDU(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the resumePDU operation on the server.
Resume PDU
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
resumePDU(Arg2=list, Arg3=enum, async_operation=bool)list
---------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
resumePDU(Arg2=enum, async_operation=bool)list
----------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('resumePDU', payload=payload, response_object=None)
def SetAdminDown(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the setAdminDown operation on the server.
Set Admin Down
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
setAdminDown(Arg2=list, Arg3=enum, async_operation=bool)list
------------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
setAdminDown(Arg2=enum, async_operation=bool)list
-------------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('setAdminDown', payload=payload, response_object=None)
def SetAdminUp(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the setAdminUp operation on the server.
Set Admin Up
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
setAdminUp(Arg2=list, Arg3=enum, async_operation=bool)list
----------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
setAdminUp(Arg2=enum, async_operation=bool)list
-----------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('setAdminUp', payload=payload, response_object=None)
def SetDiagnosticState(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the setDiagnosticState operation on the server.
Set Diagnostic State
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
setDiagnosticState(Arg2=list, Arg3=enum, Arg4=enum, async_operation=bool)list
-----------------------------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- Arg4 (str(controlDetectionTimeExpired | echoFunctionFailed | neighbourSignaledSessionDown | forwardingPlaneReset | pathDown | concatenatedPathDown | administrativelyDown | reverseConcatenatedPathDown | reserved)): Diagnostic Code
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
setDiagnosticState(Arg2=enum, Arg3=enum, async_operation=bool)list
------------------------------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- Arg3 (str(controlDetectionTimeExpired | echoFunctionFailed | neighbourSignaledSessionDown | forwardingPlaneReset | pathDown | concatenatedPathDown | administrativelyDown | reverseConcatenatedPathDown | reserved)): Diagnostic Code
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('setDiagnosticState', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=list, async_operation=bool)
-----------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=string, async_operation=bool)
-------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def StopPDU(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the stopPDU operation on the server.
Stop PDU
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stopPDU(Arg2=list, Arg3=enum, async_operation=bool)list
-------------------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Arg3 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
stopPDU(Arg2=enum, async_operation=bool)list
--------------------------------------------
- Arg2 (str(ospf | ospfv3 | bgp | ldp | rsvp | isis | pim | bfd)): Session used by Protocol
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stopPDU', payload=payload, response_object=None)
def get_device_ids(self, PortNames=None, Active=None, ConfigureEchoSourceIp=None, EchoRxInterval=None, EchoTimeOut=None, EchoTxInterval=None, EnableControlPlaneIndependent=None, EnableDemandMode=None, FlapTxIntervals=None, IpDiffServ=None, MinRxInterval=None, PollInterval=None, SourceIp6=None, TimeoutMultiplier=None, TxInterval=None):
"""Base class infrastructure that gets a list of bfdv6Interface device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- ConfigureEchoSourceIp (str): optional regex of configureEchoSourceIp
- EchoRxInterval (str): optional regex of echoRxInterval
- EchoTimeOut (str): optional regex of echoTimeOut
- EchoTxInterval (str): optional regex of echoTxInterval
- EnableControlPlaneIndependent (str): optional regex of enableControlPlaneIndependent
- EnableDemandMode (str): optional regex of enableDemandMode
- FlapTxIntervals (str): optional regex of flapTxIntervals
- IpDiffServ (str): optional regex of ipDiffServ
- MinRxInterval (str): optional regex of minRxInterval
- PollInterval (str): optional regex of pollInterval
- SourceIp6 (str): optional regex of sourceIp6
- TimeoutMultiplier (str): optional regex of timeoutMultiplier
- TxInterval (str): optional regex of txInterval
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| [
"[email protected]"
]
| |
85f6ba445f50885725cedabb538b5da28dee1645 | 90c6262664d013d47e9a3a9194aa7a366d1cabc4 | /tests/opcodes/cases/test_pexec_253.py | 18a5dd1216b31de8c9699b42ce6666fe9b9aae87 | [
"MIT"
]
| permissive | tqtezos/pytezos | 3942fdab7aa7851e9ea81350fa360180229ec082 | a4ac0b022d35d4c9f3062609d8ce09d584b5faa8 | refs/heads/master | 2021-07-10T12:24:24.069256 | 2020-04-04T12:46:24 | 2020-04-04T12:46:24 | 227,664,211 | 1 | 0 | MIT | 2020-12-30T16:44:56 | 2019-12-12T17:47:53 | Python | UTF-8 | Python | false | false | 820 | py | from unittest import TestCase
from tests import abspath
from pytezos.repl.interpreter import Interpreter
from pytezos.michelson.converter import michelson_to_micheline
from pytezos.repl.parser import parse_expression
class OpcodeTestpexec_253(TestCase):
def setUp(self):
self.maxDiff = None
self.i = Interpreter(debug=True)
def test_opcode_pexec_253(self):
res = self.i.execute(f'INCLUDE "{abspath("opcodes/contracts/pexec.tz")}"')
self.assertTrue(res['success'])
res = self.i.execute('RUN 38 14')
self.assertTrue(res['success'])
exp_val_expr = michelson_to_micheline('52')
exp_val = parse_expression(exp_val_expr, res['result']['storage'].type_expr)
self.assertEqual(exp_val, res['result']['storage']._val)
| [
"[email protected]"
]
| |
9cc60c5abdd36edbd7a873ba397ed2815867ad34 | 66cff6c4ad4c5fd6ecdfb723614f0475e27a5b38 | /akshare/air/air_hebei.py | 7954e6d79e7f2c969e9da72997e8aedbf6ef83fa | [
"MIT"
]
| permissive | ifzz/akshare | a862501b314f2b5aeab22af86771dbeee34cfdb8 | 70cf20680b580c8bacab55a0b7d792d06e299628 | refs/heads/master | 2022-12-02T18:36:33.754645 | 2020-08-24T05:16:42 | 2020-08-24T05:16:42 | 289,834,570 | 1 | 0 | MIT | 2020-08-24T05:17:09 | 2020-08-24T05:17:09 | null | UTF-8 | Python | false | false | 3,461 | py | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2020/4/29 12:33
Desc: 河北省空气质量预报信息发布系统
http://110.249.223.67/publish/
每日 17 时发布
等级划分
1. 空气污染指数为0-50,空气质量级别为一级,空气质量状况属于优。此时,空气质量令人满意,基本无空气污染,各类人群可正常活动。
2. 空气污染指数为51-100,空气质量级别为二级,空气质量状况属于良。此时空气质量可接受,但某些污染物可能对极少数异常敏感人群健康有较弱影响,建议极少数异常敏感人群应减少户外活动。
3. 空气污染指数为101-150,空气质量级别为三级,空气质量状况属于轻度污染。此时,易感人群症状有轻度加剧,健康人群出现刺激症状。建议儿童、老年人及心脏病、呼吸系统疾病患者应减少长时间、高强度的户外锻炼。
4. 空气污染指数为151-200,空气质量级别为四级,空气质量状况属于中度污染。此时,进一步加剧易感人群症状,可能对健康人群心脏、呼吸系统有影响,建议疾病患者避免长时间、高强度的户外锻练,一般人群适量减少户外运动。
5. 空气污染指数为201-300,空气质量级别为五级,空气质量状况属于重度污染。此时,心脏病和肺病患者症状显著加剧,运动耐受力降低,健康人群普遍出现症状,建议儿童、老年人和心脏病、肺病患者应停留在室内,停止户外运动,一般人群减少户外运动。
6. 空气污染指数大于300,空气质量级别为六级,空气质量状况属于严重污染。此时,健康人群运动耐受力降低,有明显强烈症状,提前出现某些疾病,建议儿童、老年人和病人应当留在室内,避免体力消耗,一般人群应避免户外活动。
发布单位:河北省环境应急与重污染天气预警中心 技术支持:中国科学院大气物理研究所 中科三清科技有限公司
"""
from datetime import datetime
import pandas as pd
import requests
def air_quality_hebei(city: str = "唐山市") -> pd.DataFrame:
"""
河北省空气质量预报信息发布系统-空气质量预报, 未来 6 天
http://110.249.223.67/publish/
:param city: ['石家庄市', '唐山市', '秦皇岛市', '邯郸市', '邢台市', '保定市', '张家口市', '承德市', '沧州市', '廊坊市', '衡水市', '辛集市', '定州市']
:type city: str
:return: city = "", 返回所有地区的数据; city="唐山市", 返回唐山市的数据
:rtype: pandas.DataFrame
"""
url = "http://110.249.223.67/publishNewServer/api/CityPublishInfo/GetProvinceAndCityPublishData"
params = {
"publishDate": f"{datetime.today().strftime('%Y-%m-%d')} 16:00:00"
}
r = requests.get(url, params=params)
json_data = r.json()
city_list = pd.DataFrame.from_dict(json_data["cityPublishDatas"], orient="columns")["CityName"].tolist()
outer_df = pd.DataFrame()
for i in range(1, 7):
inner_df = pd.DataFrame([item[f"Date{i}"] for item in json_data["cityPublishDatas"]], index=city_list)
outer_df = outer_df.append(inner_df)
if city == "":
return outer_df
else:
return outer_df[outer_df.index == city]
if __name__ == "__main__":
air_quality_hebei_df = air_quality_hebei(city="石家庄市")
print(air_quality_hebei_df)
| [
"[email protected]"
]
| |
62c23bc35e09fd885d7dd599ac35f30f777a5148 | 4c19eac6e53b2c1230257508370ad60c8d83d6a7 | /dxm/lib/DxAlgorithm/alg_worker.py | 75fcc4c87123d87ea14ca078b5a002fd729f7811 | [
"Apache-2.0"
]
| permissive | rakesh-roshan/dxm-toolkit | 2c7741c8a02952de1c23715eadb515d84fcaf954 | 2c6e6ebf8615526501767844edf06fb74d878f25 | refs/heads/master | 2020-04-27T19:05:11.293818 | 2019-03-01T13:49:34 | 2019-03-01T13:49:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,246 | py | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (c) 2018 by Delphix. All rights reserved.
#
# Author : Marcin Przepiorowski
# Date : April 2018
from dxm.lib.DxEngine.DxMaskingEngine import DxMaskingEngine
import logging
from dxm.lib.DxLogging import print_error
from dxm.lib.DxLogging import print_message
from dxm.lib.Output.DataFormatter import DataFormatter
from dxm.lib.DxTools.DxTools import get_list_of_engines
from dxm.lib.DxAlgorithm.DxAlgorithmList import DxAlgorithmList
from dxm.lib.DxAlgorithm.DxAlgorithm import DxAlgorithm
from dxm.lib.DxDomain.DxDomainList import DxDomainList
import sys
def algorithm_list(p_engine, format, algname):
"""
Print list of algorithms
param1: p_engine: engine name from configuration
param2: format: output format
param2: algname: algname name to list, all if None
return 0 if algname found
"""
ret = 0
data = DataFormatter()
data_header = [
("Engine name", 30),
("Algorithm name", 30),
("Domain name", 32),
("Syncable", 9),
("Algorithm type", 30),
]
data.create_header(data_header)
data.format_type = format
enginelist = get_list_of_engines(p_engine)
if enginelist is None:
return 1
for engine_tuple in enginelist:
engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
engine_tuple[2], engine_tuple[3])
if engine_obj.get_session():
continue
domainlist = DxDomainList()
domainlist.LoadDomains()
alglist = DxAlgorithmList()
alglist.LoadAlgorithms()
algref_list = []
if algname:
algobj = alglist.get_by_ref(algname)
if algobj:
algref_list.append(algobj.algorithm_name)
else:
algref_list = alglist.get_allref()
for algref in algref_list:
algobj = alglist.get_by_ref(algref)
if algobj.sync:
syncable = 'Y'
else:
syncable = 'N'
data.data_insert(
engine_tuple[0],
algobj.algorithm_name,
algobj.domain_name,
syncable,
algobj.algorithm_type
)
#algobj.export()
print("")
print (data.data_output(False))
print("")
return ret
def algorithm_worker(p_engine, algname, **kwargs):
"""
Select an algorithm and run action on it
param1: p_engine: engine name from configuration
param2: algname: algorithm name
kwargs: parameters to pass including function name to call
return 0 if algname found
"""
ret = 0
function_to_call = kwargs.get('function_to_call')
enginelist = get_list_of_engines(p_engine)
if enginelist is None:
return 1
for engine_tuple in enginelist:
engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
engine_tuple[2], engine_tuple[3])
if engine_obj.get_session():
continue
domainlist = DxDomainList()
domainlist.LoadDomains()
alglist = DxAlgorithmList()
algref_list = []
algobj = alglist.get_by_ref(algname)
if algobj is None:
ret = ret + 1
continue
dynfunc = globals()[function_to_call]
if dynfunc(algobj=algobj, engine_obj=engine_obj, **kwargs):
ret = ret + 1
return ret
def algorithm_export(p_engine, algname, outputfile):
"""
Save algorithm to file
param1: p_engine: engine name from configuration
param2: algname: algname name to export
param3: outputfile: output file
return 0 if OK
"""
return algorithm_worker(p_engine, algname, outputfile=outputfile,
function_to_call='do_export')
def do_export(**kwargs):
algobj = kwargs.get('algobj')
algobj.export()
def algorithm_import(p_engine, inputfile):
"""
Load algorithm from file
param1: p_engine: engine name from configuration
param2: inputfile: input file
return 0 if OK
"""
ret = 0
enginelist = get_list_of_engines(p_engine)
if enginelist is None:
return 1
for engine_tuple in enginelist:
engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
engine_tuple[2], engine_tuple[3])
if engine_obj.get_session():
continue
algobj = DxAlgorithm(engine_obj)
algobj.importalg(None)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.