blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dc8c9b4ea75f3e8b25d8678b445eb24ee988d0e3 | c03c8d7363a3626b1178d241af3aa93a7b0b15e2 | /unidesign/transients/__init__.py | 3d50473e4a0abd9fdb247dd1784c2cc73cd211c5 | [] | no_license | luntan365/unidesign | 0c9aa82df215fcff6be32840709ea51588cad805 | ee24a7152d937d1b95c2bb67b3f050966850d50f | refs/heads/master | 2020-12-01T07:50:18.444676 | 2011-09-11T10:54:41 | 2011-09-11T10:54:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | """
Transient Dynamics for Neural Processing
Building Blocks
- Network N for the neuroanatomy with nodes representing neurons and edges representing connection weight
- A set of input pattern I extended in time, representing a spatio-temporal entity, thought of as the
activation of a subset of neurons that would be activated by the sensory signal transduction
- Mapping of e.g. odor identity and concentration to activation pattern I
Experimental physiological data
- Activity patterns, spike time events on the network nodes
Temporal Propagation Function (model)
- Fitting experimental activity pattern evolution given input pattern and anatomical connectivity
- Generate transients T using the fitted model for given input patterns
Algorithm:
- Extract the stable heteroclinic channels, i.e. the metastable saddle states
- Implement the Lotka-Voltera equation as model to generate the transients using ne
http://www.scipy.org/Cookbook/LoktaVolterraTutorial?action=show&redirect=LoktaVolterraTutorial
Try:
- extract from space-time object objects with a sliding time-window approach
existing only in a topological space (no distances), using open sets to define nearness
and use it for clustering into metastable states.
"""
| [
"[email protected]"
] | |
523a6572bc48a5326a93e982133be5b30218128c | acf8fe77e599f8372adf4fc971012394715795d6 | /flask/EGG-INFO/enstaller/post_egginst.py | e91b753a7dca7f316b1ea217b327a33c095849ad | [] | no_license | shaheershantk/Blog-Engine-Using-Flask | 3e2f1457a59f282c336bbb63ff48171f938f5108 | 450e76a8bde0bd702d995fa7bb746ed920917f98 | refs/heads/master | 2021-01-01T19:42:03.401554 | 2014-11-10T15:01:08 | 2014-11-10T15:01:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,380 | py | """
converts the old "__egginst__.txt" files to "egginst.json"
and "_info.json" files
"""
import os
import re
import sys
import json
import time
from os.path import abspath, isdir, isfile, join
def read_old(path):
d1 = {'installed_size': -1}
execfile(path, d1)
d2 = {}
for name in ['egg_name', 'prefix', 'installed_size', 'rel_files']:
d2[name] = d1[name]
return d2
def write_egginst(path, d):
d['files'] = []
for f in d['rel_files'] + [path]:
p = abspath(join(sys.prefix, f))
d['files'].append(p.replace(sys.prefix, '.').replace('\\', '/')
if p.startswith(sys.prefix) else p)
del d['rel_files']
d['prefix'] = sys.prefix
with open(path, 'w') as f:
json.dump(d, f, indent=2, sort_keys=True)
egg_pat = re.compile(r'([\w.]+)-([\w.]+)-(\d+)\.egg$')
def write_info(path, eggname):
m = egg_pat.match(eggname)
if m is None:
return
n, v, b = m.group(1), m.group(2), int(m.group(3))
info = dict(
key = eggname,
name = n.lower(),
version = v,
build = b,
ctime = time.ctime(),
hook = False,
)
with open(path, 'w') as f:
json.dump(info, f, indent=2, sort_keys=True)
def get_eggname():
from enstaller import __version__
return 'enstaller-%s-1.egg' % __version__
def main():
egg_info_dir = join(sys.prefix, 'EGG-INFO')
for fn in os.listdir(egg_info_dir):
meta_dir = join(egg_info_dir, fn)
if not isdir(meta_dir):
continue
path1 = join(meta_dir, '__egginst__.txt')
if not isfile(path1):
continue
path2 = join(meta_dir, 'egginst.json')
path3 = join(meta_dir, '_info.json')
if isfile(path2) and isfile(path3):
continue
data = read_old(path1)
write_egginst(path2, data)
write_info(path3, data['egg_name'])
# create files for enstaller itself if necessary
meta_dir = join(egg_info_dir, 'enstaller')
path2 = join(meta_dir, 'egginst.json')
if not isfile(path2):
write_egginst(path2, dict(
egg_name=get_eggname(), prefix=sys.prefix,
installed_size=-1, rel_files=[]))
path3 = join(meta_dir, '_info.json')
if not isfile(path3):
write_info(path3, get_eggname())
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
fda9be22f30a154d5abe56754b9f02059d1a50ff | 753f729f33a1b00a0a7f5c78d217cc4c609aee6f | /n17_ViewSetAndRouters/n17/wsgi.py | 8e8c18729c4e259cdf8ed2fa83301e9ba3ae37dc | [] | no_license | nayan-gujju/DRF-Code | 874114a861042d558112f1a8ec95daf1356d5493 | 6fb3fdd5dde352e7b6e3a7363da0e7a3057b1ede | refs/heads/master | 2023-08-06T12:42:23.551603 | 2021-10-06T11:34:54 | 2021-10-06T11:34:54 | 404,650,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | """
WSGI config for n17 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'n17.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
a41def6a23f404b4cc471342fface1cd38551c6b | bbd65a48e9fb340b29f39082483680969d6e2571 | /python/misc/double_letters.py | 3c887a4d9ac46b613773398829e3c96f93bf0139 | [
"MIT"
] | permissive | christopher-burke/warmups | 2784eef3b959bca5c270b3e642b505f3b4c0b790 | 140c96ada87ec5e9faa4622504ddee18840dce4a | refs/heads/master | 2022-05-24T11:26:40.046650 | 2022-03-28T16:47:16 | 2022-03-28T16:47:16 | 152,440,792 | 0 | 0 | MIT | 2022-03-13T03:25:43 | 2018-10-10T14:51:43 | Python | UTF-8 | Python | false | false | 712 | py | #!/usr/bin/env python3
"""Double Letters.
Create a function that takes a word and returns true if the word
as two consecutive identical letters.
Source:
https://edabit.com/challenge/q3JMk2yqXfNyHWE9c
"""
import re
def double_letters(text: str) -> bool:
"""Determine if text contains two consecutive identical letters.
Uses `re` module to search for a text.
"""
if re.search(r"(\w)\1+", text):
return True
return False
def main():
"""Run sample double_letters functions. Do not import."""
print(double_letters("loop"))
print(double_letters("yummy"))
print(double_letters("orange"))
print(double_letters("munchkin"))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
df9444bb29a93e0916ada2e355c0a1121aed5edc | 38ecb8003cfa1f7b418d8c4ccd30d2a6262965c5 | /tfx_bsl/coders/sequence_example_coder_test.py | 45f5624302061620dc45922e576efd776832810a | [
"MIT",
"Apache-2.0"
] | permissive | dhruvesh09/tfx-bsl | ff5c89051516577ef76f9949c18b6e33071eaeff | 52f7aeab4a528ee81ae308534c9fcc1ce431d569 | refs/heads/master | 2021-12-04T11:20:53.374965 | 2021-08-27T22:53:20 | 2021-08-27T22:53:52 | 214,256,743 | 1 | 0 | Apache-2.0 | 2019-10-10T18:28:46 | 2019-10-10T18:28:45 | null | UTF-8 | Python | false | false | 27,863 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx_bsl.coders.sequence_example_coder."""
import pyarrow as pa
import tensorflow as tf
from tfx_bsl.coders import sequence_example_coder
from google.protobuf import text_format
from absl.testing import absltest
from absl.testing import parameterized
from tensorflow_metadata.proto.v0 import schema_pb2
_TEST_SEQUENCE_COLUMN_NAME = "##SEQUENCE##"
_TYPED_SEQUENCE_EXAMPLE = """
context {
feature {
key: 'context_a'
value {
int64_list {
value: [1]
}
}
}
feature {
key: "context_b"
value {
float_list {
value: [1.0, 2.0]
}
}
}
feature {
key: 'context_c'
value {
bytes_list {
value: ['a', 'b', 'c']
}
}
}
}
feature_lists {
feature_list {
key: 'sequence_x'
value {
feature {
int64_list {
value: [1, 2]
}
}
feature {
int64_list {
value: [3]
}
}
}
}
feature_list {
key: "sequence_y"
value {
feature {
float_list {
value: [3.0, 4.0]
}
}
feature {
float_list {
value: [1.0, 2.0]
}
}
}
}
feature_list {
key: 'sequence_z'
value {
feature {
bytes_list {
value: ['a', 'b']
}
}
feature {
bytes_list {
value: ['c']
}
}
}
}
}
"""
_UNTYPED_SEQUENCE_EXAMPLE = """
context {
feature {
key: 'context_a'
value {}
}
feature {
key: "context_b"
value {}
}
feature {
key: 'context_c'
value {}
}
feature {
key: 'context_d'
value {}
}
}
feature_lists {
feature_list {
key: 'sequence_x'
value {}
}
feature_list {
key: "sequence_y"
value {}
}
feature_list {
key: 'sequence_z'
value {}
}
}
"""
_SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE = """
context {
feature {
key: 'context_a'
value {}
}
feature {
key: "context_b"
value {}
}
feature {
key: 'context_c'
value {}
}
feature {
key: 'context_d'
value {}
}
feature {
key: 'context_e'
value {
float_list {
value: [1.0]
}
}
}
}
feature_lists {
feature_list {
key: 'sequence_v'
value {
feature {
float_list {
value: [1.0]
}
}
}
}
feature_list {
key: 'sequence_x'
value {
feature {}
feature {}
feature {}
}
}
feature_list {
key: "sequence_y"
value {
feature {}
}
}
feature_list {
key: 'sequence_z'
value {
feature {}
}
}
}
"""
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE = """
context {
feature {
key: 'context_a'
value {
int64_list {
value: []
}
}
}
feature {
key: "context_b"
value {
float_list {
value: []
}
}
}
feature {
key: 'context_c'
value {
bytes_list {
value: []
}
}
}
}
feature_lists {
feature_list {
key: 'sequence_x'
value {
feature {
int64_list {
value: []
}
}
feature {
int64_list {
value: []
}
}
}
}
feature_list {
key: "sequence_y"
value {
feature {
float_list {
value: []
}
}
}
}
feature_list {
key: 'sequence_z'
value {
feature {
bytes_list {
value: []
}
}
}
}
}
"""
_TEST_SEQUENCE_EXAMPLES_NONE_TYPED = [
"""
context {
feature {
key: 'context_a'
value {}
}
feature {
key: "context_b"
value {}
}
feature {
key: 'context_c'
value {}
}
feature {
key: 'context_d'
value {}
}
}
feature_lists {
feature_list {
key: 'sequence_x'
value {}
}
}
""",
"""
context {
feature {
key: 'context_a'
value {}
}
feature {
key: "context_b"
value {}
}
feature {
key: 'context_c'
value {}
}
feature {
key: 'context_d'
value {}
}
}
feature_lists {
feature_list {
key: 'sequence_w'
value {
feature {}
}
}
feature_list {
key: 'sequence_x'
value {
feature {}
}
}
}
""",
]
_DECODE_CASES = [
dict(
testcase_name="without_schema_first_example_typed",
schema_text_proto=None,
sequence_examples_text_proto=[
_TYPED_SEQUENCE_EXAMPLE, _UNTYPED_SEQUENCE_EXAMPLE,
_SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE,
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE
],
expected=pa.RecordBatch.from_arrays([
pa.array([[1], None, None, []], type=pa.large_list(pa.int64())),
pa.array([[1.0, 2.0], None, None, []],
type=pa.large_list(pa.float32())),
pa.array([[b"a", b"b", b"c"], None, None, []],
type=pa.large_list(pa.large_binary())),
pa.array([None, None, None, None], pa.null()),
pa.array([None, None, [1.0], None],
type=pa.large_list(pa.float32())),
pa.StructArray.from_arrays([
pa.array([None, None, [[1.0]], None],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[[1, 2], [3]], [], [None, None, None], [[], []]],
type=pa.large_list(pa.large_list(pa.int64()))),
pa.array([[[3.0, 4.0], [1.0, 2.0]], [], [None], [[]]],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[[b"a", b"b"], [b"c"]], [], [None], [[]]],
type=pa.large_list(pa.large_list(pa.large_binary())))
],
names=[
"sequence_v", "sequence_x",
"sequence_y", "sequence_z"
])
], [
"context_a", "context_b", "context_c", "context_d", "context_e",
_TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="with_schema_first_example_typed",
schema_text_proto="""
feature {
name: "context_a"
type: INT
}
feature {
name: "context_b"
type: FLOAT
}
feature {
name: "context_c"
type: BYTES
}
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_x"
type: INT
}
feature {
name: "sequence_y"
type: FLOAT
}
feature {
name: "sequence_z"
type: BYTES
}
}
}""",
sequence_examples_text_proto=[
_TYPED_SEQUENCE_EXAMPLE, _UNTYPED_SEQUENCE_EXAMPLE,
_SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE,
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE
],
expected=pa.RecordBatch.from_arrays([
pa.array([[1], None, None, []], type=pa.large_list(pa.int64())),
pa.array([[1.0, 2.0], None, None, []],
type=pa.large_list(pa.float32())),
pa.array([[b"a", b"b", b"c"], None, None, []],
type=pa.large_list(pa.large_binary())),
pa.StructArray.from_arrays([
pa.array([[[1, 2], [3]], [], [None, None, None], [[], []]],
type=pa.large_list(pa.large_list(pa.int64()))),
pa.array([[[3.0, 4.0], [1.0, 2.0]], [], [None], [[]]],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[[b"a", b"b"], [b"c"]], [], [None], [[]]],
type=pa.large_list(pa.large_list(pa.large_binary())))
],
names=[
"sequence_x", "sequence_y",
"sequence_z"
])
], ["context_a", "context_b", "context_c", _TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="without_schema_untyped_then_typed_examples",
schema_text_proto=None,
sequence_examples_text_proto=[
_UNTYPED_SEQUENCE_EXAMPLE, _SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE,
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE, _TYPED_SEQUENCE_EXAMPLE
],
expected=pa.RecordBatch.from_arrays([
pa.array([None, None, [], [1]], type=pa.large_list(pa.int64())),
pa.array([None, None, [], [1.0, 2.0]],
type=pa.large_list(pa.float32())),
pa.array([None, None, [], [b"a", b"b", b"c"]],
type=pa.large_list(pa.large_binary())),
pa.array([None, None, None, None], pa.null()),
pa.array([None, [1.0], None, None],
type=pa.large_list(pa.float32())),
pa.StructArray.from_arrays([
pa.array([None, [[1.0]], None, None],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[], [None, None, None], [[], []], [[1, 2], [3]]],
type=pa.large_list(pa.large_list(pa.int64()))),
pa.array([[], [None], [[]], [[3.0, 4.0], [1.0, 2.0]]],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[], [None], [[]], [[b"a", b"b"], [b"c"]]],
type=pa.large_list(pa.large_list(pa.large_binary())))
],
names=[
"sequence_v", "sequence_x",
"sequence_y", "sequence_z"
])
], [
"context_a", "context_b", "context_c", "context_d", "context_e",
_TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="with_schema_untyped_then_typed_examples",
schema_text_proto="""
feature {
name: "context_a"
type: INT
}
feature {
name: "context_b"
type: FLOAT
}
feature {
name: "context_c"
type: BYTES
}
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_x"
type: INT
}
feature {
name: "sequence_y"
type: FLOAT
}
feature {
name: "sequence_z"
type: BYTES
}
}
}""",
sequence_examples_text_proto=[
_UNTYPED_SEQUENCE_EXAMPLE, _SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE,
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE, _TYPED_SEQUENCE_EXAMPLE
],
expected=pa.RecordBatch.from_arrays([
pa.array([None, None, [], [1]], type=pa.large_list(pa.int64())),
pa.array([None, None, [], [1.0, 2.0]],
type=pa.large_list(pa.float32())),
pa.array([None, None, [], [b"a", b"b", b"c"]],
type=pa.large_list(pa.large_binary())),
pa.StructArray.from_arrays([
pa.array([[], [None, None, None], [[], []], [[1, 2], [3]]],
type=pa.large_list(pa.large_list(pa.int64()))),
pa.array([[], [None], [[]], [[3.0, 4.0], [1.0, 2.0]]],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([[], [None], [[]], [[b"a", b"b"], [b"c"]]],
type=pa.large_list(pa.large_list(pa.large_binary())))
],
names=[
"sequence_x", "sequence_y",
"sequence_z"
])
], ["context_a", "context_b", "context_c", _TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="without_schema_no_typed_examples",
schema_text_proto=None,
sequence_examples_text_proto=_TEST_SEQUENCE_EXAMPLES_NONE_TYPED,
expected=pa.RecordBatch.from_arrays([
pa.array([None, None], type=pa.null()),
pa.array([None, None], type=pa.null()),
pa.array([None, None], type=pa.null()),
pa.array([None, None], type=pa.null()),
pa.StructArray.from_arrays([
pa.array([None, [None]], type=pa.large_list(pa.null())),
pa.array([[], [None]], type=pa.large_list(pa.null())),
],
names=[
"sequence_w",
"sequence_x",
])
], [
"context_a", "context_b", "context_c", "context_d",
_TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="with_schema_no_typed_examples",
schema_text_proto="""
feature {
name: "context_a"
type: INT
}
feature {
name: "context_b"
type: FLOAT
}
feature {
name: "context_c"
type: BYTES
}
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_x"
type: INT
}
feature {
name: "sequence_y"
type: FLOAT
}
feature {
name: "sequence_z"
type: BYTES
}
}
}""",
sequence_examples_text_proto=_TEST_SEQUENCE_EXAMPLES_NONE_TYPED,
expected=pa.RecordBatch.from_arrays([
pa.array([None, None], type=pa.large_list(pa.int64())),
pa.array([None, None], type=pa.large_list(pa.float32())),
pa.array([None, None], type=pa.large_list(pa.large_binary())),
pa.StructArray.from_arrays([
pa.array([[], [None]],
type=pa.large_list(pa.large_list(pa.int64()))),
pa.array([None, None],
type=pa.large_list(pa.large_list(pa.float32()))),
pa.array([None, None],
type=pa.large_list(pa.large_list(pa.large_binary())))
],
names=[
"sequence_x", "sequence_y",
"sequence_z"
])
], ["context_a", "context_b", "context_c", _TEST_SEQUENCE_COLUMN_NAME
])),
dict(
testcase_name="build_nulls_for_unseen_feature",
schema_text_proto="""
feature {
name: "context_u"
type: BYTES
}
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_u"
type: INT
}
}
}
""",
sequence_examples_text_proto=[
_TYPED_SEQUENCE_EXAMPLE, _UNTYPED_SEQUENCE_EXAMPLE,
_SOME_FEATURES_TYPED_SEQUENCE_EXAMPLE,
_EMPTY_VALUES_LIST_SEQUENCE_EXAMPLE
],
expected=pa.RecordBatch.from_arrays([
pa.array([None, None, None, None],
type=pa.large_list(pa.large_binary())),
pa.StructArray.from_arrays([
pa.array([None, None, None, None],
type=pa.large_list(pa.large_list(pa.int64())))
],
names=["sequence_u"]),
], ["context_u", _TEST_SEQUENCE_COLUMN_NAME])),
dict(
testcase_name="build_null_for_unset_kind",
schema_text_proto="""
feature {
name: "context_a"
type: BYTES
}
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_a"
type: INT
}
}
}
""",
sequence_examples_text_proto=[
"""
context { feature { key: "context_a" value { } } }
feature_lists {
feature_list { key: 'sequence_a' value { } }
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.array([None], type=pa.large_list(pa.large_binary())),
pa.StructArray.from_arrays(
[pa.array([[]], type=pa.large_list(pa.large_list(pa.int64())))],
names=["sequence_a"]),
], ["context_a", _TEST_SEQUENCE_COLUMN_NAME])),
dict(
testcase_name="schema_does_not_contain_sequence_feature",
schema_text_proto="""
feature {
name: "context_a"
type: BYTES
}
""",
sequence_examples_text_proto=[
"""
context { feature { key: "context_a" value { } } }
feature_lists {
feature_list { key: 'sequence_a' value { } }
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.array([None], type=pa.large_list(pa.large_binary())),
], ["context_a"])),
dict(
testcase_name="duplicate_context_feature_names_in_schema",
schema_text_proto="""
feature {
name: "context_a"
type: BYTES
}
# Note that the second feature "context_a" will be ignored.
feature {
name: "context_a"
type: INT
}
""",
sequence_examples_text_proto=[
"""
context { feature { key: "context_a" value { } } }
feature_lists {
feature_list { key: 'sequence_a' value { } }
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.array([None], type=pa.large_list(pa.large_binary())),
], ["context_a"])),
dict(
testcase_name="duplicate_sequence_feature_names_in_schema",
schema_text_proto="""
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "sequence_a"
type: INT
}
# Note that the second feature "sequence_a" will be ignored.
feature {
name: "sequence_a"
type: BYTES
}
}
}
""",
sequence_examples_text_proto=[
"""
feature_lists {
feature_list { key: 'sequence_a' value { } }
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.StructArray.from_arrays(
[pa.array([[]], type=pa.large_list(pa.large_list(pa.int64())))],
names=["sequence_a"]),
], [_TEST_SEQUENCE_COLUMN_NAME])),
dict(
testcase_name="feature_lists_with_no_sequence_features",
schema_text_proto=None,
sequence_examples_text_proto=["""
feature_lists {}
"""],
expected=pa.RecordBatch.from_arrays([
pa.StructArray.from_buffers(pa.struct([]), 1, [None]),
], [_TEST_SEQUENCE_COLUMN_NAME])),
dict(
testcase_name="without_schema_only_context_features",
schema_text_proto=None,
sequence_examples_text_proto=[
"""
context {
feature {
key: 'context_a'
value {
int64_list {
value: [1, 2]
}
}
}
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.array([[1, 2]], type=pa.large_list(pa.int64())),
], ["context_a"])),
dict(
testcase_name="without_schema_only_sequence_features",
schema_text_proto=None,
sequence_examples_text_proto=[
"""
feature_lists {
feature_list {
key: 'sequence_x'
value {
feature {
int64_list {
value: [1, 2]
}
}
}
}
}
"""
],
expected=pa.RecordBatch.from_arrays([
pa.StructArray.from_arrays([
pa.array([[[1, 2]]],
type=pa.large_list(pa.large_list(pa.int64()))),
],
names=["sequence_x"])
], [_TEST_SEQUENCE_COLUMN_NAME])),
]
_INVALID_INPUT_CASES = [
dict(
testcase_name="context_feature_actual_type_mismatches_schema_type",
schema_text_proto="""
feature {
name: "a"
type: BYTES
}
""",
sequence_examples_text_proto=[
"""
context { feature { key: "a" value { float_list { value: [] } } } }
"""
],
error=RuntimeError,
error_msg_regex=(
"Feature had wrong type, expected bytes_list, found float_list "
"for feature \"a\""),
),
dict(
testcase_name="sequence_feature_actual_type_mismatches_schema_type",
schema_text_proto="""
feature {
name: "##SEQUENCE##"
type: STRUCT
struct_domain {
feature {
name: "a"
type: BYTES
}
}
}
""",
sequence_examples_text_proto=[
"""
feature_lists {
feature_list {
key: 'a'
value {
feature { float_list { value: [] } }
}
}
}
"""
],
error=RuntimeError,
error_msg_regex=(
"Feature had wrong type, expected bytes_list, found float_list "
"for sequence feature \"a\""),
),
dict(
testcase_name="context_feature_no_schema_mixed_type",
schema_text_proto=None,
sequence_examples_text_proto=[
"""
context { feature { key: "a" value { float_list { value: [] } } } }
""", """
context { feature { key: "a" value { int64_list { value: [] } } } }
"""
],
error=RuntimeError,
error_msg_regex=(
"Feature had wrong type, expected float_list, found int64_list"
" for feature \"a\""),
),
dict(
testcase_name="sequence_feature_no_schema_mixed_type",
schema_text_proto=None,
sequence_examples_text_proto=[
"""
feature_lists {
feature_list {
key: 'a'
value {
feature { float_list { value: [] } }
}
}
}
""", """
feature_lists {
feature_list {
key: 'a'
value {
feature { int64_list { value: [] } }
}
}
}
"""
],
error=RuntimeError,
error_msg_regex=(
"Feature had wrong type, expected float_list, found int64_list"
" for sequence feature \"a\""),
),
]
class SequenceExamplesToRecordBatchDecoderTest(parameterized.TestCase):
@parameterized.named_parameters(*_DECODE_CASES)
def test_decode(self, schema_text_proto, sequence_examples_text_proto,
expected):
serialized_sequence_examples = [
text_format.Parse(pbtxt,
tf.train.SequenceExample()).SerializeToString()
for pbtxt in sequence_examples_text_proto
]
serialized_schema = None
if schema_text_proto is not None:
serialized_schema = text_format.Parse(
schema_text_proto, schema_pb2.Schema()).SerializeToString()
if serialized_schema:
coder = sequence_example_coder.SequenceExamplesToRecordBatchDecoder(
_TEST_SEQUENCE_COLUMN_NAME,
serialized_schema)
else:
coder = sequence_example_coder.SequenceExamplesToRecordBatchDecoder(
_TEST_SEQUENCE_COLUMN_NAME)
result = coder.DecodeBatch(serialized_sequence_examples)
self.assertIsInstance(result, pa.RecordBatch)
self.assertTrue(
result.equals(expected),
"actual: {}\n expected:{}".format(result, expected))
if serialized_schema is not None:
self.assertTrue(coder.ArrowSchema().equals(result.schema))
@parameterized.named_parameters(*_INVALID_INPUT_CASES)
def test_invalid_input(self, schema_text_proto, sequence_examples_text_proto,
error, error_msg_regex):
serialized_sequence_examples = [
text_format.Parse(pbtxt,
tf.train.SequenceExample()).SerializeToString()
for pbtxt in sequence_examples_text_proto
]
serialized_schema = None
if schema_text_proto is not None:
serialized_schema = text_format.Parse(
schema_text_proto, schema_pb2.Schema()).SerializeToString()
if serialized_schema:
coder = sequence_example_coder.SequenceExamplesToRecordBatchDecoder(
_TEST_SEQUENCE_COLUMN_NAME, serialized_schema)
else:
coder = sequence_example_coder.SequenceExamplesToRecordBatchDecoder(
_TEST_SEQUENCE_COLUMN_NAME)
with self.assertRaisesRegex(error, error_msg_regex):
coder.DecodeBatch(serialized_sequence_examples)
def test_sequence_feature_column_name_not_struct_in_schema(self):
schema_text_proto = """
feature {
name: "##SEQUENCE##"
type: INT
}
"""
serialized_schema = text_format.Parse(
schema_text_proto, schema_pb2.Schema()).SerializeToString()
error_msg_regex = (
"Found a feature in the schema with the sequence_feature_column_name "
r"\(i.e., ##SEQUENCE##\) that is not a struct.*")
with self.assertRaisesRegex(RuntimeError, error_msg_regex):
sequence_example_coder.SequenceExamplesToRecordBatchDecoder(
_TEST_SEQUENCE_COLUMN_NAME, serialized_schema)
if __name__ == "__main__":
absltest.main()
| [
"[email protected]"
] | |
75ad74f6d506bc120e99ee767e0028402f3c0bfa | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/app/util/bin/format/elf/ElfStringTable.pyi | eb2f888abcb4090c19e2aa10f46f1b875549534a | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842142 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,508 | pyi | import ghidra.app.util.bin
import ghidra.app.util.bin.format
import ghidra.app.util.bin.format.elf
import ghidra.program.model.data
import java.lang
class ElfStringTable(object, ghidra.app.util.bin.format.elf.ElfFileSection):
ASCII: ghidra.program.model.data.DataType = char
BYTE: ghidra.program.model.data.DataType = byte
DWORD: ghidra.program.model.data.DataType = dword
IBO32: ghidra.program.model.data.DataType = ImageBaseOffset32
POINTER: ghidra.program.model.data.DataType = pointer
QWORD: ghidra.program.model.data.DataType = qword
STRING: ghidra.program.model.data.DataType = string
UTF16: ghidra.program.model.data.DataType = unicode
UTF8: ghidra.program.model.data.DataType = string-utf8
VOID: ghidra.program.model.data.DataType = void
WORD: ghidra.program.model.data.DataType = word
def __init__(self):
"""
DO NOT USE THIS CONSTRUCTOR, USE create*(GenericFactory ...) FACTORY METHODS INSTEAD.
"""
...
@staticmethod
def createElfStringTable(reader: ghidra.app.util.bin.format.FactoryBundledWithBinaryReader, header: ghidra.app.util.bin.format.elf.ElfHeader, stringTableSection: ghidra.app.util.bin.format.elf.ElfSectionHeader, fileOffset: long, addrOffset: long, length: long) -> ghidra.app.util.bin.format.elf.ElfStringTable:
"""
Create and parse an Elf string table
@param reader the binary reader containing the elf string table
@param header elf header
@param stringTableSection string table section header or null if associated with a dynamic table entry
@param fileOffset symbol table file offset
@param addrOffset memory address of symbol table (should already be adjusted for prelink)
@param length length of symbol table in bytes of -1 if unknown
@return Elf string table object
@throws IOException
"""
...
def equals(self, __a0: object) -> bool: ...
def getAddressOffset(self) -> long: ...
def getClass(self) -> java.lang.Class: ...
def getEntrySize(self) -> int: ...
def getFileOffset(self) -> long: ...
def getLength(self) -> long: ...
def getTableSectionHeader(self) -> ghidra.app.util.bin.format.elf.ElfSectionHeader:
"""
Get section header which corresponds to this table, or null
if only associated with a dynamic table entry
@return string table section header or null
"""
...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def readString(self, reader: ghidra.app.util.bin.BinaryReader, stringOffset: long) -> unicode:
"""
Read string from table at specified relative table offset
@param reader
@param stringOffset table relative string offset
@return string or null on error
"""
...
def toDataType(self) -> ghidra.program.model.data.DataType: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
@property
def addressOffset(self) -> long: ...
@property
def entrySize(self) -> int: ...
@property
def fileOffset(self) -> long: ...
@property
def length(self) -> long: ...
@property
def tableSectionHeader(self) -> ghidra.app.util.bin.format.elf.ElfSectionHeader: ...
| [
"[email protected]"
] | |
b082b3c7ad5a4453378668650eaa7575368881ca | bbbc0e8660cf32334ff5156d974c59e9936b5fa2 | /aliyun-python-sdk-workbench-inner/aliyunsdkworkbench_inner/request/v20210121/InnerGetSolutionRequest.py | d5c15d8ea09248a0672e2c3c739550bddb972c1d | [
"Apache-2.0"
] | permissive | stevenQiang/aliyun-openapi-python-sdk | a201062ec1df0df44e4f540e1ae11ef35ce5eb12 | a7e33abb4af88674be42d45db821a173c3a9dc60 | refs/heads/master | 2023-06-21T07:29:18.142415 | 2021-07-13T07:52:54 | 2021-07-13T07:52:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,514 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class InnerGetSolutionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Workbench-inner', '2021-01-21', 'InnerGetSolution')
self.set_method('POST')
def get_CurrentOrgId(self):
return self.get_query_params().get('CurrentOrgId')
def set_CurrentOrgId(self,CurrentOrgId):
self.add_query_param('CurrentOrgId',CurrentOrgId)
def get_SolutionId(self):
return self.get_query_params().get('SolutionId')
def set_SolutionId(self,SolutionId):
self.add_query_param('SolutionId',SolutionId)
def get_UserId(self):
return self.get_query_params().get('UserId')
def set_UserId(self,UserId):
self.add_query_param('UserId',UserId) | [
"[email protected]"
] | |
ad4b2dc8435b7049cde996a489f1a5cd634e4b65 | 61050d0d7f0c0a60474e4e85d30be4e5ea7c6b04 | /vnf/scripting/itask.py | 7ce7e28ed7a4fa2376510dad889f520aecf393de | [] | no_license | danse-inelastic/vnf | 8173f06f32b4a2fa2b71fddfe0fecf9c19e05e9a | be989448577f14f424aca4ce852c7198304ca57b | refs/heads/master | 2021-01-22T01:06:00.294100 | 2015-05-02T23:25:45 | 2015-05-02T23:25:45 | 34,947,878 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | # -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
def waitForITaskToFinish(taskid, credential):
from . import run
import time
while 1:
taskstatus = run(
actor='itask',
routine='getStatus',
id = taskid,
credential = credential,
)
# eval to a dictionary
taskstatus = eval(taskstatus)
# check status
if taskstatus['state'] in ['finished', 'failed', 'cancelled']:
break
time.sleep(5)
continue
if taskstatus['state'] != 'finished':
raise RuntimeError, "itask %s %s" % (taskid, taskstatus['state'])
return
# version
__id__ = "$Id$"
# End of file
| [
"[email protected]"
] | |
6d78cb4e690e54c41470accfc57a37d54d557d9b | 7002919119e429faac734099bb18d75047e49673 | /data/course/migrations/0002_auto_20201012_1724.py | 7a34d9c9167def4aad8410dc81a0960926d33509 | [] | no_license | Polydelta-ai/competency_model_prototype | 47f86353a4157f68f8e3a6fd961223bd81fa3c23 | 8162ec6df5679143e3f3165a3779c1619f9c925f | refs/heads/main | 2022-12-30T19:21:41.044344 | 2020-10-14T11:09:37 | 2020-10-14T11:09:37 | 301,813,276 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,300 | py | # Generated by Django 3.0 on 2020-10-12 21:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='course',
name='groups',
),
migrations.RemoveField(
model_name='course',
name='objectives',
),
migrations.RemoveField(
model_name='course',
name='overview',
),
migrations.RemoveField(
model_name='course',
name='target_audience',
),
migrations.AddField(
model_name='course',
name='bureau',
field=models.CharField(max_length=256, null=True),
),
migrations.AddField(
model_name='course',
name='category',
field=models.CharField(max_length=256, null=True),
),
migrations.AddField(
model_name='course',
name='duration',
field=models.CharField(max_length=256, null=True),
),
migrations.AddField(
model_name='course',
name='type',
field=models.CharField(max_length=256, null=True),
),
]
| [
"[email protected]"
] | |
b7044d0662504d59b77b29979428fa9a0beec204 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/rnn/python/ops/gru_ops.py | 75536e3f5f8cbe44231f19d4d455537e654f7a08 | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 7,762 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrapper for the Block GRU Op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.rnn.ops import gen_gru_ops
from tensorflow.contrib.util import loader
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import resource_loader
from tensorflow.python.util.deprecation import deprecated_args
_gru_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_gru_ops.so"))
@ops.RegisterGradient("GRUBlockCell")
def _GRUBlockCellGrad(op, *grad):
r"""Gradient for GRUBlockCell.
Args:
op: Op for which the gradient is defined.
*grad: Gradients of the optimization function wrt output
for the Op.
Returns:
d_x: Gradients wrt to x
d_h: Gradients wrt to h
d_w_ru: Gradients wrt to w_ru
d_w_c: Gradients wrt to w_c
d_b_ru: Gradients wrt to b_ru
d_b_c: Gradients wrt to b_c
Mathematics behind the Gradients below:
```
d_c_bar = d_h \circ (1-u) \circ (1-c \circ c)
d_u_bar = d_h \circ (h-c) \circ u \circ (1-u)
d_r_bar_u_bar = [d_r_bar d_u_bar]
[d_x_component_1 d_h_prev_component_1] = d_r_bar_u_bar * w_ru^T
[d_x_component_2 d_h_prevr] = d_c_bar * w_c^T
d_x = d_x_component_1 + d_x_component_2
d_h_prev = d_h_prev_component_1 + d_h_prevr \circ r + u
```
Below calculation is performed in the python wrapper for the Gradients
(not in the gradient kernel.)
```
d_w_ru = x_h_prevr^T * d_c_bar
d_w_c = x_h_prev^T * d_r_bar_u_bar
d_b_ru = sum of d_r_bar_u_bar along axis = 0
d_b_c = sum of d_c_bar along axis = 0
```
"""
x, h_prev, w_ru, w_c, b_ru, b_c = op.inputs
r, u, c, _ = op.outputs
_, _, _, d_h = grad
d_x, d_h_prev, d_c_bar, d_r_bar_u_bar = gen_gru_ops.gru_block_cell_grad(
x, h_prev, w_ru, w_c, b_ru, b_c, r, u, c, d_h)
x_h_prev = array_ops.concat([x, h_prev], 1)
d_w_ru = math_ops.matmul(x_h_prev, d_r_bar_u_bar, transpose_a=True)
d_b_ru = nn_ops.bias_add_grad(d_r_bar_u_bar)
x_h_prevr = array_ops.concat([x, h_prev * r], 1)
d_w_c = math_ops.matmul(x_h_prevr, d_c_bar, transpose_a=True)
d_b_c = nn_ops.bias_add_grad(d_c_bar)
return d_x, d_h_prev, d_w_ru, d_w_c, d_b_ru, d_b_c
class GRUBlockCell(rnn_cell_impl.RNNCell):
r"""Block GRU cell implementation.
Deprecated: use GRUBlockCellV2 instead.
The implementation is based on: http://arxiv.org/abs/1406.1078
Computes the GRU cell forward propagation for 1 time step.
This kernel op implements the following mathematical equations:
Biases are initialized with:
* `b_ru` - constant_initializer(1.0)
* `b_c` - constant_initializer(0.0)
```
x_h_prev = [x, h_prev]
[r_bar u_bar] = x_h_prev * w_ru + b_ru
r = sigmoid(r_bar)
u = sigmoid(u_bar)
h_prevr = h_prev \circ r
x_h_prevr = [x h_prevr]
c_bar = x_h_prevr * w_c + b_c
c = tanh(c_bar)
h = (1-u) \circ c + u \circ h_prev
```
"""
@deprecated_args(None, "cell_size is deprecated, use num_units instead",
"cell_size")
def __init__(self, num_units=None, cell_size=None):
"""Initialize the Block GRU cell.
Args:
num_units: int, The number of units in the GRU cell.
cell_size: int, The old (deprecated) name for `num_units`.
Raises:
ValueError: if both cell_size and num_units are not None;
or both are None.
"""
if (cell_size is None) == (num_units is None):
raise ValueError("Exactly one of num_units or cell_size must be provided.")
if num_units is None:
num_units = cell_size
self._cell_size = num_units
@property
def state_size(self):
return self._cell_size
@property
def output_size(self):
return self._cell_size
def __call__(self, x, h_prev, scope=None):
"""GRU cell."""
with vs.variable_scope(scope or type(self).__name__):
input_size = x.get_shape().with_rank(2)[1]
# Check if the input size exist.
if input_size is None:
raise ValueError("Expecting input_size to be set.")
# Check cell_size == state_size from h_prev.
cell_size = h_prev.get_shape().with_rank(2)[1]
if cell_size != self._cell_size:
raise ValueError("Shape of h_prev[1] incorrect: cell_size %i vs %s" %
(self._cell_size, cell_size))
if cell_size is None:
raise ValueError("cell_size from `h_prev` should not be None.")
w_ru = vs.get_variable("w_ru", [input_size + self._cell_size,
self._cell_size * 2])
b_ru = vs.get_variable(
"b_ru", [self._cell_size * 2],
initializer=init_ops.constant_initializer(1.0))
w_c = vs.get_variable("w_c",
[input_size + self._cell_size, self._cell_size])
b_c = vs.get_variable(
"b_c", [self._cell_size],
initializer=init_ops.constant_initializer(0.0))
_gru_block_cell = gen_gru_ops.gru_block_cell # pylint: disable=invalid-name
_, _, _, new_h = _gru_block_cell(
x=x, h_prev=h_prev, w_ru=w_ru, w_c=w_c, b_ru=b_ru, b_c=b_c)
return new_h, new_h
class GRUBlockCellV2(GRUBlockCell):
"""Temporary GRUBlockCell impl with a different variable naming scheme.
Only differs from GRUBlockCell by variable names.
"""
def __call__(self, x, h_prev, scope=None):
"""GRU cell."""
with vs.variable_scope(scope or type(self).__name__):
input_size = x.get_shape().with_rank(2)[1]
# Check if the input size exist.
if input_size is None:
raise ValueError("Expecting input_size to be set.")
# Check cell_size == state_size from h_prev.
cell_size = h_prev.get_shape().with_rank(2)[1]
if cell_size != self._cell_size:
raise ValueError("Shape of h_prev[1] incorrect: cell_size %i vs %s" %
(self._cell_size, cell_size))
if cell_size is None:
raise ValueError("cell_size from `h_prev` should not be None.")
with vs.variable_scope("gates"):
w_ru = vs.get_variable("kernel", [input_size + self._cell_size,
self._cell_size * 2])
b_ru = vs.get_variable(
"bias", [self._cell_size * 2],
initializer=init_ops.constant_initializer(1.0))
with vs.variable_scope("candidate"):
w_c = vs.get_variable("kernel",
[input_size + self._cell_size, self._cell_size])
b_c = vs.get_variable(
"bias", [self._cell_size],
initializer=init_ops.constant_initializer(0.0))
_gru_block_cell = gen_gru_ops.gru_block_cell # pylint: disable=invalid-name
_, _, _, new_h = _gru_block_cell(
x=x, h_prev=h_prev, w_ru=w_ru, w_c=w_c, b_ru=b_ru, b_c=b_c)
return new_h, new_h
| [
"[email protected]"
] | |
8d61d262ed2d32d78b76380c88292faa89d7ea96 | d125c002a6447c3f14022b786b07712a7f5b4974 | /tests/bugs/core_0859_test.py | e3bbec9126ee000bf0fd91d4a9fcfd15bf56912b | [
"MIT"
] | permissive | FirebirdSQL/firebird-qa | 89d5b0035071f9f69d1c869997afff60c005fca9 | cae18186f8c31511a7f68248b20f03be2f0b97c6 | refs/heads/master | 2023-08-03T02:14:36.302876 | 2023-07-31T23:02:56 | 2023-07-31T23:02:56 | 295,681,819 | 3 | 2 | MIT | 2023-06-16T10:05:55 | 2020-09-15T09:41:22 | Python | UTF-8 | Python | false | false | 1,291 | py | #coding:utf-8
"""
ID: issue-1249
ISSUE: 1249
TITLE: Sorting is allowed for blobs and arrays
DESCRIPTION:
NOTES:
For now we test that such operations raise an exception, as we restored the legacy
behavior until we're able to implement DISTINCT for blobs properly,
JIRA: CORE-859
FBTEST: bugs.core_0859
"""
import pytest
from firebird.qa import *
from firebird.driver import DatabaseError
init_script = """create table t (i integer, b blob sub_type text, a integer [5]);
"""
db = db_factory(init=init_script)
act = python_act('db')
@pytest.mark.version('>=3')
def test_1(act: Action):
with act.db.connect() as con:
c = con.cursor()
# Use with to free the Statement immediately
with c.prepare('select * from t order by b'):
pass
with pytest.raises(DatabaseError, match='.*Datatype ARRAY is not supported for sorting operation.*'):
c.prepare('select * from t order by a')
# Use with to free the Statement immediately
with c.prepare('select b, count(*) from t group by b'):
pass
with pytest.raises(DatabaseError, match='.*Datatype ARRAY is not supported for sorting operation.*'):
c.prepare('select a, count(*) from t group by a')
# Passed.
| [
"[email protected]"
] | |
9eab2697aa461aa64c7d4d36bb6a56ea7a48521d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_brands.py | 764cf25e586cb39ea7275ee9a182cbf0ea77c8e2 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py |
#calss header
class _BRANDS():
def __init__(self,):
self.name = "BRANDS"
self.definitions = brand
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['brand']
| [
"[email protected]"
] | |
f953387192ad4b6709f38021023a8739e849e939 | 3ed70536d4d06b2ac43b64976ddc43a5d7025b31 | /uri2253.py | 06119c5d13870da281433a9120affdffd464344a | [] | no_license | LuisHenrique01/Questoes_URI | 7f1d397e3cd055349939184603eb86cb4bf43d65 | 35c8e77eb7cd9da96df4268b5d71f3ad87446c89 | refs/heads/master | 2020-07-22T08:12:12.700484 | 2020-04-12T17:39:29 | 2020-04-12T17:39:29 | 207,126,339 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | def main():
while True:
try:
senha = input()
num = False
ma = False
mi = False
eh_l = True
if len(senha) < 6 or len(senha) > 32:
print('Senha invalida.')
else:
for i in range(len(senha)):
if ord(senha[i]) > 47 and ord(senha[i]) < 58:
num = True
elif ord(senha[i]) > 64 and ord(senha[i]) < 91:
ma = True
elif ord(senha[i]) > 96 and ord(senha[i]) < 123:
mi = True
else:
eh_l = False
if num and mi and ma and eh_l:
print('Senha valida.')
else:
print('Senha invalida.')
except EOFError:
break
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
32e255b02a850c3797fb6ce4ce7aaf522117092a | 61bc31419d212e9be092e4b8f25c86126023b1bf | /main.py | 69bfe62d27a639f241de78013b655a034d75df48 | [] | no_license | jmosbacher/SRMControl | 9054981de22cce4158c4d9da5083064e5cb43646 | 08738eb07035def95dbd3da75e7d4f0df07614d9 | refs/heads/master | 2020-03-23T15:01:42.449894 | 2019-03-01T13:05:19 | 2019-03-01T13:05:19 | 141,715,681 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,762 | py | from traits.api import *
from traitsui.api import *
import os
import cfg
from devices import BaseDevice
from experiments import BaseExperiment
from manual_controls import BaseManualControl
from global_states import BaseGlobalState
from saving import CanSaveMixin, SaveHandler
from microscopes import SuperResolutionMicroscope, BaseMicroscope
#from experiments import BaseExperiment
from experiment_manager import ExperimentManager
from device_manager import DeviceManager
from control_manager import ControlManager
from global_state_manager import GlobalStateManager
from log_viewer import LogStream
from traitsui.qt4.tree_editor \
import NewAction, CopyAction, CutAction, \
PasteAction, DeleteAction, RenameAction
try:
import cPickle as pickle
except:
import pickle
from pyface.api import ImageResource
import cfg
import random
import logging
GLOBALS = cfg.Globals()
class MainApp(CanSaveMixin):
microscope = Instance(BaseMicroscope)
selected = Instance(HasTraits,transient=True)
log = Instance(LogStream,transient=True)
status_str = Str(GLOBALS.STATUS,transient=True)
save_load_message = Str('')
dirty = Property()
#dirty = Constant(cfg.status.dirty,transient=True)
save_action = Action(name='Save', action='save')
toggle_autosave = Action(name='Autosave', action='toggle_autosave',style='toggle', checked_when='handler.autosave', )
save_as_action = Action(name='Save as', action='saveAs')
load_action = Action(name='Load file', action='load')
traits_view = View(
HSplit(
VSplit(
Item(name='microscope', show_label=False,editor=TreeEditor(
nodes=[
TreeNode(node_for=[SuperResolutionMicroscope],
auto_open=True,
children='',
label='',
# add=[Project],
menu = Menu(RenameAction,)
),
TreeNode(node_for=[SuperResolutionMicroscope],
auto_open=True,
children='managers',
label='name',
icon_group='microscope100.png',
icon_open='microscope100.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction,)
),
TreeNode(node_for=[ExperimentManager],
auto_open=True,
children='experiments',
icon_group='experiments.png',
icon_open='experiments.png',
icon_path=GLOBALS.ICON_DIR,
label='=Experiment Manager',
# add=[Project],
menu=Menu(RenameAction, )
),
TreeNode(node_for=[BaseExperiment],
auto_open=True,
children='',
label='name',
icon_item='graph_node.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction, )
),
TreeNode(node_for=[ControlManager],
auto_open=True,
children='controls',
label='name',
icon_group='controls.png',
icon_open='controls.png',
add=[],
menu=Menu(RenameAction,)
),
TreeNode(node_for=[BaseManualControl],
auto_open=True,
children='',
label='name',
icon_item='graph_node.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction,)
),
TreeNode(node_for=[DeviceManager],
auto_open=True,
children='devices',
label='name',
icon_open='devices.png',
icon_group='devices.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction,)
),
TreeNode(node_for=[BaseDevice],
auto_open=True,
children='',
label='name',
icon_item='graph_node.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction,)
),
TreeNode(node_for=[GlobalStateManager],
auto_open=True,
children='global_states',
label='name',
icon_open='global.png',
icon_group='global.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction, )
),
TreeNode(node_for=[BaseGlobalState],
auto_open=True,
children='',
label='name',
icon_item='graph_node.png',
icon_path=GLOBALS.ICON_DIR,
# add=[Project],
menu=Menu(RenameAction, )
),
],
hide_root=True,
editable=False,
selected='selected')
, height=0.7),
Item(name='log', show_label=False, style='custom', height=0.3),
),
Item(name='selected', show_label=False,editor=InstanceEditor(),
style='custom',width=0.85),
),
width=1200,
height=700,
icon=ImageResource(os.path.join(GLOBALS.ICON_DIR,'microscope100.png')),
resizable=True,
title='Super Resolution Microscope',
handler = SaveHandler,
menubar=MenuBar(
Menu(save_action, save_as_action, load_action, toggle_autosave,
name='File'),
# Menu(exp_int_tool, comp_tool, comp_int_tool, plot_tool, fit_tool,
# name='Tools'),
),
statusbar=[StatusItem(name='save_load_message', width=0.5),
StatusItem(name='blank', width=0.35),
#StatusItem(name='label', width=0.05),
StatusItem(name='status_str', width=0.15),
],
)
blank = Str(' ')
label = Str('Status: ')
def _get_dirty(self):
return GLOBALS.DIRTY
def _set_dirty(self, val):
GLOBALS.DIRTY = val
def _microscope_default(self):
return SuperResolutionMicroscope()
def _log_default(self):
log = LogStream()
log.config_logger(__name__)
return log
def validate(self):
#if os.path.isfile(self.filepath):
#return True, ''
#else:
#return False, 'No such file {}'.format(self.filepath)
return True, ''
def save(self):
self.dirty = True
self.save_load_message = 'Saving to {}...'.format(self.filepath)
with open(self.filepath,'wb') as f:
pickle.dump(self.microscope,f)
self.dirty = False
self.save_load_message = 'File saved successfully.'
if self.dirty:
self.save_load_message = 'Could not save file.'
def load(self):
self.dirty = True
self.save_load_message = 'Loading from {}...'.format(self.filepath)
with open(self.filepath,'rb') as f:
self.microscope = pickle.load(f)
self.dirty = False
self.save_load_message = 'Successfully loaded file.'
if self.dirty:
self.save_load_message = 'Could not load file.'
if __name__ == '__main__':
app = MainApp()
app.configure_traits()
| [
"[email protected]"
] | |
056a5a7fd69c9b3926985847bd9c38c278137345 | c885e1ca19bb24f167f5284c020673246a0fc260 | /synthesis/experiments/cnf_batch.py | f60be95380335a8023939af372da650722d5b95b | [] | no_license | suomela/counting | 6c800a2c9c56c44d0b8aac8117c79c78e6b92d8b | 5a9f96f6cabdbb54d2cd24da014aff31f4baf05d | refs/heads/master | 2022-02-25T23:07:03.078165 | 2022-01-27T19:08:49 | 2022-01-27T19:08:49 | 9,413,189 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | #! /usr/bin/env python
import os.path
import sys
PATH = sys.argv[1]
SOLVER = sys.argv[5]
SEED = int(sys.argv[4])
RANDOM_SEEDS = int(sys.argv[3])
PREFIX_PATH = sys.argv[2]
from batch import *
def cmd_out(**params):
prefix = "{path}/n{nodes}-s{states}-f{faulty}-t{time}-{cyclicity}-{id}-seed{seed}".format(path=PREFIX_PATH, **params)
cmd = "{path} -solver={solver} -outdir={prefix} -nodes={nodes} -states={states} -time={time} -faulty={faulty} -{cyclicity} -seed={seed}".format(path=PATH, solver=SOLVER, prefix=prefix, **params)
return cmd
import sys
params_list = read_list(sys.stdin)
output_batch(params_list, cmd_out, sys.stdout, RANDOM_SEEDS, master_seed=SEED)
| [
"[email protected]"
] | |
34d2fc4a840a02f8646dcc1a23a0e36187588c06 | ffd5e689f88c49ab7af3554c22dc0c36301084fa | /longest_common_subsequence.py | aa5fed409ca1f399f6f93be3e79b73ae04292dfd | [] | no_license | ellismckenzielee/codewars-python | 1710e6f0499047139479de386927c7dbd5f1cdf6 | af3f4b4534798a58115d0565730aae28ce87437e | refs/heads/master | 2023-08-09T13:38:40.964141 | 2023-08-01T14:45:22 | 2023-08-01T14:45:22 | 168,981,376 | 45 | 18 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | #longest common subsequence kata
#https://www.codewars.com/kata/52756e5ad454534f220001ef
def lcs(x, y):
x = list(filter(lambda x1: x1 in y,x))
y = list(filter(lambda x1: x1 in x,y))
if [] in (x,y):
return ''
elif x == y:
return ''.join(x)
else:
both = [x, y]
lens = [len(x), len(y)]
mindex = lens.index(min(lens[0],lens[1]))
maxdex = lens.index(max(lens[0],lens[1]))
output = ''
for i, letter in enumerate(both[mindex]):
try :
index = both[maxdex].index(letter)
both[maxdex] = both[maxdex][index:]
output += letter
except:
continue
return output
| [
"[email protected]"
] | |
e5b3b507e9fa57753b9a2e6d8e08d05cc8cbe038 | ce20062fedae07a1f0ea00a7e5ab0b86e05ebe69 | /pytests_stale/cbas/cbas_sql++_composability.py | 0c58a774b01ed031c02d263c49c8a3c878c82beb | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | AnithaKuberan/TAF | 256f9ee850275be5461e4d19a671c9dd19edb9f5 | 9824c6a4f1680c320ab065e23c720ffa92d530d9 | refs/heads/master | 2022-03-18T16:17:08.876692 | 2022-03-03T09:57:52 | 2022-03-03T09:57:52 | 206,016,123 | 0 | 0 | Apache-2.0 | 2019-09-03T07:30:18 | 2019-09-03T07:30:18 | null | UTF-8 | Python | false | false | 3,323 | py | '''
Created on Mar 8, 2018
@author: riteshagarwal
'''
'''
Created on Jan 4, 2018
@author: riteshagarwal
'''
import json
import os
import time
from cbas_base import CBASBaseTest, TestInputSingleton
from lib.memcached.helper.data_helper import MemcachedClientHelper
from membase.api.rest_client import RestConnection
from remote.remote_util import RemoteMachineShellConnection
class SQLPP_Composability_CBAS(CBASBaseTest):
def setUp(self):
self.input = TestInputSingleton.input
self.input.test_params.update({"default_bucket":False})
super(SQLPP_Composability_CBAS, self).setUp()
if "add_all_cbas_nodes" in self.input.test_params and self.input.test_params["add_all_cbas_nodes"] and len(self.cluster.cbas_nodes) > 0:
self.otpNodes.append(self.add_all_nodes_then_rebalance(self.cluster.cbas_nodes))
self.shell = RemoteMachineShellConnection(self.cbas_node)
def tearDown(self):
super(SQLPP_Composability_CBAS, self).tearDown()
def test_composability(self):
bucket_username = "cbadminbucket"
bucket_password = "password"
url = 'http://{0}:8095/analytics/service'.format(self.cbas_node.ip)
files_dict={'union':['non_unary_subplan_01_1_ddl.sqlpp',
'non_unary_subplan_01.2.update.sqlpp',
'non_unary_subplan_01.3.query.sqlpp',
'non_unary_subplan_01.4.query.sqlpp',
'non_unary_subplan_01.5.query.sqlpp',
'non_unary_subplan_01.6.query.sqlpp'],
'inner-join':['non_unary_subplan_02.1.ddl.sqlpp',
'non_unary_subplan_02.2.update.sqlpp',
'non_unary_subplan_02.3.query.sqlpp',
'non_unary_subplan_02.4.query.sqlpp',
'non_unary_subplan_02.5.query.sqlpp',
'non_unary_subplan_02.6.query.sqlpp'],
'outer-join':[
]
}
for key in files_dict.keys():
for query_file in files_dict[key]:
cmd = 'curl -s --data pretty=true --data-urlencode "statement@'+os.getcwd()+'/b/resources/non_unary_subplan_01/%s" '%query_file + url + " -u " + bucket_username + ":" + bucket_password
output, error = self.shell.execute_command(cmd)
response = ""
for line in output:
response = response + line
response = json.loads(response)
self.log.info(response)
if "errors" in response:
errors = response["errors"]
else:
errors = None
if "results" in response:
results = response["results"]
else:
results = None
if "handle" in response:
handle = response["handle"]
else:
handle = None
self.assertTrue(response["status"] == "success")
self.shell.disconnect() | [
"[email protected]"
] | |
eef1da102ec3c444d29d4f92f426ecbc184f2898 | 1d928c3f90d4a0a9a3919a804597aa0a4aab19a3 | /python/core/2015/12/hue.py | 77672c9aaf56db21fbbfaa6f7afeb2d80e53470e | [] | no_license | rosoareslv/SED99 | d8b2ff5811e7f0ffc59be066a5a0349a92cbb845 | a062c118f12b93172e31e8ca115ce3f871b64461 | refs/heads/main | 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null | UTF-8 | Python | false | false | 7,899 | py | """
homeassistant.components.light.hue
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for Hue lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.hue/
"""
import json
import logging
import os
import socket
import random
from datetime import timedelta
from urllib.parse import urlparse
from homeassistant.loader import get_component
import homeassistant.util as util
import homeassistant.util.color as color_util
from homeassistant.const import CONF_HOST, DEVICE_DEFAULT_NAME
from homeassistant.components.light import (
Light, ATTR_BRIGHTNESS, ATTR_XY_COLOR, ATTR_COLOR_TEMP,
ATTR_TRANSITION, ATTR_FLASH, FLASH_LONG, FLASH_SHORT,
ATTR_EFFECT, EFFECT_COLORLOOP, EFFECT_RANDOM, ATTR_RGB_COLOR)
REQUIREMENTS = ['phue==0.8']
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
PHUE_CONFIG_FILE = "phue.conf"
# Map ip to request id for configuring
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
def _find_host_from_config(hass):
""" Attempt to detect host based on existing configuration. """
path = hass.config.path(PHUE_CONFIG_FILE)
if not os.path.isfile(path):
return None
try:
with open(path) as inp:
return next(json.loads(''.join(inp)).keys().__iter__())
except (ValueError, AttributeError, StopIteration):
# ValueError if can't parse as JSON
# AttributeError if JSON value is not a dict
# StopIteration if no keys
return None
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Gets the Hue lights. """
if discovery_info is not None:
host = urlparse(discovery_info[1]).hostname
else:
host = config.get(CONF_HOST, None)
if host is None:
host = _find_host_from_config(hass)
if host is None:
_LOGGER.error('No host found in configuration')
return False
# Only act if we are not already configuring this host
if host in _CONFIGURING:
return
setup_bridge(host, hass, add_devices_callback)
def setup_bridge(host, hass, add_devices_callback):
""" Setup a phue bridge based on host parameter. """
import phue
try:
bridge = phue.Bridge(
host,
config_file_path=hass.config.path(PHUE_CONFIG_FILE))
except ConnectionRefusedError: # Wrong host was given
_LOGGER.exception("Error connecting to the Hue bridge at %s", host)
return
except phue.PhueRegistrationException:
_LOGGER.warning("Connected to Hue at %s but not registered.", host)
request_configuration(host, hass, add_devices_callback)
return
# If we came here and configuring this host, mark as done
if host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
lights = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_lights():
""" Updates the Hue light objects with latest info from the bridge. """
try:
api = bridge.get_api()
except socket.error:
# socket.error when we cannot reach Hue
_LOGGER.exception("Cannot reach the bridge")
return
api_states = api.get('lights')
if not isinstance(api_states, dict):
_LOGGER.error("Got unexpected result from Hue API")
return
new_lights = []
for light_id, info in api_states.items():
if light_id not in lights:
lights[light_id] = HueLight(int(light_id), info,
bridge, update_lights)
new_lights.append(lights[light_id])
else:
lights[light_id].info = info
if new_lights:
add_devices_callback(new_lights)
update_lights()
def request_configuration(host, hass, add_devices_callback):
""" Request configuration steps from the user. """
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], "Failed to register, please try again.")
return
# pylint: disable=unused-argument
def hue_configuration_callback(data):
""" Actions to do when our configuration callback is called. """
setup_bridge(host, hass, add_devices_callback)
_CONFIGURING[host] = configurator.request_config(
hass, "Philips Hue", hue_configuration_callback,
description=("Press the button on the bridge to register Philips Hue "
"with Home Assistant."),
description_image="/static/images/config_philips_hue.jpg",
submit_caption="I have pressed the button"
)
class HueLight(Light):
""" Represents a Hue light """
def __init__(self, light_id, info, bridge, update_lights):
self.light_id = light_id
self.info = info
self.bridge = bridge
self.update_lights = update_lights
@property
def unique_id(self):
""" Returns the id of this Hue light """
return "{}.{}".format(
self.__class__, self.info.get('uniqueid', self.name))
@property
def name(self):
""" Get the mame of the Hue light. """
return self.info.get('name', DEVICE_DEFAULT_NAME)
@property
def brightness(self):
""" Brightness of this light between 0..255. """
return self.info['state']['bri']
@property
def xy_color(self):
""" XY color value. """
return self.info['state'].get('xy')
@property
def color_temp(self):
""" CT color value. """
return self.info['state'].get('ct')
@property
def is_on(self):
""" True if device is on. """
self.update_lights()
return self.info['state']['reachable'] and self.info['state']['on']
def turn_on(self, **kwargs):
""" Turn the specified or all lights on. """
command = {'on': True}
if ATTR_TRANSITION in kwargs:
command['transitiontime'] = kwargs[ATTR_TRANSITION] * 10
if ATTR_BRIGHTNESS in kwargs:
command['bri'] = kwargs[ATTR_BRIGHTNESS]
if ATTR_XY_COLOR in kwargs:
command['xy'] = kwargs[ATTR_XY_COLOR]
elif ATTR_RGB_COLOR in kwargs:
command['xy'] = color_util.color_RGB_to_xy(
*(int(val) for val in kwargs[ATTR_RGB_COLOR]))
if ATTR_COLOR_TEMP in kwargs:
command['ct'] = kwargs[ATTR_COLOR_TEMP]
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command['alert'] = 'lselect'
elif flash == FLASH_SHORT:
command['alert'] = 'select'
else:
command['alert'] = 'none'
effect = kwargs.get(ATTR_EFFECT)
if effect == EFFECT_COLORLOOP:
command['effect'] = 'colorloop'
elif effect == EFFECT_RANDOM:
command['hue'] = random.randrange(0, 65535)
command['sat'] = random.randrange(150, 254)
else:
command['effect'] = 'none'
self.bridge.set_light(self.light_id, command)
def turn_off(self, **kwargs):
""" Turn the specified or all lights off. """
command = {'on': False}
if ATTR_TRANSITION in kwargs:
# Transition time is in 1/10th seconds and cannot exceed
# 900 seconds.
command['transitiontime'] = min(9000, kwargs[ATTR_TRANSITION] * 10)
self.bridge.set_light(self.light_id, command)
def update(self):
""" Synchronize state with bridge. """
self.update_lights(no_throttle=True)
| [
"[email protected]"
] | |
d6a628d30cd50796fad7346cedd8050cae518409 | f87f51ec4d9353bc3836e22ac4a944951f9c45c0 | /.history/HW03_20210706185704.py | 4de89390673cdc7ea96c6258834fddd35d0d6aae | [] | no_license | sanjayMamidipaka/cs1301 | deaffee3847519eb85030d1bd82ae11e734bc1b7 | 9ddb66596497382d807673eba96853a17884d67b | refs/heads/main | 2023-06-25T04:52:28.153535 | 2021-07-26T16:42:44 | 2021-07-26T16:42:44 | 389,703,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,061 | py | """
Georgia Institute of Technology - CS1301
HW03 - Strings and Lists
Collaboration Statement:
"""
#########################################
"""
Function Name: movieNight()
Parameters: subtitle (str)
Returns: fixed subtitle (str)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def movieNight(subtitle):
newSubtitle = ''
for i in subtitle:
if not i.isdigit():
newSubtitle += i
return newSubtitle
"""
Function Name: longestWord()
Parameters: sentence (str)
Returns: longest word (str)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def longestWord(sentence):
newSentence = ''
for i in sentence:
if not i == ',':
newSentence += i
list1 = newSentence.split(' ')
length = 0
longestWord = ''
for i in list1:
if len(i) >= length:
length = len(i)
longestWord = i
return longestWord
"""
Function Name: tennisMatch()
Parameters: player1 (str), player2 (str), matchRecord (str)
Returns: game statement (str)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def tennisMatch(player1, player2, matchRecord):
player1Points = 0
player2Points = 0
matchesWonPlayer1 = 0
matchesWonPlayer2 = 0
for i in matchRecord:
if i == '1':
player1Points += 1
elif i == '2':
player2Points += 1
elif i == '-':
if player1Points > player2Points:
matchesWonPlayer1 += 1
elif player2Points > player1Points:
matchesWonPlayer2 += 1
player1Points = 0
player2Points = 0
if matchesWonPlayer1 > matchesWonPlayer2:
return player1 + ' won! The score was ' + str(matchesWonPlayer1) + str('-') + str(matchesWonPlayer2)
elif matchesWonPlayer2 > matchesWonPlayer1:
return player2 + ' won! The score was ' + str(matchesWonPlayer2) + str('-') + str(matchesWonPlayer1)
else:
return
"""
Function Name: freshFruit()
Parameters: barcodes (list), startIndex (int), stopIndex (int)
Returns: freshest barcode (int)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
"""
Function Name: highestSum()
Parameters: stringList (list)
Returns: highest sum index (int)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
# subtitle = "Mr. and M4rs. Dursley of nu28mber four, Privet Drive, wer903e proud to say th6at they we6re perfectly norm3al, tha894nk you ve89ry much."
# print(movieNight(subtitle))
# sentence = " abc def ghi jkl mno "
# print(longestWord(sentence)) | [
"[email protected]"
] | |
6ffb5a7913c10d585f3deb2048cc818c0cfc1870 | e0c8662a56d89730043146ddc340e9e0b9f7de72 | /plugin/117c19ce-1596.py | c48b541558f6dace7b69689c04f13acc0ba485e2 | [] | no_license | izj007/bugscan_poc | f2ef5903b30b15c230b292a1ff2dc6cea6836940 | 4490f3c36d4033bdef380577333722deed7bc758 | refs/heads/master | 2020-09-22T17:20:50.408078 | 2019-01-18T09:42:47 | 2019-01-18T09:42:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | #coding:utf-8
from lib.curl import *
# -*- coding: utf-8 -*-
# FE5.5
# http://www.wooyun.org/bugs/wooyun-2010-086697
def assign(service, arg):
if service == "yongyou_fe":
return True, arg
def audit(arg):
url = arg + '/common/treeXml.jsp?type=sort&lx=3&code=1%27'
_, head, body, _, _ = curl.curl(url)
if body and body.find('bad SQL grammar [];') != -1:
security_hole(url)
if __name__ == '__main__':
from dummy import *
audit(assign('yongyou_fe', 'http://www.example.com/')[1]) | [
"[email protected]"
] | |
ed530119e7417142f6eadadd59dea9e964ffb5dd | 535f6b7a0635233dead58a4d8ef203b32c43ff2a | /实验/AI2019_SA19225404_吴语港_Lab4_TF1.x/量化个人教学/假量化/generate.py | 0d79b3a5b26dbcb9fe2aa08daee608ec403dafc4 | [] | no_license | jessenmeng/USTC_SSE_AI | 6cff7c6a1671a4e503727eea4ef9b455d41b166d | fa7891d69edfa719b509d1af894f7b6aa23e0b7e | refs/heads/master | 2022-12-25T19:32:39.320723 | 2020-10-10T06:08:19 | 2020-10-10T06:08:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,053 | py | from captcha.image import ImageCaptcha # pip install captcha
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import random
import cv2
#生成字符对应的验证码
class generateCaptcha():
alphabet = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
def random_captcha_text(self,char_set=alphabet, captcha_size=4):
captcha_text = []
for i in range(captcha_size):
c = random.choice(char_set)
captcha_text.append(c)
return captcha_text
def gen_captcha_text_and_image(self):
image = ImageCaptcha(width = 160,height = 60)
captcha_text = self.random_captcha_text()
captcha_text = ''.join(captcha_text) #连接字符串
captcha = image.generate(captcha_text)
captcha_image = Image.open(captcha)
captcha_image = np.array(captcha_image)
return captcha_text, captcha_image
def vec2text(self,char_pos):
text = []
for i, c in enumerate(char_pos):
char_idx = c % 52
if char_idx < 26:
char_code = char_idx + ord('A')
elif char_idx < 52:
char_code = char_idx - 26 + ord('a')
text.append(chr(char_code))
return "".join(text)
def text2vec(self,text):
vector = np.zeros(4 * 52)
def char2pos(c):
k = ord(c) - 65
if k > 25:
k = ord(c) - 71
return k
for i, c in enumerate(text):
idx = i * 52 + char2pos(c)
vector[idx] = 1
return vector
def get_imgs(self,num):
#获取图片
train_imgs = np.zeros(num*160*60).reshape(num,160*60)
test_labels = np.zeros(num*52*4).reshape(num,52*4)
for i in range(num):
text, image = self.gen_captcha_text_and_image()
train_imgs[i,:] = np.mean(image,-1).flatten()/255
test_labels[i,:] = self.text2vec(text)
return train_imgs, test_labels
| [
"[email protected]"
] | |
56f4aa1fac68ae607b647d5e6fbc8b7e2273b221 | 393a393bb593ec5813aa16a96384a62128eed643 | /ocr/src/processor/utility/common.py | a2e4d435f20872df95db8efa7de86379206bf01a | [] | no_license | normanyahq/kejinyan | 4b0d40559b0f6b715107aa38fe800539ba485f27 | 486403fcf393077fefb441cb64c217a2289aaf3e | refs/heads/master | 2023-06-24T13:43:52.740419 | 2017-10-09T04:29:50 | 2017-10-09T04:29:50 | 84,788,246 | 7 | 6 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | import random
import datetime
import string
import time
def getSquareDist(p1, p2):
return (int(p1[0])-int(p2[0])) ** 2 + (int(p1[1])-int(p2[1]))**2
def generateFileName():
return getToken() + ".png"
def getToken():
return datetime.datetime.now().strftime("%Y%m%d%H%M%S") \
+ "".join([random.choice(string.uppercase + string.lowercase + string.digits)
for i in range(0, 10)])
def timeit(f):
def timed(*args, **kw):
ts = time.time()
result = f(*args, **kw)
te = time.time()
print 'func:%r took: %2.4f sec' % \
(f.__name__, te-ts)
return result
return timed
| [
"[email protected]"
] | |
eb3fc4058efee853d8bc2aa246422e6bdbc51da6 | d2b81eacad849b66066b3b5d244b59f0144e8a0e | /python_do_sth/cv_3.py | aeda29e7cdd4d57cd02874d675fcffcae122bafc | [] | no_license | Kaiping23/Automatic_operation_and_maintenance | ba88d7c2590fb6a23c20167d7273e06b037dfc52 | 4a659c91c44a0ef7c4701199f1ee49072bad98dd | refs/heads/master | 2021-07-13T23:28:20.890603 | 2020-12-01T07:23:47 | 2020-12-01T07:23:47 | 222,401,688 | 0 | 2 | null | 2020-09-14T03:18:31 | 2019-11-18T08:42:26 | Shell | UTF-8 | Python | false | false | 860 | py | #!/usr/bin/python3
# _*_coding=utf-8 _*_
# @author lkp
# @date 2020/3/31 17:50
from PIL import Image, ImageDraw, ImageFont
import cv2
import os
def draw(pic):
img = cv2.imread('pic/' + pic)
img = img[:, :, (2, 1, 0)]
blank = Image.new("RGB", [len(img[0]), len(img), "white"])
drawObj = ImageDraw.Draw(blank)
n = 10
font = ImageFont.truetype('C:\Windows\Fonts\Microsoft YaHer UI/msgothic.ttc', size=n - 1)
for i in range(0, len(img), n):
for j in range(0, len(img[i], n)):
text = '武汉加油'
drawObj.ink = img[i][j][0] + img[i][j][1] * 256 + img[i][j][2] * 256 * 256
drawObj.text([j, i], text[int(j / n) % len(text)], font=font)
print('完成处理-----', i, j)
blank.save('new/new_' + pic, 'jpeg')
filelist = os.listdir('pic')
for file in filelist:
draw(file)
| [
"[email protected]"
] | |
1b73266fec9ad8adc485a161f7ef19f92c9e3161 | 536bce6ca78a9a151247b51acb8c375c9db7445f | /chapter2/2.33-fact.py | f39322d3d7ba4b95c64460e082de81bd82d43e19 | [] | no_license | clicianaldoni/aprimeronpython | 57de34313f4fd2a0c69637fefd60b0fb5861f859 | a917b62bec669765a238c4b310cc52b79c7df0c9 | refs/heads/master | 2023-01-28T18:02:31.175511 | 2023-01-23T08:14:57 | 2023-01-23T08:14:57 | 112,872,454 | 0 | 0 | null | 2017-12-02T19:55:40 | 2017-12-02T19:55:40 | null | UTF-8 | Python | false | false | 189 | py | """
Implement the factorial function
"""
def fact(n):
if n == 1 or n == 0:
return n
sum = 1
while n > 1:
sum *= n
n -= 1
return sum
print fact(4)
| [
"[email protected]"
] | |
f9f3b4e9ec54097a3c5c5636f4d1e0303adf3559 | e5e2b7da41fda915cb849f031a0223e2ac354066 | /sdk/python/pulumi_azure_native/recoveryservices/v20210210/replication_policy.py | 35cc2c0021962454d10fce98f1f070dafc5a861b | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | johnbirdau/pulumi-azure-native | b7d3bdddeb7c4b319a7e43a892ddc6e25e3bfb25 | d676cc331caa0694d8be99cb90b93fa231e3c705 | refs/heads/master | 2023-05-06T06:48:05.040357 | 2021-06-01T20:42:38 | 2021-06-01T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,838 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ReplicationPolicyArgs', 'ReplicationPolicy']
@pulumi.input_type
class ReplicationPolicyArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
resource_name: pulumi.Input[str],
policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input['CreatePolicyInputPropertiesArgs']] = None):
"""
The set of arguments for constructing a ReplicationPolicy resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[str] resource_name: The name of the recovery services vault.
:param pulumi.Input[str] policy_name: Replication policy name.
:param pulumi.Input['CreatePolicyInputPropertiesArgs'] properties: Policy creation properties.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "resource_name", resource_name)
if policy_name is not None:
pulumi.set(__self__, "policy_name", policy_name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group where the recovery services vault is present.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> pulumi.Input[str]:
"""
The name of the recovery services vault.
"""
return pulumi.get(self, "resource_name")
@resource_name.setter
def resource_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_name", value)
@property
@pulumi.getter(name="policyName")
def policy_name(self) -> Optional[pulumi.Input[str]]:
"""
Replication policy name.
"""
return pulumi.get(self, "policy_name")
@policy_name.setter
def policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['CreatePolicyInputPropertiesArgs']]:
"""
Policy creation properties.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['CreatePolicyInputPropertiesArgs']]):
pulumi.set(self, "properties", value)
class ReplicationPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['CreatePolicyInputPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Protection profile details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] policy_name: Replication policy name.
:param pulumi.Input[pulumi.InputType['CreatePolicyInputPropertiesArgs']] properties: Policy creation properties.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[str] resource_name_: The name of the recovery services vault.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ReplicationPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Protection profile details.
:param str resource_name: The name of the resource.
:param ReplicationPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ReplicationPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['CreatePolicyInputPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ReplicationPolicyArgs.__new__(ReplicationPolicyArgs)
__props__.__dict__["policy_name"] = policy_name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if resource_name_ is None and not opts.urn:
raise TypeError("Missing required property 'resource_name_'")
__props__.__dict__["resource_name"] = resource_name_
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210210:ReplicationPolicy"), pulumi.Alias(type_="azure-native:recoveryservices:ReplicationPolicy"), pulumi.Alias(type_="azure-nextgen:recoveryservices:ReplicationPolicy"), pulumi.Alias(type_="azure-native:recoveryservices/v20160810:ReplicationPolicy"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20160810:ReplicationPolicy"), pulumi.Alias(type_="azure-native:recoveryservices/v20180110:ReplicationPolicy"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20180110:ReplicationPolicy"), pulumi.Alias(type_="azure-native:recoveryservices/v20180710:ReplicationPolicy"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20180710:ReplicationPolicy"), pulumi.Alias(type_="azure-native:recoveryservices/v20210301:ReplicationPolicy"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210301:ReplicationPolicy")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ReplicationPolicy, __self__).__init__(
'azure-native:recoveryservices/v20210210:ReplicationPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ReplicationPolicy':
"""
Get an existing ReplicationPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ReplicationPolicyArgs.__new__(ReplicationPolicyArgs)
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return ReplicationPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource Location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.PolicyPropertiesResponse']:
"""
The custom data.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource Type
"""
return pulumi.get(self, "type")
| [
"[email protected]"
] | |
f5beb8c7f2fe29d2b3875d2c777fa120da672849 | 3af6960c805e9903eb27c09d8bc7ebc77f5928fe | /problems/0095_Unique_Binary_Search_Trees_II/__init__.py | 9d3b0685feaf5e1088c97cabc9af771655025d5e | [] | no_license | romain-li/leetcode | b3c8d9d4473eebd039af16ad2d4d99abc2768bdd | 5e82b69bd041c2c168d75cb9179a8cbd7bf0173e | refs/heads/master | 2020-06-04T20:05:03.592558 | 2015-06-08T18:05:03 | 2015-06-08T18:05:03 | 27,431,664 | 2 | 1 | null | 2015-06-08T18:05:04 | 2014-12-02T12:31:58 | Python | UTF-8 | Python | false | false | 1,040 | py | ID = '95'
TITLE = 'Unique Binary Search Trees II'
DIFFICULTY = 'Medium'
URL = 'https://oj.leetcode.com/problems/unique-binary-search-trees-ii/'
BOOK = False
PROBLEM = r"""Given _n_, generate all structurally unique **BST's** (binary search trees)
that store values 1..._n_.
For example,
Given _n_ = 3, your program should return all 5 unique BST's shown below.
1 3 3 2 1
\ / / / \ \
3 2 1 1 3 2
/ / \ \
2 1 2 3
confused what `"{1,#,2,3}"` means? > read more on how binary tree is
serialized on OJ.
**OJ's Binary Tree Serialization:**
The serialization of a binary tree follows a level order traversal, where '#'
signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as `"{1,2,3,#,#,4,#,#,5}"`.
"""
| [
"[email protected]"
] | |
b0b3f0dbcdb5f3398cb9d799cf00fb967cd0f87f | 466912406272829982f75854cf0104c6ce8c9814 | /data/spider2/migrate/domain_2_beian.py | 85b21c4c99d1c2c7c4d92cd71141911c01a78701 | [] | no_license | logonmy/Codes | 9631fa103fc499663361fa7eeccd7cedb9bb08e4 | 92723efdeccfc193f9ee5d0ab77203c254f34bc2 | refs/heads/master | 2021-09-21T18:07:22.985184 | 2018-08-30T05:53:26 | 2018-08-30T05:53:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,596 | py | # -*- coding: utf-8 -*-
import os, sys
from pymongo import MongoClient
import pymongo
reload(sys)
sys.setdefaultencoding("utf-8")
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '../../util'))
import loghelper, config
import db
import name_helper
#logger
loghelper.init_logger("domain_2_beian", stream=True)
logger = loghelper.get_logger("domain_2_beian")
#mongo
mongo = db.connect_mongo()
collection = mongo.info.beian
if __name__ == '__main__':
logger.info("Begin...")
conn = db.connect_torndb()
domains = conn.query("select * from domain")
for domain in domains:
if domain["beianhao"] is None:
continue
logger.info(domain["domain"])
domain.pop("id")
domain.pop("companyId")
domain.pop("createUser")
domain.pop("modifyUser")
domain.pop("confidence")
domain.pop("verify")
domain.pop("active")
beian = collection.find_one({"domain":domain["domain"],"organizer":domain["organizer"]})
if beian is None:
collection.insert(domain)
#break
domains = conn.query("select * from source_domain")
for domain in domains:
if domain["beianhao"] is None:
continue
logger.info(domain["domain"])
domain.pop("id")
domain.pop("sourceCompanyId")
domain.pop("verify")
beian = collection.find_one({"domain":domain["domain"],"organizer":domain["organizer"]})
if beian is None:
collection.insert(domain)
#break
conn.close()
logger.info("End.") | [
"[email protected]"
] | |
274d97a41930b485884225a410ffc761c904177b | 254ef44b90485767a3aea8cbe77dc6bf77dddaeb | /605种花问题.py | d57acf9ac517c69f15cf6872c969cf4d6f949943 | [] | no_license | XinZhaoFu/leetcode_moyu | fae00d52a52c090901021717df87b78d78192bdb | e80489923c60ed716d54c1bdeaaf52133d4e1209 | refs/heads/main | 2023-06-19T02:50:05.256149 | 2021-07-09T00:50:41 | 2021-07-09T00:50:41 | 331,243,022 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | """
假设有一个很长的花坛,一部分地块种植了花,另一部分却没有。可是,花不能种植在相邻的地块上,它们会争夺水源,两者都会死去。
给你一个整数数组 flowerbed 表示花坛,由若干 0 和 1 组成,其中 0 表示没种植花,1 表示种植了花。另有一个数 n ,能否在不打破种植规则的情况下种入 n 朵花?能则返回 true ,不能则返回 false。
示例 1:
输入:flowerbed = [1,0,0,0,1], n = 1
输出:true
"""
class Solution(object):
def canPlaceFlowers(self, flowerbed, n):
"""
:type flowerbed: List[int]
:type n: int
:rtype: bool
"""
res = 0
flowerbed.append(0)
flowerbed.insert(0, 0)
for index in range(1, len(flowerbed)-1):
if flowerbed[index-1] == 0 and flowerbed[index] == 0 and flowerbed[index+1] == 0:
flowerbed[index] = 1
res += 1
return res >= n
| [
"[email protected]"
] | |
f9e56da2e6260cf5095bb824487edc63654639a4 | 8dbbcb4775dc930d56ba46963c106eb0e801ad41 | /venv/Scripts/pip3-script.py | 4d3d09e7d661ce7a6d8bfbf6f5adb8bf7a9d4cb1 | [] | no_license | harmansehmbi/Project29 | 345d88d122eae636c28910db80beb8f67b8eaf7a | 3150c0590eb5c8abe44301747b9e2643cc8d8d16 | refs/heads/master | 2022-01-21T00:05:28.300590 | 2019-07-21T08:29:50 | 2019-07-21T08:29:50 | 198,032,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | #!C:\Users\HARMANPREET\PycharmProjects\Project29\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | |
1bc65433c162da913b092d7c9fdf5baab419a75a | 8853462a79608b7e5b7af94dbfa6c0a63c1f6b6a | /2. Crawlers/CASNET_policy/Generated_data/temp.py | f77dd43e3977f455dd80c26bb735b579e07b95d0 | [] | no_license | Ashish017/CASNET | eaae2552f8d56413f756c7d3839cd6f548a6e1ef | 73ec542c4c3fa1f97686796f0c385c71cad3e8d5 | refs/heads/master | 2023-02-06T06:53:27.362356 | 2020-12-27T04:43:34 | 2020-12-27T04:43:34 | 270,657,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | import pandas as pd
import matplotlib.pyplot as plt
t1 = pd.read_csv("test_1.csv")
t2 = pd.read_csv("test_2.csv")
t3 = pd.read_csv("test_3.csv")
files = [t1,t2,t3]
di = {}
for i, file in enumerate(files):
for col in file.columns:
if col[0] != "U":
name = col + "_seed_{}".format(i+1)
di[name] = file[col]
frame = pd.DataFrame(di)
frame.to_csv("Crawler_CASNET_test.csv") | [
"[email protected]"
] | |
383d7a4ec52c2853b77bfe31e940ffda2d8eb356 | 82b50cebff69927d4394378522ae96524950fff4 | /pointnet_ops/python/ops/interpolate_ops_test.py | f6a478248088b8f9c2a5e4775093df4d606239aa | [
"MIT"
] | permissive | jackd/pointnet_ops | 1b8a44bccbb2d9d3d5763b725f8972ad096a8a14 | 26d9b85ce4b503fac7547b965e233442aa243430 | refs/heads/master | 2020-04-29T01:35:10.644187 | 2019-11-07T00:08:01 | 2019-11-07T00:08:01 | 175,735,495 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,836 | py | # Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for interpolate ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from interpolate_ops import three_interpolate
from interpolate_ops import three_nn
import tensorflow as tf
class GroupPointTest(tf.test.TestCase):
def test_grad(self):
with self.test_session():
points = tf.constant(np.random.random((1, 8, 16)).astype('float32'))
# print points
xyz1 = tf.constant(np.random.random((1, 128, 3)).astype('float32'))
xyz2 = tf.constant(np.random.random((1, 8, 3)).astype('float32'))
dist, idx = three_nn(xyz1, xyz2)
weight = tf.ones_like(dist) / 3.0
interpolated_points = three_interpolate(points, idx, weight)
# print interpolated_points
err = tf.test.compute_gradient_error(points, (1, 8, 16),
interpolated_points,
(1, 128, 16))
# print err
self.assertLess(err, 1e-4)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
12d524b0c3be40cfe8c7280cdf150f9712f2cacc | 1b5802806cdf2c3b6f57a7b826c3e064aac51d98 | /tensorrt-basic-1.10-3rd-plugin/TensorRT-main/demo/HuggingFace/GPT2/frameworks.py | 2baca65d15581a13288d52200aa0c29f2ea38e53 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0",
"ISC",
"BSD-2-Clause"
] | permissive | jinmin527/learning-cuda-trt | def70b3b1b23b421ab7844237ce39ca1f176b297 | 81438d602344c977ef3cab71bd04995c1834e51c | refs/heads/main | 2023-05-23T08:56:09.205628 | 2022-07-24T02:48:24 | 2022-07-24T02:48:24 | 517,213,903 | 36 | 18 | null | 2022-07-24T03:05:05 | 2022-07-24T03:05:05 | null | UTF-8 | Python | false | false | 9,165 | py | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import argparse
from typing import List
# huggingface
from transformers import (
GPT2LMHeadModel,
GPT2Tokenizer,
GPT2Config,
)
# Add syspath for custom library
if __name__ == "__main__":
filepath = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.join(filepath, os.pardir)
sys.path.append(project_root)
# helpers
from NNDF.interface import FrameworkCommand
from NNDF.general_utils import confirm_folder_delete, NNFolderWorkspace
from NNDF.networks import (
NetworkResult,
NetworkMetadata,
NetworkRuntime,
Precision,
NetworkModel,
NetworkModels,
TimingProfile,
)
from GPT2.export import GPT2TorchFile
from GPT2.GPT2ModelConfig import GPT2ModelTRTConfig
from GPT2.measurements import gpt2_inference, full_inference_greedy
class GPT2HuggingFace(FrameworkCommand):
def __init__(self):
super().__init__(
GPT2ModelTRTConfig, description="Runs framework results for GPT2 model."
)
# Default inference input used during inference stage
self.onnx_gpt2 = None
self.torch_gpt2_dir = None
def generate_and_download_framework(
self, metadata: NetworkMetadata, workspace: NNFolderWorkspace
) -> NetworkModels:
cache_variant = False
if metadata.other.kv_cache:
cache_variant = True
trt_gpt2_config = self.config
metadata_serialized = trt_gpt2_config.get_metadata_string(metadata)
workspace_dir = workspace.get_path()
pytorch_model_dir = os.path.join(workspace_dir, metadata_serialized)
# We keep track of the generated torch location for cleanup later
self.torch_gpt2_dir = pytorch_model_dir
model = None
tfm_config = GPT2Config(use_cache=cache_variant)
if not os.path.exists(pytorch_model_dir):
# Generate the pre-trained weights
model = GPT2LMHeadModel(tfm_config).from_pretrained(metadata.variant)
model.save_pretrained(pytorch_model_dir)
print("Pytorch Model saved to {}".format(pytorch_model_dir))
else:
print(
"Frameworks file already exists, skipping generation and loading from file instead."
)
model = GPT2LMHeadModel(tfm_config).from_pretrained(pytorch_model_dir)
root_onnx_model_name = "{}.onnx".format(metadata_serialized)
root_onnx_model_fpath = os.path.join(
os.getcwd(), workspace_dir, root_onnx_model_name
)
onnx_model_fpath = root_onnx_model_fpath
gpt2 = GPT2TorchFile(model, metadata)
self.onnx_gpt2 = gpt2.as_onnx_model(onnx_model_fpath, force_overwrite=False)
onnx_models = [
NetworkModel(
name=GPT2ModelTRTConfig.NETWORK_DECODER_SEGMENT_NAME,
fpath=self.onnx_gpt2.fpath,
)
]
torch_models = [
NetworkModel(
name=GPT2ModelTRTConfig.NETWORK_DECODER_SEGMENT_NAME,
fpath=pytorch_model_dir,
)
]
return NetworkModels(torch=torch_models, onnx=onnx_models, trt=None)
def cleanup(
self,
workspace: NNFolderWorkspace,
save_onnx_model: bool = True,
keep_pytorch_model: bool = True,
) -> None:
"""
Cleans up the working directory and leaves models if available.
Should not assume any functions from the framework class has been called.
Returns:
None
"""
# Clean-up generated files
if not save_onnx_model and self.onnx_gpt2 is not None:
self.onnx_gpt2.cleanup()
# Remove any onnx external files by removing integer named values and weight files
workspace_path = workspace.get_path()
for d in os.listdir(workspace_path):
fpath = os.path.join(workspace_path, d)
if os.path.isfile(fpath) and os.path.splitext(d)[1] == ".weight":
os.remove(fpath)
elif d.isnumeric():
os.remove(fpath)
if not keep_pytorch_model:
# Using rmtree can be dangerous, have user confirm before deleting.
confirm_folder_delete(
self.torch_gpt2_dir,
prompt="Confirm you want to delete downloaded pytorch model folder?",
)
if not keep_pytorch_model and not save_onnx_model:
workspace.cleanup(force_remove=False)
def execute_inference(
self,
metadata: NetworkMetadata,
network_fpaths: NetworkModels,
inference_input: str,
timing_profile: TimingProfile,
use_cpu: bool,
batch_size: int = 1
) -> NetworkResult:
# Execute some tests
tokenizer = GPT2Tokenizer.from_pretrained(metadata.variant)
# GPT2 has no proper token set. Use custom token. Only "generate()" will auto
# replace with EOS token when using generating mode
tokenizer.add_special_tokens({"pad_token": "[PAD]"})
input_ids = tokenizer([inference_input] * batch_size, padding=True, return_tensors="pt").input_ids
# By default, HuggingFace model structure is one giant file.
gpt2_torch_fpath = network_fpaths.torch[0].fpath
config = GPT2Config(use_cache=metadata.other.kv_cache)
gpt2_model = GPT2LMHeadModel(config).from_pretrained(gpt2_torch_fpath)
gpt2_torch = GPT2TorchFile.TorchModule(
gpt2_model.transformer, gpt2_model.lm_head, gpt2_model.config
)
greedy_output = gpt2_torch.generate(input_ids) #greedy search
# get single decoder iteration inference timing profile
_, decoder_e2e_median_time = gpt2_inference(
gpt2_torch, input_ids, timing_profile, use_cuda=(not use_cpu)
)
# get complete decoder inference result and its timing profile
sample_output, full_e2e_median_runtime = full_inference_greedy(
gpt2_torch,
input_ids,
timing_profile,
max_length=GPT2ModelTRTConfig.MAX_SEQUENCE_LENGTH[metadata.variant],
use_cuda=(not use_cpu),
batch_size=batch_size
)
# Remove the padding and end tokens.
semantic_outputs = tokenizer.decode(
sample_output[-1, :], skip_special_tokens=True
)
if isinstance(semantic_outputs, list):
semantic_outputs = " ".join(semantic_outputs).strip()
return NetworkResult(
input=inference_input,
output_tensor=greedy_output,
semantic_output=semantic_outputs,
median_runtime=[
NetworkRuntime(
name=GPT2ModelTRTConfig.NETWORK_DECODER_SEGMENT_NAME,
runtime=decoder_e2e_median_time,
),
NetworkRuntime(
name=GPT2ModelTRTConfig.NETWORK_FULL_NAME,
runtime=full_e2e_median_runtime,
),
],
models=network_fpaths,
)
def run_framework(
self,
metadata: NetworkMetadata,
network_input: List[str],
working_directory: str,
keep_onnx_model: bool,
keep_pytorch_model: bool,
timing_profile: TimingProfile,
use_cpu: bool = False,
batch_size: int = 1
) -> List[NetworkResult]:
"""
Main entry point of our function which compiles and generates our model data.
"""
results = []
workspace = NNFolderWorkspace(
self.config.network_name, metadata, working_directory
)
try:
network_fpaths = self.generate_and_download_framework(metadata, workspace)
for ninput in network_input:
results.append(
self.execute_inference(
metadata, network_fpaths, ninput, timing_profile, use_cpu
)
)
finally:
self.cleanup(workspace, keep_onnx_model, keep_pytorch_model)
return results
def args_to_network_metadata(self, args: argparse.Namespace) -> NetworkMetadata:
return NetworkMetadata(
variant=args.variant,
precision=Precision(fp16=False),
other=self.config.MetadataClass(kv_cache=args.enable_kv_cache),
)
# Entry point
RUN_CMD = GPT2HuggingFace()
if __name__ == "__main__":
result = RUN_CMD()
print("Results: {}".format(result))
| [
"[email protected]"
] | |
7fd7d2d3435692240151409412192953b899d325 | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /cherrypy/test/test_wsgi_vhost.py | a7cbc1e110cbfd03a130821cba176cd2cc2bc53c | [] | no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | #Embedded file name: cherrypy/test\test_wsgi_vhost.py
import cherrypy
from cherrypy.test import helper
class WSGI_VirtualHost_Test(helper.CPWebCase):
def setup_server():
class ClassOfRoot(object):
def __init__(self, name):
self.name = name
def index(self):
return 'Welcome to the %s website!' % self.name
index.exposed = True
default = cherrypy.Application(None)
domains = {}
for year in range(1997, 2008):
app = cherrypy.Application(ClassOfRoot('Class of %s' % year))
domains['www.classof%s.example' % year] = app
cherrypy.tree.graft(cherrypy._cpwsgi.VirtualHost(default, domains))
setup_server = staticmethod(setup_server)
def test_welcome(self):
if not cherrypy.server.using_wsgi:
return self.skip('skipped (not using WSGI)... ')
for year in range(1997, 2008):
self.getPage('/', headers=[('Host', 'www.classof%s.example' % year)])
self.assertBody('Welcome to the Class of %s website!' % year)
| [
"[email protected]"
] | |
891131987930169977089ef3876a01ab735f4942 | a3e34daf7cf75f98d54b7d183f68e50277a6479b | /sources/XML projects/IBN EZRA/parse.py | 71b51b7d8f32ca018977270a23b497b3c19866af | [] | no_license | TomaszWaszczyk/Sefaria-Data | 2d0f1544c071340bffd87585e6883ef77df41495 | db2e136a14c6d08d98925da3afdf6373fec92de1 | refs/heads/master | 2023-06-07T07:56:24.332834 | 2021-06-20T15:53:56 | 2021-06-20T15:53:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,952 | py | # -*- coding: utf-8 -*-
__author__ = 'stevenkaplan'
from sources.functions import *
from sefaria.model import *
from data_utilities.XML_to_JaggedArray import XML_to_JaggedArray
from sefaria.helper.schema import *
import bleach
SERVER = "https://ste.cauldron.sefaria.org"
def reorder_modify(text):
return bleach.clean(text, strip=True)
def get_dict_of_names(file):
import csv
reader = csv.reader(open(file))
dict = {}
for row in reader:
dict[row[0]] = row[1]
return dict
def change_priority(dict_of_names):
pass
def tester(x):
return x.tag == "h1"
if __name__ == "__main__":
#add_term("Preface", u"פתח דבר", "pseudo_toc_categories", "http://localhost:8000")
#create_schema("Responsa to Chaplains", u"משהו", ["Halakhah"])
post_info = {}
volume = 2
post_info["language"] = "en"
post_info["server"] = SERVER
allowed_tags = ["volume", "book", "ack", "intro", "preface", "bibl", "part", "chapter", "p", "ftnote", "title", "ol", "footnotes", "appendix"]
allowed_attributes = ["id"]
p = re.compile("\d+a?\.")
post_info["versionTitle"] = "Ibn Ezra's commentary on the Pentateuch, tran. and annot. by H. Norman Strickman and Arthur M. Silver. Menorah Pub., 1988-2004"
post_info["versionSource"] = "https://www.nli.org.il/he/books/NNL_ALEPH001102376/NLI"
title = "Ibn Ezra"
for file in os.listdir("."):
print(file)
if file.endswith("xml") and "IBN" in file:
with open(file) as f:
contents = f.read()
title = "Ibn Ezra on {}".format(file.split("_")[-1].replace(".xml", ""))
parser = XML_to_JaggedArray(title, contents, allowed_tags, allowed_attributes, post_info, change_name=True, image_dir="./images",
titled=True, print_bool=True)
parser.set_funcs(reorder_modify=reorder_modify, reorder_test=tester)
parser.run()
| [
"[email protected]"
] | |
d7f8127681b7485ec6e79e90de2be2b3cb6c79eb | 663305e4fc1cc44ce032cfbff001d7beb21d6e92 | /QuantEconpy_quantitative economic modeling/quantecon/game_theory/vertex_enumeration.py | 7e573f1d8fd8824cd15f0be8cdccbbb18bf44eab | [
"BSD-3-Clause"
] | permissive | yangkedc1984/Source_Codes_Collected | 19e1c5887c6d5654bc76bae364a304964433a59c | 668467e74301a42eebf3085add344a5ac1323120 | refs/heads/master | 2023-01-09T15:14:43.355589 | 2020-11-08T10:12:31 | 2020-11-08T10:12:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,587 | py | """
Compute all mixed Nash equilibria of a 2-player normal form game by
vertex enumeration.
References
----------
B. von Stengel, "Equilibrium Computation for Two-Player Games in
Strategic and Extensive Form," Chapter 3, N. Nisan, T. Roughgarden, E.
Tardos, and V. Vazirani eds., Algorithmic Game Theory, 2007.
"""
import numpy as np
import scipy.spatial
from numba import jit, guvectorize
def vertex_enumeration(g):
"""
Compute mixed-action Nash equilibria of a 2-player normal form game
by enumeration and matching of vertices of the best response
polytopes. For a non-degenerate game input, these are all the Nash
equilibria.
Internally, `scipy.spatial.ConvexHull` is used to compute vertex
enumeration of the best response polytopes, or equivalently, facet
enumeration of their polar polytopes. Then, for each vertex of the
polytope for player 0, vertices of the polytope for player 1 are
searched to find a completely labeled pair.
Parameters
----------
g : NormalFormGame
NormalFormGame instance with 2 players.
Returns
-------
list(tuple(ndarray(float, ndim=1)))
List containing tuples of Nash equilibrium mixed actions.
"""
return list(vertex_enumeration_gen(g))
def vertex_enumeration_gen(g):
"""
Generator version of `vertex_enumeration`.
Parameters
----------
g : NormalFormGame
NormalFormGame instance with 2 players.
Yields
-------
tuple(ndarray(float, ndim=1))
Tuple of Nash equilibrium mixed actions.
"""
try:
N = g.N
except AttributeError:
raise TypeError('input must be a 2-player NormalFormGame')
if N != 2:
raise NotImplementedError('Implemented only for 2-player games')
brps = [_BestResponsePolytope(g.players[1-i], idx=i) for i in range(N)]
labelings_bits_tup = \
tuple(_ints_arr_to_bits(brps[i].labelings) for i in range(N))
equations_tup = tuple(brps[i].equations for i in range(N))
trans_recips = tuple(brps[i].trans_recip for i in range(N))
return _vertex_enumeration_gen(labelings_bits_tup, equations_tup,
trans_recips)
@jit(nopython=True)
def _vertex_enumeration_gen(labelings_bits_tup, equations_tup, trans_recips):
"""
Main body of `vertex_enumeration_gen`.
Parameters
----------
labelings_bits_tup : tuple(ndarray(np.uint64, ndim=1))
Tuple of ndarrays of integers representing labelings of the
vertices of the best response polytopes.
equations_tup : tuple(ndarray(float, ndim=2))
Tuple of ndarrays containing the hyperplane equations of the
polar polytopes.
trans_recips : tuple(scalar(float))
Tuple of the reciprocals of the translations.
"""
m, n = equations_tup[0].shape[1] - 1, equations_tup[1].shape[1] - 1
num_vertices0, num_vertices1 = \
equations_tup[0].shape[0], equations_tup[1].shape[0]
ZERO_LABELING0_BITS = (np.uint64(1) << np.uint64(m)) - np.uint64(1)
COMPLETE_LABELING_BITS = (np.uint64(1) << np.uint64(m+n)) - np.uint64(1)
for i in range(num_vertices0):
if labelings_bits_tup[0][i] == ZERO_LABELING0_BITS:
continue
for j in range(num_vertices1):
xor = labelings_bits_tup[0][i] ^ labelings_bits_tup[1][j]
if xor == COMPLETE_LABELING_BITS:
yield _get_mixed_actions(
labelings_bits_tup[0][i],
(equations_tup[0][i], equations_tup[1][j]),
trans_recips
)
break
class _BestResponsePolytope:
"""
Class that represents a best response polytope for a player in a
two-player normal form game.
Let :math:`A` and :math:`B` be the m x n and n x m payoff matrices
of players 0 and 1, respectively, where the payoffs are assumed to
have been shifted in such a way that :math:`A` and :math:`B` are
nonnegative and have no zero column. In von Stegel (2007), the best
response polytope for player 0 is defined by
.. math::
P = \{x \in \mathbb{R}^m \mid x \geq 0,\ B x \leq 1\},
and that for player 1 by
.. math::
Q = \{y \in \mathbb{R}^n \mid A y \leq 1,\ y \geq 0\}.
Here, by translation we represent these in the form
.. math::
\hat{P} = \{z \in \mathbb{R}^m \mid D z \leq 1\},
and
.. math::
\hat{Q} = \{w \in \mathbb{R}^n \mid C w \leq 1\},
where :math:`D` and :math:`C` are (m+n) x m and (m+n) x n matrices,
respectively. The 2d array of matrix :math:`D` for player 0 (or
:math:`C` for player 1) is passed as its `points` argument to
`scipy.spatial.ConvexHull`, which then computes, by the Qhull
library, convex hull (or facet enumeration). By polar duality, this
is equivalent to vertex enumeration of the polytope :math:`\hat{P}`,
where its k-th vertex is obtained by `-equations[k, :-1]/
equations[k, -1]`, and the indices of the corresponding binding
inequalities by `labelings[k]`, while the vertex of the original
polytope :math:`P` can be obtained by `-equations[k, :-1]/
equations[k, -1] + 1/trans_recip`.
Parameters
----------
opponent_player : Player
Instance of Player with one opponent.
idx : scalar(int), optional(default=0)
Player index in the normal form game, either 0 or 1.
Attributes
----------
ndim : scalar(int)
Dimension of the polytope.
hull : scipy.spatial.ConvexHull
`ConvexHull` instance reprensenting the polar polytope.
num_vertices : scalar(int)
Number of the vertices identified by `ConvexHull`.
equations : ndarray(float, ndim=2)
Output of `ConvexHull.equations`. The k-th vertex is obtained
by `-equations[k, :-1]/equations[k, -1]`.
labelings : ndarray(int32, ndim=2)
Output of `ConvexHull.simplices`. `labelings[k]` stores the
indices of the binding inequalities for the k-th vertex.
trans_recip : scalar(float)
Reciprocal of the translation; the k-th vertex of the original
polytope before translation can be computed by
`-equations[k, :-1]/equations[k, -1] + 1/trans_recip`.
"""
def __init__(self, opponent_player, idx=0):
try:
num_opponents = opponent_player.num_opponents
except AttributeError:
raise TypeError('input must be a Player instance')
if num_opponents != 1:
raise NotImplementedError(
'Implemented only for Player in a 2-player game'
)
B = opponent_player.payoff_array
n, m = B.shape
self.ndim = m
D = np.empty((m+n, m))
nonneg_cond_start, payoff_cond_start = (0, m) if idx == 0 else (n, 0)
# Shift the payoffs to be nonnegative and have no zero column
col_mins = B.min(axis=0)
col_maxs = B.max(axis=0)
neg_cols = (col_mins < 0)
nonpos_const_cols = (col_maxs == col_mins) * (col_mins <= 0)
shifts = np.zeros(m)
shifts[col_mins < 0] = -col_mins[col_mins < 0]
shifts[nonpos_const_cols] += 1
D[payoff_cond_start:payoff_cond_start+n, :] = B + shifts
# Construct matrix D for player 0 (or matrix C for player 1)
# by translation z = x - 1/trans_recip
row_sums = D[payoff_cond_start:payoff_cond_start+n, :].sum(axis=1)
trans_recip = row_sums.max() * 2
D[payoff_cond_start:payoff_cond_start+n, :] *= trans_recip
D[payoff_cond_start:payoff_cond_start+n, :] /= \
(trans_recip - row_sums).reshape(n, 1)
D[nonneg_cond_start:nonneg_cond_start+m, :] = 0
np.fill_diagonal(
D[nonneg_cond_start:nonneg_cond_start+m, :], -trans_recip
)
# Create scipy.spatial.ConvexHull
self.hull = scipy.spatial.ConvexHull(D)
self.equations = self.hull.equations
self.labelings = self.hull.simplices
self.num_vertices = self.hull.equations.shape[0]
self.trans_recip = trans_recip
@guvectorize(['(i4[:], u8[:])'], '(m)->()', nopython=True, cache=True)
def _ints_arr_to_bits(ints_arr, out):
"""
Convert an array of integers representing the set bits into the
corresponding integer.
Compiled as a ufunc by Numba's `@guvectorize`: if the input is a
2-dim array with shape[0]=K, the function returns a 1-dim array of
K converted integers.
Parameters
----------
ints_arr : ndarray(int32, ndim=1)
Array of distinct integers from 0, ..., 63.
Returns
-------
np.uint64
Integer with set bits represented by the input integers.
Examples
--------
>>> ints_arr = np.array([0, 1, 2], dtype=np.int32)
>>> _ints_arr_to_bits(ints_arr)
7
>>> ints_arr2d = np.array([[0, 1, 2], [3, 0, 1]], dtype=np.int32)
>>> _ints_arr_to_bits(ints_arr2d)
array([ 7, 11], dtype=uint64)
"""
m = ints_arr.shape[0]
out[0] = 0
for i in range(m):
out[0] |= np.uint64(1) << np.uint64(ints_arr[i])
@jit(nopython=True, cache=True)
def _get_mixed_actions(labeling_bits, equation_tup, trans_recips):
"""
From a labeling for player 0, a tuple of hyperplane equations of the
polar polytopes, and a tuple of the reciprocals of the translations,
return a tuple of the corresponding, normalized mixed actions.
Parameters
----------
labeling_bits : scalar(np.uint64)
Integer with set bits representing a labeling of a mixed action
of player 0.
equation_tup : tuple(ndarray(float, ndim=1))
Tuple of hyperplane equations of the polar polytopes.
trans_recips : tuple(scalar(float))
Tuple of the reciprocals of the translations.
Returns
-------
tuple(ndarray(float, ndim=1))
Tuple of mixed actions.
"""
m, n = equation_tup[0].shape[0] - 1, equation_tup[1].shape[0] - 1
out = np.empty(m+n)
for pl, (start, stop, skip) in enumerate([(0, m, np.uint64(1)),
(m, m+n, np.uint64(0))]):
sum_ = 0.
for i in range(start, stop):
if (labeling_bits & np.uint64(1)) == skip:
out[i] = 0
else:
out[i] = equation_tup[pl][i-start] * trans_recips[pl] - \
equation_tup[pl][-1]
sum_ += out[i]
labeling_bits = labeling_bits >> np.uint64(1)
if sum_ != 0:
out[start:stop] /= sum_
return out[:m], out[m:]
| [
"[email protected]"
] | |
75f4ebcb13a4e90f149b593c0fd91725a3698c42 | 993ef8924418866f932396a58e3ad0c2a940ddd3 | /Production/python/PrivateSamples/EMJ_UL18_mMed-1800_mDark-6_kappa-0p95_aligned-down_cff.py | 9529dca572a022c1535c9eb4f19ce9b052141394 | [] | no_license | TreeMaker/TreeMaker | 48d81f6c95a17828dbb599d29c15137cd6ef009a | 15dd7fe9e9e6f97d9e52614c900c27d200a6c45f | refs/heads/Run2_UL | 2023-07-07T15:04:56.672709 | 2023-07-03T16:43:17 | 2023-07-03T16:43:17 | 29,192,343 | 16 | 92 | null | 2023-07-03T16:43:28 | 2015-01-13T13:59:30 | Python | UTF-8 | Python | false | false | 1,961 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-1.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-10.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-2.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-3.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-4.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-5.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-6.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-7.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-8.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL18/step4_MINIAODv2_mMed-1800_mDark-6_kappa-0p95_aligned-down_n-500_part-9.root',
] )
| [
"[email protected]"
] | |
1278478f1e40f5357bc58ac0949409e318c8679f | 8364e4d23191ee535c163debffafa8418d705843 | /aiokubernetes/models/v1beta2_replica_set.py | d891f61f5234e0fbcff0c24e5b8bd28d2e713e6d | [
"Apache-2.0"
] | permissive | olitheolix/aiokubernetes | 2bb6499030e2e6e9b7ca0db63c4441293d70a09b | 266718b210dff2a9b2212183261ea89adf89115e | refs/heads/master | 2020-03-21T23:02:30.484410 | 2018-10-20T19:33:01 | 2018-10-22T05:52:42 | 139,162,905 | 28 | 3 | Apache-2.0 | 2018-10-22T05:52:51 | 2018-06-29T15:02:59 | Python | UTF-8 | Python | false | false | 8,595 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
from aiokubernetes.models.v1_object_meta import V1ObjectMeta # noqa: F401,E501
from aiokubernetes.models.v1beta2_replica_set_spec import V1beta2ReplicaSetSpec # noqa: F401,E501
from aiokubernetes.models.v1beta2_replica_set_status import V1beta2ReplicaSetStatus # noqa: F401,E501
class V1beta2ReplicaSet(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1beta2ReplicaSetSpec',
'status': 'V1beta2ReplicaSetStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): # noqa: E501
"""V1beta2ReplicaSet - a model defined in Swagger""" # noqa: E501
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1beta2ReplicaSet. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1beta2ReplicaSet. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1beta2ReplicaSet.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1beta2ReplicaSet. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1beta2ReplicaSet. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1beta2ReplicaSet. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1beta2ReplicaSet.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1beta2ReplicaSet. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1beta2ReplicaSet. # noqa: E501
If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata # noqa: E501
:return: The metadata of this V1beta2ReplicaSet. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta2ReplicaSet.
If the Labels of a ReplicaSet are empty, they are defaulted to be the same as the Pod(s) that the ReplicaSet manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata # noqa: E501
:param metadata: The metadata of this V1beta2ReplicaSet. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1beta2ReplicaSet. # noqa: E501
Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status # noqa: E501
:return: The spec of this V1beta2ReplicaSet. # noqa: E501
:rtype: V1beta2ReplicaSetSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1beta2ReplicaSet.
Spec defines the specification of the desired behavior of the ReplicaSet. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status # noqa: E501
:param spec: The spec of this V1beta2ReplicaSet. # noqa: E501
:type: V1beta2ReplicaSetSpec
"""
self._spec = spec
@property
def status(self):
"""Gets the status of this V1beta2ReplicaSet. # noqa: E501
Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status # noqa: E501
:return: The status of this V1beta2ReplicaSet. # noqa: E501
:rtype: V1beta2ReplicaSetStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1beta2ReplicaSet.
Status is the most recently observed status of the ReplicaSet. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status # noqa: E501
:param status: The status of this V1beta2ReplicaSet. # noqa: E501
:type: V1beta2ReplicaSetStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in self.swagger_types.items():
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta2ReplicaSet):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
fbe5b3bc53d4ca5e5279b5e32b1207f84aea771a | 00c6ded41b84008489a126a36657a8dc773626a5 | /.history/Sizing_Method/ConstrainsAnalysis/DesignPointSelectStrategy_20210714191430.py | 04f5745a8803874ada846022a476fba426fada70 | [] | no_license | 12libao/DEA | 85f5f4274edf72c7f030a356bae9c499e3afc2ed | 1c6f8109bbc18c4451a50eacad9b4dedd29682bd | refs/heads/master | 2023-06-17T02:10:40.184423 | 2021-07-16T19:05:18 | 2021-07-16T19:05:18 | 346,111,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,729 | py | # author: Bao Li #
# Georgia Institute of Technology #
import sys
import os
sys.path.insert(0, os.getcwd())
import numpy as np
import matplotlib.pylab as plt
import Sizing_Method.Other.US_Standard_Atmosphere_1976 as atm
import Sizing_Method.Aerodynamics.ThrustLapse as thrust_lapse
import Sizing_Method.Aerodynamics.Aerodynamics as ad
import Sizing_Method.ConstrainsAnalysis.ConstrainsAnalysis as ca
import Sizing_Method.ConstrainsAnalysis.ConstrainsAnalysisPD as ca_pd
import Sizing_Method.ConstrainsAnalysis.ConstrainsAnalysisPDP1P2 as ca_pd_12
from scipy.optimize import curve_fit
"""
The unit use is IS standard
"""
class Design_Point_Select_Strategy:
"""This is a design point select strategy from constrains analysis"""
def __init__(self, altitude, velocity, beta, method, p_turbofan_max, p_motorfun_max, n=12):
"""
:param altitude: m x 1 matrix
:param velocity: m x 1 matrix
:param beta: P_motor/P_total m x 1 matrix
:param p_turbofan_max: maximum propulsion power for turbofan (threshold value)
:param p_motorfun_max: maximum propulsion power for motorfun (threshold value)
:param n: number of motor
the first group of condition is for stall speed
the stall speed condition have to use motor, therefore with PD
:return:
power load: design point p/w and w/s
"""
self.h = altitude
self.v = velocity
self.beta = beta
self.n_motor = n
self.p_turbofan_max = p_turbofan_max
self.p_motorfun_max = p_motorfun_max
# initialize the p_w, w_s, hp, n, m
self.n = 100
self.m = len(self.h)
self.hp = np.linspace(0, 1, self.n)
self.hp_threshold = self.p_motorfun_max / (self.p_motorfun_max + self.p_turbofan_max)
# method1 = Mattingly_Method, method2 = Gudmundsson_Method
if method == 1:
self.method1 = ca_pd_12.ConstrainsAnalysis_Mattingly_Method_with_DP_turbofun
self.method2 = ca_pd_12.ConstrainsAnalysis_Mattingly_Method_with_DP_electric
else:
self.method1 = ca_pd_12.ConstrainsAnalysis_Gudmundsson_Method_with_DP_turbofun
self.method2 = ca_pd_12.ConstrainsAnalysis_Gudmundsson_Method_with_DP_electric
problem = self.method(self.h[0], self.v[0], self.beta[0], 6000, self.hp_threshold)
self.w_s = problem.allFuncs[0](problem)
def p_w_compute(self):
p_w = np.zeros([self.m, self.n]) # m x n matrix
for i in range(1, 8):
for j in range(self.n):
problem1 = self.method1(self.h[i], self.v[i],
self.beta[i], self.w_s, self.hp[j])
problem2 = self.method2(self.h[i], self.v[i],
self.beta[i], self.w_s, self.hp[j])
if i >= 5:
p_w_1 = problem1.allFuncs[-1](problem1, roc=15 - 5 * (i - 5))
p_w_2 = problem2.allFuncs[-1](problem2, roc=15 - 5 * (i - 5))
else:
p_w_1 = problem1.allFuncs[i](problem1)
p_w_2 = problem2.allFuncs[i](problem2)
if p_w_1 > self.p_turbofan_max:
p_w_1 = 100000
elif p_w_2 > self.p_motorfun_max:
p_w_2 = 100000
self.p_w[i, j] = p_w_1 + p_w_2
return p_w
def strategy(self):
p_w = Design_Point_Select_Strategy.p_w_compute(self)
#find the min p_w for difference hp for each flight condition:
p_w_min = np.amax(p_w, axis=1)
hp_p_w_min = np.array(np.where(p_w == p_w_min))
design_p
| [
"[email protected]"
] | |
a65e513b2a58d300144f7c80b24d882cad985edc | a0ca6e78f0500e6bfc874cdb4ad79869c0fc4e81 | /plot_admitted.py | 027c0de8432efea3e6c2e5c09fc897a98567b1f0 | [] | no_license | kshedden/micovid | e25fc834e4fe9f55fdd80a40d68c134a9912bc3c | 1f85d84645707d4bb5e6bc913e667d666ce10a85 | refs/heads/master | 2022-11-23T23:46:09.794802 | 2020-07-20T02:57:57 | 2020-07-20T02:57:57 | 275,910,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,281 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.dates as mdates
import json
import gzip
months = mdates.MonthLocator()
months_fmt = mdates.DateFormatter("%b")
with gzip.open("/nfs/kshedden/Daniel_Keyes/ratio_results.json.gz") as gid:
dm = json.load(gid)
pdf = PdfPages("admit_ratios.pdf")
px = []
for ky in dm.keys():
px.append(":".join(ky.split(":")[0:-1]))
px = list(set(px))
px.sort()
for kp in px:
plt.clf()
plt.figure(figsize=(7, 5))
plt.axes([0.1, 0.11, 0.8, 0.8])
plt.grid(True)
try:
dun = dm[kp + ":Admit"]
dud = dm[kp + ":Total"]
except KeyError:
continue
dun = np.asarray(dun).T
dud = np.asarray(dud).T
dd = pd.to_datetime("2020-01-01") + pd.to_timedelta(dun[:, 0], 'd')
xt = ky.split(":")[-1]
plt.plot(dd, np.exp(dun[:, 1] - dud[:, 1]), label=xt, color='black', alpha=0.6)
plt.gca().xaxis.set_major_locator(months)
plt.gca().xaxis.set_major_formatter(months_fmt)
plt.xlabel("Date (2020)", size=14)
plt.ylabel("Ratio relative to 2019", size=14)
plt.ylim(ymin=0)
ti = kp.split(":")
ti[1] = ti[1].lower()
plt.title(" ".join(ti) + "s")
pdf.savefig()
pdf.close()
| [
"[email protected]"
] | |
344e59c82179984a3b83cc3a808abee6d080f458 | 9c1ebbe76c525f1f63481232ebfb61cde37994e3 | /second/main (1).py | effb9dbc42da8b3ad7671faddfea0490a28c4044 | [] | no_license | georgeiniesta/Python-Tests-2 | f279f59046fcdf3a33d9320b05d12de7d6a12f97 | 54544888069edda0f42c8277f5e1a20fd4c4f82c | refs/heads/main | 2023-08-03T10:42:00.056297 | 2021-09-13T19:42:10 | 2021-09-13T19:42:10 | 406,106,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34 | py | print ('I\'m Monty \"Python\"') | [
"[email protected]"
] | |
d1de944f71e0ff37476071f7eb2400ef4bf5ca6e | d4e9fd6dd51d29ad374b460a2cfbd467502ede7d | /ros2doctor/test/test_qos_compatibility.py | cb21bedba5e8d26abe78afc768015c3139e5efed | [
"Apache-2.0"
] | permissive | ros2/ros2cli | 3f7b93ff44d18b2292a50d3b6ff119494142328b | 351ef3c7442f49013d84084dea23fe399517690f | refs/heads/rolling | 2023-08-07T03:53:23.635067 | 2023-08-03T19:50:28 | 2023-08-03T19:50:28 | 93,568,427 | 142 | 157 | Apache-2.0 | 2023-09-14T07:36:46 | 2017-06-06T22:13:14 | Python | UTF-8 | Python | false | false | 6,588 | py | # Copyright 2021 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import os
import sys
import unittest
from launch import LaunchDescription
from launch.actions import ExecuteProcess
from launch_ros.actions import Node
import launch_testing
import launch_testing.actions
import launch_testing.asserts
import launch_testing.markers
import launch_testing.tools
import launch_testing_ros.tools
import pytest
from rclpy.utilities import get_available_rmw_implementations
# Skip cli tests on Windows while they exhibit pathological behavior
# https://github.com/ros2/build_farmer/issues/248
if sys.platform.startswith('win'):
pytest.skip(
'CLI tests can block for a pathological amount of time on Windows.',
allow_module_level=True)
@pytest.mark.rostest
@launch_testing.parametrize('rmw_implementation', get_available_rmw_implementations())
def generate_test_description(rmw_implementation):
path_to_fixtures = os.path.join(os.path.dirname(__file__), 'fixtures')
additional_env = {
'RMW_IMPLEMENTATION': rmw_implementation, 'PYTHONUNBUFFERED': '1'
}
path_to_incompatible_talker_node_script = os.path.join(
path_to_fixtures, 'talker_node_with_best_effort_qos.py')
path_to_compatible_talker_node_script = os.path.join(
path_to_fixtures, 'talker_node_with_reliable_qos.py')
path_to_listener_node_script = os.path.join(
path_to_fixtures, 'listener_node_with_reliable_qos.py')
talker_node_compatible = Node(
executable=sys.executable,
arguments=[path_to_compatible_talker_node_script],
remappings=[('chatter', 'compatible_chatter')],
additional_env=additional_env
)
listener_node_compatible = Node(
executable=sys.executable,
arguments=[path_to_listener_node_script],
remappings=[('chatter', 'compatible_chatter')],
additional_env=additional_env
)
talker_node_incompatible = Node(
executable=sys.executable,
arguments=[path_to_incompatible_talker_node_script],
remappings=[('chatter', 'incompatible_chatter')],
additional_env=additional_env
)
listener_node_incompatible = Node(
executable=sys.executable,
arguments=[path_to_listener_node_script],
remappings=[('chatter', 'incompatible_chatter')],
additional_env=additional_env
)
return LaunchDescription([
# Always restart daemon to isolate tests.
ExecuteProcess(
cmd=['ros2', 'daemon', 'stop'],
name='daemon-stop',
on_exit=[
ExecuteProcess(
cmd=['ros2', 'daemon', 'start'],
name='daemon-start',
on_exit=[
# Add incompatible talker/listener pair.
talker_node_incompatible,
listener_node_incompatible,
talker_node_compatible,
listener_node_compatible,
launch_testing.actions.ReadyToTest()
],
additional_env=additional_env
)
]
),
]), locals()
class TestROS2DoctorQoSCompatibility(unittest.TestCase):
@classmethod
def setUpClass(
cls,
launch_service,
proc_info,
proc_output,
rmw_implementation,
):
rmw_implementation_filter = launch_testing_ros.tools.basic_output_filter(
filtered_patterns=['WARNING: topic .* does not appear to be published yet'],
filtered_rmw_implementation=rmw_implementation
)
@contextlib.contextmanager
def launch_doctor_command(self, arguments):
doctor_command_action = ExecuteProcess(
cmd=['ros2', 'doctor', *arguments],
additional_env={
'RMW_IMPLEMENTATION': rmw_implementation,
'PYTHONUNBUFFERED': '1'
},
name='ros2doctor-cli',
output='screen'
)
with launch_testing.tools.launch_process(
launch_service, doctor_command_action, proc_info, proc_output,
output_filter=rmw_implementation_filter
) as doctor_command:
yield doctor_command
cls.launch_doctor_command = launch_doctor_command
@launch_testing.markers.retry_on_failure(times=5, delay=1)
def test_check(self):
with self.launch_doctor_command(
arguments=[]
) as doctor_command:
assert doctor_command.wait_for_shutdown(timeout=10)
assert doctor_command.exit_code == launch_testing.asserts.EXIT_OK
assert doctor_command.output
lines_list = [line for line in doctor_command.output.splitlines() if line]
assert 'Failed modules' in lines_list[-1]
assert 'middleware' in lines_list[-1]
@launch_testing.markers.retry_on_failure(times=5, delay=1)
def test_report(self):
for argument in ['-r', '--report']:
with self.launch_doctor_command(
arguments=[argument]
) as doctor_command:
assert doctor_command.wait_for_shutdown(timeout=10)
assert doctor_command.exit_code == launch_testing.asserts.EXIT_OK
assert ('topic [type] : /compatible_chatter [std_msgs/msg/String]\n'
'publisher node : talker_node\n'
'subscriber node : listener\n'
'compatibility status : OK') in doctor_command.output
assert ('topic [type] : /incompatible_chatter [std_msgs/msg/String]\n'
'publisher node : talker_node\n'
'subscriber node : listener\n'
'compatibility status : '
'ERROR: Best effort publisher and reliable subscription;') \
in doctor_command.output
| [
"[email protected]"
] | |
5c8bec34b3455da38460275936f45236a1356b09 | 921aa4d0dddc868b61fe1ea3805cf15c36853bc0 | /client/k8s_client/models/v1beta1_ingress_list.py | 597dd9d514517fbd656459a07921c530ef9f056d | [] | no_license | mbohlool/k8s-python-client | 56d569071326820ebb5e11319ca230a272282a56 | e083a1991e03f05d69a39f43b02b9bd5ede0a2b7 | refs/heads/master | 2021-01-09T20:29:17.339112 | 2016-08-18T23:16:49 | 2016-08-18T23:16:49 | 65,772,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,235 | py | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class V1beta1IngressList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
V1beta1IngressList - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'items': 'list[V1beta1Ingress]',
'kind': 'str',
'metadata': 'UnversionedListMeta'
}
self.attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
@property
def api_version(self):
"""
Gets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
:return: The api_version of this V1beta1IngressList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1IngressList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1beta1IngressList.
Items is the list of Ingress.
:return: The items of this V1beta1IngressList.
:rtype: list[V1beta1Ingress]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1beta1IngressList.
Items is the list of Ingress.
:param items: The items of this V1beta1IngressList.
:type: list[V1beta1Ingress]
"""
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1IngressList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1IngressList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1IngressList.
:return: The metadata of this V1beta1IngressList.
:rtype: UnversionedListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1IngressList.
:param metadata: The metadata of this V1beta1IngressList.
:type: UnversionedListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
3d9fabb20612a4a28c9dd6172949c6e3068e7b92 | c004fbafbc8c78e4c825f8183d9d70597f9bac8e | /venv/Scripts/pip3-script.py | 61c922b63ba5ee3ce34d43445af6a44533602957 | [] | no_license | vectorhuztt/backstage_back | 3de4d69003d4a8b161d20f3e35a2e0bd10bdf7e6 | ca704e75d5069bc4d1d8ea5a3f13a94fe91c820b | refs/heads/master | 2022-12-13T02:34:46.617296 | 2020-04-23T03:12:37 | 2020-04-23T03:12:37 | 254,113,925 | 0 | 0 | null | 2022-12-08T09:38:25 | 2020-04-08T14:39:19 | Python | UTF-8 | Python | false | false | 415 | py | #!E:\MyStudyPro\django_file\backstage_back\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | |
e6265d295615293256a409e7286b429a02cc5609 | 61747f324eaa757f3365fd7bf5ddd53ea0db47d1 | /casepro/contacts/migrations/0001_initial.py | 6d0a371e168bc8f557ce07d0e5145cb99d59cf58 | [
"BSD-3-Clause"
] | permissive | BlueRidgeLabs/casepro | f8b0eefa8f961dd2fdb5da26a48b619ebc1f8c12 | 8ef509326f3dfa80bb44beae00b60cc6c4ac7a24 | refs/heads/master | 2022-01-24T09:01:18.881548 | 2017-12-05T18:46:05 | 2017-12-05T18:49:42 | 113,502,588 | 0 | 0 | null | 2017-12-07T21:57:37 | 2017-12-07T21:57:37 | null | UTF-8 | Python | false | false | 3,306 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orgs', '0014_auto_20150722_1419'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uuid', models.CharField(unique=True, max_length=36)),
('name', models.CharField(help_text='The name of this contact', max_length=128, null=True, verbose_name='Full name', blank=True)),
('language', models.CharField(help_text='Language for this contact', max_length=3, null=True, verbose_name='Language', blank=True)),
('is_active', models.BooleanField(default=True, help_text='Whether this contact is active')),
('created_on', models.DateTimeField(help_text='When this contact was created', auto_now_add=True)),
],
),
migrations.CreateModel(
name='Field',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=36, verbose_name='Key')),
('label', models.CharField(max_length=36, null=True, verbose_name='Label')),
('org', models.ForeignKey(related_name='fields', verbose_name='Organization', to='orgs.Org')),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uuid', models.CharField(unique=True, max_length=36)),
('name', models.CharField(max_length=64)),
('is_active', models.BooleanField(default=True, help_text='Whether this group is active')),
('created_on', models.DateTimeField(help_text='When this group was created', auto_now_add=True)),
('org', models.ForeignKey(related_name='new_groups', verbose_name='Organization', to='orgs.Org')),
],
),
migrations.CreateModel(
name='Value',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('string_value', models.TextField(help_text='The string value or string representation of this value', max_length=640, null=True)),
('contact', models.ForeignKey(related_name='values', to='contacts.Contact')),
('field', models.ForeignKey(to='contacts.Field')),
],
),
migrations.AddField(
model_name='contact',
name='groups',
field=models.ManyToManyField(related_name='contacts', to='contacts.Group'),
),
migrations.AddField(
model_name='contact',
name='org',
field=models.ForeignKey(related_name='new_contacts', verbose_name='Organization', to='orgs.Org'),
),
migrations.AlterUniqueTogether(
name='field',
unique_together=set([('org', 'key')]),
),
]
| [
"[email protected]"
] | |
796b55262c5939d604def2ffdc5807697e8ce051 | 26eb818572061109b55e498ab4f123a4ff9b9499 | /Stochastic_Evolutionary_Game/Stochastic_Game_Nowak_Code/Prisoners_Dilemma_Game/Matlab_Version/data.py | 80b1c7a3ca6a564a7c4dd0b6db60554e709bee0a | [] | no_license | Dcomplexity/Researches | 550e49b5a5951dca11df062aae1f86e2c12945c5 | 4eb55e2550970223c2f4006d289d8f4ba70a611a | refs/heads/master | 2022-04-04T02:13:56.976901 | 2020-02-01T14:34:44 | 2020-02-01T14:34:44 | 147,739,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | import pylab
import scipy.io as scio
data = scio.loadmat('Coop_Time')
coopS = data['ans'][0]
coop1 = data['ans'][1]
coop2 = data['ans'][2]
pylab.figure()
pylab.title('Cooperation Rate with Time')
pylab.xlabel('Time')
pylab.ylabel('Cooperation Fraction')
pylab.plot(coopS, 'k')
pylab.plot(coop1, 'r')
pylab.plot(coop2, 'g')
pylab.show()
| [
"[email protected]"
] | |
4b6cbd25b8649a1363aba2a0743f9bef3b0bf588 | 6b6bd222658ab11b51afc27d507abf4ef7d8e57b | /chapter6/kmeans.py | 72b173ecb14ac98ea75d4c245148844914a34c87 | [] | no_license | JessicaFeng0926/classic_computer_science_problems | 2347851f051e6e2afb2340258d5bf2d0a20e1571 | 68e44f4569a774553b763685050101357124d4a5 | refs/heads/master | 2022-11-13T16:29:45.396010 | 2020-07-08T02:11:04 | 2020-07-08T02:11:04 | 274,041,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,792 | py | from __future__ import annotations
from typing import TypeVar, Generic, List, Sequence, Optional
from copy import deepcopy
from functools import partial
from random import uniform
from statistics import mean, pstdev
from dataclasses import dataclass
from data_point import DataPoint
def zscores(original: Sequence[float]) -> List[float]:
avg: float = mean(original)
std: float = pstdev(original)
if std == 0:
return [0]*len(original)
return [(x-avg)/std for x in original]
Point = TypeVar('Point', bound=DataPoint)
class KMeans(Generic[Point]):
def __init__(self, k: int, points: List[Point], initial_centroids: Optional[List[Point]]=None) -> None:
# 簇的数量不能为0,也不能为负数
if k<1 :
raise ValueError('k must be >= 1')
self._points: List[Point] = points
self._zscore_normalize()
# 初始化空簇
self._clusters: List[KMeans.Cluster] = []
if initial_centroids is None:
for _ in range(k):
# 选择随机中心点
rand_point: DataPoint = self._random_point()
# 用选出的这个随机中心点聚集起一个簇来
# 目前簇只有中心点,没有其他的成员点
cluster: KMeans.Cluster = KMeans.Cluster([],rand_point)
self._clusters.append(cluster)
else:
if len(initial_centroids) != k:
raise ValueError('The number of centroids must be k')
for i in range(k):
# 选择对应的中心点
centroid: DataPoint = initial_centroids[i]
# 用给定的这个中心点聚集起一个簇来
# 目前簇只有中心点,没有其他的成员点
cluster: KMeans.Cluster = KMeans.Cluster([],centroid)
self._clusters.append(cluster)
@property
def _centroids(self) -> List[DataPoint]:
return [x.centroid for x in self._clusters]
def _dimension_slice(self,dimension: int) -> List[float]:
return [x.dimensions[dimension] for x in self._points]
def _zscore_normalize(self) -> None:
zscored: List[List[float]] = [[] for _ in range(len(self._points))]
for dimension in range(self._points[0].num_dimensions):
dimension_slice: List[float] = self._dimension_slice(dimension)
for index, zscore in enumerate(zscores(dimension_slice)):
zscored[index].append(zscore)
for i in range(len(self._points)):
self._points[i].dimensions = tuple(zscored[i])
def _random_point(self) -> DataPoint:
'''对所有点来说,每个维度都取这些点提供的范围内的一个随机值,组成一个随机点'''
rand_dimensions: List[float] = []
for dimension in range(self._points[0].num_dimensions):
values: List[float] = self._dimension_slice(dimension)
rand_value: float = uniform(min(values),max(values))
rand_dimensions.append(rand_value)
return DataPoint(rand_dimensions)
def _assign_clusters(self) -> None:
'''给数据点寻找最近的中心点,并把数据点分到那个簇里'''
for point in self._points:
closest: DataPoint = min(self._centroids,
key=partial(DataPoint.distance,point))
idx: int = self._centroids.index(closest)
cluster: KMeans.Cluster = self._clusters[idx]
cluster.points.append(point)
def _generate_centroids(self) -> None:
'''找到每个簇的中心,并把中心点换成它'''
for cluster in self._clusters:
# 如果当前的簇还没有数据点,那就先不管它
if len(cluster.points) == 0:
continue
# 保存所有点在每个维度的平均值
means: List[float] = []
for dimension in range(cluster.points[0].num_dimensions):
dimension_slice: List[float] = [p.dimensions[dimension] for p in cluster.points]
means.append(mean(dimension_slice))
# 用这些平均值构造一个新点做中心点
cluster.centroid = DataPoint(means)
def run(self,max_iterations:int = 100) -> List[KMeans.Cluster]:
for iteration in range(max_iterations):
for cluster in self._clusters:
# 清空上一轮每个簇里的点
cluster.points.clear()
# 重新给点分簇
self._assign_clusters()
# 保存当前的中心点
old_centroids: List[DataPoint] = deepcopy(self._centroids)
# 生成新的中心点
self._generate_centroids()
# 如果两轮的中心点一样,说明已经稳定下来
# 当前的中心点就是最好的中心点,返回当前的簇
if old_centroids == self._centroids:
print(f'Converged after {iteration} iterations')
return self._clusters
# 迭代次数耗尽之后,返回当前的簇
return self._clusters
@dataclass
class Cluster:
points: List[Point]
centroid: DataPoint
if __name__ == '__main__':
point1: DataPoint = DataPoint([2.0,1.0,1.0])
point2: DataPoint = DataPoint([2.0,2.0,5.0])
point3: DataPoint = DataPoint([3.0,1.5,2.5])
kmeans_test: KMeans[DataPoint] = KMeans(2, [point1,point2,point3])
test_clusters: List[KMeans.Cluster] = kmeans_test.run()
for index, cluster in enumerate(test_clusters):
print(f'Cluster {index}: {cluster.points}')
| [
"[email protected]"
] | |
71cfbf8a60c7c3ba6f029a3dbb93bebf0062f36d | 99a83749bc5976d78acb2eaa43139662a52629d4 | /msked/placements/utils_old.py | 73ea061533d9d83cf90c722c21b3c623905a9774 | [] | no_license | tommydangerous/msked | 16856ca484a98f0aa5785bc37355c33b436c6c37 | 681f48c386da17da64abbb24565efcce4cc0f10d | refs/heads/master | 2020-04-16T15:15:57.992175 | 2013-11-07T06:37:16 | 2013-11-07T06:37:16 | 8,272,061 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,445 | py | from collections import defaultdict
from django.conf import settings
from django.db.models import Q
from employees.models import Employee
from employees.utils import tier_lab_sum, tier_lab_balance_check
from random import shuffle
from tasks.utils import task_check
from undos.models import Undo
from works.utils import work_check
def set_placements(schedule):
employees = list(schedule.employees())
# the tier lab sum of all employees
total_tier = sum([e.tier_lab for e in employees])
# work locations for this schedule
locations = sorted(schedule.locations(),
key=lambda l: l.occupancy, reverse=True)
# create dictionary with empty list for each location
location_dict = defaultdict(list)
for location in locations:
location_dict[location] = []
# if schedule has at least 1 location with an occupancy number
if locations and locations[0].occupancy:
first_loc = locations[0]
second_loc = locations[1]
# separate location exclusive employees
exl_emp = first_loc.exclusive_employees()
exl_pks = [e.pk for e in exl_emp]
employees = [e for e in employees if e.pk not in exl_pks]
# minimum tier level required for first work location
min_tier = first_loc.occupancy/float(len(employees)) * total_tier
loop_counter = 0
loop_max = settings.LOOP_MAX
work_check(schedule)
# keep shuffling until the tier levels are balanced in all locations
# or until the script has looped over itself 1000 times
while not location_dict[first_loc] and loop_counter < loop_max:
shuffle(employees)
needed = first_loc.occupancy - len(exl_emp)
first_emp = employees[:needed]
second_emp = employees[needed:]
temp = task_check(schedule, first_emp, second_emp)
if temp:
location_dict[first_loc] = temp[0] + exl_emp
location_dict[second_loc] = temp[1]
else:
loop_counter = loop_max
break
loop_counter += 1
print 'Set placement loop counter: %s' % loop_counter
if loop_counter < loop_max:
for location in locations:
for employee in location_dict[location]:
# create employee placements for location
employee.placement_set.create(location=location)
Undo.objects.create(location=location)
return loop_counter
else:
return False
def switch_placements(schedule):
all_employees = Employee.objects.exclude(vacation=True)
excludes = schedule.exclude_set.all()
if excludes:
# exclude employees on certain teams
for exclude in excludes:
all_employees = all_employees.exclude(team=exclude.team)
all_employees = list(all_employees)
# work locations for this schedule
locations = sorted(schedule.locations(),
key=lambda l: l.occupancy, reverse=True)
if len(locations) >= 2:
# check to see if employees are placed at both locations
first_loc = locations[0]
second_loc = locations[1]
if not first_loc.current_employees() and not (
second_loc.current_employees()):
return set_placements(schedule)
else:
# create dictionary with empty list for each location
location_dict = defaultdict(list)
# previous location dictionary
prev_dict = defaultdict(list)
for location in locations:
location_dict[location] = []
# store the location's previous employees
prev_dict[location] = (
[e for e in location.current_employees() if not (
e.vacation)])
employees = prev_dict[first_loc] + prev_dict[second_loc]
# check to see if any employees came back from vacation
new_extra = [e for e in all_employees if e not in employees]
# the tier lab sum of all employees
total_tier = sum([e.tier_lab for e in employees])
for employee in new_extra:
if employee.current_location():
# place them at their last worked location
prev_dict[employee.current_location].append(employee)
else:
# place them in the second location
prev_dict[second_loc].append(employee)
if first_loc.occupancy:
# minimum tier level required for first work location
min_tier = first_loc.occupancy/float(len(
employees)) * total_tier
loop_counter = 0
loop_max = settings.LOOP_MAX
# check to see if there are enough employees left to
# work at each job for the week
work_check(schedule)
# separate location exclusive employees
exl_emp = first_loc.exclusive_employees()
exl_pks = [e.pk for e in exl_emp]
while not location_dict[first_loc] and loop_counter < loop_max:
prev_femp = prev_dict[first_loc]
prev_femp = [e for e in prev_femp if e.pk not in exl_pks]
prev_semp = prev_dict[second_loc]
temp = task_check(schedule, prev_femp, prev_semp)
if temp:
location_dict[first_loc] = temp[0] + exl_emp
location_dict[second_loc] = temp[1]
else:
loop_counter = loop_max
break
loop_counter += 1
print 'Switch placement loop counter: %s' % loop_counter
if loop_counter < loop_max:
for location in locations:
for employee in location_dict[location]:
# create employee placements for location
employee.placement_set.create(location=location)
Undo.objects.create(location=location)
return loop_counter
else:
return False | [
"[email protected]"
] | |
c484c984aa678a25c17ee8984e4c160b9abfbc25 | feea5b7e71dbcc491dc9fe9b1686b5c13949369e | /mods/channels/__init__.py | 21139372dfe1438fafc7a060196c47bd3261c615 | [] | no_license | IsmaelRLG/UserBot | 232660971f98db1838263f821f40f0c879f00030 | 1ed21d0c0274c022b0de66ecc48547d9dab8be2b | refs/heads/master | 2021-01-17T07:08:13.342496 | 2016-03-11T19:08:30 | 2016-03-11T19:08:30 | 39,799,613 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,260 | py | # -*- coding: utf-8 -*-
"""
UserBot module
Copyright 2015, Ismael R. Lugo G.
"""
import channels
reload(channels)
from sysb import commands
from channels import lang
from channels import _
commands.addHandler('channels', 'chan register( (?P<channel>[^ ]+))?', {
'sintax': 'chan register <channel>?',
'example': 'chan register #Foo',
'desc': _('registra un canal en el bot', lang)},
registered=True,
logged=True,
channels=True,
chan_reqs='channel')(channels.register)
commands.addHandler('channels', 'chan flags( (?P<channel>#[^ ]+))? (?P<target>['
'^ ]+) (?P<flags>[^ ]+)', {
'sintax': 'chan flags <channel>? <target> <flags>',
'example': 'chan flags #Foo-chan foo-user OP',
'desc': _('(añade / elimina / edita / muestra) los flags', lang)},
registered=True,
logged=True,
channels=True,
chn_registered=True,
privs='s',
chan_reqs='channel')(channels.flags)
commands.addHandler('channels', 'chan drop( (?P<channel>#[^ ]+))?', {
'sintax': 'chan drop <channel>?',
'example': 'chan drop #foo',
'desc': _('elimina un canal del bot', lang)},
registered=True,
logged=True,
channels=True,
chn_registered=True,
privs='F',
chan_reqs='channel')(channels.drop) | [
"[email protected]"
] | |
42262be31f9c9c5961d6f718490d0a8e36264f3f | 52f8ac63714421e1930d7b90cb8200bb24d6ac42 | /milove/shop/migrations/0011_auto_20170914_1954.py | a24d6b8e68833b9780ce5f3322a7f656ff9f7926 | [] | no_license | richardchien/milove-backend | ccc7afa17e88b17199ad3878b9315e12c2af8ef1 | 0310f2a60ebcbc3143d0aedcc6cb5842ba264a43 | refs/heads/master | 2023-07-06T12:45:13.622032 | 2017-10-02T04:40:00 | 2017-10-02T04:40:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-14 11:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('shop', '0010_auto_20170911_1831'),
]
operations = [
migrations.AddField(
model_name='payment',
name='type',
field=models.CharField(choices=[('standard', 'PaymentType|standard'), ('recharge', 'PaymentType|recharge')], default='standard', max_length=20, verbose_name='type'),
),
migrations.AlterField(
model_name='payment',
name='order',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='payments', to='shop.Order', verbose_name='order'),
),
]
| [
"[email protected]"
] | |
d2ff44c7f4f8ddc697ff528d785f404eda63cd11 | 64c8d431c751b1b7a7cb7224107ee40f67fbc982 | /code/python/echomesh/base/Path.py | f72ecbc1483d9983735ec38c6681592f9d32e01b | [
"MIT"
] | permissive | silky/echomesh | 6ac4755e4ff5ea3aa2b2b671c0979068c7605116 | 2fe5a00a79c215b4aca4083e5252fcdcbd0507aa | refs/heads/master | 2021-01-12T20:26:59.294649 | 2013-11-16T23:29:05 | 2013-11-16T23:29:05 | 14,458,268 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,625 | py | from __future__ import absolute_import, division, print_function, unicode_literals
from echomesh.base import MakeEmptyProject
from echomesh.base import Platform
import getpass
import os
import os.path
import sys
ECHOMESH_EXTERNALS_OVERRIDE_SYSTEM_PACKAGES = True
# If this is True, you want Echomesh to use its own external packages in
# preference to any you might have installed in your system path.
CODE_PATH = os.path.abspath(sys.path[0])
EXTERNAL_CODE_PATH = os.path.join(CODE_PATH, 'external')
ECHOMESH_PATH = os.path.dirname(os.path.dirname(CODE_PATH))
BINARY_PATH = os.path.join(ECHOMESH_PATH, 'bin', Platform.PLATFORM)
PROJECT_PATH = None
COMMAND_PATH = None
ASSET_PATH = None
_REQUIRED_DIRECTORIES = 'asset', 'cache', 'command', 'log'
def _possible_project(path):
for d in _REQUIRED_DIRECTORIES:
if not os.path.exists(os.path.join(path, d)):
return False
return True
def set_project_path(project_path=None, show_error=True, prompt=True):
original_path = os.path.abspath(os.path.expanduser(project_path or os.curdir))
path = original_path
global PROJECT_PATH, COMMAND_PATH, ASSET_PATH
while not _possible_project(path):
p = os.path.dirname(path)
if p != path:
path = p
continue
if prompt:
if MakeEmptyProject.ask_to_make_empty_project(original_path):
path = original_path
break
else:
PROJECT_PATH = None
return False
if show_error:
print("\nYour path %s isn't in an echomesh project." % original_path)
print("Defaulting to the echomesh path %s." % ECHOMESH_PATH)
path = ECHOMESH_PATH
break
PROJECT_PATH = path
COMMAND_PATH = os.path.join(path, 'command')
ASSET_PATH = os.path.join(path, 'asset')
os.chdir(path)
return True
set_project_path()
def info():
return {
'Asset path': ASSET_PATH,
'Code path': CODE_PATH,
'Command path': COMMAND_PATH,
'External code path': EXTERNAL_CODE_PATH,
'Project path': PROJECT_PATH,
'echomesh path': ECHOMESH_PATH,
}
def fix_sys_path():
for path in EXTERNAL_CODE_PATH, BINARY_PATH:
if path not in sys.path:
if ECHOMESH_EXTERNALS_OVERRIDE_SYSTEM_PACKAGES:
sys.path.insert(1, path)
else:
sys.path.append(path)
_HOME_VARIABLE_FIXED = False
# HACK!
def fix_home_directory_environment_variable():
if Platform.PLATFORM == Platform.DEBIAN:
global _HOME_VARIABLE_FIXED
if not _HOME_VARIABLE_FIXED:
# If running as root, export user pi's home directory as $HOME.
if getpass.getuser() == 'root':
os.environ['HOME'] = '/home/pi'
_HOME_VARIABLE_FIXED = True
| [
"[email protected]"
] | |
07ffac3ce1e2a9ef29d17775d50bc2c6979df431 | 3f6c16ea158a8fb4318b8f069156f1c8d5cff576 | /.PyCharm2019.1/system/python_stubs/1707563220/resource.py | cfb221481b898c344ab21274ba2bd7f905e9409f | [] | no_license | sarthak-patidar/dotfiles | 08494170d2c0fedc0bbe719cc7c60263ce6fd095 | b62cd46f3491fd3f50c704f0255730af682d1f80 | refs/heads/master | 2020-06-28T23:42:17.236273 | 2019-10-01T13:56:27 | 2019-10-01T13:56:27 | 200,369,900 | 0 | 0 | null | 2019-08-03T12:56:33 | 2019-08-03T11:53:29 | Shell | UTF-8 | Python | false | false | 5,485 | py | # encoding: utf-8
# module resource
# from /usr/lib/python3.6/lib-dynload/resource.cpython-36m-x86_64-linux-gnu.so
# by generator 1.147
# no doc
# no imports
# Variables with simple values
RLIMIT_AS = 9
RLIMIT_CORE = 4
RLIMIT_CPU = 0
RLIMIT_DATA = 2
RLIMIT_FSIZE = 1
RLIMIT_MEMLOCK = 8
RLIMIT_MSGQUEUE = 12
RLIMIT_NICE = 13
RLIMIT_NOFILE = 7
RLIMIT_NPROC = 6
RLIMIT_OFILE = 7
RLIMIT_RSS = 5
RLIMIT_RTPRIO = 14
RLIMIT_RTTIME = 15
RLIMIT_SIGPENDING = 11
RLIMIT_STACK = 3
RLIM_INFINITY = -1
RUSAGE_CHILDREN = -1
RUSAGE_SELF = 0
RUSAGE_THREAD = 1
# functions
def getpagesize(*args, **kwargs): # real signature unknown
pass
def getrlimit(*args, **kwargs): # real signature unknown
pass
def getrusage(*args, **kwargs): # real signature unknown
pass
def prlimit(*args, **kwargs): # real signature unknown
pass
def setrlimit(*args, **kwargs): # real signature unknown
pass
# classes
class error(Exception):
""" Base class for I/O related errors. """
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
characters_written = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
errno = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""POSIX exception code"""
filename = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""exception filename"""
filename2 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""second exception filename"""
strerror = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""exception strerror"""
class struct_rusage(tuple):
"""
struct_rusage: Result from getrusage.
This object may be accessed either as a tuple of
(utime,stime,maxrss,ixrss,idrss,isrss,minflt,majflt,
nswap,inblock,oublock,msgsnd,msgrcv,nsignals,nvcsw,nivcsw)
or via the attributes ru_utime, ru_stime, ru_maxrss, and so on.
"""
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
ru_idrss = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""unshared data size"""
ru_inblock = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""block input operations"""
ru_isrss = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""unshared stack size"""
ru_ixrss = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""shared memory size"""
ru_majflt = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""page faults requiring I/O"""
ru_maxrss = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""max. resident set size"""
ru_minflt = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""page faults not requiring I/O"""
ru_msgrcv = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""IPC messages received"""
ru_msgsnd = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""IPC messages sent"""
ru_nivcsw = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""involuntary context switches"""
ru_nsignals = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""signals received"""
ru_nswap = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""number of swap outs"""
ru_nvcsw = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""voluntary context switches"""
ru_oublock = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""block output operations"""
ru_stime = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""system time used"""
ru_utime = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""user time used"""
n_fields = 16
n_sequence_fields = 16
n_unnamed_fields = 0
# variables with complex values
__loader__ = None # (!) real value is '<_frozen_importlib_external.ExtensionFileLoader object at 0x7fdd7a975898>'
__spec__ = None # (!) real value is "ModuleSpec(name='resource', loader=<_frozen_importlib_external.ExtensionFileLoader object at 0x7fdd7a975898>, origin='/usr/lib/python3.6/lib-dynload/resource.cpython-36m-x86_64-linux-gnu.so')"
| [
"[email protected]"
] | |
38b4d90fe79dceeac78c45f1b1931845c7c16df1 | f62fd455e593a7ad203a5c268e23129473d968b6 | /python-barbicanclient-4.2.0/functionaltests/cli/v1/behaviors/secret_behaviors.py | 783db8bcdceadd3bb8c49600d77926bb52b65eca | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 4,527 | py | # Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import base_behaviors
class SecretBehaviors(base_behaviors.BaseBehaviors):
def __init__(self):
super(SecretBehaviors, self).__init__()
self.LOG = logging.getLogger(type(self).__name__)
self.secret_hrefs_to_delete = []
def update_secret(self,
secret_href,
payload):
"""Update a secret
:param secret_href the href to the secret to update.
:param payload the payload to put into the secret.
:param payload_content_type the payload content type.
"""
argv = ['secret', 'update']
self.add_auth_and_endpoint(argv)
argv.extend([secret_href])
argv.extend([payload])
stdout, stderr = self.issue_barbican_command(argv)
def delete_secret(self, secret_href):
"""Delete a secret
:param secret_href the href to the secret to delete
"""
argv = ['secret', 'delete']
self.add_auth_and_endpoint(argv)
argv.extend([secret_href])
stdout, stderr = self.issue_barbican_command(argv)
self.secret_hrefs_to_delete.remove(secret_href)
def store_secret(self, payload="Payload for testing", store_argv=[]):
"""Store (aka create) a secret
The store_argv parameter allows additional command line parameters for
the store operation to be specified. This can be used to specify -a for
algorithm as an example.
:param payload The payload to use when storing the secret
:param store_argv The store command line parameters
:return: the href to the newly created secret
"""
argv = ['secret', 'store']
self.add_auth_and_endpoint(argv)
argv.extend(['--payload', payload])
argv.extend(store_argv)
stdout, stderr = self.issue_barbican_command(argv)
secret_data = self._prettytable_to_dict(stdout)
secret_href = secret_data['Secret href']
self.secret_hrefs_to_delete.append(secret_href)
return secret_href
def get_secret(self, secret_href):
"""Get a secret
:param: the href to a secret
:return dict of secret values, or an empty dict if the secret
is not found.
"""
argv = ['secret', 'get']
self.add_auth_and_endpoint(argv)
argv.extend([secret_href])
stdout, stderr = self.issue_barbican_command(argv)
if '4xx Client error: Not Found' in stderr:
return {}
secret_data = self._prettytable_to_dict(stdout)
return secret_data
def get_secret_payload(self, secret_href, raw=False):
"""Get a secret
:param: the href to a secret
:param raw if True then add "-f value" to get raw payload (ie not
within a PrettyTable). If False then omit -f.
:return string representing the secret payload.
"""
argv = ['secret', 'get']
self.add_auth_and_endpoint(argv)
argv.extend([secret_href])
argv.extend(['--payload'])
if raw:
argv.extend(['-f', 'value'])
stdout, stderr = self.issue_barbican_command(argv)
if '4xx Client error: Not Found' in stderr:
return {}
if raw:
secret = stdout.rstrip()
else:
secret_data = self._prettytable_to_dict(stdout)
secret = secret_data['Payload']
return secret
def list_secrets(self):
"""List secrets
:return: a list of secrets
"""
argv = ['secret', 'list']
self.add_auth_and_endpoint(argv)
stdout, stderr = self.issue_barbican_command(argv)
secret_list = self._prettytable_to_list(stdout)
return secret_list
def delete_all_created_secrets(self):
"""Delete all secrets that we created"""
for href in self.secret_hrefs_to_delete:
self.delete_secret(href)
| [
"[email protected]"
] | |
63e3cad549c027b04f4f2aeabb6948e59f1936fe | 15de7f67ac019324d99076906e7864e2b3d52218 | /Part 3/Ch1.py | c6c9fd87d0c1035be6d4c4b6f26a3055489bf39b | [] | no_license | ankiwoong/Web_Scraping_in_Python | df97aebc36b43d125f5d4ff55ab47cd114656c51 | dcc0950ee7fb5e4b9acaec581a2fcd590d27bb4a | refs/heads/master | 2022-12-03T07:58:38.626717 | 2020-08-30T02:37:13 | 2020-08-30T02:37:13 | 289,597,273 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | import mechanicalsoup
browser = mechanicalsoup.Browser()
# browser() 객체는 head가 없는 웹 브라우저를 나타낸다.
# URL을 .get() 메소드에 전달하여 인터넷에서 페이지를 요청할 수 있다.
url = "http://olympus.realpython.org/login"
page = browser.get(url)
# 출력
print(page)
# MechanicalSoup은 요청에서 HTML을 구문 분석하기 위해 Beautiful Soup을 사용한다.
# 페이지에는 BeautifulSoup 개체를 나타내는 .soup 속성이 있다
print(type(page.soup))
# .soup 속성을 검사하여 HTML을 볼 수 있다.
print(page.soup)
| [
"[email protected]"
] | |
0cce371b26cc503d2072a6754493cd4e3c5ce7cb | 4749d3cf395522d90cb74d1842087d2f5671fa87 | /alice/LC686.py | 67bed14402b09716dbc8ef161fcd6f9c04fb99e2 | [] | no_license | AliceTTXu/LeetCode | c1ad763c3fa229362350ce3227498dfb1f022ab0 | ed15eb27936b39980d4cb5fb61cd937ec7ddcb6a | refs/heads/master | 2021-01-23T11:49:49.903285 | 2018-08-03T06:00:16 | 2018-08-03T06:00:16 | 33,470,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | class Solution(object):
def repeatedStringMatch(self, A, B):
"""
:type A: str
:type B: str
:rtype: int
"""
temp_out = len(B) / len(A)
end = self.find_substring_ending(A * (temp_out + 2), B)
if end == -1:
return -1
else:
return temp_out + ((end + 1) - len(A) * temp_out) / len(A) + (((end + 1) - len(A) * temp_out) % len(A) > 0)
def find_substring_ending(self, A, B):
for i in xrange(len(A) - len(B) + 1):
if A[i:i + len(B)] == B:
return i + len(B) - 1
return -1
| [
"[email protected]"
] | |
3bcdcd1ec3d8551f1937844d1b5049ba7f40e0de | 21bbcc4898cc653d2a81b91f500293f7e6932fc8 | /users/views/bet.py | 44bac74410fd01c5813a9e733895dec785afbeca | [] | no_license | ewgen19892/auction | 7e116354008349bbde147b42ee1a909cac7fc00b | 1449156a4985ca7757e342613e0762e11ed9aa30 | refs/heads/master | 2023-05-31T23:02:14.539923 | 2021-06-30T13:21:13 | 2021-06-30T13:21:13 | 381,708,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,256 | py | """Bets views."""
from rest_framework.generics import GenericAPIView
from rest_framework.mixins import (
CreateModelMixin,
ListModelMixin,
RetrieveModelMixin,
)
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from users.models.bet import Bet
from users.serializers.bet import BetSerializer
class BetList(GenericAPIView, ListModelMixin, CreateModelMixin):
"""Bet list view."""
queryset = Bet.objects.all()
serializer_class = BetSerializer
permission_classes = (IsAuthenticated,)
def get(self, request, *args, **kwargs) -> Response:
"""
Get Bets list.
"""
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs) -> Response:
"""
Create a new bet.
"""
return self.create(request, *args, **kwargs)
class BetDetail(
GenericAPIView,
RetrieveModelMixin,
):
"""Bet detail view."""
serializer_class = BetSerializer
queryset = Bet.objects.all()
permission_classes = (IsAuthenticated,)
def get(self, request, *args, **kwargs) -> Response:
"""
Get bet by ID.
"""
return self.retrieve(request, *args, **kwargs)
| [
"[email protected]"
] | |
5b3f7af6254318f0492a9a4656e243ea1a650008 | 18f776553a59a89faf05144ed1a69dc563dc4e9e | /Algorithm/insert_sort.py | cbdb6501102014039571662e8f5b9136479ee6a9 | [] | no_license | g-lyc/LeetCode | 274feff3b6c61da0ec8726deac0b298baed5cf10 | fa45cd44c3d4e7b0205833efcdc708d1638cbbe4 | refs/heads/master | 2022-12-13T14:50:02.976951 | 2020-09-03T09:39:58 | 2020-09-03T09:39:58 | 259,212,315 | 15 | 1 | null | null | null | null | UTF-8 | Python | false | false | 706 | py | # coding:utf-8
def insert_sort(alist):
"""插入排序"""
n = len(alist)
# 从右边的无序序列中取出多少个元素执行这样的过程
for j in range(1, n):
# j = [1, 2, 3, n-1]
# i 代表内层循环起始值
i = j
# 执行从右边的无序序列中取出第一个元素,即i位置的元素,然后将其插入到前面的正确位置中
while i > 0:
if alist[i] < alist[i-1]:
alist[i], alist[i-1] = alist[i-1], alist[i]
i -= 1
else:
break
if __name__ == "__main__":
li = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print(li)
insert_sort(li)
print(li) | [
"[email protected]"
] | |
6aca02bb5efcef065a9dbd7d46d07f6bb6937bf1 | 76e8afe527e191e6291562c6140606c16d7385df | /wsgi.py | 75ad6b25013956a7af77660d24fbd9058b5cb79c | [] | no_license | socek/tklive2013 | dfa896800713832d3f1b4a11f35aecf723e09328 | 2fa8c6fd099a66b7f84fc5df94d0a2b542a44f75 | refs/heads/master | 2021-01-22T05:16:34.001073 | 2013-02-25T13:14:43 | 2013-02-25T13:14:43 | 42,201,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | #!/home/socek/www/2013.turniejkosza.pl/venv/bin/python
import os, sys
sys.path.append('/home/socek/www/2013.turniejkosza.pl/tklive')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' # this is your settings.py file
os.environ['PYTHON_EGG_CACHE'] = '/tmp'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| [
"[email protected]"
] | |
253c9632016a2b5d45b61ace6d16cc791687be0b | 20f2f9c4728cc8380062c557b838af45d65b909b | /tests/basetypes.py | ad5a3e8fe487b3e4dd897fef64492948ff945397 | [] | no_license | dankamongmen/everpad | 5cea529cab55f2c4a8f4fc5588807c35b2a61aef | 4eea1d6c9b322cbad021453f3a0dc57582fb5c6a | refs/heads/master | 2021-01-16T19:51:27.598790 | 2012-12-06T09:28:28 | 2012-12-06T09:37:13 | 7,032,105 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | import sys
sys.path.append('..')
from everpad.basetypes import Tag, DbusSendable
import unittest
class TestBaseTypes(unittest.TestCase):
def test_signature(self):
class Fake(DbusSendable):
fields = (
('id', 'i'),
('name', 's'),
)
self.assertEqual(
Fake.signature, '(is)',
'generate signature',
)
def test_serialise(self):
class Fake(object):
id = 0
name = '123'
tag = Tag.from_obj(Fake())
self.assertEqual(
tag.struct, (0, '123'),
'serialise to struct',
)
def test_load(self):
tag = Tag.from_tuple((0, '123'))
self.assertEqual(
tag.name, '123',
'load from struct',
)
def test_give(self):
class Fake(object):
id = 0
@property
def id_dbus(self):
return self.id
@id_dbus.setter
def id_dbus(self, val):
self.id = val + 12
tag = Tag.from_tuple((0, '123'))
obj = Fake()
tag.give_to_obj(obj)
self.assertEqual(
obj.id, 12,
'give data to object',
)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e2001593d3dace8fffbd229e25b81b7d1a5932c2 | e54c04a919a21af0564c6d3f77c92c84a097e58b | /src/front-door/azext_front_door/vendored_sdks/models/frontend_endpoint_link_py3.py | 2777bd8e09b27cfb2d1604822a29b8ac26415ee0 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | rlrossiter/azure-cli-extensions | b8b4acad5394edbff545f4fd145cf24255db7453 | e8a640e6623e69e21fa7118eceb1ae28d134bb9a | refs/heads/master | 2021-08-06T18:17:29.141297 | 2019-09-22T13:37:04 | 2019-09-22T13:37:04 | 210,704,389 | 1 | 0 | MIT | 2019-09-24T21:56:44 | 2019-09-24T21:56:43 | null | UTF-8 | Python | false | false | 905 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FrontendEndpointLink(Model):
"""Defines the Resource ID for a Frontend Endpoint.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, *, id: str=None, **kwargs) -> None:
super(FrontendEndpointLink, self).__init__(**kwargs)
self.id = id
| [
"[email protected]"
] | |
e165f515f5a8171ea465a2a3904e19bda27ebe4a | ad2090cc9591d38456621951d4901276481b55fd | /python编程技巧提高/day03/ex3_实现反向迭代.py | 389dd1920cf2aee18456d949fadf7f3918a3afae | [] | no_license | GuangGuangLi-Artist/LearningPython | 9d17366c4b64f5b3d53b885b71f1cf9bd4d2f53f | 0810ff6d0cc557f4d5ed8c024ce413a93183a6da | refs/heads/master | 2023-08-18T16:32:03.595418 | 2023-07-30T09:47:48 | 2023-07-30T09:47:48 | 201,511,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 614 | py | #coding=utf-8
class FloatRange():
def __init__(self,start,end,step=0.1):
self.start = start
self.end = end
self.step = step
def __iter__(self):
t = self.start
while t <= self.end:
yield t
t += self.step
def __reversed__(self):
t = self.end
while t >= self.start:
yield t
t -= self.step
if __name__ == '__main__':
'''正向迭代'''
# for x in FloatRange(1.0,4.0,0.5):
# print(x)
'''反向迭代'''
for x in reversed(FloatRange(1.0, 4.0, 0.5)):
print(x) | [
"[email protected]"
] | |
43fcbd83f3f339c662561f29b5c5cd79d393716b | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/fv/rttoeptask.py | f93376a7a4574d9dc1dc35e1a6c378196f119b89 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,689 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RtToEpTask(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.fv.RtToEpTask")
meta.moClassName = "fvRtToEpTask"
meta.rnFormat = "fvRtToEpTask-%(id)s"
meta.category = MoCategory.TASK
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.action.LicensemgrSubj")
meta.parentClasses.add("cobra.model.action.StreamelemSubj")
meta.parentClasses.add("cobra.model.action.ObserverSubj")
meta.parentClasses.add("cobra.model.action.SnmpdSubj")
meta.parentClasses.add("cobra.model.action.VmmmgrSubj")
meta.parentClasses.add("cobra.model.action.AnalyticsSubj")
meta.parentClasses.add("cobra.model.action.ScripthandlerSubj")
meta.parentClasses.add("cobra.model.action.ConfelemSubj")
meta.parentClasses.add("cobra.model.action.GoobserverelemSubj")
meta.parentClasses.add("cobra.model.action.EventmgrSubj")
meta.parentClasses.add("cobra.model.action.OspaelemSubj")
meta.parentClasses.add("cobra.model.action.VtapSubj")
meta.parentClasses.add("cobra.model.action.GohealthelemSubj")
meta.parentClasses.add("cobra.model.action.OshSubj")
meta.parentClasses.add("cobra.model.action.DhcpdSubj")
meta.parentClasses.add("cobra.model.action.OpflexelemSubj")
meta.parentClasses.add("cobra.model.action.DomainmgrSubj")
meta.parentClasses.add("cobra.model.action.DbgrelemSubj")
meta.parentClasses.add("cobra.model.action.CloudpeSubj")
meta.parentClasses.add("cobra.model.action.PlgnhandlerSubj")
meta.parentClasses.add("cobra.model.action.TopomgrSubj")
meta.parentClasses.add("cobra.model.action.VleafelemSubj")
meta.parentClasses.add("cobra.model.action.NxosmockSubj")
meta.parentClasses.add("cobra.model.action.DbgrSubj")
meta.parentClasses.add("cobra.model.action.PlatformmgrSubj")
meta.parentClasses.add("cobra.model.action.AppliancedirectorSubj")
meta.parentClasses.add("cobra.model.action.OpflexpSubj")
meta.parentClasses.add("cobra.model.action.BootmgrSubj")
meta.parentClasses.add("cobra.model.action.AeSubj")
meta.parentClasses.add("cobra.model.action.GoeventelemSubj")
meta.parentClasses.add("cobra.model.action.GoconnectorSubj")
meta.parentClasses.add("cobra.model.action.PolicymgrSubj")
meta.parentClasses.add("cobra.model.action.ExtXMLApiSubj")
meta.parentClasses.add("cobra.model.action.ObserverelemSubj")
meta.parentClasses.add("cobra.model.action.PolicyelemSubj")
meta.parentClasses.add("cobra.model.action.PolicydistSubj")
meta.parentClasses.add("cobra.model.action.IdmgrSubj")
meta.parentClasses.add("cobra.model.action.EdmgrSubj")
meta.superClasses.add("cobra.model.action.RInst")
meta.superClasses.add("cobra.model.pol.ComplElem")
meta.superClasses.add("cobra.model.task.Inst")
meta.superClasses.add("cobra.model.action.Inst")
meta.rnPrefixes = [
('fvRtToEpTask-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "data", "data", 52, PropCategory.REGULAR)
prop.label = "Data"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("data", prop)
prop = PropMeta("str", "descr", "descr", 33, PropCategory.REGULAR)
prop.label = "Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "endTs", "endTs", 15575, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("endTs", prop)
prop = PropMeta("str", "fail", "fail", 46, PropCategory.REGULAR)
prop.label = "Fail"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("fail", prop)
prop = PropMeta("str", "flags", "flags", 30392, PropCategory.REGULAR)
prop.label = "Flags"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("flags", prop)
prop = PropMeta("str", "id", "id", 24125, PropCategory.REGULAR)
prop.label = "ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("DbgacToEp", "dbgactoep", 2126)
prop._addConstant("none", "none", 0)
meta.props.add("id", prop)
prop = PropMeta("str", "invErrCode", "invErrCode", 49, PropCategory.REGULAR)
prop.label = "Remote Error Code"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("ERR-FILTER-illegal-format", None, 1140)
prop._addConstant("ERR-FSM-no-such-state", None, 1160)
prop._addConstant("ERR-HTTP-set-error", None, 1551)
prop._addConstant("ERR-HTTPS-set-error", None, 1552)
prop._addConstant("ERR-MO-CONFIG-child-object-cant-be-configured", None, 1130)
prop._addConstant("ERR-MO-META-no-such-object-class", None, 1122)
prop._addConstant("ERR-MO-PROPERTY-no-such-property", None, 1121)
prop._addConstant("ERR-MO-PROPERTY-value-out-of-range", None, 1120)
prop._addConstant("ERR-MO-access-denied", None, 1170)
prop._addConstant("ERR-MO-deletion-rule-violation", None, 1107)
prop._addConstant("ERR-MO-duplicate-object", None, 1103)
prop._addConstant("ERR-MO-illegal-containment", None, 1106)
prop._addConstant("ERR-MO-illegal-creation", None, 1105)
prop._addConstant("ERR-MO-illegal-iterator-state", None, 1100)
prop._addConstant("ERR-MO-illegal-object-lifecycle-transition", None, 1101)
prop._addConstant("ERR-MO-naming-rule-violation", None, 1104)
prop._addConstant("ERR-MO-object-not-found", None, 1102)
prop._addConstant("ERR-MO-resource-allocation", None, 1150)
prop._addConstant("ERR-aaa-config-modify-error", None, 1520)
prop._addConstant("ERR-acct-realm-set-error", None, 1513)
prop._addConstant("ERR-add-ctrlr", None, 1574)
prop._addConstant("ERR-admin-passwd-set", None, 1522)
prop._addConstant("ERR-api", None, 1571)
prop._addConstant("ERR-auth-issue", None, 1548)
prop._addConstant("ERR-auth-realm-set-error", None, 1514)
prop._addConstant("ERR-authentication", None, 1534)
prop._addConstant("ERR-authorization-required", None, 1535)
prop._addConstant("ERR-connect", None, 1572)
prop._addConstant("ERR-create-domain", None, 1562)
prop._addConstant("ERR-create-keyring", None, 1560)
prop._addConstant("ERR-create-role", None, 1526)
prop._addConstant("ERR-create-user", None, 1524)
prop._addConstant("ERR-delete-domain", None, 1564)
prop._addConstant("ERR-delete-role", None, 1528)
prop._addConstant("ERR-delete-user", None, 1523)
prop._addConstant("ERR-domain-set-error", None, 1561)
prop._addConstant("ERR-http-initializing", None, 1549)
prop._addConstant("ERR-incompat-ctrlr-version", None, 1568)
prop._addConstant("ERR-internal-error", None, 1540)
prop._addConstant("ERR-invalid-args", None, 1569)
prop._addConstant("ERR-invalid-delimiter", None, 1589)
prop._addConstant("ERR-invalid-domain", None, 1588)
prop._addConstant("ERR-invalid-domain-name", None, 1582)
prop._addConstant("ERR-ldap-delete-error", None, 1510)
prop._addConstant("ERR-ldap-get-error", None, 1509)
prop._addConstant("ERR-ldap-group-modify-error", None, 1518)
prop._addConstant("ERR-ldap-group-set-error", None, 1502)
prop._addConstant("ERR-ldap-set-error", None, 1511)
prop._addConstant("ERR-missing-method", None, 1546)
prop._addConstant("ERR-modify-ctrlr-access", None, 1567)
prop._addConstant("ERR-modify-ctrlr-dvs-version", None, 1576)
prop._addConstant("ERR-modify-ctrlr-rootcont", None, 1575)
prop._addConstant("ERR-modify-ctrlr-scope", None, 1573)
prop._addConstant("ERR-modify-ctrlr-trig-inventory", None, 1577)
prop._addConstant("ERR-modify-domain", None, 1563)
prop._addConstant("ERR-modify-domain-encapmode", None, 1581)
prop._addConstant("ERR-modify-domain-enfpref", None, 1578)
prop._addConstant("ERR-modify-domain-mcastpool", None, 1579)
prop._addConstant("ERR-modify-domain-mode", None, 1580)
prop._addConstant("ERR-modify-domain-prefencapmode", None, 1584)
prop._addConstant("ERR-modify-role", None, 1527)
prop._addConstant("ERR-modify-user", None, 1525)
prop._addConstant("ERR-modify-user-domain", None, 1565)
prop._addConstant("ERR-modify-user-role", None, 1532)
prop._addConstant("ERR-no-buf", None, 1570)
prop._addConstant("ERR-passwd-set-failure", None, 1566)
prop._addConstant("ERR-provider-group-modify-error", None, 1519)
prop._addConstant("ERR-provider-group-set-error", None, 1512)
prop._addConstant("ERR-radius-global-set-error", None, 1505)
prop._addConstant("ERR-radius-group-set-error", None, 1501)
prop._addConstant("ERR-radius-set-error", None, 1504)
prop._addConstant("ERR-request-timeout", None, 1545)
prop._addConstant("ERR-role-set-error", None, 1515)
prop._addConstant("ERR-rsa-global-set-error", None, 1587)
prop._addConstant("ERR-rsa-group-set-error", None, 1585)
prop._addConstant("ERR-rsa-set-error", None, 1586)
prop._addConstant("ERR-secondary-node", None, 1550)
prop._addConstant("ERR-service-not-ready", None, 1539)
prop._addConstant("ERR-set-password-strength-check", None, 1543)
prop._addConstant("ERR-store-pre-login-banner-msg", None, 1521)
prop._addConstant("ERR-tacacs-enable-error", None, 1508)
prop._addConstant("ERR-tacacs-global-set-error", None, 1507)
prop._addConstant("ERR-tacacs-group-set-error", None, 1503)
prop._addConstant("ERR-tacacs-set-error", None, 1506)
prop._addConstant("ERR-user-account-expired", None, 1536)
prop._addConstant("ERR-user-set-error", None, 1517)
prop._addConstant("ERR-xml-parse-error", None, 1547)
prop._addConstant("communication-error", "communication-error", 1)
prop._addConstant("none", "none", 0)
meta.props.add("invErrCode", prop)
prop = PropMeta("str", "invErrDescr", "invErrDescr", 50, PropCategory.REGULAR)
prop.label = "Remote Error Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("invErrDescr", prop)
prop = PropMeta("str", "invRslt", "invRslt", 48, PropCategory.REGULAR)
prop.label = "Remote Result"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("capability-not-implemented-failure", "capability-not-implemented-failure", 16384)
prop._addConstant("capability-not-implemented-ignore", "capability-not-implemented-ignore", 8192)
prop._addConstant("capability-not-supported", "capability-not-supported", 32768)
prop._addConstant("capability-unavailable", "capability-unavailable", 65536)
prop._addConstant("end-point-failed", "end-point-failed", 32)
prop._addConstant("end-point-protocol-error", "end-point-protocol-error", 64)
prop._addConstant("end-point-unavailable", "end-point-unavailable", 16)
prop._addConstant("extend-timeout", "extend-timeout", 134217728)
prop._addConstant("failure", "failure", 1)
prop._addConstant("fru-identity-indeterminate", "fru-identity-indeterminate", 4194304)
prop._addConstant("fru-info-malformed", "fru-info-malformed", 8388608)
prop._addConstant("fru-not-ready", "fru-not-ready", 67108864)
prop._addConstant("fru-not-supported", "fru-not-supported", 536870912)
prop._addConstant("fru-state-indeterminate", "fru-state-indeterminate", 33554432)
prop._addConstant("fw-defect", "fw-defect", 256)
prop._addConstant("hw-defect", "hw-defect", 512)
prop._addConstant("illegal-fru", "illegal-fru", 16777216)
prop._addConstant("intermittent-error", "intermittent-error", 1073741824)
prop._addConstant("internal-error", "internal-error", 4)
prop._addConstant("not-applicable", "not-applicable", 0)
prop._addConstant("resource-capacity-exceeded", "resource-capacity-exceeded", 2048)
prop._addConstant("resource-dependency", "resource-dependency", 4096)
prop._addConstant("resource-unavailable", "resource-unavailable", 1024)
prop._addConstant("service-not-implemented-fail", "service-not-implemented-fail", 262144)
prop._addConstant("service-not-implemented-ignore", "service-not-implemented-ignore", 131072)
prop._addConstant("service-not-supported", "service-not-supported", 524288)
prop._addConstant("service-protocol-error", "service-protocol-error", 2097152)
prop._addConstant("service-unavailable", "service-unavailable", 1048576)
prop._addConstant("sw-defect", "sw-defect", 128)
prop._addConstant("task-reset", "task-reset", 268435456)
prop._addConstant("timeout", "timeout", 8)
prop._addConstant("unidentified-fail", "unidentified-fail", 2)
meta.props.add("invRslt", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "oDn", "oDn", 51, PropCategory.REGULAR)
prop.label = "Subject DN"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("oDn", prop)
prop = PropMeta("str", "operSt", "operSt", 15674, PropCategory.REGULAR)
prop.label = "Completion"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "scheduled"
prop._addConstant("cancelled", "cancelled", 3)
prop._addConstant("completed", "completed", 2)
prop._addConstant("crashsuspect", "crash-suspect", 7)
prop._addConstant("failed", "failed", 4)
prop._addConstant("indeterminate", "indeterminate", 5)
prop._addConstant("processing", "processing", 1)
prop._addConstant("ready", "ready", 8)
prop._addConstant("scheduled", "scheduled", 0)
prop._addConstant("suspended", "suspended", 6)
meta.props.add("operSt", prop)
prop = PropMeta("str", "originMinority", "originMinority", 54, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("originMinority", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "runId", "runId", 45, PropCategory.REGULAR)
prop.label = "ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("runId", prop)
prop = PropMeta("str", "startTs", "startTs", 36, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("startTs", prop)
prop = PropMeta("str", "startTx", "startTx", 36895, PropCategory.REGULAR)
prop.label = "startTxId"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("none", "none", 0)
meta.props.add("startTx", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "try", "try", 15574, PropCategory.REGULAR)
prop.label = "Try"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("try", prop)
prop = PropMeta("str", "ts", "ts", 47, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("ts", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
6033d53dbc557f177f5618eb13bead953214860f | 39d4504ec1da8975fac526d6801b94f4348b6b61 | /official/utils/logs/hooks_helper_test.py | 5adb5fdc343ad00cfab2557436de927e68501743 | [
"Apache-2.0"
] | permissive | vincentcheny/models | fe0ff5888e6ee00a0d4fa5ee14154acdbeebe7ad | afb1a59fc1bc792ac72d1a3e22e2469020529788 | refs/heads/master | 2020-07-23T21:38:24.559521 | 2019-11-15T07:50:11 | 2019-11-15T07:50:11 | 207,712,649 | 1 | 0 | Apache-2.0 | 2019-09-11T03:12:31 | 2019-09-11T03:12:31 | null | UTF-8 | Python | false | false | 2,777 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for hooks_helper."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import tensorflow as tf # pylint: disable=g-bad-import-order
from official.utils.logs import hooks_helper
from official.utils.misc import keras_utils
class BaseTest(unittest.TestCase):
def setUp(self):
super(BaseTest, self).setUp()
if keras_utils.is_v2_0:
tf.compat.v1.disable_eager_execution()
def test_raise_in_non_list_names(self):
with self.assertRaises(ValueError):
hooks_helper.get_train_hooks(
'LoggingTensorHook, ProfilerHook', model_dir="", batch_size=256)
def test_raise_in_invalid_names(self):
invalid_names = ['StepCounterHook', 'StopAtStepHook']
with self.assertRaises(ValueError):
hooks_helper.get_train_hooks(invalid_names, model_dir="", batch_size=256)
def validate_train_hook_name(self,
test_hook_name,
expected_hook_name,
**kwargs):
returned_hook = hooks_helper.get_train_hooks(
[test_hook_name], model_dir="", **kwargs)
self.assertEqual(len(returned_hook), 1)
self.assertIsInstance(returned_hook[0], tf.estimator.SessionRunHook)
self.assertEqual(returned_hook[0].__class__.__name__.lower(),
expected_hook_name)
def test_get_train_hooks_logging_tensor_hook(self):
self.validate_train_hook_name('LoggingTensorHook', 'loggingtensorhook')
def test_get_train_hooks_profiler_hook(self):
self.validate_train_hook_name('ProfilerHook', 'profilerhook')
def test_get_train_hooks_examples_per_second_hook(self):
self.validate_train_hook_name('ExamplesPerSecondHook',
'examplespersecondhook')
def test_get_logging_metric_hook(self):
test_hook_name = 'LoggingMetricHook'
self.validate_train_hook_name(test_hook_name, 'loggingmetrichook')
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
ad0e22e38a0cabad285892fabed6f049c4e102a0 | ba9fc166ea9adb64e522471a4fc96e1810862516 | /examples/simple_rpc_server.py | db4cdd58130f340bb9dc83a573bde92b1e461b2d | [
"MIT"
] | permissive | martincolladodotcom/amqpstorm | 30ca347234f86b653ea6c0327ba93b7d40b8dee6 | 8c320601b92482472f3e5fe366221fa276c49004 | refs/heads/master | 2021-08-07T13:52:53.570087 | 2017-11-08T07:54:59 | 2017-11-08T07:54:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,021 | py | """
RPC Server example based on code from the official RabbitMQ Tutorial.
http://www.rabbitmq.com/tutorials/tutorial-six-python.html
"""
import amqpstorm
from amqpstorm import Message
CONNECTION = amqpstorm.Connection('127.0.0.1', 'guest', 'guest')
CHANNEL = CONNECTION.channel()
CHANNEL.queue.declare(queue='rpc_queue')
def fib(number):
if number == 0:
return 0
elif number == 1:
return 1
else:
return fib(number - 1) + fib(number - 2)
def on_request(message):
number = int(message.body)
print(" [.] fib(%s)" % (number,))
response = str(fib(number))
properties = {
'correlation_id': message.correlation_id
}
response = Message.create(message.channel, response, properties)
response.publish(message.reply_to)
message.ack()
if __name__ == '__main__':
CHANNEL.basic.qos(prefetch_count=1)
CHANNEL.basic.consume(on_request, queue='rpc_queue')
print(" [x] Awaiting RPC requests")
CHANNEL.start_consuming(to_tuple=False)
| [
"[email protected]"
] | |
9af698eb1b9e10126a05c34812ef09e3ca26db5f | c4079336265fcaa2eb8be72d5a755d2dd3c95b1c | /bin/service.py | 114b11a3ee9f2e3e925d41730aab7de2ddb60201 | [
"MIT"
] | permissive | alexhsamuel/absence | 3bdd5e9b6f20f995b4b71ffc6ad94c52c7757c59 | 2dcb1e14f4ec0d90b1dad3ca03ebba7fa5a3d3ff | refs/heads/master | 2020-03-11T06:39:26.328341 | 2018-04-17T03:37:55 | 2018-04-17T03:37:55 | 129,835,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,848 | py | #!/usr/bin/env python
import argparse
import flask
import logging
from pathlib import Path
import absence.api
import absence.api.service
from absence.db import SqliteDB
#-------------------------------------------------------------------------------
app = flask.Flask(__name__)
app.register_blueprint(absence.api.service.API, url_prefix="/api/v1")
logging.basicConfig(
format ="%(asctime)s [%(levelname)-7s] %(name)s: %(message)s",
datefmt ="%Y-%m-%dT%H:%M:%S",
)
parser = argparse.ArgumentParser()
parser.add_argument(
"--host", metavar="ADDR", default="localhost",
help="serve on ADDR [def: localhost]")
parser.add_argument(
"--port", metavar="PORT", type=int, default=absence.api.DEFAULT_PORT,
help="serve on PORT [def: {}]".format(absence.api.DEFAULT_PORT))
parser.add_argument(
"--repo", metavar="PATH", type=Path, default=Path("./repo"),
help="use repo dir at PATH")
parser.add_argument(
"--initialize", action="store_true", default=False,
help="initialize repo if missing")
parser.add_argument(
"--debug", action="store_true", default=False,
help="run Werkzeug in debug mode")
parser.add_argument(
"--log", metavar="LEVEL", default="INFO",
help="log at LEVEL [def: INFO]")
parser.add_argument(
"--db", metavar="FILE", default="./absence.sqlite",
help="path to database")
parser.add_argument(
"--create-db", action="store_true", default=False,
help="create the database")
args = parser.parse_args()
logging.getLogger().setLevel(getattr(logging, args.log.upper()))
# We don't cache the database as SQLite connections are thead-specific.
# But either create or check it up front.
if args.create_db:
SqliteDB.create(args.db)
else:
SqliteDB.open(args.db)
app.db_path = args.db
app.run(host=args.host, port=args.port, debug=args.debug, threaded=False)
| [
"[email protected]"
] | |
9851e0e373c9fa6897b1f6abde21d140a495945e | b7620d0f1a90390224c8ab71774b9c906ab3e8e9 | /aliyun-python-sdk-gpdb/aliyunsdkgpdb/request/v20160503/DescribeSQLLogsOnSliceRequest.py | 6c001777dbc8db958ef3c54d9c3f181732da9bde | [
"Apache-2.0"
] | permissive | YaoYinYing/aliyun-openapi-python-sdk | e9c62940baee1a35b9ec4a9fbd1e4eb0aaf93b2f | e9a93cc94bd8290d1b1a391a9cb0fad2e6c64627 | refs/heads/master | 2022-10-17T16:39:04.515562 | 2022-10-10T15:18:34 | 2022-10-10T15:18:34 | 117,057,304 | 0 | 0 | null | 2018-01-11T06:03:02 | 2018-01-11T06:03:01 | null | UTF-8 | Python | false | false | 2,838 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkgpdb.endpoint import endpoint_data
class DescribeSQLLogsOnSliceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'gpdb', '2016-05-03', 'DescribeSQLLogsOnSlice')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SliceId(self): # String
return self.get_query_params().get('SliceId')
def set_SliceId(self, SliceId): # String
self.add_query_param('SliceId', SliceId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MinExecuteCost(self): # String
return self.get_query_params().get('MinExecuteCost')
def set_MinExecuteCost(self, MinExecuteCost): # String
self.add_query_param('MinExecuteCost', MinExecuteCost)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DBInstanceId(self): # String
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self, DBInstanceId): # String
self.add_query_param('DBInstanceId', DBInstanceId)
def get_MaxExecuteCost(self): # String
return self.get_query_params().get('MaxExecuteCost')
def set_MaxExecuteCost(self, MaxExecuteCost): # String
self.add_query_param('MaxExecuteCost', MaxExecuteCost)
def get_ExecuteState(self): # String
return self.get_query_params().get('ExecuteState')
def set_ExecuteState(self, ExecuteState): # String
self.add_query_param('ExecuteState', ExecuteState)
def get_QueryId(self): # String
return self.get_query_params().get('QueryId')
def set_QueryId(self, QueryId): # String
self.add_query_param('QueryId', QueryId)
| [
"[email protected]"
] | |
06c1b7128e4e03525c80978f40c0045334e11190 | c5d68f58c9523257a8b41954553f5cff2cd5f487 | /Secao_08_Lista_Ex_73e/ex_67.py | b48841401026d069e655abe6f4ea6e4a504e6079 | [] | no_license | SouzaCadu/guppe | 04bfcde82d4404eb9ec795006c6931ba07dc72b6 | 1f8a672230c5c27712f522e1e34516591c012453 | refs/heads/master | 2023-03-13T01:32:51.019871 | 2021-02-25T17:02:59 | 2021-02-25T17:02:59 | 320,908,119 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,256 | py | """
faça uma rotina que receba como parâmetro um vetor de caracteres e
seu tamanho. a função deverá de ler uma string do teclado, caractere por caractere
usando a função getchat() até que o usuário digite enter ou o tamanho máximo do
vetor seja alcançado.
"""
def getchar():
"""
Função que retorna o caractere informado pelo usuário durante a execução da função
:return: Retorna o caractere informado pelo usuário durante a execução da função.
Caso o usuário digite mais de um caractere, será retornado um valor do tipo None
"""
caractere = input("Informe um caractere: ")
if len(caractere) <= 1:
return caractere
def rotina(args, tamanho):
"""
Função que recebe um vetor e realiza uma rotina preenchendo o vetor
através de um looping até chegar ao tamanho desejado pelo usuário
:param args: Recebe um vetor
:param tamanho: Recebe a quantidade de vezes que deve ser preenchida pelo usuário
:return: Retorna o vetor preenchido pelo usuário
"""
for _ in range(tamanho):
valor = getchar()
if valor != "":
args.append(valor)
else:
break
return args
vetor = []
tam = 8
print(f"{rotina(vetor, tam)}")
| [
"[email protected]"
] | |
f2d735b073eda95981eb74d263efe7b8b08b0939 | 181af10fcf40b824fe92d3b8f72fd15d6d1490c2 | /Contests/101-200/week 178/1368. Minimum Cost to Make at Least One Valid Path in a Grid/Minimum Cost to Make at Least One Valid Path in a Grid.py | c0f974e882a945b86410b30ef80cb3fbd07b4071 | [] | no_license | wangyendt/LeetCode | 402c59a0b7b7f5b3a672231ea5dad8056ade36af | 4a3ba15284c45b2d8bf38306c8c8526ae174615c | refs/heads/master | 2023-08-10T06:27:54.995152 | 2023-08-10T02:22:27 | 2023-08-10T02:22:27 | 176,651,399 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: Wayne
@contact: [email protected]
@software: PyCharm
@file: Minimum Cost to Make at Least One Valid Path in a Grid
@time: 2020/3/5 14:47
"""
class Solution:
def minCost(self, A: list(list())):
n, m, inf, k = len(A), len(A[0]), 10 ** 9, 0
dp = [[inf] * m for i in range(n)]
dirt = [[0, 1], [0, -1], [1, 0], [-1, 0]]
bfs = []
def dfs(x, y):
if not (0 <= x < n and 0 <= y < m and dp[x][y] == inf): return
dp[x][y] = k
bfs.append([x, y])
dfs(x + dirt[A[x][y] - 1][0], y + dirt[A[x][y] - 1][1])
dfs(0, 0)
while bfs:
k += 1
bfs, bfs2 = [], bfs
[dfs(x + i, y + j) for x, y in bfs2 for i, j in dirt]
return dp[-1][-1]
| [
"[email protected]"
] | |
70cfde39fc00d4c6dbdb49f823c1d203e72aeaa0 | d09ad52c0911a83a5e5a03850bb5371d22446226 | /metrics.py | c0c926ae23018f66d249ffebc51658a841faf430 | [] | no_license | hzfmer/pyawp | 88d0ea24b47aa02a8ef4a89e59335b636a5c205b | aff564d59c1f00ada2755ab239980fbfa0a8d9cb | refs/heads/main | 2023-05-08T07:37:53.603933 | 2021-05-27T18:51:56 | 2021-05-27T18:51:56 | 371,476,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,102 | py | import sympy as sp
def covariant_basis(x, r):
"""
Compute covariant basis vectors.
Arguments:
x : List of mappings of physical coordinates to parameter coordinates
r : List of parameter coordinates.
Returns:
a : Covariant basis (list of vectors). For example, `a[0]` gives the
first covariant basis vector.
"""
a1 = [sp.diff(xi,r[0]) for xi in x]
a2 = [sp.diff(xi,r[1]) for xi in x]
a3 = [sp.diff(xi,r[2]) for xi in x]
return a1, a2, a3
def jacobian_matrix(a1,a2,a3):
"""
Return the Jacobian matrix.
"""
J = sp.Matrix([a1,a2,a3])
return J
def contravariant_basis(J, a):
"""
Compute contravariant basis vectors
Arguments:
J : Determinant of Jacobian
a : Covariant basis (list of basis vectors)
Returns:
b : Contravariant basis (list of vectors). For example, `b[0]` gives the
first contravariant basis vector.
"""
return [a[1].cross(a[2])/J, a[2].cross(a[0])/J, a[0].cross(a[1])/J]
def metric_tensor(a):
"""
Compute the metric tensor. Whether it is covariant or contravariant metric
tensor depends which basis vectors are passed into this function.
Arguments:
a : Either the covariant or contravariant basis vectors.
Returns:
G : A symmetric and positive definite matrix `G[i,j]`.
"""
m = len(a)
G = sp.zeros(m,m)
for i in range(m):
for j in range(m):
G[i,j] = a[i].dot(a[j])
return G
def metrics(x, r, eval_J=0):
"""
Compute metric coefficients for a mapping given physical coordinates `x` and
parameter coordinates `r`.
To use, specify x = [x1, x2, x3] for each xi, as a
function of each ri, i.e., `x1 = f(r1,r2,r3)`, etc.
Example:
>> import sympy as sp
>> f = sp.Function('f')(r1,r2)
>> x1 = f
Returns:
a : Covariant basis vectors
b : Contravariant basis vectors
Jmat : Jacobian matrix
J : Determinant of Jacobian matrix
"""
a1, a2, a3 = covariant_basis(x, r)
Jmat = jacobian_matrix(a1,a2,a3)
J = Jmat.det()
a = [sp.Matrix(ai) for ai in [a1, a2, a3]]
if eval_J:
b = contravariant_basis(J, a)
else:
b = contravariant_basis(sp.symbols('J'), a)
Ga = metric_tensor(a)
Gb = metric_tensor(b)
return a, b, Ga, Gb, Jmat, J
def christoffel(a, b, r):
"""
Compute the Christoffel symbols:
\Gamma^k_{ij} = a^k \cdot \frac{\partial a_i}{\partial r^j}
Input arguments:
a : Covariant basis vectors
b : Contravariant basis vectors
r : Coordinates
Returns:
Gam : Christoffel symbols as an array of matrices. Symbol is defined as
`Gam[k][i,j]`.
Gamsym : `Gam
"""
m = len(a)
Gam = [0]*m
for k in range(m):
Gam[k] = sp.zeros(m)
for i in range(m):
for j in range(m):
Gam[k][i,j] = b[k].dot([sp.diff(a[i][l], r[j]) \
for l in range(m)])
return Gam
| [
"[email protected]"
] | |
94547a2d5945424fe6d0f40934e69eedc44a5cba | 86a017dd4c8d4d77c511cc598190aaa9dc0ae3e8 | /data structure/mine_tree.py | 50c7743ae9b425ddc25d7aea392e1c7f56c1aff7 | [] | no_license | sungguenja/studying | fd7459eb9faa6488d7b63bf3884a92513daf3c54 | 719f4dfbda211c34de2a0c8cf3b9d3001f29fcec | refs/heads/master | 2023-08-17T13:46:44.343780 | 2023-08-10T11:55:15 | 2023-08-10T11:55:15 | 232,306,053 | 0 | 0 | null | 2022-12-16T10:53:26 | 2020-01-07T11:00:28 | Python | UTF-8 | Python | false | false | 1,713 | py | import mine_node
from collections import deque
def preorder(n):
if n is not None:
print(n.data)
preorder(n.left)
preorder(n.right)
def inorder(n):
if n is not None:
inorder(n.left)
print(n.data)
inorder(n.right)
def postorder(n):
if n is not None:
postorder(n.left)
postorder(n.right)
print(n.data)
def levelorder(n):
Que = deque()
Que.append(n)
while Que:
node = Que.popleft()
if node is not None:
print(node.data)
Que.append(node.left)
Que.append(node.right)
# 최소 공통 조상 찾기
checked = [False]*21
depth = [0]*21
atree = [
[1,2],
[3,4],
[5,6],
[7,8],
[9,10,11],
[],
[],
[],
[12,13],
[14],
[15],
[],
[],
[16,17],
[18],
[19],
[],
[20],
[],
[],
[]
]
parent = [[] for i in range(21)]
log = 11
def dfs(x,dep):
checked[x] = True
depth[x] = dep
for i in atree[x]:
if checked[i]:
continue
parent[i].append(x)
dfs(i,dep+1)
def setParent():
dfs(0,0)
for i in range(20,-1,-1):
j = parent[i]
while len(j) > 0:
j = parent[j[0]]
if len(j) == 0:
break
parent[i].append(j[0])
setParent()
def setSameDepth(A,B):
while depth[A] > depth[B]:
A = parent[A][0]
while depth[A] < depth[B]:
B = parent[B][0]
return A,B
def findSameParent(A,B):
value1,value2 = setSameDepth(A,B)
while value1 != value2:
value1 = parent[value1][0]
value2 = parent[value2][0]
return value1
print(findSameParent(20,15)) | [
"[email protected]"
] | |
d066657a658a5b4bcac8e5964f2e917273576be4 | a9a8931d6877d6e0f4f11cbd7b50322819e0fe45 | /hpc/REBAGG-SMOTER_43.py | b5ee6bdf31673ae698cb31a019f9f71d1437d5cd | [] | no_license | jafetgado/tomerdesign | 8517f9f8266bcf1db64fdf00d12294f682cd412d | a0d0961a11d7d84be5343d374198ab0f5084c2b3 | refs/heads/master | 2022-05-31T18:15:55.045419 | 2020-04-25T05:49:32 | 2020-04-25T05:49:32 | 258,499,679 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,399 | py | """
Template script for hyperparameter tuning with HPC
Evaluates the performance of a strategy for a single
set of hyperparameter combinations)
"""
# Imports
#============#
import numpy as np
import pandas as pd
import joblib
import itertools
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import r2_score
from sklearn.metrics import mean_squared_error
import resreg
import warnings
warnings.filterwarnings("ignore")
# Get dataset and features
#==============================#
aalist = list('ACDEFGHIKLMNPQRSTVWY')
def getAAC(seq):
aac = np.array([seq.count(x) for x in aalist])/len(seq)
return aac
data = pd.read_excel('sequence_ogt_topt.xlsx', index_col=0)
aac = np.array([getAAC(seq) for seq in data['sequence']])
ogt = data['ogt'].values.reshape((data.shape[0],1))
X = np.append(aac, ogt, axis=1)
sc = StandardScaler()
X = sc.fit_transform(X)
y = data['topt'].values
# Strategies and hyperparameters
#======================================#
# Hyperparameter range
cl_vals = [25.0, 30.0, None]
ch_vals = [72.2, 60.0]
ks = [5, 10, 15]
deltas = [0.1, 0.5, 1.0]
overs = [0.5, 0.75]
unders = [0.5, 0.75]
sizes = [300, 600]
sample_methods = ['balance', 'extreme', 'average']
size_methods = ['balance', 'variation']
all_params = {}
# Hyperparameter combinations (grid search)
all_params['RO'] = list(itertools.product(cl_vals, ch_vals, sample_methods))
all_params['SMOTER'] = list(itertools.product(cl_vals, ch_vals, sample_methods, ks))
all_params['GN'] = list(itertools.product(cl_vals, ch_vals, sample_methods, deltas))
all_params['WERCS'] = list(itertools.product(cl_vals, ch_vals, overs, unders))
all_params['WERCS-GN'] = list(itertools.product(cl_vals, ch_vals, overs, unders, deltas))
all_params['REBAGG-RO'] = list(itertools.product(cl_vals, ch_vals, size_methods,
sizes))
all_params['REBAGG-SMOTER'] = list(itertools.product(cl_vals, ch_vals, size_methods,
sizes, ks))
all_params['REBAGG-GN'] = list(itertools.product(cl_vals, ch_vals, size_methods,
sizes, deltas))
all_params['REBAGG-WERCS'] = list(itertools.product(cl_vals, ch_vals, sizes, overs,
unders))
all_params['REBAGG-WERCS-GN'] = list(itertools.product(cl_vals, ch_vals, sizes, overs,
unders, deltas))
strategies = list(all_params.keys())
# Evaluate performance for a single strategy and hyperparameter combination
#===========================================================================#
bins = [30, 50, 65, 85] # For splitting target values into bins
m = 100 # Number of regressors in REBAGG ensemble
# Specify strategy and param (instead of a lengthy for loop of combinations)
strategy = 'REBAGG-SMOTER' # Replace REBAGG-SMOTER for this calculation
params = all_params[strategy]
param = params[43] # Replace 43 for this calculation
# Implement calculation for only specified strategy and param
r2_store, mse_store, mcc_store, f1_store = [], [], [], [] # Empty lists for storing results
mse_bins_store = []
# Monte Carlo cross validation (MCCV) loop
for rrr in range(50):
# Resample validation set (uniform distribution)
train_indices, test_indices = resreg.uniform_test_split(X, y, bins=bins,
bin_test_size=70, verbose=False,
random_state=rrr)
X_train, y_train = X[train_indices,:], y[train_indices]
X_test, y_test = X[test_indices,:], y[test_indices]
# Unpack hyperparameters, resample training data, and fit regressors
reg = DecisionTreeRegressor(random_state=rrr) if 'REBAGG' in strategy else \
RandomForestRegressor(n_estimators=10, n_jobs=-1, random_state=rrr)
if strategy=='RO':
cl, ch, sample_method = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
X_train, y_train = resreg.random_oversample(X_train, y_train, relevance,
relevance_threshold=0.5, over=sample_method,
random_state=rrr)
reg.fit(X_train, y_train)
elif strategy=='SMOTER':
cl, ch, sample_method, k = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
X_train, y_train = resreg.smoter(X_train, y_train, relevance,
relevance_threshold=0.5, k=k, over=sample_method,
random_state=rrr)
reg.fit(X_train, y_train)
elif strategy=='GN':
cl, ch, sample_method, delta = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
X_train, y_train = resreg.gaussian_noise(X_train, y_train, relevance,
relevance_threshold=0.5, delta=delta, over=sample_method,
random_state=rrr)
reg.fit(X_train, y_train)
elif strategy=='WERCS':
cl, ch, over, under = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
X_train, y_train = resreg.wercs(X_train, y_train, relevance, over=over,
under=under, noise=False, random_state=rrr)
reg.fit(X_train, y_train)
elif strategy=='WERCS-GN':
cl, ch, over, under, delta = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
X_train, y_train = resreg.wercs(X_train, y_train, relevance, over=over,
under=under, noise=True, delta=delta, random_state=rrr)
reg.fit(X_train, y_train)
elif strategy=='REBAGG-RO':
cl, ch, size_method, s = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
rebagg = resreg.Rebagg(m=m, s=s, base_reg=reg)
rebagg.fit(X_train, y_train, relevance, relevance_threshold=0.5,
sample_method='random_oversample', size_method=size_method,
random_state=rrr)
elif strategy=='REBAGG-SMOTER':
cl, ch, size_method, s, k = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
rebagg = resreg.Rebagg(m=m, s=s, base_reg=reg)
rebagg.fit(X_train, y_train, relevance, relevance_threshold=0.5,
sample_method='smoter', size_method=size_method, k=k,
random_state=rrr)
elif strategy=='REBAGG-GN':
cl, ch, size_method, s, delta = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
rebagg = resreg.Rebagg(m=m, s=s, base_reg=reg)
rebagg.fit(X_train, y_train, relevance, relevance_threshold=0.5,
sample_method='gaussian', size_method=size_method, delta=delta,
random_state=rrr)
elif strategy=='REBAGG-WERCS':
cl, ch, s, over, under = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
rebagg = resreg.Rebagg(m=m, s=s, base_reg=reg)
rebagg.fit(X_train, y_train, relevance=relevance, sample_method='wercs',
over=over, under=under, random_state=rrr)
elif strategy=='REBAGG-WERCS-GN':
cl, ch, s, over, under, delta = param
relevance = resreg.sigmoid_relevance(y_train, cl=cl, ch=ch)
rebagg = resreg.Rebagg(m=m, s=s, base_reg=reg)
rebagg.fit(X_train, y_train, relevance=relevance, sample_method='wercs-gn',
over=over, under=under, delta=delta, random_state=rrr)
# Validate fitted regressors on uniform validation set
if 'REBAGG' in strategy:
y_pred = rebagg.predict(X_test)
else:
y_pred = reg.predict(X_test)
# Evaluate regressor performance on validation set
r2 = r2_score(y_test, y_pred)
mse = mean_squared_error(y_test, y_pred)
mcc = resreg.matthews_corrcoef(y_test, y_pred, bins)
relevance_true = resreg.sigmoid_relevance(y_test, cl=None, ch=65)
relevance_pred = resreg.sigmoid_relevance(y_pred, cl=None, ch=65)
f1 = resreg.f1_score(y_test, y_pred, error_threshold=5,
relevance_true=relevance_true, relevance_pred=relevance_pred,
relevance_threshold=0.5, k=1e4)
mse_bins = resreg.bin_performance(y_test, y_pred, bins, metric='MSE')
# Store performance results
r2_store.append(r2)
mse_store.append(mse)
mcc_store.append(mcc)
f1_store.append(f1)
mse_bins_store.append(mse_bins)
# Performance statistics
r2_mean, r2_std = np.mean(r2_store), np.std(r2_store)
mse_mean, mse_std = np.mean(mse_store), np.std(mse_store)
f1_mean, f1_std = np.mean(f1_store), np.std(f1_store)
mcc_mean, mcc_std = np.mean(mcc_store), np.std(mcc_store)
mse_bins_store = pd.DataFrame(mse_bins_store)
mse_bins_mean, mse_bins_std = np.mean(mse_bins_store, axis=0), np.std(mse_bins_store, axis=0)
# Combine all performance data and write to excel spreadsheet
means = [r2_mean, mse_mean, f1_mean, mcc_mean] + list(mse_bins_mean)
stds = [r2_std, mse_std, f1_std, mcc_std] + list(mse_bins_std)
store = [param] + means + stds
# Save performance results as a binary file (to be read and analyzed later)
joblib.dump(store, f'hpc/joblib_files/{strategy}_{43}.pkl')
| [
"[email protected]"
] | |
ffe05317dc06d9997163e1418a54c24f60a49031 | fb0f6646b2a7972454453907fbdc656b7471f55f | /p260_file_read.py | 8c4892ed1674aecd1e9a19996f1fc807b08cae99 | [] | no_license | woojin97318/python_basic | 6497d5c85369746edfe8ca79ad7f3f47c871ee66 | 97e9a322a08f1483bf35dc03507ac36af2bf1ddb | refs/heads/master | 2023-07-15T03:06:05.716623 | 2021-08-25T03:46:48 | 2021-08-25T03:46:48 | 399,681,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | # 파일을 엽니다.
with open("basic.txt", "r") as file:
# 파일을 읽고 출력합니다.
contents = file.read()
print(contents) | [
"[email protected]"
] | |
7f08de5fc9d940f912450c7c8a1c200a3d404b56 | 21f38f1a9f6d4edfa3b233697e17d86f30b168ce | /janeway/migrations/0002_add_releases_and_credits.py | 8046d7841d026120c8fed38e386d74af867f80ff | [] | no_license | m100bit/demozoo | 3734d0126a6f0bd9ff98128a4350e40b22cdd8a3 | 93918da57e7cb96a0d2f724e5a876406d3477891 | refs/heads/master | 2023-03-14T05:34:20.323613 | 2023-01-27T23:10:32 | 2023-01-27T23:23:52 | 232,933,218 | 0 | 0 | null | 2020-01-10T00:32:53 | 2020-01-10T00:32:53 | null | UTF-8 | Python | false | false | 2,068 | py | # Generated by Django 1.11.8 on 2019-07-11 16:07
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('janeway', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Credit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('janeway_id', models.IntegerField()),
('category', models.CharField(max_length=50)),
('description', models.CharField(blank=True, max_length=255)),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='janeway.Name')),
],
),
migrations.CreateModel(
name='Release',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('janeway_id', models.IntegerField()),
('title', models.CharField(max_length=255)),
('supertype', models.CharField(choices=[(b'production', b'Production'), (b'graphics', b'Graphics'), (b'music', b'Music')], max_length=20)),
('author_names', models.ManyToManyField(related_name='authored_releases', to='janeway.Name')),
],
),
migrations.CreateModel(
name='ReleaseType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_name', models.CharField(max_length=255)),
('release', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='types', to='janeway.Release')),
],
),
migrations.AddField(
model_name='credit',
name='release',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='janeway.Release'),
),
]
| [
"[email protected]"
] | |
35950c8356581390bba8855f424cf6e26e3c6bc5 | 7dc65b6d2e857c807bd2f75e2586af5f8e933fe5 | /scripts/securitygroup/test_regression.py | 574e5de1f23065be99d681f0425ed5fae7b1fe47 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | vkolli/contrail-test-perf | d6fdc20f4a2004066c5a6316afd915ecdc9366c2 | db04b8924a2c330baabe3059788b149d957a7d67 | refs/heads/master | 2021-01-18T15:36:18.120487 | 2017-03-30T19:19:30 | 2017-03-30T19:19:30 | 86,661,522 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111,564 | py | import unittest
from tcutils.wrappers import preposttest_wrapper
from vnc_api.vnc_api import NoIdError
from verify import VerifySecGroup
from policy_test import PolicyFixture
from vn_test import MultipleVNFixture
from vm_test import MultipleVMFixture
from base import BaseSGTest
from common.policy.config import ConfigPolicy
from security_group import SecurityGroupFixture,get_secgrp_id_from_name
from vn_test import VNFixture
from vm_test import VMFixture
from tcutils.topo.topo_helper import *
import os
import sys
sys.path.append(os.path.realpath('scripts/flow_tests'))
from tcutils.topo.sdn_topo_setup import *
import test
import sdn_sg_test_topo
from tcutils.tcpdump_utils import *
from time import sleep
from tcutils.util import get_random_name
from base_traffic import *
from tcutils.util import skip_because
import test_regression_basic
class SecurityGroupRegressionTests2(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests2, cls).setUpClass()
cls.option = 'openstack'
def setUp(self):
super(SecurityGroupRegressionTests2, self).setUp()
self.create_sg_test_resources()
def tearDown(self):
self.logger.debug("Tearing down SecurityGroupRegressionTests2.")
super(SecurityGroupRegressionTests2, self).tearDown()
def runTest(self):
pass
@preposttest_wrapper
def test_sec_group_with_proto(self):
"""
Description: Verify security group with allow specific protocol on all ports and policy with allow all between VN's
Steps:
1. create the resources VN,VM,policy,SG
2. update the SG rules with proto tcp(for sg1) and udp(sg2)
3. verify if traffic allowed is as per the proto allowed in SG rule
Pass criteria: step 3 should pass
"""
self.logger.info("Configure the policy with allow any")
rules = [
{
'direction': '<>',
'protocol': 'any',
'source_network': self.vn1_name,
'src_ports': [0, -1],
'dest_network': self.vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
rule = [{'direction': '<>',
'protocol': 'tcp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'tcp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg1_fix.replace_rules(rule)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg2_fix.replace_rules(rule)
self.verify_sec_group_port_proto()
return True
@preposttest_wrapper
def test_sec_group_with_port(self):
"""
Description: Verify security group with allow specific protocol/port and policy with allow all between VN's
Steps:
1. create the resources VN,VM,policy,SG
2. update the SG rules with proto tcp(for sg1) and udp(sg2) and open port 8000-9000
3. verify if traffic allowed is as per the proto/port allowed in SG rule
Pass criteria: step 3 should pass
"""
self.logger.info("Configure the policy with allow any")
rules = [
{
'direction': '<>',
'protocol': 'any',
'source_network': self.vn1_name,
'src_ports': [0, -1],
'dest_network': self.vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
rule = [{'direction': '<>',
'protocol': 'tcp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 8000, 'end_port': 9000}],
'src_ports': [{'start_port': 8000, 'end_port': 9000}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'tcp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 8000, 'end_port': 9000}],
'dst_ports': [{'start_port': 8000, 'end_port': 9000}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg1_fix.replace_rules(rule)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 8000, 'end_port': 9000}],
'src_ports': [{'start_port': 8000, 'end_port': 9000}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 8000, 'end_port': 9000}],
'dst_ports': [{'start_port': 8000, 'end_port': 9000}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg2_fix.replace_rules(rule)
self.verify_sec_group_port_proto(port_test=True)
return True
#end class SecurityGroupRegressionTests2
class SecurityGroupRegressionTests3(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests3, cls).setUpClass()
cls.option = 'openstack'
def setUp(self):
super(SecurityGroupRegressionTests3, self).setUp()
self.create_sg_test_resources()
def tearDown(self):
self.logger.debug("Tearing down SecurityGroupRegressionTests3.")
super(SecurityGroupRegressionTests3, self).tearDown()
def runTest(self):
pass
@preposttest_wrapper
def test_sec_group_with_proto_and_policy_to_allow_only_tcp(self):
"""
Description: Verify security group with allow specific protocol on all ports and policy with allow only TCP between VN's
Steps:
1. create the resources VN,VM,policy,SG
2. update the SG rules with proto tcp(for sg1) and udp(sg2)
3. verify if traffic allowed is as per the proto allowed in SG rule and policy
Pass criteria: step 3 should pass
"""
self.logger.info("Configure the policy with allow TCP only rule.")
rules = [
{
'direction': '<>',
'protocol': 'tcp',
'source_network': self.vn1_name,
'src_ports': [0, -1],
'dest_network': self.vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
rule = [{'direction': '<>',
'protocol': 'tcp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'tcp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg1_fix.replace_rules(rule)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg2_fix.replace_rules(rule)
self.verify_sec_group_with_udp_and_policy_with_tcp()
return True
@preposttest_wrapper
def test_sec_group_with_proto_and_policy_to_allow_only_tcp_ports(self):
"""
Description: Verify security group with allow specific protocol on all ports and policy with allow only TCP on specifif ports between VN's
Steps:
1. create the resources VN,VM,policy,SG
2. update the SG rules with proto tcp(for sg1) and udp(sg2)
3. verify if traffic allowed is as per the proto allowed in SG rule and port in policy
Pass criteria: step 3 should pass
"""
self.logger.info(
"Configure the policy with allow TCP port 8000/9000 only rule.")
rules = [
{
'direction': '<>',
'protocol': 'tcp',
'source_network': self.vn1_name,
'src_ports': [8000, 8000],
'dest_network': self.vn2_name,
'dst_ports': [9000, 9000],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
rule = [{'direction': '<>',
'protocol': 'tcp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'tcp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg1_fix.replace_rules(rule)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg2_fix.replace_rules(rule)
self.verify_sec_group_with_udp_and_policy_with_tcp_port()
return True
#end class SecurityGroupRegressionTests3
class SecurityGroupRegressionTests4(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests4, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
@skip_because(feature='multi-subnet')
def test_vn_compute_sg_comb(self):
"""
Description: Verify traffic between intra/inter VN,intra/inter compute and same/diff default/user-define SG
Steps:
1. define the topology for intra/inter VN,intra/inter compute and same/diff default/user-define SG
2. create the resources as defined in the topo
3. verify the traffic
Pass criteria: step 3 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_4vn_xvm_config
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
try:
# provided by wrapper module if run in parallel test env
topo = topology_class_name(
project=self.project.project_name,
username=self.project.username,
password=self.project.password, compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
except (AttributeError,NameError):
topo = topology_class_name(compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(VmToNodeMapping=topo.vm_node_map,config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
self.start_traffic_and_verify_negative_cases(topo_obj, config_topo)
return True
#end test_vn_compute_sg_comb
#end class SecurityGroupRegressionTests4
class SecurityGroupRegressionTests5(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests5, cls).setUpClass()
cls.option = 'openstack'
def setUp(self):
super(SecurityGroupRegressionTests5, self).setUp()
self.create_sg_test_resources()
def tearDown(self):
self.logger.debug("Tearing down SecurityGroupRegressionTests2.")
super(SecurityGroupRegressionTests5, self).tearDown()
def runTest(self):
pass
@preposttest_wrapper
def test_sec_group_with_proto_double_rules_sg1(self):
"""
Description: Verify security group with allow tcp/udp protocol on all ports and policy with allow all between VN's
Steps:
1. create the resources VN,VM,policy,SG
2. update the SG rules with proto tcp/udp
3. verify if traffic allowed is as per the proto allowed in SG rule
Pass criteria: step 3 should pass
"""
self.logger.info("Configure the policy with allow any")
rules = [
{
'direction': '<>',
'protocol': 'any',
'source_network': self.vn1_name,
'src_ports': [0, -1],
'dest_network': self.vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
rule = [{'direction': '<>',
'protocol': 'tcp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'tcp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg1_fix.replace_rules(rule)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
self.sg2_fix.replace_rules(rule)
self.verify_sec_group_port_proto(double_rule=True)
return True
#end test_sec_group_with_proto_double_rules_sg1
@preposttest_wrapper
def test_default_sg(self):
"""
Description: test default security group
Steps:
1. try to delete default sg, should fail
2. add/delete rules and verify the rules with traffic
Pass criteria: step 1 and 2 should pass
"""
self.logger.info("Configure the policy with allow any")
rules = [
{
'direction': '<>',
'protocol': 'any',
'source_network': self.vn1_name,
'src_ports': [0, -1],
'dest_network': self.vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
self.config_policy_and_attach_to_vn(rules)
#try to delete default sg
secgrp_fq_name = ':'.join(['default-domain',
self.inputs.project_name,
'default'])
sg_id = get_secgrp_id_from_name(
self.connections,
secgrp_fq_name)
try:
self.orch.delete_security_group(sg_id)
except Exception, msg:
self.logger.info(msg)
self.logger.info(
"Not able to delete the default security group as expected")
else:
try:
secgroup = self.vnc_lib.security_group_read(
fq_name=secgrp_fq_name)
self.logger.info(
"Not able to delete the default security group as expected")
except NoIdError:
errmsg = "default Security group deleted"
self.logger.error(errmsg)
assert False, errmsg
#delete egress rule and add new rules and verify with traffic
self.sg1_fix.delete_all_rules(sg_id)
rule = [{'direction': '<>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '10.1.1.0', 'ip_prefix_len': 24}},
{'subnet': {'ip_prefix': '20.1.1.0', 'ip_prefix_len': 24}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
secgrp_rules = self.sg1_fix.create_sg_rule(sg_id,secgrp_rules=rule)
assert secgrp_rules
sender = (self.vm1_fix, self.sg2_fix.secgrp_name)
receiver = (self.vm6_fix, 'default')
self.assert_traffic(sender, receiver, 'udp', 8000, 9000, 'pass')
#revert back default sg
self.sg1_fix.delete_all_rules(sg_id)
rule = [{'direction': '<>',
'protocol': 'any',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '<>',
'protocol': 'any',
'src_addresses': [{'security_group':secgrp_fq_name}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
secgrp_rules = self.sg1_fix.create_sg_rule(sg_id,secgrp_rules=rule)
assert secgrp_rules
return True
#end test_default_sg
#end class SecurityGroupRegressionTests5
class SecurityGroupRegressionTests6(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests6, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
@skip_because(feature='multi-subnet')
def test_sg_stateful(self):
"""
Description: Test if SG is stateful
1. test if inbound traffic without allowed ingress rule is allowed
2. Test if outbound traffic without allowed egress rule is allowed
3. test traffic betwen SG with only ingress/egress rule
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. verify the traffic
Pass criteria: step 3 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_config
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo_sg_stateful(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,config_option=self.option)
except (AttributeError,NameError):
topo.build_topo_sg_stateful(config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
self.start_traffic_and_verify(topo_obj, config_topo, traffic_reverse=False)
return True
#end test_sg_stateful
@preposttest_wrapper
@skip_because(feature='multi-tenant')
def test_sg_multiproject(self):
"""
Description: Test SG across projects
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. verify the traffic
Pass criteria: step 3 should pass
"""
topology_class_name = None
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_config_multiproject
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
self.topo = topo
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
topo_objs = {}
config_topo = {}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.sdn_topo_setup(config_option=self.option)
self.assertEqual(out['result'], True, out['msg'])
if out['result'] == True:
topo_objs, config_topo, vm_fip_info = out['data']
self.start_traffic_and_verify_multiproject(topo_objs, config_topo, traffic_reverse=False)
return True
#end test_sg_multiproject
@preposttest_wrapper
@skip_because(feature='multi-subnet')
def test_sg_no_rule(self):
"""
Description: Test SG without any rule, it should deny all traffic
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. verify the traffic denied
Pass criteria: step 3 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_1vn_2vm_config
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,config_option=self.option)
except (AttributeError,NameError):
topo.build_topo(config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
self.start_traffic_and_verify(topo_obj, config_topo, traffic_reverse=True)
return True
#end test_sg_no_rule
#end class SecurityGroupRegressionTests6
class SecurityGroupRegressionTests7(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests7, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
def test_icmp_error_handling1(self):
"""
Description: Test ICMP error handling
1. ingress-udp from same SG, egress-all
2. Test with SG rule, ingress-egress-udp only
3. Test with SG rule, ingress-egress-all
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. verify the traffic for each of the cases mentioned in description
Pass criteria: step 3 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,config_option=self.option)
except (AttributeError,NameError):
topo.build_topo(config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
#Test SG rule, ingress-udp same SG, egress-all
port = 10000
pkt_cnt = 10
src_vm_name = 'vm1'
dst_vm_name = 'vm3'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
#start tcpdump on src VM
filters = '\'(icmp[0]=3 and icmp[1]=3 and src host %s and dst host %s)\'' % (dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
#start traffic
sender, receiver = self.start_traffic_scapy(src_vm_fix, dst_vm_fix, 'udp',
port, port,recvr=False)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap)
#stop traffic
sent, recv = self.stop_traffic_scapy(sender, receiver,recvr=False)
#Test with SG rule, ingress-egress-udp only
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][topo_obj.sg_list[0]].replace_rules(rule)
#start tcpdump on src VM
filters = '\'(icmp[0]=3 and icmp[1]=3 and src host %s and dst host %s)\'' % (dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
#start traffic
sender, receiver = self.start_traffic_scapy(src_vm_fix, dst_vm_fix, 'udp',
port, port,recvr=False)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap)
#stop traffic
sent, recv = self.stop_traffic_scapy(sender, receiver,recvr=False)
#Test with SG rule, ingress-egress-all
dst_vm_fix = config_topo['vm']['vm2']
rule = [{'direction': '>',
'protocol': 'any',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'any',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][topo_obj.sg_list[0]].replace_rules(rule)
#start tcpdump on src VM
filters = '\'(icmp[0]=3 and icmp[1]=3 and src host %s and dst host %s)\'' % (dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
#start traffic
sender, receiver = self.start_traffic_scapy(src_vm_fix, dst_vm_fix, 'udp',
port, port,recvr=False)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap)
#stop traffic
sent, recv = self.stop_traffic_scapy(sender, receiver,recvr=False)
return True
#end test_icmp_error_handling1
@preposttest_wrapper
def test_icmp_error_handling2(self):
"""
Description:
1. Test ICMP error handling with SG rules egress-udp only
2. Test ICMP error from agent
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. verify the traffic for each of the cases mentioned in description
Pass criteria: step 3 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
except (AttributeError,NameError):
topo.build_topo2(compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(VmToNodeMapping=topo.vm_node_map,config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
#Test with SG rule, egress-udp only
port = 10000
pkt_cnt = 10
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
#start tcpdump on src VM
filters = '\'(icmp[0]=3 and icmp[1]=3 and src host %s and dst host %s)\'' % (dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
#start traffic
sender, receiver = self.start_traffic_scapy(src_vm_fix, dst_vm_fix, 'udp',
port, port,recvr=False)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap)
#stop traffic
sent, recv = self.stop_traffic_scapy(sender, receiver,recvr=False)
#Test ICMP error from agent
if len(self.connections.orch.get_hosts()) < 2:
self.logger.info("Skipping second case(Test ICMP error from agent), \
this test needs atleast 2 compute nodes")
raise self.skipTest("Skipping second case(Test ICMP error from agent), \
this test needs atleast 2 compute nodes")
return True
rule = [{'direction': '>',
'protocol': 'icmp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'icmp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][topo_obj.sg_list[0]].replace_rules(rule)
vn1_name = "test_vnv6sr"
vn1_net = ['2001::101:0/120']
#vn1_fixture = self.config_vn(vn1_name, vn1_net)
vn1_fixture = self.useFixture(VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn1_name, inputs=self.inputs, subnets=vn1_net))
assert vn1_fixture.verify_on_setup()
vn2_name = "test_vnv6dn"
vn2_net = ['2001::201:0/120']
#vn2_fixture = self.config_vn(vn2_name, vn2_net)
vn2_fixture = self.useFixture(VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn2_name, inputs=self.inputs, subnets=vn2_net))
assert vn2_fixture.verify_on_setup()
vm1_name = 'source_vm'
vm2_name = 'dest_vm'
#vm1_fixture = self.config_vm(vn1_fixture, vm1_name)
#vm2_fixture = self.config_vm(vn2_fixture, vm2_name)
self.inputs.set_af('dual')
vm1_fixture = self.useFixture(VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn1_fixture.obj, vm_name=vm1_name, node_name=None,
image_name='ubuntu-traffic', flavor='contrail_flavor_small'))
vm2_fixture = self.useFixture(VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn2_fixture.obj, vm_name=vm2_name, node_name=None,
image_name='ubuntu-traffic', flavor='contrail_flavor_small'))
assert vm1_fixture.verify_on_setup()
assert vm2_fixture.verify_on_setup()
vm1_fixture.wait_till_vm_is_up()
vm2_fixture.wait_till_vm_is_up()
rule = [
{
'direction': '<>',
'protocol': 'any',
'source_network': vn1_name,
'src_ports': [0, -1],
'dest_network': vn2_name,
'dst_ports': [0, -1],
'simple_action': 'pass',
},
]
policy_name = 'allow_all'
policy_fixture = self.config_policy(policy_name, rule)
vn1_policy_fix = self.attach_policy_to_vn(
policy_fixture, vn1_fixture)
vn2_policy_fix = self.attach_policy_to_vn(
policy_fixture, vn2_fixture)
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
self.logger.info("Increasing MTU on src VM and ping with bigger size and reverting MTU")
cmd_ping = ('ping -M want -s 2500 -c 10 %s | grep \"Frag needed and DF set\"' %
(dst_vm_fix.vm_ip))
# cmd_tcpdump = 'tcpdump -vvv -c 5 -ni eth0 -v icmp > /tmp/op1.log'
output = src_vm_fix.run_cmd_on_vm(cmds=['''netstat -anr |grep ^0.0.0.0 | awk '{ print $2 }' '''], as_sudo=True)
gw = output.values()[0].split('\r\n')[-1]
filters = 'icmp'
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
cmds = ['ifconfig eth0 mtu 3000', cmd_ping,
'ifconfig eth0 mtu 1500']
output = src_vm_fix.run_cmd_on_vm(cmds=cmds, as_sudo=True, as_daemon=True)
cmd = 'tcpdump -r %s' % pcap
cmd_check_icmp, err = execute_cmd_out(session, cmd, self.logger)
cmd_df = re.search('need to frag', cmd_check_icmp)
self.logger.debug("output for ping cmd: %s" % output[cmd_ping])
cmd_next_icmp = re.search('.+ seq 2, length (\d\d\d\d).*', cmd_check_icmp)
icmpmatch = ("%s > %s: ICMP %s unreachable - need to frag" %
(gw, src_vm_fix.vm_ip, dst_vm_fix.vm_ip))
if not ((icmpmatch in cmd_check_icmp) and ("need to frag" in cmd_df.group(0))
and (cmd_next_icmp.group(1) < '1500')
and ("Frag needed and DF set" in output[cmd_ping])):
self.logger.error("expected ICMP error for type 3 code 4 not found")
stop_tcpdump_for_vm_intf(self, session, pcap)
return False
stop_tcpdump_for_vm_intf(self, session, pcap)
self.logger.info("increasing MTU on src VM and ping6 with bigger size and reverting MTU")
cmd_ping = 'ping6 -s 2500 -c 10 %s | grep \"Packet too big\"' % (vm2_fixture.vm_ip)
src_vn_fq_name = vn1_fixture.vn_fq_name
gw = vm1_fixture.vm_ip
gw = gw.split(':')
gw[-1] = '1'
gw = ':'.join(gw)
filters = 'icmp6'
session, pcap = start_tcpdump_for_vm_intf(self, vm1_fixture, src_vn_fq_name, filters = filters)
cmds = ['ifconfig eth0 mtu 3000', cmd_ping,
'ifconfig eth0 mtu 1500']
output = vm1_fixture.run_cmd_on_vm(cmds=cmds, as_sudo=True, as_daemon=True)
cmd = 'tcpdump -r %s' % pcap
cmd_check_icmp, err = execute_cmd_out(session, cmd, self.logger)
self.logger.debug("output for ping cmd: %s" % output[cmd_ping])
cmd_next_icmp = re.search('.+ ICMP6, packet too big, mtu (\d\d\d\d).*', cmd_check_icmp)
icmpmatch = ("ICMP6, packet too big")
if not ((icmpmatch in cmd_check_icmp) and (cmd_next_icmp.group(1) < '1500')
and ("Packet too big" in output[cmd_ping])):
self.logger.error("expected ICMP6 error for type 2 packet too big message not found")
stop_tcpdump_for_vm_intf(self, session, pcap)
# output = vm1_fixture.run_cmd_on_vm(cmds='rm /tmp/op.log', as_sudo=True)
return False
stop_tcpdump_for_vm_intf(self, session, pcap)
return True
#end test_icmp_error_handling2
@preposttest_wrapper
@skip_because(feature='service-instance')
def test_icmp_error_handling_from_mx_with_si(self):
"""
Description: Test ICMP error handling from MX with SI in the middle
1. uses traceroute util on the VM
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. copy the traceroute pkg to VM and install
4. run the traceroute to 8.8.8.8
5. verify through tcpdump if icmp error recvd on VM
Pass criteria: step 5 should pass
"""
if ('MX_GW_TEST' not in os.environ) or (('MX_GW_TEST' in os.environ) and (os.environ.get('MX_GW_TEST') != '1')):
self.logger.info(
"Skipping Test. Env variable MX_GW_TEST is not set. Skipping the test")
raise self.skipTest(
"Skipping Test. Env variable MX_GW_TEST is not set. Skipping the test")
return True
public_vn_info = {'subnet':[self.inputs.fip_pool], 'router_asn':self.inputs.router_asn, 'rt_number':self.inputs.mx_rt}
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_mx_with_si
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
public_vn_info=public_vn_info,config_option=self.option)
except (AttributeError,NameError):
topo.build_topo(public_vn_info=public_vn_info,config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(skip_verify='no',config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
pol_fix = config_topo['policy'][topo_obj.policy_list[0]]
if self.option == 'openstack':
policy_id = pol_fix.policy_obj['policy']['id']
new_policy_entries = config_topo['policy'][topo_obj.policy_list[1]].policy_obj['policy']['entries']
data = {'policy': {'entries': new_policy_entries}}
pol_fix.update_policy(policy_id, data)
else:
policy_name = topo_obj.policy_list[0]
proj_obj = pol_fix._conn_drv.project_read(['default-domain',self.project.project_name])
new_policy_entries = pol_fix._conn_drv.network_policy_read(['default-domain',
self.project.project_name,
topo_obj.policy_list[1]]).network_policy_entries
net_policy_obj = NetworkPolicy(
policy_name, network_policy_entries=new_policy_entries,
parent_obj=proj_obj)
pol_fix._conn_drv.network_policy_update(net_policy_obj)
src_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
pkg = 'traceroute_2.0.18-1_amd64.deb'
self.logger.info("copying traceroute pkg to the compute node.")
path = os.getcwd() + '/tcutils/pkgs/' + pkg
host_compute = {'username': self.inputs.username, 'password': self.inputs.password, 'ip': src_vm_fix.vm_node_ip}
copy_file_to_server(host_compute,path, '/tmp',pkg)
self.logger.info("copying traceroute from compute node to VM")
with settings(host_string='%s@%s' % (self.inputs.username, src_vm_fix.vm_node_ip),
password=self.inputs.password, warn_only=True, abort_on_prompts=False):
path = '/tmp/' + pkg
output = fab_put_file_to_vm(
host_string='%s@%s' %
(src_vm_fix.vm_username,
src_vm_fix.local_ip),
password=src_vm_fix.vm_password,
src=path,
dest='/tmp')
self.logger.info("installing traceroute")
cmd = 'dpkg -i /tmp/' + pkg
output_cmd_dict = src_vm_fix.run_cmd_on_vm(cmds=[cmd], as_sudo=True)
assert "Setting up traceroute" in output_cmd_dict[cmd], "traceroute pkg installation error, output:%s" % output_cmd_dict[cmd]
self.logger.info("starting tcpdump on src VM")
filters = '\'(icmp[0]=11 and icmp[1]=0)\''
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
self.logger.info("starting traceroute to out of cluster, 8.8.8.8")
cmd = 'traceroute 8.8.8.8'
for i in range(0,4):
output_cmd_dict = src_vm_fix.run_cmd_on_vm(cmds=[cmd], as_sudo=True)
self.logger.info(output_cmd_dict[cmd])
if verify_tcpdump_count(self, session, pcap):
return True
return False
#end test_icmp_error_handling_from_mx_with_si
@preposttest_wrapper
def test_icmp_error_payload_matching(self):
"""
Description: Test ICMP error handling with payload diff. from original packet
1. icmp pakcet with payload matching should be accepted and others should be denied
Steps:
1. define the topology for the test
2. create the resources as defined in the topo
3. send the traffic from sender to unreachable port on recvr side(port 10000 used here), recvr will send icmp error to sender for "destination port unreachable"
4. from recvr side send many other icmp error types in loop
5. sender should recv only icmp error mentioned in step 3 and should NOT recv errors mentioned in step4
Pass criteria: step 5 should pass
"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
except (AttributeError,NameError):
topo.build_topo2(compute_node_list=self.connections.orch.get_hosts(),config_option=self.option)
#
# Test setup: Configure policy, VN, & VM
# return {'result':result, 'msg': err_msg, 'data': [self.topo, config_topo]}
# Returned topo is of following format:
# config_topo= {'policy': policy_fixt, 'vn': vn_fixture, 'vm': vm_fixture}
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(VmToNodeMapping=topo.vm_node_map,config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
#Test with SG rule, egress-udp only and also send diff ICMP error with diff payload
port = 10000
pkt_cnt = 2
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
#start tcpdump on src VM
filters = '\'(icmp[0]=3 and icmp[1]=3)\''
session1, pcap1 = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
#start traffic
sender1, receiver1 = self.start_traffic_scapy(src_vm_fix, dst_vm_fix, 'udp',
port, port,recvr=False)
icmp_code = 0
for icmp_type in xrange(0,3):
#start tcpdump on src VM
filters = '\'(icmp[0] = %s and icmp[1] = %s)\'' % (icmp_type, icmp_code)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
sender, receiver = self.start_traffic_scapy(dst_vm_fix, src_vm_fix, 'icmp',
port, port, payload="payload",
icmp_type=icmp_type, icmp_code=icmp_code,count=pkt_cnt)
sent, recv = self.stop_traffic_scapy(sender, receiver)
assert sent != 0, "sent count is ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap, exp_count=0), "pkt count in tcpdump is not ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#type 3 , code (0,3)
icmp_type = 3
for icmp_code in xrange(0,3):
#start tcpdump on src VM
filters = '\'(icmp[0] = %s and icmp[1] = %s)\'' % (icmp_type, icmp_code)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
sender, receiver = self.start_traffic_scapy(dst_vm_fix, src_vm_fix, 'icmp',
port, port, payload="payload",
icmp_type=icmp_type, icmp_code=icmp_code,count=pkt_cnt)
sent, recv = self.stop_traffic_scapy(sender, receiver)
assert sent != 0, "sent count is ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap, exp_count=0), "pkt count in tcpdump is not ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#type 3 , code (4,15)
icmp_type = 3
for icmp_code in xrange(4,16):
#start tcpdump on src VM
filters = '\'(icmp[0] = %s and icmp[1] = %s)\'' % (icmp_type, icmp_code)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
sender, receiver = self.start_traffic_scapy(dst_vm_fix, src_vm_fix, 'icmp',
port, port, payload="payload",
icmp_type=icmp_type, icmp_code=icmp_code,count=pkt_cnt)
sent, recv = self.stop_traffic_scapy(sender, receiver)
assert sent != 0, "sent count is ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap, exp_count=0), "pkt count in tcpdump is not ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#type (4,11), code 0
icmp_code = 0
for icmp_type in xrange(4,12):
#start tcpdump on src VM
filters = '\'(icmp[0] = %s and icmp[1] = %s)\'' % (icmp_type, icmp_code)
session, pcap = start_tcpdump_for_vm_intf(self, src_vm_fix, src_vn_fq_name, filters = filters)
sender, receiver = self.start_traffic_scapy(dst_vm_fix, src_vm_fix, 'icmp',
port, port, payload="payload",
icmp_type=icmp_type, icmp_code=icmp_code,count=pkt_cnt)
sent, recv = self.stop_traffic_scapy(sender, receiver)
assert sent != 0, "sent count is ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session, pcap, exp_count=0), "pkt count in tcpdump is not ZERO for icmp type %s and code %s" % (icmp_type, icmp_code)
#verify packet count and stop tcpdump
assert verify_tcpdump_count(self, session1, pcap1)
#stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1,recvr=False)
return True
#end test_icmp_error_payload_matching
#end class SecurityGroupRegressionTests7
class SecurityGroupRegressionTests8(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests8, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
def test_flow_to_sg_rule_mapping(self):
"""
Description: test flow to security group rule uuid mapping for
1. default SG
2. user-defined SG
Steps:
1. create resources as defined in topology
2. start traffic for specific protocol which matches with specific security group rule
3. get flow records from agent and verify if sg rule uuid matches with corresponding ingress/egress rule id
Pass criteria:
step 3 should PASS
"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
compute_node_list=self.inputs.compute_ips,
config_option=self.option)
except (AttributeError, NameError):
topo.build_topo(compute_node_list=self.inputs.compute_ips,
config_option=self.option)
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(VmToNodeMapping=topo.vm_node_map,
config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
proto = 'udp'
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
default_secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
'default']))
# test with default SG
traffic_obj = BaseTraffic.factory(proto=proto)
assert traffic_obj
assert traffic_obj.start(src_vm_fix, dst_vm_fix,
proto, port, port)
assert self.verify_flow_to_sg_rule_mapping(
src_vm_fix,
dst_vm_fix,
src_vn_fix,
dst_vn_fix,
default_secgrp_id,
proto,
port)
sent, recv = traffic_obj.stop()
# test with user-defined SG
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
src_vm_fix.remove_security_group(secgrp=default_secgrp_id)
dst_vm_fix.remove_security_group(secgrp=default_secgrp_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
dst_vm_fix.add_security_group(secgrp=secgrp_id)
traffic_obj = BaseTraffic.factory(proto=proto)
assert traffic_obj
assert traffic_obj.start(src_vm_fix, dst_vm_fix,
proto, port, port)
assert self.verify_flow_to_sg_rule_mapping(
src_vm_fix,
dst_vm_fix,
src_vn_fix,
dst_vn_fix,
secgrp_id,
proto,
port)
sent, recv = traffic_obj.stop()
return True
# end test_flow_to_sg_rule_mapping
@preposttest_wrapper
def test_flow_to_sg_rule_mapping_multiple_rules(self):
"""
Description: test flow to security group rule uuid mapping for
1. SG with multiple rules and diff active flows matching diff. rules
2. Multiple SG attached to VMs and diff active flows matching diff. SG
Steps:
1. create resources as defined in topology
2. start traffic for specific protocol which matches with specific security group rule
3. get flow records from agent and verify if sg rule uuid matches with corresponding ingress/egress rule id
Pass criteria:
step 3 should PASS
"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
compute_node_list=self.inputs.compute_ips,
config_option=self.option)
except (AttributeError, NameError):
topo.build_topo2(compute_node_list=self.inputs.compute_ips,
config_option=self.option)
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(VmToNodeMapping=topo.vm_node_map,
config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
# start traffic
traffic_obj_udp = BaseTraffic.factory(proto='udp')
assert traffic_obj_udp
assert traffic_obj_udp.start(src_vm_fix, dst_vm_fix,
'udp', port, port)
traffic_obj_tcp = BaseTraffic.factory(proto='tcp')
assert traffic_obj_tcp
assert traffic_obj_tcp.start(src_vm_fix, dst_vm_fix,
'tcp', port, port)
sender_icmp, receiver_icmp = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'icmp', port, port, payload="payload")
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
assert self.verify_flow_to_sg_rule_mapping(
src_vm_fix,
dst_vm_fix,
src_vn_fix,
dst_vn_fix,
secgrp_id,
'udp',
port)
sg_name = topo_obj.sg_list[1]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
assert self.verify_flow_to_sg_rule_mapping(
src_vm_fix,
dst_vm_fix,
src_vn_fix,
dst_vn_fix,
secgrp_id,
'tcp',
port)
port = 0
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
assert self.verify_flow_to_sg_rule_mapping(
src_vm_fix,
dst_vm_fix,
src_vn_fix,
dst_vn_fix,
secgrp_id,
'icmp',
port)
# stop traffic
sent, recv = traffic_obj_udp.stop()
sent, recv = traffic_obj_tcp.stop()
sent, recv = self.stop_traffic_scapy(sender_icmp, receiver_icmp)
return True
#end test_flow_to_sg_rule_mapping_multiple_rules
@preposttest_wrapper
def test_flow_to_sg_rule_mapping_intra_vn(self):
"""
Description: test flow to security group rule uuid mapping for
1. intra VN traffic with diff SG in src and dst VM
Steps:
1. create resources as defined in topology
2. start traffic for specific protocol which matches with specific security group rule
3. get flow records from agent and verify if sg rule uuid matches with corresponding ingress/egress rule id
Pass criteria:
step 3 should PASS
"""
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password, config_option=self.option)
except (AttributeError, NameError):
topo.build_topo2(config_option=self.option)
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][topo_obj.sg_list[0]].replace_rules(rule)
proto = 'udp'
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
src_sg_name = topo_obj.sg_list[0]
dst_sg_name = topo_obj.sg_list[1]
if self.option == 'openstack':
src_vn_fq_name = src_vn_fix.vn_fq_name
dst_vn_fq_name = dst_vn_fix.vn_fq_name
else:
src_vn_fq_name = ':'.join(src_vn_fix._obj.get_fq_name())
dst_vn_fq_name = ':'.join(dst_vn_fix._obj.get_fq_name())
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
src_sg_name]))
# start traffic
traffic_obj = BaseTraffic.factory(proto=proto)
assert traffic_obj
assert traffic_obj.start(src_vm_fix, dst_vm_fix,
proto, port, port)
# get the egress rule uuid
rule_uuid = None
rules = list_sg_rules(self.connections, secgrp_id)
for rule in rules:
if rule['direction'] == 'egress' and (rule['ethertype'] == 'IPv4' or \
rule['remote_ip_prefix'] == '0.0.0.0/0') and \
(rule['protocol'] == 'any' or rule['protocol'] == proto):
rule_uuid = rule['id']
break
assert rule_uuid, "Egress rule id could not be found"
test_result = True
nh_dst = dst_vm_fix.tap_intf[dst_vn_fq_name]['flow_key_idx']
nh = src_vm_fix.tap_intf[src_vn_fq_name]['flow_key_idx']
# verify forward flow on src compute node
if not self.fetch_flow_verify_sg_uuid(
nh, src_vm_fix, dst_vm_fix, port, port, '17',
rule_uuid, src_vm_fix.vm_node_ip):
test_result = False
# verify reverse flow on src compute node
if src_vm_fix.vm_node_ip == dst_vm_fix.vm_node_ip:
nh = nh_dst
if not self.fetch_flow_verify_sg_uuid(
nh, dst_vm_fix, src_vm_fix, port, port, '17',
rule_uuid, src_vm_fix.vm_node_ip):
test_result = False
if src_vm_fix.vm_node_ip != dst_vm_fix.vm_node_ip:
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
dst_sg_name]))
# get the ingress rule uuid
rule_uuid = None
rules = list_sg_rules(self.connections, secgrp_id)
for rule in rules:
if rule['direction'] == 'ingress' and \
(rule['protocol'] == 'any' or rule['protocol'] == proto):
rule_uuid = rule['id']
break
assert rule_uuid, "Ingress rule id could not be found"
# verify forward flow on dst compute node
if not self.fetch_flow_verify_sg_uuid(
nh_dst, src_vm_fix, dst_vm_fix, port, port, '17',
rule_uuid, dst_vm_fix.vm_node_ip):
test_result = False
# verify reverse flow on dst compute node
if not self.fetch_flow_verify_sg_uuid(
nh_dst, dst_vm_fix, src_vm_fix, port, port, '17',
rule_uuid, dst_vm_fix.vm_node_ip):
test_result = False
# stop traffic
sent, recv = traffic_obj.stop()
assert test_result
return True
#end test_flow_to_sg_rule_mapping_intra_vn
@preposttest_wrapper
def test_verify_sg_rule_uuid_in_control_api(self):
"""
1. Verify uuid for each sg rule in api/control introspect and neutron cli"""
topology_class_name = None
#
# Get config for test from topology
result = True
msg = []
if not topology_class_name:
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
self.logger.info("Scenario for the test used is: %s" %
(topology_class_name))
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
config_option=self.option)
except (AttributeError, NameError):
topo.build_topo2(config_option=self.option)
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(config_option=self.option)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
'ethertype': 'IPv4'
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
'ethertype': 'IPv4'
}]
config_topo['sec_grp'][topo_obj.sg_list[0]].replace_rules(rule)
sg_list = ['default', topo_obj.sg_list[0]]
proto = 'udp'
try:
prj_name = self.project.project_name
except (AttributeError, NameError):
prj_name = 'admin'
for sg_name in sg_list:
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
# get the egress and ingress rule uuid
egress_ipv4_id = None
egress_ipv6_id = None
ingress_ipv4_id = None
ingress_ipv6_id = None
rules = list_sg_rules(self.connections, secgrp_id)
for rule in rules:
if rule['direction'] == 'egress' and rule['ethertype'] == 'IPv4':
egress_ipv4_id = rule['id']
elif rule['direction'] == 'ingress' and rule['ethertype'] == 'IPv4':
ingress_ipv4_id = rule['id']
elif rule['direction'] == 'ingress' and rule['ethertype'] == 'IPv6':
ingress_ipv6_id = rule['id']
elif rule['direction'] == 'egress' and rule['ethertype'] == 'IPv6':
egress_ipv6_id = rule['id']
assert egress_ipv4_id, "Egress rule id could not be found"
assert ingress_ipv4_id, "Ingress rule id could not be found"
# get SG rule uuid from api and match with neutron uuid
api_secgrp_obj = self.api_s_inspect.get_cs_secgrp(
project=prj_name,
secgrp=sg_name,
refresh=True)
uuid_egress_ipv4 = None
uuid_ingress_ipv4 = None
uuid_egress_ipv6 = None
uuid_ingress_ipv6 = None
for rule in api_secgrp_obj['security-group']['security_group_entries']['policy_rule']:
if rule['src_addresses'][0]['security_group'] == "local" and rule['ethertype'] == 'IPv4':
uuid_egress_ipv4 = rule['rule_uuid']
elif rule['dst_addresses'][0]['security_group'] == "local" and rule['ethertype'] == 'IPv4':
uuid_ingress_ipv4 = rule['rule_uuid']
elif rule['src_addresses'][0]['security_group'] == "local" and rule['ethertype'] == 'IPv6':
uuid_egress_ipv6 = rule['rule_uuid']
elif rule['dst_addresses'][0]['security_group'] == "local" and rule['ethertype'] == 'IPv6':
uuid_ingress_ipv6 = rule['rule_uuid']
assert uuid_egress_ipv4 == egress_ipv4_id, "egress IPv4 rule uuid is not same in API and \
neutron for SG:%s" % (sg_name)
assert uuid_ingress_ipv4 == ingress_ipv4_id, "ingress IPv4 rule uuid is not same in API \
and neutron for SG:%s" % (sg_name)
if ingress_ipv6_id:
assert ingress_ipv6_id == uuid_ingress_ipv6, "ingress IPv6 rule uuid is not same in API \
and neutron for SG:%s" % (sg_name)
if egress_ipv6_id:
assert egress_ipv6_id == uuid_egress_ipv6, "egress IPv6 rule uuid is not same in API \
and neutron for SG:%s" % (sg_name)
self.logger.info("%s security group rule uuid matches in API with neutron" % (sg_name))
# get SG rule uuid from control node and match with neutron uuid
for cn in self.inputs.bgp_ips:
uuid_egress_ipv4 = None
uuid_ingress_ipv4 = None
cn_secgrp_obj = self.cn_inspect[cn].get_cn_sec_grp(
project=prj_name,
secgrp=sg_name)
for rule in cn_secgrp_obj['obj_info'][0]['data']['security-group-entries']:
if rule['src-addresses']['security-group'] == 'local' and rule['ethertype'] == 'IPv4':
uuid_egress_ipv4 = rule['rule-uuid']
elif rule['dst-addresses']['security-group'] == 'local' and rule['ethertype'] == 'IPv4':
uuid_ingress_ipv4 = rule['rule-uuid']
elif rule['src-addresses']['security-group'] == 'local' and rule['ethertype'] == 'IPv6':
uuid_egress_ipv6 = rule['rule-uuid']
elif rule['dst-addresses']['security-group'] == 'local' and rule['ethertype'] == 'IPv6':
uuid_ingress_ipv6 = rule['rule-uuid']
assert uuid_egress_ipv4 == egress_ipv4_id, "egress rule uuid are not same in control \
and neutron for SG:%s" % (sg_name)
assert uuid_ingress_ipv4 == ingress_ipv4_id, "ingress rule uuid are not same in control \
and neutron for SG:%s" % (sg_name)
if ingress_ipv6_id:
assert ingress_ipv6_id == uuid_ingress_ipv6, "ingress IPv6 rule uuid is not same in control \
and neutron for SG:%s" % (sg_name)
if egress_ipv6_id:
assert egress_ipv6_id == uuid_egress_ipv6, "egress IPv6 rule uuid is not same in control \
and neutron for SG:%s" % (sg_name)
self.logger.info("%s security group rule uuid matches in control with neutron" % (sg_name))
return True
# end test_verify_sg_rule_uuid_in_control_api
#end class SecurityGroupRegressionTests8
class SecurityGroupRegressionTests9(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests9, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
def test_add_remove_default_sg_active_flow(self):
""" add/remove default SG from VM when flow is active and traffic from both ends"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo_obj, config_topo = self.create_topo_setup(topology_class_name, "build_topo")
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
sg_name = 'default'
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
filters1 = '\'(udp and src host %s and dst host %s)\'' % (
src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
filters2 = '\'(tcp and src host %s and dst host %s)\'' % (
dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
sender1, receiver1 = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'udp', port, port, payload="payload")
sender2, receiver2 = self.start_traffic_scapy(
dst_vm_fix, src_vm_fix, 'tcp', port, port, payload="payload")
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
src_vm_fix.remove_security_group(secgrp=secgrp_id)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1,
src_exp_count=0, dst_exp_count=0)
src_vm_fix.add_security_group(secgrp=secgrp_id)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
# stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1)
sent, recv = self.stop_traffic_scapy(sender2, receiver2)
return True
# end test_add_remove_default_sg_active_flow
@preposttest_wrapper
def test_add_remove_sg_active_flow1(self):
""" add/remove SG from VM when flow is active
1.Traffic from both ends
2.Test for SG with rule with remote as sg for both ingress-egress"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo_obj, config_topo = self.create_topo_setup(topology_class_name, "build_topo")
sg_allow_all = self.create_sec_group_allow_all()
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
default_sg_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
'default']))
src_vm_fix.remove_security_group(secgrp=default_sg_id)
dst_vm_fix.remove_security_group(secgrp=default_sg_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
dst_vm_fix.add_security_group(secgrp=secgrp_id)
# ingress-egress from same sg
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'security_group': topo_obj.domain + ':' + topo_obj.project + ':' + sg_name}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'security_group': topo_obj.domain + ':' + topo_obj.project + ':' + sg_name}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][sg_name].replace_rules(rule)
filters1 = '\'(udp and src host %s and dst host %s)\'' % (
src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
filters2 = '\'(udp and src host %s and dst host %s)\'' % (
dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
sender1, receiver1 = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'udp', port, port, payload="payload")
sender2, receiver2 = self.start_traffic_scapy(
dst_vm_fix, src_vm_fix, 'udp', port, port, payload="payload")
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
src_vm_fix.remove_security_group(secgrp=secgrp_id)
src_vm_fix.add_security_group(secgrp=sg_allow_all)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1,
src_exp_count=0, dst_exp_count=0)
# stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1)
sent, recv = self.stop_traffic_scapy(sender2, receiver2)
src_vm_fix.remove_security_group(secgrp=sg_allow_all)
return True
# end test_add_remove_sg_active_flow1
@preposttest_wrapper
def test_add_remove_sg_active_flow2(self):
""" add/remove SG from VM when flow is active
1.Traffic from both ends
2.Test for SG with egress cidr rule,ingress sg"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo_obj, config_topo = self.create_topo_setup(topology_class_name, "build_topo")
sg_allow_all = self.create_sec_group_allow_all()
port = 10000
port2 = 11000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
default_sg_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
'default']))
src_vm_fix.remove_security_group(secgrp=default_sg_id)
dst_vm_fix.remove_security_group(secgrp=default_sg_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
dst_vm_fix.add_security_group(secgrp=secgrp_id)
# start the traffic from src VM
sender1, receiver1 = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'udp', port, port, payload="payload")
# start the traffic from dst VM
sender2, receiver2 = self.start_traffic_scapy(
dst_vm_fix, src_vm_fix, 'udp', port2, port2, payload="payload")
# ingress from same sg and egress to all
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'security_group': topo_obj.domain + ':' + topo_obj.project + ':' + sg_name}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][sg_name].replace_rules(rule)
filters1 = '\'(udp and src host %s and dst host %s)\'' % (
src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
filters2 = '\'(udp and src host %s and dst host %s)\'' % (
dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
src_vm_fix.remove_security_group(secgrp=secgrp_id)
src_vm_fix.add_security_group(secgrp=sg_allow_all)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1,
dst_exp_count=0)
# stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1)
sent, recv = self.stop_traffic_scapy(sender2, receiver2)
src_vm_fix.remove_security_group(secgrp=sg_allow_all)
return True
# end test_add_remove_sg_active_flow2
@preposttest_wrapper
def test_add_remove_sg_active_flow3(self):
""" add/remove SG from VM when flow is active
1. Traffic from both ends
2. Test for SG with ingress cidr and egress sg"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo_obj, config_topo = self.create_topo_setup(topology_class_name, "build_topo")
sg_allow_all = self.create_sec_group_allow_all()
port = 10000
port2 = 11000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
default_sg_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
'default']))
src_vm_fix.remove_security_group(secgrp=default_sg_id)
dst_vm_fix.remove_security_group(secgrp=default_sg_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
dst_vm_fix.add_security_group(secgrp=secgrp_id)
# start the traffic from src VM
sender1, receiver1 = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'udp', port, port, payload="payload")
# start the traffic from dst VM
sender2, receiver2 = self.start_traffic_scapy(
dst_vm_fix, src_vm_fix, 'udp', port2, port2, payload="payload")
# egress to same sg and ingress from all
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'security_group': topo_obj.domain + ':' + topo_obj.project + ':' + sg_name}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][sg_name].replace_rules(rule)
filters1 = '\'(udp and src host %s and dst host %s)\'' % (
src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
filters2 = '\'(udp and src host %s and dst host %s)\'' % (
dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
src_vm_fix.remove_security_group(secgrp=secgrp_id)
src_vm_fix.add_security_group(secgrp=sg_allow_all)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1,
src_exp_count=0)
# stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1)
sent, recv = self.stop_traffic_scapy(sender2, receiver2)
src_vm_fix.remove_security_group(secgrp=sg_allow_all)
return True
# end test_add_remove_sg_active_flow3
@preposttest_wrapper
def test_add_remove_sg_active_flow4(self):
""" add/remove SG from VM when flow is active
1. Traffic from both ends
2. Test for SG with cidr both ingress-egress"""
topology_class_name = sdn_sg_test_topo.sdn_topo_flow_to_sg_rule_mapping
topo_obj, config_topo = self.create_topo_setup(topology_class_name, "build_topo")
port = 10000
src_vm_name = 'vm1'
dst_vm_name = 'vm2'
src_vm_fix = config_topo['vm'][src_vm_name]
dst_vm_fix = config_topo['vm'][dst_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
sg_name = topo_obj.sg_list[0]
secgrp_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
sg_name]))
default_sg_id = get_secgrp_id_from_name(
self.connections,
':'.join([self.inputs.domain_name,
self.inputs.project_name,
'default']))
src_vm_fix.remove_security_group(secgrp=default_sg_id)
dst_vm_fix.remove_security_group(secgrp=default_sg_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
dst_vm_fix.add_security_group(secgrp=secgrp_id)
# start the traffic from src VM
sender1, receiver1 = self.start_traffic_scapy(
src_vm_fix, dst_vm_fix, 'udp', port, port, payload="payload")
# start the traffic from dst VM
sender2, receiver2 = self.start_traffic_scapy(
dst_vm_fix, src_vm_fix, 'udp', port, port, payload="payload")
# ingress-egress from all
rule = [{'direction': '>',
'protocol': 'udp',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'udp',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0', 'ip_prefix_len': 0}}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
config_topo['sec_grp'][sg_name].replace_rules(rule)
filters1 = '\'(udp and src host %s and dst host %s)\'' % (
src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
filters2 = '\'(udp and src host %s and dst host %s)\'' % (
dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
src_vm_fix.remove_security_group(secgrp=secgrp_id)
src_vm_fix.add_security_group(secgrp=secgrp_id)
assert self.verify_traffic_on_vms(src_vm_fix, src_vn_fix,
dst_vm_fix, dst_vn_fix,
filters2, filters1)
# stop traffic
sent, recv = self.stop_traffic_scapy(sender1, receiver1)
sent, recv = self.stop_traffic_scapy(sender2, receiver2)
return True
# end test_add_remove_sg_active_flow4
#end class SecurityGroupRegressionTests9
class SecurityGroupSynAckTest(BaseSGTest, VerifySecGroup, ConfigPolicy):
@classmethod
def setUpClass(cls):
super(SecurityGroupSynAckTest, cls).setUpClass()
cls.option = 'openstack'
def runTest(self):
pass
@preposttest_wrapper
def test_syn_ack_create_flow(self):
"""
Description:
verify if SYN ack is allowed and flow is created again after flow is expired
Steps:
1. configure secgroupA with egress rule
2. configure secgroupB with ingress/egress rule
3. Make sure traffic from VM(secgrpB) to VM(secgrpA) fails as the VM(secgrpA) doesn't allow ingress traffic
4. Send traffic from VM(secgrpA) to VM(secgrpB), expected to pass through
5. Send SYN from VM(secgrpA) to VM(secgrpB).
6. recv SYN at VM(secgrpB) and Wait for flow to expire(180 sec)
7. Send SYN+ACK from VM(secgrpB) to VM(secgrpA), though the flow is expired and VM(secgrpA) denies ingress traffic, SYN_ACK packet of intial SYN should go through.
Pass criteria:
step 7 should PASS
"""
topology_class_name = sdn_sg_test_topo.sdn_topo_icmp_error_handling
topo = topology_class_name()
try:
# provided by wrapper module if run in parallel test env
topo.build_topo2(
project=self.project.project_name,
username=self.project.username,
password=self.project.password,
compute_node_list=self.inputs.compute_ips)
except (AttributeError,NameError):
topo.build_topo2(compute_node_list=self.inputs.compute_ips)
topo.sg_rules[topo.sg_list[0]] = [
{'direction': '>',
'protocol': 'any',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0',
'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
}]
topo.sg_rules[topo.sg_list[1]] = [
{'direction': '>',
'protocol': 'any',
'dst_addresses': [{'subnet': {'ip_prefix': '0.0.0.0',
'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'src_addresses': [{'security_group': 'local'}],
},
{'direction': '>',
'protocol': 'any',
'src_addresses': [{'subnet': {'ip_prefix': '0.0.0.0',
'ip_prefix_len': 0}}],
'dst_ports': [{'start_port': 0, 'end_port': -1}],
'src_ports': [{'start_port': 0, 'end_port': -1}],
'dst_addresses': [{'security_group': 'local'}],
}]
setup_obj = self.useFixture(
sdnTopoSetupFixture(self.connections, topo))
out = setup_obj.topo_setup(vms_on_single_compute=True)
self.logger.info("Setup completed with result %s" % (out['result']))
self.assertEqual(out['result'], True, out['msg'])
if out['result']:
topo_obj, config_topo = out['data']
src_vm_name = 'vm1'
src_vm_fix = config_topo['vm'][src_vm_name]
src_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[src_vm_name]]
dst_vm_name = 'vm2'
dst_vm_fix = config_topo['vm'][dst_vm_name]
dst_vn_fix = config_topo['vn'][topo_obj.vn_of_vm[dst_vm_name]]
pkg = 'syn_client.py'
self.logger.info("copying syn client to the compute node.")
path = os.getcwd() + '/tcutils/pkgs/syn_ack_test/' + pkg
host_compute = {
'username': self.inputs.host_data[src_vm_fix.vm_node_ip]['username'],
'password': self.inputs.host_data[src_vm_fix.vm_node_ip]['password'],
'ip': src_vm_fix.vm_node_ip}
copy_file_to_server(host_compute, path, '/tmp', pkg)
self.logger.info("copying syn client from compute node to VM")
with settings(host_string='%s@%s' % (self.inputs.username,
src_vm_fix.vm_node_ip),
password=self.inputs.password, warn_only=True,
abort_on_prompts=False):
path = '/tmp/' + pkg
output = fab_put_file_to_vm(
host_string='%s@%s' %
(src_vm_fix.vm_username,
src_vm_fix.local_ip),
password=src_vm_fix.vm_password,
src=path,
dest='/tmp')
pkg = 'syn_server.py'
self.logger.info("copying syn server to the compute node.")
path = os.getcwd() + '/tcutils/pkgs/syn_ack_test/' + pkg
host_compute = {
'username': self.inputs.username,
'password': self.inputs.password,
'ip': dst_vm_fix.vm_node_ip}
copy_file_to_server(host_compute, path, '/tmp', pkg)
self.logger.info("copying syn server from compute node to VM")
with settings(host_string='%s@%s' % (self.inputs.username,
dst_vm_fix.vm_node_ip),
password=self.inputs.password, warn_only=True,
abort_on_prompts=False):
path = '/tmp/' + pkg
output = fab_put_file_to_vm(
host_string='%s@%s' %
(dst_vm_fix.vm_username,
dst_vm_fix.local_ip),
password=dst_vm_fix.vm_password,
src=path,
dest='/tmp')
cmd1 = 'chmod +x /tmp/syn_server.py;/tmp/syn_server.py %s %s \
2>/tmp/server.log 1>/tmp/server.log' \
% (src_vm_fix.vm_ip, dst_vm_fix.vm_ip)
cmd2 = 'chmod +x /tmp/syn_client.py;/tmp/syn_client.py %s %s \
2>/tmp/client.log 1>/tmp/client.log' \
% (dst_vm_fix.vm_ip, src_vm_fix.vm_ip)
output_cmd_dict = dst_vm_fix.run_cmd_on_vm(cmds=[cmd1],
as_sudo=True, as_daemon=True)
output_cmd_dict = src_vm_fix.run_cmd_on_vm(cmds=[cmd2],
as_sudo=True, as_daemon=True)
sleep(1)
#verify flow created
inspect_h1 = self.agent_inspect[src_vm_fix.vm_node_ip]
flow_rec1 = None
sport = '8100'
dport = '8000'
vn_fq_name=src_vm_fix.vn_fq_name
flow_timeout = 180
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(
nh=src_vm_fix.tap_intf[vn_fq_name]['flow_key_idx'],
sip=src_vm_fix.vm_ip,
dip=dst_vm_fix.vm_ip,
sport=sport,
dport=dport,
protocol='6')
assert flow_rec1
#wait for flow to expire
sleep(flow_timeout+2)
#verify flow created again
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(
nh=src_vm_fix.tap_intf[vn_fq_name]['flow_key_idx'],
sip=src_vm_fix.vm_ip,
dip=dst_vm_fix.vm_ip,
sport=sport,
dport=dport,
protocol='6')
assert flow_rec1
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(
nh=dst_vm_fix.tap_intf[vn_fq_name]['flow_key_idx'],
sip=dst_vm_fix.vm_ip,
dip=src_vm_fix.vm_ip,
sport=dport,
dport=sport,
protocol='6')
assert flow_rec1
return True
#end test_syn_ack_create_flow
# end class SecurityGroupSynAckTest
#creating new classes to run all tests with contrail apis
class SecurityGroupBasicRegressionTests1_contrail(test_regression_basic.SecurityGroupBasicRegressionTests1):
@classmethod
def setUpClass(cls):
super(SecurityGroupBasicRegressionTests1_contrail, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests2_contrail(SecurityGroupRegressionTests2):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests2, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests3_contrail(SecurityGroupRegressionTests3):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests3, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests4_contrail(SecurityGroupRegressionTests4):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests4, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests5_contrail(SecurityGroupRegressionTests5):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests5, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests6_contrail(SecurityGroupRegressionTests6):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests6, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests7_contrail(SecurityGroupRegressionTests7):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests7, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests8_contrail(SecurityGroupRegressionTests8):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests8, cls).setUpClass()
cls.option = 'contrail'
class SecurityGroupRegressionTests9_contrail(SecurityGroupRegressionTests9):
@classmethod
def setUpClass(cls):
super(SecurityGroupRegressionTests9, cls).setUpClass()
cls.option = 'contrail'
| [
"[email protected]"
] | |
93cdae512cf458fe26263ccfd8795b2587eed450 | e074be8c042a872c3c97abe6e01ccaad101f0564 | /appr/models/kv/etcd/blob.py | 17855f9cedc685bea9cfb1616fc8beca065f1a7e | [
"Apache-2.0"
] | permissive | quay/appr | 1a8f80a4a3fcaf92403cae3cba27a5fc29aeea8f | 8e3dc3417c3b43eacb6ebe9543155b3586a75146 | refs/heads/master | 2022-07-13T22:59:24.023665 | 2022-07-05T20:54:56 | 2022-07-05T20:54:56 | 233,668,674 | 1 | 2 | Apache-2.0 | 2022-07-05T20:54:56 | 2020-01-13T18:50:19 | Python | UTF-8 | Python | false | false | 238 | py | from __future__ import absolute_import, division, print_function
from appr.models.kv.blob_kv_base import BlobKvBase
from appr.models.kv.etcd.models_index import ModelsIndexEtcd
class Blob(BlobKvBase):
index_class = ModelsIndexEtcd
| [
"[email protected]"
] | |
68717c72dc0d4f2c33bd500983a9437f5e933a2b | b05b89e1f6378905bbb62e2a2bf2d4f8e3187932 | /reverseLinkedListRecursive.py | 99ef5fe1734f0dd94fb91ef0d068505c0e4d5a81 | [
"MIT"
] | permissive | anishmo99/Daily-Interview-Pro | c959cd336209132aebad67a409df685e654cfdfc | d8724e8feec558ab1882d22c9ca63b850b767753 | refs/heads/master | 2023-04-10T08:09:46.089227 | 2021-04-27T07:27:38 | 2021-04-27T07:27:38 | 269,157,996 | 1 | 1 | MIT | 2020-06-08T07:09:19 | 2020-06-03T17:57:21 | C++ | UTF-8 | Python | false | false | 471 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
if not head or not head.next:
return head
else:
new_head = self.reverseList(head.next)
new_tail = head.next
new_tail.next = head
head.next = None
return new_head | [
"[email protected]"
] | |
9e567a091572b54744008d9eb043669ee4cda949 | 2aebde5f6ad67fbfed26427b5463036cc752eee4 | /model/news_crawler/sina_news/sina_news_daily.py | 0176b67554b8f743be47ea51d239af7657f4437d | [] | no_license | L-S-G/duck_prophet | be46d4d8fc63eb9030f55947626ffe056415eb8c | 23decf840d1431f0df1bcc8bd647c1ae01adec4e | refs/heads/master | 2023-02-23T11:51:32.262971 | 2021-01-08T10:25:18 | 2021-01-08T10:25:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | # -*- coding:utf-8 -*-
import sys
from config.news_crawler.sina_news.sina_news_conf import entrance_list
import requests
import re
import Queue
reload(sys)
sys.setdefaultencoding('utf-8')
class SinaNewsDaily():
# 每日抓取新浪新闻文章
pass
| [
"[email protected]"
] | |
30fc81e3450db9c9effaf08cd8ee119875de164a | f5627a74bb6b8923b639fad71033b18c047cd32e | /telemetry/telemetry/internal/platform/tracing_agent/battor_tracing_agent.py | d973bb13b2066ad7063a3f8a711ebea404827206 | [
"BSD-3-Clause"
] | permissive | fanarm/catapult | 49c02ffede981ef07b13069f5ce7483e17e4c3e8 | 26750ce5fe46882abb4e72dd488a1a08a2352b0f | refs/heads/master | 2021-01-15T13:14:38.301954 | 2016-09-20T10:28:20 | 2016-09-20T10:28:20 | 66,614,707 | 0 | 0 | null | 2016-08-26T03:48:41 | 2016-08-26T03:48:40 | null | UTF-8 | Python | false | false | 3,845 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from battor import battor_error
from battor import battor_wrapper
from catapult_base import cloud_storage
from devil.android import battery_utils
from py_trace_event import trace_time
from telemetry.internal.platform import tracing_agent
from telemetry.internal.util import atexit_with_log
from telemetry.timeline import trace_data
def _ReenableChargingIfNeeded(battery):
if not battery.GetCharging():
battery.SetCharging(True)
logging.info('Charging status checked at exit.')
class BattOrTracingAgent(tracing_agent.TracingAgent):
"""A tracing agent for getting power data from a BattOr device.
BattOrTracingAgent allows Telemetry to issue high-level tracing commands
(StartTracing, StopTracing, RecordClockSyncMarker) to BattOrs, which are
high-frequency power monitors used for battery testing.
"""
def __init__(self, platform_backend):
super(BattOrTracingAgent, self).__init__(platform_backend)
self._platform_backend = platform_backend
android_device = (
platform_backend.device if platform_backend.GetOSName() == 'android'
else None)
self._battery = (
battery_utils.BatteryUtils(platform_backend.device)
if platform_backend.GetOSName() == 'android' else None)
self._battor = battor_wrapper.BattorWrapper(
platform_backend.GetOSName(), android_device=android_device,
serial_log_bucket=cloud_storage.TELEMETRY_OUTPUT)
@classmethod
def IsSupported(cls, platform_backend):
"""Returns True if BattOr tracing is available."""
if platform_backend.GetOSName() == 'android':
# TODO(rnephew): When we pass BattOr device map into Telemetry, change
# this to reflect that.
return battor_wrapper.IsBattOrConnected(
'android', android_device=platform_backend.device)
return battor_wrapper.IsBattOrConnected(platform_backend.GetOSName())
def StartAgentTracing(self, config, timeout):
"""Start tracing on the BattOr.
Args:
config: A TracingConfig instance.
timeout: number of seconds that this tracing agent should try to start
tracing until timing out.
Returns:
True if the tracing agent started successfully.
"""
if not config.enable_battor_trace:
return False
try:
if self._battery:
self._battery.SetCharging(False)
atexit_with_log.Register(_ReenableChargingIfNeeded, self._battery)
self._battor.StartShell()
self._battor.StartTracing()
return True
except battor_error.BattorError:
if self._battery:
self._battery.SetCharging(True)
raise
def StopAgentTracing(self):
"""Stops tracing on the BattOr."""
try:
self._battor.StopTracing()
finally:
if self._battery:
self._battery.SetCharging(True)
def SupportsExplicitClockSync(self):
return self._battor.SupportsExplicitClockSync()
def RecordClockSyncMarker(self, sync_id,
record_controller_clock_sync_marker_callback):
"""Records a clock sync marker in the BattOr trace.
Args:
sync_id: Unique id for sync event.
record_controller_clock_sync_marker_callback: Function that takes a sync
ID and a timestamp as arguments. This function typically will record the
tracing controller clock sync marker.
"""
timestamp = trace_time.Now()
self._battor.RecordClockSyncMarker(sync_id)
record_controller_clock_sync_marker_callback(sync_id, timestamp)
def CollectAgentTraceData(self, trace_data_builder, timeout=None):
data = self._battor.CollectTraceData(timeout=timeout)
trace_data_builder.SetTraceFor(trace_data.BATTOR_TRACE_PART, data)
| [
"[email protected]"
] | |
9b7d7ff0fef6c8d28943e6e8b4c7ba57787ce5a9 | ebc7607785e8bcd6825df9e8daccd38adc26ba7b | /python/baekjoon/2.algorithm/implementation/백준_웰컴.py | 4ead3ab821977dbf627364ad7c5fb431c2ca1181 | [] | no_license | galid1/Algorithm | 18d1b72b0d5225f99b193e8892d8b513a853d53a | 5bd69e73332f4dd61656ccdecd59c40a2fedb4b2 | refs/heads/master | 2022-02-12T07:38:14.032073 | 2022-02-05T08:34:46 | 2022-02-05T08:34:46 | 179,923,655 | 3 | 0 | null | 2019-06-14T07:18:14 | 2019-04-07T05:49:06 | Python | UTF-8 | Python | false | false | 141 | py | # . . .
# | | _ | _. _ ._ _ _
# |/\|(/.|(_.(_)[ | )(/.
print('. . .')
print('| | _ | _. _ ._ _ _')
print('|/\|(/.|(_.(_)[ | )(/.') | [
"[email protected]"
] | |
05a6dc50b44a0fa741f092b8ddad671c62f8a292 | 5ee05e771a97d465048c50a9f9490b8b0eb7c00d | /adminsavingcategory/urls.py | 06811381e6f63dddde32d3c2e9e21b9a74276420 | [] | no_license | ninsgosai/budget | 614bb01ef4a629d3a7e41205e653acadb23984fd | 722ae7f957913422f1b77247cec7d3b06156ad43 | refs/heads/master | 2023-03-26T14:52:30.242868 | 2021-03-21T07:33:49 | 2021-03-21T07:33:49 | 349,928,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.adminsavingcategory, name='adminsavingcategory'),
path('addsavingcategory', views.addsavingcategory, name='addsavingcategory'),
path('delete_saving_category', views.delete_saving_category, name='delete_saving_category'),
path('editsavingcategory', views.editsavingcategory, name='editsavingcategory'),
path('updatesaving_category', views.updatesaving_category, name='updatesaving_category'),
path('update_income_category', views.update_income_category, name='update_income_category'),
path('incomecategory', views.incomecategory, name='incomecategory'),
path('addincomecategory', views.addincomecategory, name='addincomecategory'),
path('editincomecategory', views.editincomecategory, name='editincomecategory'),
path('delete_income_category', views.delete_income_category, name='delete_income_category'),
]
| [
"[email protected]"
] | |
041fdadfc0f82c1828a1f8516e41fa0410f8185a | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /4me7LifXBwj5rhL4n_2.py | 65b6056baa34628bd261b66044b68f8b16ed99ad | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py |
circle_or_square=lambda r,a:a**.5*4<6.28*r
| [
"[email protected]"
] | |
03432933f40125fba98a10caa76b8817f552cba1 | 48a1c21f3b1596acb1e987b73677af65e9f9416a | /packagehashtable.py | 8030cf73f09e9a08a4d646c5ba71761d9687cc63 | [] | no_license | earthafire/c950 | fe982685d91453b157461e2eec385a6b16aeb990 | af1116d6ab834a63c9496fa054385970f33b4c1f | refs/heads/master | 2023-06-04T07:03:18.427522 | 2021-06-21T02:54:07 | 2021-06-21T02:54:07 | 354,667,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,800 | py | class PackageHashTable:
# big O: O(1)
def __init__(self):
self.current_size = 25
self.filled_slots = 0
self.data = [None] * self.current_size # list of packages
def get_filled_slots(self):
return self.filled_slots
def get_hash(self, value):
return value % self.current_size
def is_full(self): # check if array is close to full
if self.filled_slots > self.current_size - 10:
return True
else:
return False
def double_table_capacity(self): # double size of current array
for x in range(0, self.current_size):
self.data.append(None)
self.current_size *= 2
# big O: O(1)
def add_package(self, new_package):
package_num = self.get_hash(new_package.package_id)
if self.data[package_num] is None:
self.filled_slots += 1 # keeps track of how many full slots in order to prevent filling it up
self.data[package_num] = new_package # set index to new package
if self.is_full(): # if the hashtable is getting full, add another 40 slots
self.double_table_capacity()
# big O(1) time to retrieve package
def get_package(self, int_id):
package = self.data[self.get_hash(int_id)]
if package is None:
print("No such package found!")
return package
# big O(1) time to edit package
def edit_package_status(self, int_key, status):
package_to_edit = self.get_package(int_key)
if package_to_edit is None:
print("No such package found!")
else:
package_to_edit.status = status
def print_all_packages_status(self):
for item in self.data:
if item is not None:
item.print()
| [
"[email protected]"
] | |
d335e8ebc38e765fc6771cbf64c0c0dfd9c8c353 | b7dd07413c05a13207988535b755b7d28dbc5663 | /Chapter_7/7-3_multiples_10.py | 895c7529af5c2a587ee9e35165d9f19b3504417c | [] | no_license | GrnTeaLatte/AlienInvasion | b671a87cd730c3d4b31a8e8d760d2d02d576cfb3 | d60e8e65adb79e54a1e1c579825827355a7e85ea | refs/heads/main | 2023-02-26T03:55:26.799446 | 2020-11-03T00:42:06 | 2020-11-03T00:42:06 | 336,111,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | number = input("Pick a number from 1 to 100.")
number = int(number)
if number %10 == 0:
print("This number is a multiple of ten.")
else:
print("This number is not a multiple of ten.") | [
"[email protected]"
] | |
cdc568f6f8b729c0cb3256a847415ecb9daf8769 | 78e60a7d8a67ed76244004e8a3ed573fbf396e41 | /samples/del_admin_role.py | ad52a172a1488dd2d1f632616ef30fbe40844ca8 | [
"MIT"
] | permissive | Crivez/apiclient-python | 837a9f7cc0453ccd3121311adc7920b5fe6b3e33 | 860fc054f546152a101e29b1af388c381075ac47 | refs/heads/master | 2023-06-08T13:24:09.249704 | 2021-06-17T12:16:35 | 2021-06-17T12:16:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | from voximplant.apiclient import VoximplantAPI, VoximplantException
if __name__ == "__main__":
voxapi = VoximplantAPI("credentials.json")
# Delete the admin role.
ADMIN_ROLE_ID = 10
try:
res = voxapi.del_admin_role(admin_role_id=ADMIN_ROLE_ID)
print(res)
except VoximplantException as e:
print("Error: {}".format(e.message))
| [
"[email protected]"
] | |
ca9fd7db7af76cf7e25b5898992310a5733c1001 | 88e06bab1989c81a2dd649bb09b144fa7c958f89 | /leet_simplify_path.py | 4bcdad7a72949aab1ed4bd7e8b422c4511b91450 | [] | no_license | VaibhavD143/Coding | 4499526b22ee4ef13f66c3abcea671c80a8f748a | 5de3bae8891c7d174cbc847a37c3afb00dd28f0e | refs/heads/master | 2023-08-06T21:56:44.934954 | 2021-10-09T18:31:29 | 2021-10-09T18:31:29 | 263,890,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | class Solution:
def simplifyPath(self, path: str) -> str:
ss = []
for s in path.split('/') :
if s and s != '.' :
if s == ".." :
if ss :
ss.pop()
else :
ss.append(s)
return '/' + '/'.join(ss)
| [
"[email protected]"
] | |
7565fccefc1f94b90892e24e8bc4c59ad706f8ce | 601c2b26b115e59002fd0d105daef714aa6803a2 | /Problem68.py | de6e4c370c5364a8f9fd371e752b2672507cabb7 | [] | no_license | madrury/euler | 4be0f28edbfdf5a10de1b3f68336f73dcc73455d | 04ad94fe67af09e48bd795a2cc9f229f7c752397 | refs/heads/master | 2021-01-17T07:40:46.930178 | 2018-01-21T23:49:21 | 2018-01-21T23:49:21 | 15,242,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | from itertools import permutations, chain
def magic_3_ring():
S = set([1, 2, 3, 4, 5, 6])
for a_0, a_1, a_2 in permutations(S, 3):
b_0 = b_1 = b_2 = c_0 = c_1 = c_2 = None
s = a_0 + a_1 + a_2
b_1 = a_2
for b_0 in S - set([a_0, a_1, a_2]):
b_2 = c_1 = s - b_1 - b_0
if b_2 in S - set([a_0, a_1, a_2]) and b_2 != b_0:
for c_0 in S - set([a_0, a_1, a_2, b_0, b_1, b_2]):
c_2 = s - c_1 - c_0
if c_2 == a_1 and a_0 < b_0 and a_0 < c_0:
yield (a_0, a_1, a_2, b_0, b_1, b_2, c_0, c_1, c_2)
def magic_5_ring():
S = set([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
for A in permutations(S, 3):
A = list(A)
B, C, D, E = [None]*3, [None]*3, [None]*3, [None]*3
s = sum(A)
B[1] = A[2]
for b_0 in S - set(A):
B[0] = b_0
B[2] = C[1] = s - B[1] - B[0]
if B[2] in S - set(A) and B[2] != B[0]:
for c_0 in S - (set(A) | set(B)):
C[0] = c_0
C[2] = D[1] = s - C[1] - C[0]
if C[2] in S - (set(A) | set(B)) and C[2] != C[0]:
for d_0 in S - (set(A) | set(B) | set(C)):
D[0] = d_0
D[2] = E[1] = s - D[0] - D[1]
if D[2] in S - (set(A) | set(B) | set(C)) and D[2] != D[0]:
for e_0 in S - (set(A) | set(B) | set(C) | set(D)):
E[0] = e_0
E[2] = s - E[1] - E[0]
if E[2] == A[1] and E[2] != E[0] and A[0] < B[0] and A[0] < C[0] and A[0] < D[0] and A[0] < E[0]:
yield (A, B, C, D, E)
for ring in magic_5_ring():
print int("".join([str(i) for i in list(chain.from_iterable(ring))]))
| [
"[email protected]"
] | |
47eb91749a631a8839cbadb2b1ae5950f56d3da1 | 2834e05cfb56e16fb4a20a46d713ee3e7393a30a | /mzitu_spider.py | 0716848d851bd48611516e87812f40ec2161a5b2 | [] | no_license | jayhebe/Python_Data_Analysis | 29680f1a13ca5756b8fed96c75f7bf4ee432851e | 2bd39ce2688ef7fe4340810529f35799eff7de7a | refs/heads/master | 2020-07-04T16:29:59.470195 | 2020-01-14T15:12:42 | 2020-01-14T15:12:42 | 202,339,479 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,246 | py | from bs4 import BeautifulSoup
import requests
import os
import time
mzitu_folder_name = "mzitu_images"
mzitu_base_url = "https://www.mzitu.com"
mzitu_headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/77.0.3865.120 Safari/537.36",
"Request": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Referer": "https://www.mzitu.com"
}
def get_page_info(url):
page_res = requests.get(url, headers=mzitu_headers)
page_res.encoding = page_res.apparent_encoding
page_bs = BeautifulSoup(page_res.text, "html.parser")
page_info = page_bs.find("ul", id="pins").find_all("span", class_="")
page_next = page_bs.find("a", class_="next page-numbers")
return page_info, page_next
def get_pic_url(page_url):
pic_res = requests.get(page_url, headers=mzitu_headers)
pic_bs = BeautifulSoup(pic_res.text, "html.parser")
pic_url = pic_bs.find("div", class_="main-image").find("img")["src"]
pic_pages = pic_bs.find("div", class_="pagenavi").find_all("a")
pic_next_page = ""
for pic_page in pic_pages:
if "下一页" in pic_page.text:
pic_next_page = pic_page["href"]
return pic_url, pic_next_page
def get_file_name(file_extension):
file_name = str(time.time()).replace(".", "") + file_extension
return os.path.sep.join([os.getcwd(), mzitu_folder_name, file_name])
def download_pic(pic_url):
print("Downloading: {}".format(pic_url))
with open(get_file_name(".jpg"), "wb") as pic_fp:
pic_content = requests.get(pic_url, headers=mzitu_headers).content
pic_fp.write(pic_content)
mzitu_page_info, mzitu_page_next = get_page_info(mzitu_base_url)
while mzitu_page_next:
for mzitu_pic_span in mzitu_page_info:
mzitu_pic_next_page = mzitu_pic_span.find("a")["href"]
# print(mzitu_pic_link["href"])
print("Parsing page: {}".format(mzitu_pic_next_page))
while True:
mzitu_pic_url, mzitu_pic_next_page = get_pic_url(mzitu_pic_next_page)
download_pic(mzitu_pic_url)
if not mzitu_pic_next_page:
break
mzitu_page_info, mzitu_page_next = get_page_info(mzitu_page_next["href"])
| [
"[email protected]"
] | |
0e042a6a418f78275e29bb9fe263d90fa38a2b3c | cf59d92614a3505aeed9455482ef327572578228 | /venv/lib/python3.6/site-packages/djoser/urls/authtoken.py | 87ed8962570463caffb44de8795bf7e87591014d | [
"MIT"
] | permissive | slarkjm0803/autobets | e1d1a3b00cf94ee90fd1fed7464431677b4f9e11 | f92a5d999acaf5d7c83ca2768a260c2282eabbee | refs/heads/master | 2020-09-23T21:40:46.057648 | 2019-11-29T11:42:37 | 2019-11-29T11:42:37 | 225,591,526 | 1 | 0 | MIT | 2019-12-03T10:22:21 | 2019-12-03T10:22:20 | null | UTF-8 | Python | false | false | 308 | py | from django.conf.urls import url
from djoser import views
urlpatterns = [
url(
r'^token/create/$',
views.TokenCreateView.as_view(),
name='token-create'
),
url(
r'^token/destroy/$',
views.TokenDestroyView.as_view(),
name='token-destroy'
),
]
| [
"[email protected]"
] | |
ee80f003615f1943cc0c93612686b83872586dc6 | d52ee2f7ec5dcd8825f4e221a7f084d488d35634 | /new_scripts/baselines/dmass/tensor2tensor_old/models/aligned.py | a6eca3bab94b43e8af18bd325c617f3e1d66d3f7 | [] | no_license | rekriz11/sockeye-recipes | 9dbf96140e4d9d546210dd1c29801132e1b9201c | 644363b92e2f38311cc2b7e926b6558aa41900f3 | refs/heads/master | 2020-03-29T16:52:52.542574 | 2020-03-13T18:18:25 | 2020-03-13T18:18:25 | 150,131,769 | 5 | 3 | null | 2018-09-24T16:15:47 | 2018-09-24T16:15:46 | null | UTF-8 | Python | false | false | 17,800 | py | # coding=utf-8
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single stack of transformations with no masking.
Produces output aligned with inputs.
Configurable using hyperparameters to use some combination of convolutions,
attention, mixtures of experts, etc.
A good problem for this model is languagemodel_wiki_scramble1k50 .
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from tensor2tensor.layers import common_attention
from tensor2tensor.layers import common_hparams
from tensor2tensor.layers import common_layers
from tensor2tensor.utils import diet
from tensor2tensor.utils import expert_utils
from tensor2tensor.utils import registry
from tensor2tensor.utils import t2t_model
import tensorflow as tf
ModeKeys = tf.estimator.ModeKeys # pylint: disable=invalid-name
def _should_preprocess(layer_type):
return layer_type not in ["timing", "pos_emb", "att_memory_efficient"]
def _should_postprocess(layer_type):
return layer_type not in ["timing", "pos_emb"]
@registry.register_model
class Aligned(t2t_model.T2TModel):
"""Attention net. See file docstring."""
def model_fn_body_sharded(self, sharded_features):
# Remove dropout if not training
hparams = self._hparams
dp = self._data_parallelism
x = dp(tf.squeeze, sharded_features["inputs"], 2)
def preprocess(x):
return dp(common_layers.layer_preprocess, x, hparams)
def postprocess(x, y):
return dp(common_layers.layer_postprocess, x, y, hparams)
x = dp(tf.nn.dropout, x, 1.0 - hparams.layer_prepostprocess_dropout)
extra_loss = 0.0
ffn_hidden_sizes = [int(s) for s in hparams.ffn_hidden_sizes.split(",")]
moe_hidden_sizes = [int(s) for s in hparams.moe_hidden_sizes.split(",")]
if hparams.mask_right:
def _bias(x):
return common_attention.attention_bias_lower_triangle(
common_layers.shape_list(x)[1])
bias = dp(_bias, x)
else:
bias = tf.zeros([1, 1, 1, 1])
if hparams.diet_experts:
hsize, = moe_hidden_sizes
def _diet_expert(x):
return diet.diet_expert(x, hsize, diet.diet_adam_optimizer_params())
expert_fn = _diet_expert
else:
expert_fn = expert_utils.ffn_expert_fn(
hparams.hidden_size, moe_hidden_sizes, hparams.hidden_size)
batch_coordinate = dp(get_batch_coordinate, x)
layers = hparams.layers.strip(",").split(",")
for layer_num, layer_type in enumerate(layers):
with tf.variable_scope("%s_%d" % (layer_type, layer_num)):
if _should_preprocess(layer_type):
x = preprocess(x)
if layer_type == "timing":
y = dp(common_attention.add_timing_signal_nd, x)
elif layer_type == "pos_emb":
y = dp(
common_attention.add_positional_embedding_nd,
x,
hparams.max_length,
name="pos_emb")
elif layer_type == "att":
y = dp(
common_attention.multihead_attention,
x,
None,
bias, # bias
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size,
hparams.num_heads,
hparams.attention_dropout)
elif layer_type == "att_grouped":
multiplicative_overhead = (
hparams.multiplicative_overhead if hparams.mode == ModeKeys.TRAIN
else hparams.multiplicative_overhead_eval)
y, loss = dp(
common_attention.grouped_attention_multihead,
x,
x,
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size,
hparams.num_heads,
num_groups=hparams.attention_num_groups,
memory_target_density=hparams.memory_target_density,
multiplicative_overhead=multiplicative_overhead,
make_image_summary=hparams.attention_image_summary,
mask_right=hparams.mask_right,
)
extra_loss += tf.add_n(loss) / dp.n
elif layer_type == "att_memory_efficient":
assert hparams.layer_preprocess_sequence == "n"
y = dp(common_attention.multihead_self_attention_memory_efficient, x,
bias, hparams.num_heads)
elif layer_type == "att_local":
y = dp(
common_attention.multihead_attention,
x,
None,
None, # bias
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size,
hparams.num_heads,
hparams.attention_dropout,
attention_type=("local_mask_right"
if hparams.mask_right else "local_unmasked"),
block_length=hparams.local_attention_window,
block_width=hparams.local_attention_window)
elif layer_type == "att_pseudolocal":
# This is an inefficient implementation of local attention, for the
# purpose of testing model quality.
def _pseudolocal_bias(x):
return common_attention.attention_bias_local(
common_layers.shape_list(x)[1], hparams.local_attention_window,
0 if hparams.mask_right else hparams.local_attention_window)
pseudolocal_bias = dp(_pseudolocal_bias, x)
y = dp(common_attention.multihead_attention, x, None,
pseudolocal_bias, hparams.attention_key_channels or
hparams.hidden_size, hparams.attention_value_channels or
hparams.hidden_size, hparams.hidden_size, hparams.num_heads,
hparams.attention_dropout)
elif layer_type == "att_local_expert":
y, loss = dp(
common_attention.local_expert_attention,
x,
k=hparams.attention_moe_k,
loss_coef=hparams.attention_load_balance,
attention_num_experts=hparams.attention_num_experts,
train=hparams.mode == ModeKeys.TRAIN,
batch_coordinate=batch_coordinate,
mask_right=hparams.mask_right,
split_batch=bool(hparams.attention_split_batch),
attention_kq_size=hparams.attention_kq_size,
attention_v_size=hparams.attention_v_size)
# TODO(avaswani, epot, noam): Do we need to divide by num shards ?
extra_loss += tf.add_n(loss) / dp.n
elif layer_type == "att_lsh":
if hparams.lsh_truncated:
attention_fn = common_attention.multihead_attention_sparse_truncated
else:
attention_fn = common_attention.multihead_attention_sparse_dot_prod
y, loss = dp(
attention_fn,
x,
None,
None, # Bias is computed inside
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size,
hparams.num_heads,
hparams.attention_dropout,
# Additional parameters
bi=[
common_attention.BatchInfo(
coordinates=batch_coordinate[i],
order=None, # No future mask
) for i in range(dp.n)
],
use_map_fn=False,
experts_params=dict(nb_hyperplanes=4,))
extra_loss += tf.add_n(loss) / dp.n
elif layer_type == "moe":
y, loss = expert_utils.distributed_moe(
dp,
self._ps_devices,
x,
hparams.mode == ModeKeys.TRAIN,
input_size=hparams.hidden_size,
expert_fn=expert_fn,
num_experts=hparams.moe_num_experts,
k=hparams.moe_k,
loss_coef=hparams.moe_loss_coef)
extra_loss += loss
elif layer_type == "ffn":
y = dp(
expert_utils.ffn_expert_fn(hparams.hidden_size, ffn_hidden_sizes,
hparams.hidden_size),
dp(expert_utils.flatten_all_but_last, x))
y = dp(expert_utils.reshape_like, y, x)
elif layer_type == "conv":
y = dp(
common_layers.conv1d,
x,
hparams.hidden_size,
hparams.kernel_height,
activation=tf.nn.relu,
padding="SAME",
)
else:
assert False, "unknown sublayer %s" % layer_type
if _should_postprocess(layer_type):
x = postprocess(x, y)
else:
x = y
x = preprocess(x)
decoder_output = dp(tf.expand_dims, x, 2)
return decoder_output, extra_loss
def get_batch_coordinate(x):
"""Return a flat int32 tensor of shape [1, batch_size*length, 1]."""
# Compute the batch coordinate before flattening all batches
batch_coordinate = tf.expand_dims(
common_attention.coordinate_tensor(
common_layers.shape_list(x)[:-1], axis=0),
axis=-1)
return batch_coordinate
@registry.register_hparams
def aligned_base():
"""Set of hyperparameters.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps (10min): log(ppl)_eval = 2.60
12.0 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.00
Returns:
a hparams object
"""
hparams = common_hparams.basic_params1()
hparams.hidden_size = 512
hparams.batch_size = 5000
hparams.max_length = 0
hparams.min_length_bucket = 1024
hparams.dropout = 0.0
hparams.layer_prepostprocess_dropout = 0.0
hparams.label_smoothing = 0.0
hparams.clip_grad_norm = 0. # i.e. no gradient clipping
hparams.optimizer_adam_epsilon = 1e-9
hparams.learning_rate_decay_scheme = "noam"
hparams.learning_rate = 0.1
hparams.learning_rate_warmup_steps = 2000
hparams.initializer_gain = 1.0
hparams.initializer = "uniform_unit_scaling"
hparams.weight_decay = 0.0
hparams.optimizer_adam_beta1 = 0.9
hparams.optimizer_adam_beta2 = 0.98
hparams.shared_embedding_and_softmax_weights = True
hparams.add_hparam("ffn_hidden_sizes", "2048") # Add new ones like this.
hparams.moe_num_experts = 32
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
hparams.add_hparam("layers", "timing," + "conv,att,ffn," * 2)
# attention-related flags
hparams.add_hparam("num_heads", 8)
hparams.add_hparam("attention_key_channels", 0)
hparams.add_hparam("attention_value_channels", 0)
# All hyperparameters ending in "dropout" are automatically set to 0.0
# when not in training mode.
hparams.add_hparam("attention_dropout", 0.0)
hparams.add_hparam("pos", "timing") # timing, none
# moe params. local attention moe.
hparams.add_hparam("attention_local", False)
hparams.add_hparam("attention_moe_k", 2)
hparams.add_hparam("attention_num_experts", 16)
hparams.add_hparam("attention_split_batch", False)
# Key, query and value dimensions for the attention
hparams.add_hparam("attention_kq_size", 128)
hparams.add_hparam("attention_v_size", 256)
# Loss coef for load balancing
hparams.add_hparam("attention_load_balance", 2e-2)
hparams.add_hparam("diet_experts", False)
hparams.add_hparam("memory_efficient_ffn", False)
hparams.add_hparam("local_attention_window", 128)
hparams.add_hparam("attention_num_groups", 8)
hparams.add_hparam("memory_target_density", 2.0)
hparams.add_hparam("multiplicative_overhead", 1.25)
hparams.add_hparam("multiplicative_overhead_eval", 2.0)
hparams.add_hparam("attention_image_summary", True)
# LSH params
hparams.add_hparam("lsh_truncated", True)
# For testing right-masking.
# This is not implemented in all layers.
hparams.add_hparam("mask_right", False)
return hparams
@registry.register_hparams
def aligned_memory_efficient():
"""Use multihead_self_attention_memory_efficient.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.59
8.7 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.02
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_memory_efficient,ffn," * 2
return hparams
@registry.register_hparams
def aligned_local_expert():
"""Use local_expert_attention.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.72
10.2 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.27
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_local_expert,ffn," * 2
return hparams
@registry.register_hparams
def aligned_grouped():
"""Use local_expert_attention.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.63
10.2 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.04
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_grouped,ffn," * 2
return hparams
@registry.register_hparams
def aligned_local():
"""Use local attention code.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.57
12.8 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.08
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_local,ffn," * 2
return hparams
@registry.register_hparams
def aligned_local_1k():
"""Use local attention code, attend to full sequence.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.57
7.5 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.00
Returns:
a hparams object
"""
hparams = aligned_local()
hparams.local_attention_window = 1024
return hparams
@registry.register_hparams
def aligned_pseudolocal():
"""Use a bias to simulate local attention. attention radius 128.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.57
12.0 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.06
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_pseudolocal,ffn," * 2
return hparams
@registry.register_hparams
def aligned_pseudolocal_256():
"""Use a bias to simulate local attention. attentio radius 256.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.56
12.0 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.05
Returns:
a hparams object
"""
hparams = aligned_pseudolocal()
hparams.local_attention_window = 256
return hparams
@registry.register_hparams
def aligned_no_timing():
"""No timing signal.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.75
12.3 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.39
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "conv,att,ffn," * 2
return hparams
@registry.register_hparams
def aligned_no_att():
"""No attention at all.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.89
20.8 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.70
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "conv,ffn," * 2
return hparams
@registry.register_hparams
def aligned_pos_emb():
"""positional embedding insead of timing signal.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.67
12.1 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.00
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "pos_emb," + "conv,att,ffn," * 2
return hparams
@registry.register_hparams
def aligned_moe():
"""mixture of experts instead of ffn.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.62
6.7 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 1.94
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att,moe," * 2
return hparams
@registry.register_hparams
def aligned_lsh():
"""Use multihead_attention_sparse_dot_prod.
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.layers = "timing," + "conv,att_lsh,ffn," * 2
return hparams
@registry.register_hparams
def aligned_8k():
"""version for languagemodel_wiki_scramble8k50.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.93
1.5 steps/sec on P100
Returns:
a hparams object
"""
hparams = aligned_base()
hparams.batch_size = 8192
return hparams
@registry.register_hparams
def aligned_8k_grouped():
"""version for languagemodel_wiki_scramble8k50.
languagemodel_wiki_scramble1k50, 1gpu, 7k steps: log(ppl)_eval = 2.92
3.3 steps/sec on P100
8gpu (8x batch), 7k steps: log(ppl)_eval = 2.15
Returns:
a hparams object
"""
hparams = aligned_grouped()
hparams.batch_size = 8192
# hparams.attention_image_summary = False
hparams.num_groups = 16
hparams.multiplicative_overhead = 1.1
return hparams
| [
"[email protected]"
] | |
95205781cf62e93ad3b0b25c8c12a11ba6a87ce3 | 462c56e7454c97e0541588b9be66a4e216ea20fd | /337.house-robber-iii.py | b7df910aa92e9bed203055651ced6ea30bbb2e43 | [] | no_license | LouisYLWang/leetcode_python | d5ac6289e33c5d027f248aa3e7dd66291354941c | 2ecaeed38178819480388b5742bc2ea12009ae16 | refs/heads/master | 2020-05-27T08:38:48.532000 | 2019-12-28T07:08:57 | 2019-12-28T07:08:57 | 188,549,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,498 | py | #
# @lc app=leetcode id=337 lang=python3
#
# [337] House Robber III
#
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#
class Solution:
def rob(self, root: TreeNode) -> int:
def rob_sub(root):
if not root:
return [0,0]
left = rob_sub(root.left)
right = rob_sub(root.right)
return [max(left) + max(right), root.val + left[0] + right[0]]
return max(rob_sub(root))
# This method use recursion with memorization, but not performance well
'''class Solution:
rob_map = dict()
def rob(self, root: TreeNode) -> int:
def cur_out(root):
if root in self.rob_map:
if 0 in self.rob_map[root]:
return self.rob_map[root][0]
ans_i_i = 0
ans_o_o = 0
ans_i_o = 0
ans_o_i = 0
if root.left or root.right:
if root.left:
ans_i_i += cur_in(root.left)
ans_o_o += cur_out(root.left)
ans_i_o += cur_in(root.left)
ans_o_i += cur_out(root.left)
if root.right:
ans_i_i += cur_in(root.right)
ans_o_o += cur_out(root.right)
ans_o_i += cur_in(root.right)
ans_i_o += cur_out(root.right)
ans = max(ans_i_i, ans_o_o, ans_o_i, ans_i_o)
if root not in self.rob_map:
self.rob_map[root] = dict()
self.rob_map[root][0] = ans
return self.rob_map[root][0]
def cur_in(root):
if root in self.rob_map:
if 1 in self.rob_map[root]:
return self.rob_map[root][1]
ans = root.val
if root.left or root.right:
if root.left:
ans += cur_out(root.left)
if root.right:
ans += cur_out(root.right)
if root not in self.rob_map:
self.rob_map[root] = dict()
self.rob_map[root][1] = ans
return ans
if root:
return max(cur_out(root), cur_in(root))
return 0'''
| [
"[email protected]"
] | |
31356e0d99a19ee522cda865378a49b0ec1c5187 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startCirq1623.py | debc4e59f6ac2116d11df4140a57d981a774db94 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,252 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=60
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[0])) # number=38
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39
c.append(cirq.H.on(input_qubit[0])) # number=40
c.append(cirq.H.on(input_qubit[0])) # number=49
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=50
c.append(cirq.H.on(input_qubit[0])) # number=51
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=52
c.append(cirq.Z.on(input_qubit[1])) # number=53
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=54
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=47
c.append(cirq.H.on(input_qubit[0])) # number=32
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=33
c.append(cirq.H.on(input_qubit[0])) # number=34
c.append(cirq.X.on(input_qubit[4])) # number=48
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[0])) # number=57
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=58
c.append(cirq.H.on(input_qubit[0])) # number=59
c.append(cirq.Z.on(input_qubit[3])) # number=42
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=43
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[3])) # number=44
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.X.on(input_qubit[0])) # number=9
c.append(cirq.H.on(input_qubit[1])) # number=56
c.append(cirq.X.on(input_qubit[1])) # number=10
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.rx(-2.9845130209103035).on(input_qubit[4])) # number=55
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=35
c.append(cirq.X.on(input_qubit[3])) # number=36
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=37
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24
c.append(cirq.X.on(input_qubit[0])) # number=25
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1623.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
448d240cac3a1caba7534ece90ad949bfc2df05b | 2342b8737b9ffeb9715158b8ec74a33c7a4947f6 | /koku/providers/test/azure/test_client.py | c77e19be39011df1ea5a3260c8323920ed3ce2e7 | [
"Apache-2.0"
] | permissive | project-koku/koku | 444d8df05da5416c9cee606c42481c99be45f13d | 0416e5216eb1ec4b41c8dd4999adde218b1ab2e1 | refs/heads/main | 2023-08-20T11:30:17.510182 | 2023-08-17T18:27:30 | 2023-08-17T18:27:30 | 126,496,611 | 225 | 94 | Apache-2.0 | 2023-09-14T17:38:08 | 2018-03-23T14:29:23 | Python | UTF-8 | Python | false | false | 5,298 | py | #
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""Test Azure Client Class."""
import random
from unittest.mock import patch
from azure.identity import ClientSecretCredential
from azure.mgmt.costmanagement import CostManagementClient
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.storage import StorageManagementClient
from azure.storage.blob import BlobServiceClient
from django.test import TestCase
from faker import Faker
from providers.azure.client import AzureClientFactory
FAKE = Faker()
class AzureClientFactoryTestCase(TestCase):
"""Parent Class for AzureClientFactory test cases."""
def setUp(self):
"""Test case setup."""
self.clouds = ["china", "germany", "public", "usgov"]
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_constructor(self, mock_get_token):
"""Test that we can create an AzureClientFactory object."""
obj = AzureClientFactory(
subscription_id=FAKE.uuid4(),
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(isinstance(obj, AzureClientFactory))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_costmanagement_client(self, mock_get_token):
"""Test the costmanagement_client property."""
obj = AzureClientFactory(
subscription_id=FAKE.uuid4(),
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(isinstance(obj.cost_management_client, CostManagementClient))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_credentials(self, mock_get_token):
"""Test the credentials property."""
obj = AzureClientFactory(
subscription_id=FAKE.uuid4(),
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(isinstance(obj._credentials, ClientSecretCredential))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_resource_client(self, mock_get_token):
"""Test the resource_client property."""
obj = AzureClientFactory(
subscription_id=FAKE.uuid4(),
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(isinstance(obj.resource_client, ResourceManagementClient))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_storage_client(self, mock_get_token):
"""Test the storage_client property."""
obj = AzureClientFactory(
subscription_id=FAKE.uuid4(),
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(isinstance(obj.storage_client, StorageManagementClient))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_subscription_id(self, mock_get_token):
"""Test the subscription_id property."""
subscription_id = FAKE.uuid4()
obj = AzureClientFactory(
subscription_id=subscription_id,
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
self.assertTrue(obj.subscription_id, subscription_id)
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_cloud_storage_account(self, mock_get_token):
"""Test the cloud_storage_account method."""
subscription_id = FAKE.uuid4()
resource_group_name = FAKE.word()
storage_account_name = FAKE.word()
obj = AzureClientFactory(
subscription_id=subscription_id,
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
)
with patch.object(StorageManagementClient, "storage_accounts", return_value=None):
cloud_account = obj.cloud_storage_account(resource_group_name, storage_account_name)
self.assertTrue(isinstance(cloud_account, BlobServiceClient))
@patch("providers.azure.client.ClientSecretCredential.get_token")
def test_scope_and_export_name(self, mock_get_token):
"""Test the scope and export_name properties."""
subscription_id = FAKE.uuid4()
scope = f"/subscriptions/{subscription_id}"
export_name = "cost_export"
obj = AzureClientFactory(
subscription_id=subscription_id,
tenant_id=FAKE.uuid4(),
client_id=FAKE.uuid4(),
client_secret=FAKE.word(),
cloud=random.choice(self.clouds),
scope=scope,
export_name=export_name,
)
self.assertTrue(obj.scope, scope)
self.assertTrue(obj.export_name, export_name)
| [
"[email protected]"
] | |
dd4233f9877fc797e7015caf2f2eea6ec7f0105b | 149baa65329d0e13ae3189b8127d2eff5f5fdf77 | /bot_ws/build/ur5_pkg/ur5/catkin_generated/pkg.installspace.context.pc.py | 285472e4a9fe5b9bac59158a6b63e020bc6e13d7 | [] | no_license | mtbthebest/imitation_learning | 20b990aa7396fecbe5433c7703f353bf99fa5f2c | 4c08192e31062f69056cc36efffb7a2ce0264244 | refs/heads/master | 2020-05-21T17:05:26.567273 | 2019-09-16T12:20:19 | 2019-09-16T12:20:19 | 186,111,790 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ur5"
PROJECT_SPACE_DIR = "/home/mtb/sim_ws/bot_ws/install"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
2bd53cdf3cf4e912f272d060d6e07a9650c5bf45 | 7ba5ec9aa9ddca3f9b3384fc4457b0a865c2a0a1 | /src/559.py | f063866e484fd03a1aa4d26b5e7b19a1d8e94cd2 | [] | no_license | ecurtin2/Project-Euler | 71f79ee90a9abd0943421677d78a6c087419e500 | 79479da7a45b3ae67c0c7ea24da5f7d43c6f25d3 | refs/heads/master | 2021-03-19T14:52:57.045443 | 2018-04-12T22:05:37 | 2018-04-12T22:05:37 | 100,059,180 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | """
An ascent of a column j in a matrix occurs if the value of column j is smaller than the value of column j+1 in all rows.
Let P(k, r, n) be the number of r x n matrices with the following properties:
The rows are permutations of {1, 2, 3, ... , n}.
Numbering the first column as 1, a column ascent occurs at column j<n if and only if j is not a multiple of k.
For example, P(1, 2, 3) = 19, P(2, 4, 6) = 65508751 and P(7, 5, 30) mod 1000000123 = 161858102.
Let Q(n) =$\, \displaystyle \sum_{k=1}^n\,$ P(k, n, n).
For example, Q(5) = 21879393751 and Q(50) mod 1000000123 = 819573537.
Find Q(50000) mod 1000000123.
""" | [
"[email protected]"
] | |
2970d8227cccf50e65da0dda71c84de76531c099 | e510a82771967d8677ccb77be7d8fe199970ec39 | /setup.py | 94c41706ab048c6d1b4f753918410c242eff4509 | [] | no_license | mbr/ragstoriches | 4c1a7501a05f40aad13aa796105df5ce57beb9b9 | aa11405673dfd307915e38110145cede72892804 | refs/heads/master | 2023-06-06T20:49:07.700820 | 2015-12-23T10:53:40 | 2015-12-23T10:53:40 | 8,688,096 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='ragstoriches',
version='0.4.1dev',
description='Develop highly-concurrent web scrapers, easily.',
long_description=read('README.rst'),
author='Marc Brinkmann',
author_email='[email protected]',
url='http://github.com/mbr/ragstoriches',
license='MIT',
install_requires=['gevent', 'logbook', 'requests', 'requests_cache',
'stuf', 'colorama', 'python-dateutil'],
packages=find_packages(exclude=['test']),
entry_points={
'console_scripts': [
'ragstoriches = ragstoriches.apps:run_scraper',
],
})
| [
"[email protected]"
] | |
75aa6901d1a170000bddbec4f1128620ace9611a | 1e5026118b9777f2274dc4fe9e7ff459a61430c4 | /redit/wsgi.py | d77c2f2ca8fe7560f62bf5191d2f0e9c54e7d36f | [] | no_license | Rubyroy12/reddit | 70a50980d02f1b10e8bae3b034327f73ab38be94 | 4d0e29bfa813c5ee42cc8b38647b3c5538e3c376 | refs/heads/master | 2023-06-18T06:58:02.900692 | 2021-07-16T00:09:06 | 2021-07-16T00:09:06 | 386,455,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
WSGI config for redit project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'redit.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
847e7826459e7dffae6d5bcde0376c4b7086d6d5 | 186f694b65b43cd56e746ce8538e4f1edad6129e | /1on1/BFS/547-friendcircle.py | 373d30f3c54ec2cd1aa12674be315d70fc5f22f8 | [] | no_license | zionhjs/algorithm_repo | 287486e0173e68cfa9e535490004c952192a54db | 26b4a770d5335abd738ae26c68d91f6af7b13749 | refs/heads/master | 2022-12-17T15:59:17.932490 | 2020-09-23T04:12:38 | 2020-09-23T04:12:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | class Solution:
def findCircleNum(self, M: List[List[int]]) -> int:
if not M:
return 0
for i in range(len(M)):
for j in range(len(M):
if j > i:
| [
"[email protected]"
] | |
1681cf3956952c3b2add6ceb0f3ae7b5d6d21f62 | dd31ec8f3f979b0339cf686ce9094def03ef003a | /myvenv/Lib/site-packages/pylint/reporters/json.py | be37b21102933ca7fe5d330945a71fcc8280180a | [
"MIT"
] | permissive | rvmoura96/projeto-almoxarifado | 872bb945b4057bdbf108776e2101e9966a23f4de | 4ca5e5d00f449a940f7c601479bb3fe14c54f012 | refs/heads/master | 2022-11-11T07:45:33.475443 | 2017-11-21T21:13:19 | 2017-11-21T21:13:19 | 106,044,249 | 1 | 1 | MIT | 2022-10-26T05:02:32 | 2017-10-06T19:48:08 | Python | UTF-8 | Python | false | false | 1,662 | py | # Copyright (c) 2015-2016 Claudiu Popa <[email protected]>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""JSON reporter"""
from __future__ import absolute_import, print_function
import cgi
import json
import sys
from pylint.interfaces import IReporter
from pylint.reporters import BaseReporter
class JSONReporter(BaseReporter):
"""Report messages and layouts in JSON."""
__implements__ = IReporter
name = 'json'
extension = 'json'
def __init__(self, output=sys.stdout):
BaseReporter.__init__(self, output)
self.messages = []
def handle_message(self, msg):
"""Manage message of different type and in the context of path."""
self.messages.append({
'type': msg.category,
'module': msg.module,
'obj': msg.obj,
'line': msg.line,
'column': msg.column,
'path': msg.path,
'symbol': msg.symbol,
# pylint: disable=deprecated-method; deprecated since 3.2.
'message': cgi.escape(msg.msg or ''),
})
def display_messages(self, layout):
"""Launch layouts display"""
if self.messages:
print(json.dumps(self.messages, indent=4), file=self.out)
def display_reports(self, layout): # pylint: disable=arguments-differ
"""Don't do nothing in this reporter."""
def _display(self, layout):
"""Don't do nothing."""
def register(linter):
"""Register the reporter classes with the linter."""
linter.register_reporter(JSONReporter)
| [
"[email protected]"
] | |
219305932842bb0a951cda0aab9e9783eb969379 | f361126ee099303113b5ed3cc0e838bd01a9e41b | /Semana3/pattern.py | ce21f32f68915c1c4ac487f6d46600d7fa9a86c0 | [] | no_license | ju-c-lopes/Univesp_Algoritmos_II | e1ce5557d342ea75fe929cf7b207e633f9aa89cd | 5d4eec368be91c18f0ae5c17d342e6eb0f1c79be | refs/heads/master | 2023-06-05T11:09:25.415719 | 2021-07-07T22:26:53 | 2021-07-07T22:26:53 | 383,600,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | def pattern(n):
if n == 0:
print(n, end=' ')
else:
pattern(n-1)
print(n, end=' ')
pattern(n-1)
pattern(3)
| [
"[email protected]"
] | |
600b5d49487605e376e745ff50c7047d387368b4 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/288/105862/submittedfiles/testes.py | 2d998344bd13ef5f84800052822c63b3268d44ad | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,713 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
#from minha_bib import *
print
("7 | 8 | 9")
import random
def solicitaSimboloDoHumano():
simbolo=0
while (simbolo)!= 'X' and (simbolo)!='O':
print ("Escolha um simbolo para jogar [X:O]: ")
simbolo=input()
if simbolo=='O':
return ['O','X']
else:
return ['X','O']
def sorteioPrimeiraJogada():
if random.radiant==1:
return "Computador"
else:
return "Voce"
def sorteioPrimeiraJogada():
if random.randint==1:
return 'Computador'
else:
return 'Voce'
#movimento em forma de vetor/matriz:
def jogadaHumana(tabuleiro):
movimento = 0
while movimento not in '1 2 3 4 5 6 7 8 9'.split() or not vazio(tabuleiro, int(movimento)):
print('Qual a sua jogada, {}?'.format(nome))
movimento = input()
return int(movimento)
#função com relação a matrizes:
def jogadaComputador(tabuleiro, letraComputador):
if simboloComputador == 'X':
simboloVoce = 'O'
else:
simboloVoce = 'X'
for i in range(1,10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, simboloComputador, i)
if verificaVencedor(copy, simboloComputador):
return i
for i in range(1, 10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, simboloVoce, i)
if verificaVencedor(copy, simboloVoce):
return i
movimento = movAleatoria(tabuleiro, [1, 3, 7, 9])
if movimento != None:
return movimento
if vazio(tabuleiro, 5):
return 5
return movAleatoria(tabuleiro, [2, 4, 6, 8])
#def validaJogada()
def mostraTabuleiro(tabuleiro):
dupeTabuleiro = []
for i in tabuleiro:
dupeTabuleiro.append(i)
return dupeTabuleiro
def verificaVencedor(tabuleiro, simbolo):
return ((tabuleiro[1] == simbolo and tabuleiro[2] == simbolo and tabuleiro[3] == simbolo) or #linhas vencedoras
(tabuleiro[4] == simbolo and tabuleiro[5] == simbolo and tabuleiro[6] ==simbolo) or
(tabuleiro[7] == simbolo and tabuleiro[8] == simbolo and tabuleiro[9] == simbolo) or
(tabuleiro[7] == simbolo and tabuleiro[4] == simbolo and tabuleiro[1] == simbolo) or #colunas vencedoras
(tabuleiro[8] == simbolo and tabuleiro[5] == simbolo and tabuleiro[2] == simbolo) or
(tabuleiro[9] == simbolo and tabuleiro[6] == simbolo and tabuleiro[3] == simbolo) or
(tabuleiro[7] == simbolo and tabuleiro[5] == simbolo and tabuleiro[3] == simbolo) or #diagonais vencedoras
(tabuleiro[9] == simbolo and tabuleiro[5] == simbolo and tabuleiro[1] == simbolo))
#################################################################################
def vazio(tabuleiro, movimento):
return tabuleiro[movimento] == ' '
def desenhaTabuleiro(tabuleiro):
print(' ' + tabuleiro[7] + ' | ' + tabuleiro[8] + ' | ' + tabuleiro[9])
print(' ' + tabuleiro[4] + ' | ' + tabuleiro[5] + ' | ' + tabuleiro[6])
print(' ' + tabuleiro[1] + ' | ' + tabuleiro[2] + ' | ' + tabuleiro[3])
def jogarNovamente():
print('Você deseja jogar novamente? ')
return input().lower().startswith('sim')
def movimentacao(tabuleiro, simbolo, movimento):
tabuleiro[movimento] = simbolo
def movAleatoria(tabuleiro, movimentosList):
movPossiveis = []
for i in movimentosList:
if vazio(tabuleiro, i):
movPossiveis.append(i)
if len(movPossiveis) != 0:
return random.choice(movPossiveis)
else:
return None
def completo(tabuleiro):
for i in range(1, 10):
if vazio(tabuleiro, i):
return False
return True
print('Bem vindo ao JogoDaVelha do grupo X')
nome = input('Qual o seu nome (ou apelido)? ')
while True:
tabul = [' '] * 10
simboloVoce, simboloComputador = solicitaSimboloDoHumano()
turn = sorteioPrimeiraJogada()
print('Vencedor do sorteio para início do jogo: {}'.format(turn))
rodando = True
while rodando:
if turn == 'voce':
desenhaTabuleiro(tabul)
movimento = jogadaHumana(tabul)
movimentacao(tabul, simboloVoce, movimento)
if verificaVencedor(tabul, simboloVoce):
desenhaTabuleiro(tabul)
print('Vencedor: {}'.format(nome))
rodando = False
else:
if completo(tabul):
desenhaTabuleiro(tabul)
print('Deu Velha!')
break
else:
turn = 'Computador'
else:
movimento = jogadaComputador(tabul, simboloComputador)
movimentacao(tabul, simboloComputador, movimento)
if verificaVencedor(tabul, simboloComputador):
desenhaTabuleiro(tabul)
print('Vencedor: Computador')
rodando = False
else:
if completo(tabul):
desenhaTabuleiro(tabul)
print('Deu Velha!')
break
else:
turn = 'voce'
if not jogarNovamente():
break
"""
velha= Jogadas Posições jogáveis
| | 7 | 8 | 9
---+---+--- ---+---+---
| | 4 | 5 | 6
---+---+--- ---+---+---
| | 1 | 2 | 3
# Listagem de posicoes (horizontal e vertical) para as posicoes do jogo.
# Numeração das posicoes ira facilitar o entendimento para jogabilidade.
posicoes = [
None, # Indice
(5, 1), # 1
(5, 5), # 2
(5, 9), # 3
(3, 1), # 4
(3, 5), # 5
(3, 9), # 6
(1, 1), # 7
(1, 5), # 8
(1, 9), # 9
]
# Descrição das posicoes que ganham o jogo fazendo uma linha, um coluna, linha ou diagonal == win
# Os números representam as posicoes ganhadoras
win = [
[ 1, 2, 3], #linha
[ 4, 5, 6],
[ 7, 8, 9],
[ 7, 4, 1], #coluna
[ 8, 5, 2],
[ 9, 6, 3],
[ 7, 5, 3], #diag
[ 1, 5, 9]
]
# Tabuleiro é construido usndo string e gera lista
tabuleiro = []
for linha in velha.splitlines():
tabuleiro.append(list(linha))
jogador = "X" # Começa jogando com X
jogando = True
jogada = 0 # Contador de jogadas
while True:
for t in tabuleiro: # Mostra o tabuleiro
print("".join(t))
if not jogando: # Termina após mostrar o último tabuleiro
break
if jogada == 9: # Se 9 jogadas, todas as posicoes já foram preenchidas
print("Deu velha! Ninguém ganhou.")
break
jogada = int(input("Digite a posição a jogar 1-9 (jogador %s):" % jogador))
if jogada<1 or jogada>9:
print("Posição inválida")
continue
# Verifica posição livre
if tabuleiro[posicoes[jogada][0]][posicoes[jogada][1]] != " ":
print("Posição ja utilizada ocupada.")
continue
# Marca a jogada p/ o jogador
tabuleiro[posicoes[jogada][0]][posicoes[jogada][1]] = jogador
# Verfica se ganhou
for p in win:
for x in p:
if tabuleiro[posicoes[x][0]][posicoes[x][1]] != jogador:
break
else: # Se o for terminar sem break, todas as posicoes de p pertencem ao mesmo jogador
print("O jogador %s ganhou (%s): "%(jogador, p))
jogando = False
break
jogador = "X" if jogador == "O" else "O" # Alterna os jogador
jogada +=1 # Contador de jogadas
m=int(input("Digite a quantidade n-s: "))
while m<2 or m>1000:
m=int(input("Digite a quantidade n-s: "))
n=int(input("Digite a quantidade l-o: "))
while n<2 or n>1000:
n=int(input("Digite a quantidade l-o: "))
matriz=[]
for i in range (0,m,1):
linha=[]
for j in range (0,n,1):
a=(int(input("Digite o preço da quadra: ")))
while a <1 or a >100:
a=(int(input("Digite o preço da quadra: ")))
linha.append (a)
matriz.append(linha)
menor=100*m
for j in range (0,n,1):
soma=0
for i in range (0,m,1):
soma+=matriz[i][j]
if soma<menor:
menor=soma
print (menor)
n=int(input('Digite um valor >=2: '))
while n<2:
n=int(input('Digite um valor >=2: '))
a=[]
for i in range (0,n,1):
b=[]
for i in range (0,n,1):
b.append(int(input('Digite os valores da matriz: ')))
a.append(b)
print (a[i][i])
print (a[i][n-i])
lista1=[1,2,3,4]
print (lista1[len(lista1)+1])
n = 20
a = []
for i in range(0,n,1):
a.append(input('Digite um valor: '))
print (a)
x=int(input('digite um valor 1: '))
y=int(input('digite um valor 2: '))
print (maximo(x,y))
if par(2):
print ('sim')
else:
print ('nao')
from minha_bib import *
a=int(input('digite a: '))
b=int(input('digite b: '))
print (f1(x,y))
a=-100
valor=avaliar(a)
print (valor)
if funcao(2,3,10):
print ('S')
else:
print ('n')
cronometro(20)
#print (fatorial(5))
#print(multiplicacao(2,3))
b=0
a=100
for i in range (0,a,1):
if a%(i+1)!=0:
b=b+1
print (b)
###########################
for i in range (0,10,1):
print (i)
###########################
for i in rang (0,10,1):
print (i)
while(True):
while(True):
n=int(input("Digite um numero inteiro positivo: "))
if (n>=0):
break
f=1
for i in range (2,n+1,1):
f*=i
print("%d!= %d" %(n,f))
opt=input("Deseja continuar? [S ou N]")
if (opt =="N"):
print ("\n\nAte Breve")
break
i=0
while i<10:
print (i)
########################
n=int(input("digite um numero n: "))
i=1
cont = 0
while i < n:
if i%2==1:
cont=cont+1
i=i+1
print (cont)
##########################
for x in range (0,5,1):
for y in range (0,5,1):
print ("(%d,%d)" %(x,y))
##########################
n= int(input("Digite um numero: "))
i=2
contador=0
while (i<(n-1)):
if n%i==0:
comtador+=1
i+=1
if contador>0:
print ("%d Nao eh primo!" %n)
else:
print ("%d Eh primo!" %n)
##########################
a= 30
b= 20
z=int(a/b)
print z
###########################
a=80
b=50
c=10
if a<b:
print("comando 1")
else:
if a<c:
print ("comando 2")
else b<c:
print ("comando 3")
print("pronto")
#########################
a=30
b=5
c=10
if a<b<c:
print("comando 1")
else:
if a<c<b:
print("comando 2")
else:
print("comando 3")
print("comando 4")
print("pronto")
###########################
a=3
b=5
c=10
if a<b:
print ("comando 1")
else:
if a<c:
print("comando 2")
else:
if b<c:
print ("comando 3")
print("pronto")
#########################
a=30
b=5
c=10
if a<b:
print("comando 1")
else:
if a<c:
print("comando 2")
else:
if b<c:
print("comando 3")
print ("pronto")
###########################
a=int(input('Que horas são? [ 0-23 ]'))
if a < 0 or a > 23:
print('Hora invalida')
elif a >= 3 and a <12:
print('Bom dia')
elif a >= 12 and a< 18:
print('Boa tarde')
else:
print('boa noite')
############################
print("Seja bem vindo ao programa Quem é você!")
print("________________________________________")
nome= str(input("Me diga o seu nome: "))
print("\nOlá "+nome+". Farei algumas perguntas sobre você!")
idade=int(input("Me diga a sua idade: "))
print("\nok! Você tem %d anos." %idade)
altura=float(input("Me diga a sua altura em metros: "))
print("\nVocê tem %.2f metros de altura!" %altura)
print("\n\nIsso é tudo. Até a próxima, %s!" %nome)
###################################
a= float(input("digite o valor de a"))
a= a*10
print(a)
#####################################
float(input("digite um número em metros: "))
conversao=(unidade*100)
print("-----------------------------------")
print("o valor em centímetros é: %2.f" %conversao)
#################################
nota1 =float(input("digite sua nota (1): "))
nota2 =float(input("digite sua nota (2): "))
nota3 =float(input("digite sua nota (3): "))
nota4 =float(input("digite sua nota (4): "))
media=((nota1+nota2+nota3+nota4)/4)
print("sua média é: %2.f" %media)
"""
| [
"[email protected]"
] | |
a7c6f8636993116c7a66155ac7432348a4115dfa | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /Scripts/simulation/date_and_time.py | 1ceb06b1195becb57c479f4372fe8044d6aba0a6 | [
"Apache-2.0"
] | permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,479 | py | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\date_and_time.py
# Compiled at: 2020-01-15 01:21:52
# Size of source mod 2**32: 19262 bytes
import math
from protocolbuffers.Localization_pb2 import LocalizedDateAndTimeData, LocalizedStringToken
from sims4.math import MAX_UINT64
import enum, sims4.commands, sims4.tuning.tunable
with sims4.reload.protected(globals()):
TICKS_PER_REAL_WORLD_SECOND = 1000
REAL_MILLISECONDS_PER_SIM_SECOND = 25
MILLISECONDS_PER_SECOND = 1000
SECONDS_PER_MINUTE = 60
MINUTES_PER_HOUR = 60
HOURS_PER_DAY = 24
DAYS_PER_WEEK = 7
SECONDS_PER_HOUR = SECONDS_PER_MINUTE * MINUTES_PER_HOUR
SECONDS_PER_DAY = SECONDS_PER_HOUR * HOURS_PER_DAY
SECONDS_PER_WEEK = SECONDS_PER_DAY * DAYS_PER_WEEK
INVALID_TIME = MAX_UINT64
class TimeUnit(enum.Int):
SECONDS = 0
MINUTES = 1
HOURS = 2
DAYS = 3
WEEKS = 4
def send_clock_tuning():
sims4.commands.execute('clock._set_milliseconds_per_sim_second {0}'.format(REAL_MILLISECONDS_PER_SIM_SECOND), None)
def sim_ticks_per_day():
return REAL_MILLISECONDS_PER_SIM_SECOND * SECONDS_PER_DAY
def sim_ticks_per_week():
return REAL_MILLISECONDS_PER_SIM_SECOND * SECONDS_PER_WEEK
class DateAndTime(int):
__slots__ = ()
def __repr__(self):
return 'DateAndTime({0:d})'.format(self.absolute_ticks())
def __str__(self):
ms = int(self.absolute_seconds() * MILLISECONDS_PER_SECOND % MILLISECONDS_PER_SECOND)
return '{0:02}:{1:02}:{2:02}.{3:03} day:{4} week:{5}'.format(self.hour(), self.minute(), self.second(), ms, self.day(), self.week())
def __format__(self, format_spec):
if format_spec == 'h':
return '{:02}'.format(self.hour())
if format_spec == 'm':
return '{:02}'.format(self.minute())
if format_spec == 's':
return '{:02}'.format(self.second())
if format_spec == 'd':
return str(self.day())
if format_spec == 'D':
return ('Su', ' M', 'Tu', ' W', 'Th', ' F', 'Sa')[self.day()]
if format_spec == 'w':
return str(self.week())
if format_spec == 'as':
return str(self.absolute_seconds() * MILLISECONDS_PER_SECOND % MILLISECONDS_PER_SECOND)
return super().__format__(format_spec)
def second(self):
return int(self.absolute_seconds() % SECONDS_PER_MINUTE)
def minute(self):
return int(self.absolute_seconds() / SECONDS_PER_MINUTE % MINUTES_PER_HOUR)
def hour(self):
return int(self.absolute_seconds() / SECONDS_PER_HOUR % HOURS_PER_DAY)
def day(self):
return int(self.absolute_seconds() / SECONDS_PER_DAY % DAYS_PER_WEEK)
def week(self):
return int(self.absolute_seconds() / SECONDS_PER_WEEK)
def absolute_ticks(self):
return int(self)
def absolute_seconds(self):
return int(self) / REAL_MILLISECONDS_PER_SIM_SECOND
def absolute_minutes(self):
return self.absolute_seconds() / SECONDS_PER_MINUTE
def absolute_hours(self):
return self.absolute_seconds() / SECONDS_PER_HOUR
def absolute_days(self):
return self.absolute_seconds() / SECONDS_PER_DAY
def absolute_weeks(self):
return self.absolute_seconds() / SECONDS_PER_WEEK
def _ticks_in_day(self):
return int(self) % sim_ticks_per_day()
def _ticks_in_week(self):
return int(self) % sim_ticks_per_week()
def time_to_week_time(self, time_of_week):
tick_diff = time_of_week._ticks_in_week() - self._ticks_in_week()
if tick_diff < 0:
tick_diff += sim_ticks_per_week()
return TimeSpan(tick_diff)
def time_of_next_week_time(self, time_of_week):
return self + self.time_to_week_time(time_of_week)
def time_till_timespan_of_week(self, start_time_of_week, optional_end_time=None, min_duration_remaining=None):
ticks_in_week = self._ticks_in_week()
tick_diff = start_time_of_week.absolute_ticks() - ticks_in_week
if tick_diff > 0:
return TimeSpan(tick_diff)
if optional_end_time is not None:
tick_diff_before_end = optional_end_time.absolute_ticks() - ticks_in_week
if tick_diff_before_end > 0:
if min_duration_remaining is None or tick_diff_before_end >= min_duration_remaining.in_ticks():
return TimeSpan.ZERO
end_of_week = sim_ticks_per_week() - self._ticks_in_week()
return TimeSpan(end_of_week + start_time_of_week.absolute_ticks())
def time_between_day_times(self, start_time, end_time):
ticks_in_day = self._ticks_in_day()
start_ticks = start_time._ticks_in_day()
end_ticks = end_time._ticks_in_day()
if start_ticks > end_ticks:
if ticks_in_day >= start_ticks or ticks_in_day <= end_ticks:
return True
return False
if ticks_in_day >= start_ticks:
if ticks_in_day <= end_ticks:
return True
return False
def time_between_week_times(self, start_time, end_time):
ticks_in_week = self._ticks_in_week()
start_ticks = start_time._ticks_in_week()
end_ticks = end_time._ticks_in_week()
if start_ticks > end_ticks:
if ticks_in_week >= start_ticks or ticks_in_week <= end_ticks:
return True
return False
if ticks_in_week >= start_ticks:
if ticks_in_week <= end_ticks:
return True
return False
def time_till_next_day_time(self, day_time):
ticks_in_day = self._ticks_in_day()
ticks_in_day_time = day_time._ticks_in_day()
time_till_next_day_time = ticks_in_day_time - ticks_in_day
if time_till_next_day_time < 0:
time_till_next_day_time += sim_ticks_per_day()
return TimeSpan(time_till_next_day_time)
def time_of_next_day_time(self, day_time):
return self + self.time_till_next_day_time(day_time)
def time_since_beginning_of_week(self):
return self + TimeSpan(SECONDS_PER_WEEK * -self.week() * REAL_MILLISECONDS_PER_SIM_SECOND)
def start_of_week(self):
return DateAndTime(self.week() * sim_ticks_per_week())
def start_of_next_week(self):
return DateAndTime((self.week() + 1) * sim_ticks_per_week())
def __sub__(self, other):
return TimeSpan(int.__sub__(self, other))
def __add__(self, other):
return DateAndTime(int.__add__(self, other._delta))
def populate_localization_token(self, token):
loc_data = LocalizedDateAndTimeData()
loc_data.seconds = self.second()
loc_data.minutes = self.minute()
loc_data.hours = self.hour()
day_of_week = self.day()
if day_of_week == 0:
day_of_week = 7
loc_data.date = day_of_week
loc_data.month = 0
loc_data.full_year = 0
token.type = LocalizedStringToken.DATE_AND_TIME
token.date_and_time = loc_data
DATE_AND_TIME_ZERO = DateAndTime(0)
INVALID_DATE_AND_TIME = DateAndTime(INVALID_TIME)
class TimeSpan:
__slots__ = ('_delta', )
def __init__(self, delta):
self._delta = math.ceil(delta)
def populate_localization_token(self, token):
token.number = self.in_minutes()
token.type = LocalizedStringToken.NUMBER
def in_ticks(self):
return self._delta
def in_seconds(self):
return self._delta / REAL_MILLISECONDS_PER_SIM_SECOND
def in_minutes(self):
return self.in_seconds() / SECONDS_PER_MINUTE
def in_hours(self):
return self.in_seconds() / SECONDS_PER_HOUR
def in_days(self):
return self.in_seconds() / SECONDS_PER_DAY
def in_weeks(self):
return self.in_seconds() / SECONDS_PER_WEEK
def in_real_world_seconds(self):
return self._delta / TICKS_PER_REAL_WORLD_SECOND
def __add__(self, other):
return TimeSpan(self._delta + other._delta)
def __sub__(self, other):
return TimeSpan(self._delta - other._delta)
def __mul__(self, other):
return TimeSpan(self._delta * other)
def __truediv__(self, other):
return TimeSpan(self._delta / other)
def __cmp__(self, other):
return self._delta - other._delta
def __lt__(self, other):
return issubclass(type(other), type(self)) and self._delta < other._delta
def __le__(self, other):
return issubclass(type(other), type(self)) and self._delta <= other._delta
def __gt__(self, other):
return issubclass(type(other), type(self)) and self._delta > other._delta
def __ge__(self, other):
return issubclass(type(other), type(self)) and self._delta >= other._delta
def __eq__(self, other):
return issubclass(type(other), type(self)) and self._delta == other._delta
def __ne__(self, other):
return not (issubclass(type(other), type(self)) and self._delta == other._delta)
def __neg__(self):
return TimeSpan(-self._delta)
def __hash__(self):
return self._delta
def __repr__(self):
return 'TimeSpan({0})'.format(self._delta)
def __str__(self):
abs_delta = abs(self.in_seconds())
if abs_delta < SECONDS_PER_MINUTE:
return '{0:.2f} seconds'.format(self.in_seconds())
if abs_delta < SECONDS_PER_MINUTE * MINUTES_PER_HOUR:
return '{0:.2f} minutes'.format(self.in_minutes())
if abs_delta < SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY:
return '{0:.2f} hours'.format(self.in_hours())
if abs_delta < SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY * DAYS_PER_WEEK:
return '{0:.2f} days'.format(self.in_days())
return '{0:.2f} weeks'.format(self.in_weeks())
TimeSpan.ZERO = TimeSpan(0)
TimeSpan.ONE = TimeSpan(1)
TimeSpan.NEGATIVE_ONE = TimeSpan(-1)
def create_date_and_time(days=0, hours=0, minutes=0):
num_sim_seconds = days * SECONDS_PER_DAY + hours * SECONDS_PER_HOUR + minutes * SECONDS_PER_MINUTE
time_in_ticks = num_sim_seconds * REAL_MILLISECONDS_PER_SIM_SECOND
return DateAndTime(time_in_ticks)
def create_time_span(days=0, hours=0, minutes=0):
num_sim_seconds = days * SECONDS_PER_DAY + hours * SECONDS_PER_HOUR + minutes * SECONDS_PER_MINUTE
time_in_ticks = num_sim_seconds * REAL_MILLISECONDS_PER_SIM_SECOND
return TimeSpan(time_in_ticks)
def date_and_time_from_week_time(num_weeks, week_time):
total_ticks = num_weeks * sim_ticks_per_week() + week_time.absolute_ticks()
return DateAndTime(total_ticks)
def ticks_to_time_unit(ticks, time_unit, use_sim_time):
if use_sim_time:
seconds = ticks / REAL_MILLISECONDS_PER_SIM_SECOND
else:
seconds = ticks / TICKS_PER_REAL_WORLD_SECOND
if time_unit is TimeUnit.SECONDS:
return seconds
if time_unit is TimeUnit.MINUTES:
return seconds / SECONDS_PER_MINUTE
if time_unit is TimeUnit.HOURS:
return seconds / SECONDS_PER_HOUR
if time_unit is TimeUnit.DAYS:
return seconds / SECONDS_PER_DAY
if time_unit is TimeUnit.WEEKS:
return seconds / SECONDS_PER_WEEK
if __name__ == '__main__':
import doctest
doctest.testmod()
TunableClock.SetSimSecondsPerRealSecond(30) | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.