blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
909213a9973c1d8f3eb65713c5dceb7f61293c51
|
c0d9fd9aaf65fff29aaf867ba3a1cd55d35788d1
|
/thumb/fields/video_thumbnail_field/widget.py
|
c40f72151968efe5907694cc1805813b1135176a
|
[
"BSD-2-Clause"
] |
permissive
|
AmrAnwar/django-thumb
|
bb76e5511edca3ae331e00767e5c302ce4f54bfe
|
ec22446cd1e1721a02dd3d101c3697cf0f309ded
|
refs/heads/master
| 2022-11-29T01:47:38.944343 | 2022-09-21T08:15:25 | 2022-09-21T08:15:25 | 138,357,544 | 12 | 0 |
BSD-2-Clause
| 2022-11-22T07:53:26 | 2018-06-22T23:53:27 |
Python
|
UTF-8
|
Python
| false | false | 2,366 |
py
|
from django import forms
from ..colored_text_field import ColoredTextInput
from ..cascade_data import CASCADE_DATA
from ..image_thumnail_field import ImageThumbnailInput
CASCADE_CHOICES = list(
[("", "select a cascade case")] +
[(str(cascade_index), cascade[0])
for cascade_index, cascade in enumerate(CASCADE_DATA)]
)
class VideoThumbnailInput(ImageThumbnailInput):
def __init__(self, video_field_name, video_capture_help_text=None, attrs=None):
self.video_field_name = video_field_name or "video"
self.video_capture_help_text = (video_capture_help_text
or "if checked or entered data in more than 1 field, it will execute in order")
# for update case
self.video = None
widgets = [
# to get the image path
forms.HiddenInput(attrs=attrs),
# to get the video value
forms.HiddenInput(attrs=attrs),
# capture options
forms.Select(choices=CASCADE_CHOICES),
forms.TextInput(attrs={'placeholder': 'MM:SS'}),
forms.CheckboxInput(attrs={'label': "random", "note": "get random thumbnail"}, ),
# for manual input
forms.ClearableFileInput(attrs=attrs),
# color
ColoredTextInput(attrs=attrs)
]
super(VideoThumbnailInput, self).__init__(child_widgets=widgets, attrs=attrs)
self.template_name = "video_thumbnail.html"
def get_context(self, name, value, attrs):
context = super(VideoThumbnailInput, self).get_context(name, value, attrs)
context['widget']['video_capture_help_text'] = self.video_capture_help_text
return context
def decompress(self, value):
image_value = value
value = super(VideoThumbnailInput, self).decompress(value)
if any(value):
video = getattr(image_value.instance, self.video_field_name, None)
try:
value[1] = video.path
except ValueError:
value[1] = None
return value
def value_from_datadict(self, data, files, name):
value = super(VideoThumbnailInput, self).value_from_datadict(data, files, name)
submitted_video = files.get(self.video_field_name) or value['data'][1]
value['video'] = submitted_video
return value
|
[
"[email protected]"
] | |
0c748b030025fe67ebf6fdf3a85cd7acb380914e
|
5b4686ace41ebfcb2c694283b232761010cf31d7
|
/commands/deviot_choose_display_mode.py
|
ddd22cf4e81514b3e01948d627fc7200308de35c
|
[
"Apache-2.0"
] |
permissive
|
gepd/Deviot
|
bbf4d40fbecb8187255a4ab0f3e4dae7e5a7d985
|
150caea06108369b30210eb287a580fcff4904af
|
refs/heads/develop
| 2023-08-18T04:13:56.932126 | 2020-07-13T18:02:23 | 2020-07-13T18:02:23 | 47,856,861 | 335 | 91 |
Apache-2.0
| 2023-01-28T02:53:49 | 2015-12-11T23:56:06 |
Python
|
UTF-8
|
Python
| false | false | 507 |
py
|
from sublime_plugin import WindowCommand
from ..libraries.quick_menu import QuickMenu
class DeviotChooseDisplayModeCommand(WindowCommand):
"""
Stores the display mode option selected for the user and save it in
the preferences file.
Extends: sublime_plugin.WindowCommand
"""
def run(self):
Quick = QuickMenu()
items = Quick.display_mode_list()
callback = Quick.callback_display_mode
Quick.set_list(items)
Quick.show_quick_panel(callback)
|
[
"[email protected]"
] | |
58be6c6b116aee998632f1b2feeee7dc7e4292cd
|
b5ed599b776c4c34e9bd985cf72834bbbcad25ac
|
/Chapter4/SumOfListRecursion.py
|
c0237838fc67cb2bd89ec780252282a1fc650504
|
[] |
no_license
|
Kaushiksekar/DSAlgInteractivePython
|
170c57db14583e1d4923fb6aefce84ec1fdd6772
|
3560e5a8e564900c9d499504fa26d49dcdcb0784
|
refs/heads/master
| 2021-04-12T11:15:54.976931 | 2018-04-14T17:13:57 | 2018-04-14T17:13:57 | 126,722,924 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 153 |
py
|
def listSum(list1):
if len(list1) == 1:
return list1[0]
else:
return list1[0] + listSum(list1[1:])
print(listSum([1,2,3,4,5]))
|
[
"[email protected]"
] | |
197a1d020d2cfe8dee692e976248b2ff20515a16
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/tree-big-1444.py
|
6d499c33b829c13b81501da777f3e18e64053658
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 23,289 |
py
|
# Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if $Exp.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
|
[
"[email protected]"
] | |
393f1e1d4e2b22dc0dce8214a1c28bc18b874f4d
|
14381f1c66a4027e1612a069d71e3080234fecfc
|
/weatherman.py
|
0a1f947db1a8bf72c628d98b8a09efd481bd3f99
|
[
"MIT"
] |
permissive
|
yuto-moriizumi/Python
|
1c55153781208f632ebb578d89252614724845d4
|
2de2903179f187c3c7105e8cf2f9600dded21f25
|
refs/heads/master
| 2023-03-24T06:19:02.842821 | 2021-03-23T08:28:41 | 2021-03-23T08:28:41 | 277,742,594 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 105 |
py
|
from report import get_description as unchi
description = unchi()
print("Today's weather:", description)
|
[
"[email protected]"
] | |
df48b74261ed6f6a18fe11a74ca7116ad6b128c3
|
1a166165ab8287d01cbb377a13efdb5eff5dfef0
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_12_01/operations/_network_watchers_operations.py
|
ea0669c30cad73a8bbf74e5dd20d144d6c68177b
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
manoj0806/azure-sdk-for-python
|
7a14b202ff80f528abd068bf50334e91001a9686
|
aab999792db1132232b2f297c76800590a901142
|
refs/heads/master
| 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 |
MIT
| 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null |
UTF-8
|
Python
| false | false | 105,706 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkWatchersOperations(object):
"""NetworkWatchersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkWatcher"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher resource.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkWatcher')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network watcher resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Updates a network watcher tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters supplied to update network watcher tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def get_topology(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TopologyParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.Topology"
"""Gets the current network topology by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the representation of topology.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TopologyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Topology, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.Topology
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Topology"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_topology.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TopologyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Topology', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_topology.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/topology'} # type: ignore
def _verify_ip_flow_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.VerificationIPFlowResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._verify_ip_flow_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VerificationIPFlowParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_verify_ip_flow_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def begin_verify_ip_flow(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VerificationIPFlowResult"]
"""Verify IP flow from the specified VM to a location given the currently configured NSG rules.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the IP flow to be verified.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.VerificationIPFlowParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VerificationIPFlowResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.VerificationIPFlowResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._verify_ip_flow_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_verify_ip_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def _get_next_hop_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NextHopResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_next_hop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NextHopParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_next_hop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def begin_get_next_hop(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NextHopResult"]
"""Gets the next hop from the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the source and destination endpoint.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NextHopParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NextHopResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.NextHopResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_next_hop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_next_hop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def _get_vm_security_rules_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityGroupViewResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_vm_security_rules_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecurityGroupViewParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vm_security_rules_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def begin_get_vm_security_rules(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.SecurityGroupViewResult"]
"""Gets the configured and effective security group rules on the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the VM to check security groups for.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.SecurityGroupViewParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either SecurityGroupViewResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.SecurityGroupViewResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vm_security_rules_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vm_security_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def _get_troubleshooting_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def begin_get_troubleshooting(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Initiate troubleshooting on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to troubleshoot.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def _get_troubleshooting_result_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_result_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'QueryTroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_result_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def begin_get_troubleshooting_result(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Get the last completed troubleshooting result on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to query the troubleshooting result.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.QueryTroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_result_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting_result.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def _set_flow_log_configuration_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_flow_log_configuration_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogInformation')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_flow_log_configuration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def begin_set_flow_log_configuration(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Configures flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the configuration of flow log.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.FlowLogInformation
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_flow_log_configuration_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_flow_log_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def _get_flow_log_status_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_flow_log_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogStatusParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_flow_log_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def begin_get_flow_log_status(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Queries status of flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define a resource to query flow log and traffic analytics
(optional) status.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.FlowLogStatusParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_flow_log_status_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_flow_log_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def _check_connectivity_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectivityInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._check_connectivity_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectivityParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_check_connectivity_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def begin_check_connectivity(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ConnectivityInformation"]
"""Verifies the possibility of establishing a direct TCP connection from a virtual machine to a
given endpoint including another VM or an arbitrary remote server.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine how the connectivity check will be performed.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.ConnectivityParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectivityInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.ConnectivityInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._check_connectivity_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_check_connectivity.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def _get_azure_reachability_report_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureReachabilityReport"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_azure_reachability_report_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureReachabilityReportParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_azure_reachability_report_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def begin_get_azure_reachability_report(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureReachabilityReport"]
"""NOTE: This feature is currently in preview and still being tested for stability. Gets the
relative latency score for internet service providers from a specified location to Azure
regions.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine Azure reachability report configuration.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.AzureReachabilityReportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureReachabilityReport or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.AzureReachabilityReport]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_azure_reachability_report_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_azure_reachability_report.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def _list_available_providers_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AvailableProvidersList"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._list_available_providers_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AvailableProvidersListParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_available_providers_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def begin_list_available_providers(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AvailableProvidersList"]
"""NOTE: This feature is currently in preview and still being tested for stability. Lists all
available internet service providers for a specified Azure region.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that scope the list of available providers.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.AvailableProvidersListParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AvailableProvidersList or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.AvailableProvidersList]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_available_providers_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_available_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def _get_network_configuration_diagnostic_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkConfigurationDiagnosticResponse"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_network_configuration_diagnostic_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkConfigurationDiagnosticParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_network_configuration_diagnostic_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
def begin_get_network_configuration_diagnostic(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NetworkConfigurationDiagnosticResponse"]
"""Gets Network Configuration Diagnostic data to help customers understand and debug network
behavior. It provides detailed information on what security rules were applied to a specified
traffic flow and the result of evaluating these rules. Customers must provide details of a flow
like source, destination, protocol, etc. The API returns whether traffic was allowed or denied,
the rules evaluated for the specified flow and the evaluation results.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters to get network configuration diagnostic.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NetworkConfigurationDiagnosticParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkConfigurationDiagnosticResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.NetworkConfigurationDiagnosticResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_network_configuration_diagnostic_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_network_configuration_diagnostic.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
|
[
"[email protected]"
] | |
288d0ca5ced91219b4a197f51bfe93758c253b86
|
3f927f5254e58ecea3c9d9d0f4daffc85156d6f1
|
/crm/views.py
|
b30e3439e0c1bff50c9f3b05df31c502dc856da0
|
[] |
no_license
|
deepdik/toorishshop
|
38e72e95c1c9e3e02ec2910b3a1def73893d6bc4
|
438b613f1bfcc83b849811f48c65357ea422cf57
|
refs/heads/master
| 2022-12-12T05:12:10.609165 | 2019-01-06T17:59:27 | 2019-01-06T17:59:27 | 164,330,295 | 1 | 0 | null | 2022-12-08T01:20:43 | 2019-01-06T17:37:14 |
Python
|
UTF-8
|
Python
| false | false | 64,401 |
py
|
from __future__ import print_function
from django.shortcuts import render
from django.views.generic import TemplateView
from django.http import HttpResponseRedirect
from django.shortcuts import render
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
from datetime import datetime
from werkzeug.utils import secure_filename
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from bunch import bunchify
import pyexcel
from django.db.models import Q
import os.path
from datetime import datetime
import pprint
from django.http import HttpResponse
from .authtoken import *
from django.core.paginator import Paginator
import pandas as pd
import xlrd
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.generics import CreateAPIView
from .models import *
from .serializers import *
from django.http import Http404
from rest_framework import status
from django_filters import rest_framework as filters
import django_filters
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.generics import (
ListAPIView,
RetrieveAPIView,
)
import psycopg2
import json
from psycopg2.extras import RealDictCursor
con = psycopg2.connect(dbname='touristshop', user='postgres', host='localhost', password='Bismillah@123')
#con = psycopg2.connect(dbname='LeadPolice', user='postgres', host='50.63.167.106', password='Modinagar@7')
token = 'zzzzz'
class SupplierListAPIView(APIView):
def get(self, request, format=None):
agentid = request.GET.get('agentid')
supplier = Supplier.objects.filter(agentid=agentid)
serializer = SupplierListSerializer(supplier,many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = SupplierAddSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(request.data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
class AgentleadsAPIView(APIView):
def get(self, request, format=None):
statusid = request.GET.get('statusid')
agentid = request.GET.get('agentid')
leads = AssignedAgentLead.objects.filter(agentid=agentid,AgentLeadStatus = statusid)
serializer = AgentLeadsByStatusListSerializer(leads,many=True)
return Response(serializer.data)
class PackageFilter(django_filters.FilterSet):
price_per_persion = django_filters.RangeFilter(field_name='offeredpriceperperson')
package_days = django_filters.RangeFilter(field_name='packagedays')
# hotel__stars = django_filters.NumberFilter(field_name=hotel_package)
class Meta:
model = Package
fields = {'category','package_days','price_per_persion','destination__destination_type',
}
class PackageListFilterAPIView(ListAPIView):
queryset = Package.objects.all()
serializer_class = PackageListSerializer
filter_backends = (DjangoFilterBackend,)
filterset_class = PackageFilter
class PackageListAPIView(APIView):
def get(self, request, format=None):
agentid = request.GET.get('agentid')
package = Package.objects.filter(agentid=agentid)
serializer = PackageListSerializer(package,many=True)
return Response(serializer.data)
class PackageDetailAPIView(RetrieveAPIView):
queryset = Package.objects.all()
serializer_class = PackageDetailSerializer
lookup_field = "id"
class PackageAddAPIView(APIView):
def post(self, request, format=None):
serializer = PackageAddSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
serialized_data = serializer.validated_data
return Response(request.data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
class QuotationAddAPIView(APIView):
def get(self, request, format=None):
leadid = request.GET.get('leadid')
agentid = request.GET.get('agentid')
if leadid and agentid:
quotobj = Quotation.objects.filter(leadid=leadid, agentid=agentid)
serializer = QuotationListSerializer(quotobj,many=True)
return Response(serializer.data,status=HTTP_200_OK)
if leadid:
QuerySet = Quotation.objects.filter(leadid=leadid)
serializer = QuotationListSerializer(QuerySet,many=True)
return Response(serializer.data)
return Response('pls provide leadid', status=status.HTTP_400_BAD_REQUEST)
def post(self, request, format=None):
serializer = QuotationAddSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
serialized_data = serializer.validated_data
leadid = serialized_data.get('leadid')
agentid = serialized_data.get('agentid')
obj = AskQuotation.objects.get(leadid=leadid,agentid=agentid)
obj.isquotsent = True
obj.save()
return Response(request.data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
def put(self, request, format=None):
quotid = request.GET.get('id')
if quotid:
Quotation.objects.get(id=quotid).delete()
serializer = QuotationAddSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(request.data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
def delete(self, request, format=None):
quotid = request.GET.get('id')
if quotid:
obj = Quotation.objects.get(id=quotid)
leadid = obj.leadid
agentid = obj.agentid
obj.delete()
askquotobj = AskQuotation.objects.get(leadid=leadid,agentid=agentid)
askquotobj.isquotsent = False
askquotobj.save()
return Response(status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
class DocumentsList(APIView):
def get(self,request):
records = Document.objects.all()
serializer = UploadDocSerializer(records,many=True)
return Response(serializer.data)
class Documents_of_project(APIView):
def get(self,request, project_id):
records = Document.objects.filter(projectid=project_id)
serializer = UploadDocSerializer(records,many=True)
return Response(serializer.data)
class Cretae_document(APIView):
def post(self,request):
serializer = UploadDocSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Get_project_document(APIView):
def get(self,request,project_id):
record = Document.objects.filter(projectid=project_id)
serializer = UploadDocSerializer(record,many=True)
return Response(serializer.data)
class Profile_pics(APIView):
def post(self,request):
user_name = request.GET.get('username')
image = request.FILES['image']
Profilepics.objects.filter(username=user_name).update(pics=image)
# serializer = UploadProfilepics(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response('success')
def get(self,request):
user_name= request.GET.get('username')
print('abc',user_name)
record = Profilepics.objects.get(username=user_name)
serializer = UploadProfilepics(record)
return Response(serializer.data)
class Document_with_projectid_documentid(APIView):
def get(self,request, project_id,document_id):
records = Document.objects.filter(projectid= project_id,id=document_id)
serializer = DocumentsSerializer(records,many=True)
return Response(serializer.data)
#@decorator
@api_view(['get'])
def getcmp(request):
#perm=Auth(request)
param = True
if(param==True):
records = Company.objects.all()
serializer = CompanySerializer(records, many=True)
return Response(serializer.data)
else:
return "Wrong Token"
@api_view(['get'])
def getcmpWithPaging(request):
user_name = request.GET.get('username')
company_id = request.GET.get('companyId')
page_number = request.GET.get('pageNumber')
status_id = request.GET.get('statusId')
project_id = request.GET.get('projectId')
assignedTo = request.GET.get('assignedTo')
assignedTo = request.GET.get('assignedTo')
lead_name = request.GET.get('leadName')
lead_number = request.GET.get('leadNumber')
date_from = request.GET.get('DateFrom')
date_to = request.GET.get('DateTo')
'''
filter.companyid, filter.pageSize,
filter.pageNumber, filter.statusID,filter.projectId,filter.assignedTo,filter.leadName,
filter.leadNumber,filter.DateFrom,filter.DateTo
'''
param = True
if(param==True):
records = Company.objects.all()
serializer = CompanySerializer(records, many=True)
return Response(serializer.data)
else:
return "Wrong Token"
@api_view(['post'])
def postcmp(request):
serializer = CompanySerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class CompanyList(APIView):
def get(self,request):
records = Company.objects.all()
serializer = CompanySerializer(records,many=True)
#my_header(request.META['HTTP_TOKEN'])
return Response(serializer.data)
def post(self, request):
serializer = CompanySerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AgentList(APIView):
def get(self,request):
companyid= request.GET.get('companyid')
records = Agent.objects.filter(allocatedcid=companyid)
serializer = AgentsSerializer(records,many=True)
#my_header(request.META['HTTP_TOKEN'])
return Response(serializer.data)
def post(self, request):
serializer = AgentsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data['agentid'], status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AskQuotationAgent(APIView):
def get(self,request):
agent_id = request.GET.get('agentid')
user_name = request.GET.get('username')
if(agent_id):
records = AskQuotation.objects.filter(agentid=agent_id)
else:
agentid = Agent.objects.get(username = user_name).agentid
records = AskQuotation.objects.filter(agentid=agentid)
serializer = AskQuotationSerializer(records,many=True)
#my_header(request.META['HTTP_TOKEN'])
return Response(serializer.data)
def post(self, request):
print(request.data)
serializer = AskQuotationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data['quotationid'], status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Company_company_id_get_update_delete(APIView):
def get(self,request,company_id, Format=None):
records = Company.objects.get(pk=company_id)
serializer = CompanySerializer(records)
return Response(serializer.data)
def put(self,request,company_id, Format=None):
records = Company.objects.get(pk=company_id)
serializer = CompanySerializer(records,data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, company_id, format=None):
user = Company.objects.get(pk=company_id)
user.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class AspnetusersList(APIView):
def get(self,request):
documents = Aspnetusers.objects.all()
serializer = AspnetusersSerializer(documents,many=True)
return Response(serializer.data)
@api_view(['post'])
def post(request):
data = request.data
print('create user',data)
now = datetime.now()
format_iso_now = now.isoformat()
newdate = now + timedelta(days=365)
data['createddatetime'] = format_iso_now
data['lockoutenddateutc']= newdate.isoformat()
data['twofactorenabled']= "1"
data['lockoutenabled']= "1"
data['accessfailedcount'] =1
data['securitystamp'] = "victor"
data['emailconfirmed'] = "1"
data['phonenumberconfirmed'] = "1"
data['token'] = "asd"
#print(data['lockoutenddateutc'])
serializer = AspnetusersSerializer(data = data)
#print(request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Aspnetusers_update_user(APIView):
def put(self, request):
data = request.data
user = Aspnetusers.objects.get(pk=data['id'])
print(user.username)
serializer = AspnetusersSerializer(user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Aspnetusers_get_update_delete(APIView):
def get(self, request, user_id, format=None):
user = Aspnetusers.objects.get(pk=user_id)
serializer = AspnetusersSerializer(user)
return Response(serializer.data)
def put(self, request, user_id, format=None):
user = Aspnetusers.objects.get(pk=user_id)
print(request.data)
serializer = AspnetusersSerializer(user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, user_id, format=None):
user = Aspnetusers.objects.get(pk=user_id)
user.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class Aspnetusers_of_company(APIView):
def get(self,request):
username = request.GET.get('username')
company_id = Aspnetusers.objects.get(username=username).companyid
cursor = open()
join_query = "Select * from AspNetUsers u join AspNetRoles r on CAST(u.roleid AS INTEGER) = r.id " \
"where u.companyid =" + str(company_id)
cursor.execute(join_query)
records = cursor.fetchall()
dump_records = json.dumps(records, sort_keys=True, default=str)
loaded_records = json.loads(dump_records)
return Response(loaded_records)
# documents = Aspnetusers.objects.filter(companyid=company_id)
# serializer = AspnetusersSerializer(documents,many=True)
# return Response(serializer.data)
class AspnetusersWrole(APIView):
def get(self,request):
documents = Aspnetusers.objects.all()
serializer = AspnetusersWrolesSerializer(documents,many=True)
return Response(serializer.data)
class AspnetusersWithrole(APIView):
def get(self,request):
my_dict = dict(request.GET)
for key in my_dict:
if str(key.lower()) == 'username':
username = my_dict[key][0]
#username = request.GET.get('username')
company_id = Aspnetusers.objects.get(username=username).companyid
documents = Aspnetusers.objects.filter(companyid=company_id)
serializer = AspnetusersWithrolesSerializer(documents,many=True)
r = json.dumps(serializer.data)
loaded_r = json.loads(r)
new_res = list()
for info in loaded_r:
mydict = info
mydict1=mydict['role'][0]
mydict['role']=mydict1
new_res.append(mydict)
#pprint.pprint(new_res, width=4)
#print(loaded_r[0]['role'][0])
return Response(new_res)
class users_by_projectid(APIView):
def get(self,request,project_id):
users = Aspnetusers.objects.filter(projectid=project_id)
serializer = AspnetusersSerializer(users, many=True)
return Response(serializer.data)
class Aspnetusers_of_username(APIView):
def get(self,request,user_name):
documents = Aspnetusers.objects.filter(username=user_name)
serializer = AspnetusersSerializer(documents,many=True)
return Response(serializer.data)
class Reporting(APIView):
def get(self, request, project_id, user_id):
cursor = open()
query = "Select y.* from (Select Row_Number() over (order by l.leadid desc) as RowNumber, u.Id as " \
"Id,(u.FirstName || ' ' || u.LastName) as username,li.BuilderInterest,li.cmpctlabel," \
"l.companyid,l.CreateDateTimeOffset,l.createuserid,l.EditDateTimeOffset,l.EditUser_ID," \
"l.Email,l.leadid,l.name,l.phonenumber,li.ProjName,li.QueryRemarks,li.RangeFrom," \
"li.RangeTo,li.receivedon,l.Status,li.StatusDate,li.statusid,li.TypeOfProperty,li.assignedto," \
"li.LeadItemId from Leads l join LeadItems li on li.leadid = l.leadid join AspNetUsers u " \
"on u.Id::varchar = li.assignedto join Project p on u.projectid = p.projectid " \
"join company c on p.projectid = c.companyid where p.projectid =COALESCE({}) and " \
"u.id = COALESCE({}))as y".format(project_id, user_id)
print(query)
cursor.execute(query)
records = cursor.fetchall()
print(records)
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
print(loaded_r)
con.close()
return Response(loaded_r)
class AspnetrolesList(APIView):
def get(self,request):
documents = Aspnetroles.objects.all()
serializer = AspnetrolesSerializer(documents,many=True)
return Response(serializer.data)
def post(self, request, Format=None):
serializer = AspnetrolesSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ProjectList(APIView):
def get(self, request, user_name):
print(user_name)
records = Project.objects.all()
serializer = ProjectSerializer(records,many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProjectSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class Project_project_id_get_update_delete(APIView):
def get(self,request,project_id,Format=None):
records = Project.objects.get(pk=project_id)
serializer = ProjectSerializer(records)
return Response(serializer.data)
def put(self,request,project_id, Format=None):
record = Project.objects.get(pk=project_id)
serializer = ProjectSerializer(record,data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, project_id, format=None):
record = Project.objects.get(pk=project_id)
record.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class Projects_of_company(APIView):
def get(self,request):
my_dict = dict(request.GET)
for key in my_dict:
if str(key.lower()) == 'username':
username = my_dict[key][0]
#username = request.GET.get('username')
company_id = Aspnetusers.objects.get(username=username).companyid
records = Project.objects.filter(companyid=company_id)
serializer = ProjectSerializer(records,many=True)
return Response(serializer.data)
class Projects_of_company_ById(APIView):
def get(self,request):
company_id = request.GET.get('companyid')
records = Project.objects.filter(companyid=company_id)
serializer = ProjectSerializer(records, many=True)
return Response(serializer.data)
class Projects_Create(APIView):
def post(self,request):
print(request.data)
serializer = ProjectSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UsersJoinList(APIView):
def get(self,request):
cursor = open()
query = "SELECT documents.projectid FROM documents INNER JOIN project on documents.projectid = project.projectid"
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records)
loaded_r = json.loads(r)
return Response(loaded_r)
class Aspnetusers_join_aspnetroles(APIView):
def get(self,request, user_name):
cursor = open()
join_query = "select r.name, U.* from (select * from aspnetusers where companyid in " \
"(select companyid from aspnetusers where username=" + "'" + user_name + "'" + "limit 1)) as U " \
"join aspnetroles r on CAST(U.roleid AS INTEGER)=r.id;"
cursor.execute(join_query)
records = cursor.fetchall()
dump_records = json.dumps(records,sort_keys=True, default=str)
loaded_records = json.loads(dump_records)
return Response(loaded_records)
class function_of_postgres(APIView):
def get(self,request):
cursor = open()
function_query = "select * from dataget1()"
cursor.execute(function_query)
records = cursor.fetchall()
dump_records = json.dumps(records,sort_keys=True, default=str)
loaded_records = json.loads(dump_records)
return Response(loaded_records)
class complex_join_of_postgres(APIView):
def get(self,request,company_id,status_id):
cursor = open()
function_query = "Select u.Id as Id,(u.FirstName || ' ' || u.LastName) as assignedto," \
"li.BuilderInterest,li.cmpctlabel,l.companyid, l.CreateDateTimeOffset," \
"l.createuserid,l.EditDateTimeOffset,l.EditUser_ID,l.Email,l.leadid," \
"l.name,l.phonenumber, li.ProjName,li.QueryRemarks,li.RangeFrom,li.RangeTo," \
"li.receivedon,l.Status, li.StatusDate,li.statusid,li.TypeOfProperty," \
"li.LeadItemId from Leads l join LeadItems li on li.leadid = l.leadid" \
" join AspNetUsers u on u.Id = li.assignedto" \
" where li.statusid =" + str(status_id) + " and u.companyId =" + str(company_id)
#print(function_query)
cursor.execute(function_query)
records = cursor.fetchall()
dump_records = json.dumps(records,sort_keys=True, default=str)
loaded_records = json.loads(dump_records)
return Response(loaded_records)
class LeadsListAll(APIView):
def get(self,request):
documents = Leads.objects.all()
serializer = LeadsSerializer(documents,many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = LeadsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Create your views here.
class LeadsOneLeads(APIView):
def get(self,request,lead_id):
documents = Leads.objects.get(pk=lead_id)
serializer = LeadsSerializer(documents)
return Response(serializer.data)
class LeadsListAllCmp(APIView):
def get(self,request,company_id):
documents = Leads.objects.filter(companyid=company_id)
serializer = LeadsSerializer(documents,many=True)
return Response(serializer.data)
class OtherLeadsByUsernameStatusId(APIView):
def get(self,request):
username = request.GET.get('username')
status_id = request.GET.get('statusId')
#company_id = Aspnetusers.objects.get(username=username).companyid
cursor = open()
queryset = "Select y.* from (Select Row_Number() over (order by l.leadid desc) " \
"as RowNumber, u.Id as Id,(u.FirstName || ' ' || u.LastName) as username,li.BuilderInterest," \
"li.cmpctlabel,l.companyid,l.CreateDateTimeOffset,l.createuserid, l.EditDateTimeOffset,l.EditUser_ID," \
"l.Email,l.leadid,l.name,l.phonenumber,li.ProjName,li.QueryRemarks,li.RangeFrom,li.RangeTo," \
"li.receivedon,l.Status,li.StatusDate,li.statusid,li.TypeOfProperty,li.assignedto,li.LeadItemId" \
" from Leads l join LeadItems li on li.leadid = l.leadid join AspNetUsers u on u.Id::varchar = li.assignedto" \
" where u.UserName = '{}' and li.Statusid= {})as y".format(username,status_id)
cursor.execute(queryset)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
result = get_other_leads(loaded_r)
return Response(result)
class OtherLeadsByUsername(APIView):
def get(self,request):
user_name = request.GET.get('userName')
status_id = request.GET.get('statusID')
project_id = request.GET.get('projectid')
lead_name = request.GET.get('leadname')
lead_id = request.GET.get('leadid')
date_from = request.GET.get('datefrom')
date_to = request.GET.get('dateto')
#company_id = Aspnetusers.objects.get(username=username).companyid
if ((date_from == '' and date_to !='') or (date_from!='' and date_to=='')):
errors = 'Must give both date'
print('error', errors)
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
cursor = open()
additional_query= ""
if (user_name != None and status_id != None):
additional_query = " u.username=\'" + user_name + "\' and li.statusid= " + status_id
if (project_id != None and project_id != ''):
print('projectid', project_id)
additional_query = additional_query + " and u.projectid=" + project_id
if (lead_name != None and lead_name != ''):
additional_query = additional_query + " and l.name=\'" + lead_name + "\'"
if (lead_id != None and lead_id != ''):
additional_query = additional_query + " and l.leadid=" + lead_id
if (date_from != None and date_from != '' and date_to != None and date_to != ''):
additional_query = additional_query + " and li.statusdate > \'"\
+ date_from + "\' and li.statusdate < \'" + date_to + "\'"
print('xyz',additional_query)
queryset = "Select y.* from (Select Row_Number() over (order by l.leadid desc) " \
"as RowNumber, u.userid as id,(u.firstname || ' ' || u.lastname) as username,li.builderinterest," \
"li.cmpctlabel,l.companyid,l.createdatetimeoffset,l.createuserid, l.editdatetimeoffset,l.edituserid," \
"l.email,l.leadid,l.name,l.phonenumber,li.projname,li.queryremarks,li.rangefrom,li.rangeto," \
"li.receivedon,l.statusid,li.statusdate,li.statusid,li.typeofproperty,li.assignedto,li.leaditemid" \
" from Leads l join LeadItems li on li.leadid = l.leadid join Aspnetusers u on" \
" u.userid::varchar = li.assignedto" \
" where " + additional_query + " )as y"
# queryset = "select * from aspnetusers"
cursor.execute(queryset)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
pprint.pprint(loaded_r, width=8)
#print('current leads',loaded_r)
result = get_other_leads(loaded_r)
return Response(result)
class OtherLeadsByCompanyId(APIView):
def get(self,request):
my_dict = dict(request.GET)
for key in my_dict:
if str(key.lower()) == 'companyid':
company_id = my_dict[key][0]
else:
status_id = my_dict[key][0]
cursor = open()
queryset = "Select u.userid as id,(u.firstname || ' ' || u.lastname) as assignedto,li.builderinterest,li.cmpctlabel," \
"l.companyid," \
"l.createdatetimeoffset,l.createuserid,l.editdatetimeoffset,l.edituserid," \
"l.email,l.leadid,l.name,l.phonenumber,li.projname,li.queryremarks,li.rangefrom," \
"li.rangeto,li.receivedon,l.statusid,li.statusdate,li.statusid,li.typeofproperty,li.leaditemid" \
" from leads l join leaditems li on li.leadid = l.leadid join aspnetusers u on " \
"u.userid::varchar = li.assignedto " \
"where li.statusid = {} and u.companyid ={} ".format(status_id,company_id)
cursor.execute(queryset)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
result = get_other_leads(loaded_r)
return Response(result)
class OtherLeadsC(APIView):
def get(self,request):
company_id = request.GET.get('companyid')
result = other_leads_company_id(company_id)
return Response(result)
class Add_token(APIView):
def put(self,request,user_name,token):
#data = request.data['token']
new_token = token.replace("COLON",":")
Aspnetusers.objects.filter(username = user_name).update(token=new_token)
return Response('success')
class getattendance(APIView):
def get(self,request,date,username):
print(date)
cursor = open()
query = "Select (u.FirstName || '' || u.LastName) as name,a.DistanceIn,a.Attendence," \
"a.DateIn,a.DateOut,a.Date,a.DistanceOut,a.AttendanceId from Attendance a join AspNetUsers u" \
" on a.userid = u.id where a.Date = '{}' and u.UserName ='{}'".format(date,username)
print(query)
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
con.close()
return Response(loaded_r)
class Leadsitemss(APIView):
def get(self,request):
user_name = request.GET.get('userName')
print(user_name)
pagesize = request.GET.get('pageSize')
pagenumber = request.GET.get('pageNumber')
company_id = Aspnetusers.objects.get(username= user_name).companyid
status_id = 16
documents = Leads.objects.filter(companyid=company_id, statusid=status_id)
#serializer = Leads_with_itemsSerializer(documents,many=True)
# r = json.dumps(serializer.data)
# loaded_r = json.loads(r)
serializers = LeadsSerializer(documents, many=True)
leads_json = json.dumps(serializers.data)
final_json_leads= json.loads(leads_json)
user_documents = Aspnetusers.objects.filter(Q(roleid='3') | Q(roleid='4'),companyid= company_id)
user_serializer = AspnetusersSerializer(user_documents, many=True)
user_json = json.dumps(user_serializer.data)
user_json_list = json.loads(user_json)
print('items user',len(user_json_list))
#user_len = len(user_json_list)
#new_res = list(user_len)
items = list()
leads_list =list()
for info in final_json_leads:
#info = dict()
info['nextlink'] = ''
info['assignees'] = ''
info['leadSource']= 'raw'
info['pageNumber'] = '0'
info['pageSize'] = '0'
info['totalCount'] = '0'
info['assignedUsers'] = []
items_list = list()
for new_user in user_json_list:
item= dict()
item['leaditemid']= 0
item['leadid']= 0
item['queryremarks']= "testdata"
item['typeofproperty']= 2
item['status']= 10
item['rangefrom']= 2
item['rangeto']= 3
item['cmpctlabel']= "test"
item['receivedon']= "2018-10-10T00:00:00Z"
item['projname']= "Himan"
item['assignedto']= new_user['userid']
item['builderinterest']= "1"
item['statusid']= 10
item['statusdate']= "2018-10-10T00:00:00Z"
item['companyid']= 1
item['isassigned'] = False
item['token'] = ''
item['username'] = new_user['firstname'] + new_user['lastname']
item['leaditemid'] = 0
# item['lead_id'] = 0
# item['queryremarks'] = ''
# item['typeofproperty'] = 0
# item['status'] = 0
# item['rangefrom'] = 0
# item['rangeto'] = 0
# item['cmpctlabel'] = ''
# item['receivedon'] = ''
# item['projname'] = ''
# item['assignedto'] = new_user['id']
# item['builderinterest'] = ''
# item['statusid'] = 0
# item['statusdate'] = ''
# item['isAssigned'] = False
# item['token'] = ''
# item['username'] = new_user['firstname'] + new_user['lastname']
# item['companyid'] = 0
# item['leaditemid'] = 0
items_list.append(item)
info['items']= items_list
leads_list.append(info)
paginator = Paginator(leads_list, pagesize)
page = paginator.page(pagenumber)
g = page.object_list
return Response(g)
class LeadsList(APIView):
def get(self,request,company_id, page_num, status):
documents = Leads.objects.filter(companyid=company_id, status=status)
serializer = Leads_with_itemsSerializer(documents,many=True)
r = json.dumps(serializer.data)
loaded_r = json.loads(r)
new_res = list()
for info in loaded_r:
#mydict = info
#mydict1 = mydict['role'][0]
info['nextlink'] = 'vc.com/abv'
info['assignees'] = ''
info['leadSource']= 'raw'
info['pageNumber'] = '0'
info['pageSize'] = '0'
info['totalCount'] = '0'
info['assignedUsers'] = []
new_info_item = list()
for item in info['items']:
item['companyId'] = 0
item['isAssigned'] = False
item['token'] = ''
item['username'] = 'Alok Kumar'
new_info_item.append(item)
new_res.append(info)
paginator = Paginator(new_res, 10)
page = paginator.page(page_num)
g = page.object_list
return Response(g)
def post(self, request, format=None):
serializer = LeadsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Create your views here.
class LeadsWithStatus(APIView):
def get(self, request, company_id, status_id):
records = Leads.objects.filter(companyid=company_id, status = status_id)
serializer = LeadsWithItemsSerializer(records, many=True)
return Response(serializer.data)
class Leads_items_List(APIView):
def get(self,request):
documents = Leaditems.objects.all()
serializer = LeaditemsSerializer(documents,many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = LeaditemsSerializer(data=request.data)
print(request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Create your views here.
class Upload_excel_file(APIView):
def post(self,request):
#print(request.GET.get('username'))
f = request.FILES['file']
#my_dict = dict(request.GET)
username = request.GET.get('userName')
# for key in my_dict:
# if str(key.lower()) == 'username':
# username = my_dict[key][0]
company_id = Aspnetusers.objects.get(username=username).companyid
print(f.name)
myfile = pd.read_excel(f)
leads = myfile.to_json(orient='records')
leads= json.loads(leads)
for lead in leads:
lead['companyid'] = company_id
lead['cmpctlabel'] = lead['remarks']
lead['statusid'] = 16
lead['isassigned'] = False
#print(leads)
serializer = LeadsExcelSerializer(data=leads, many=True)
if serializer.is_valid():
serializer.save()
return Response('success')
# Create your views here.
#select * from leaditems;
from django.shortcuts import render
class Integration_of_company(APIView):
def get(sself, request ,ccompany_id):
records = Integrations.objects.filter(companyid=ccompany_id)
serializer = IntegrationsSerializer(records,many=True)
return Response(serializer.data)
class Integration_by_integrationid(APIView):
def get(self, request ,integration_id):
records = Integrations.objects.filter(id=integration_id)
serializer = IntegrationsSerializer(records,many=True)
return Response(serializer.data)
class Recordingsleadid(APIView):
def get(self, request ,lead_id):
records = Recordings.objects.filter(id=lead_id)
serializer = RecordingsSerializer(records,many=True)
return Response(serializer.data)
def post(self):
pass
class Recordingsusername(APIView):
def get(self, request ,lead_id):
records = Recordings.objects.filter(id=lead_id)
serializer = RecordingsSerializer(records,many=True)
return Response(serializer.data)
def post(self):
pass
class AddRecordings(APIView):
def get(self, request):
records = Recordings.objects.all()
serializer = RecordingsSerializer(records,many=True)
return Response(serializer.data)
def post (self,request):
serializer = RecordingsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class getattendance(APIView):
def get(self,request):
username = request.GET.get('username')
date = request.GET.get('attendanceDate')
cursor = open()
query = "Select (u.FirstName + '' + u.LastName) as name,a.DistanceIn,a.Attendence,a.DateIn," \
"a.DateOut,a.Date,a.DistanceOut,a.AttendanceId from Attendence a" \
" join AspNetUsers u on a.userid = u.UserName where a.Date =" +(date) +" and u.UserName ="+(username)
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
con.close()
return Response(loaded_r)
class Token(APIView):
def post(self, request):
global token
token=login(request)
return HttpResponse(json.dumps(token))
def check_token(func):
def inner(received_token):
global token
print("I am going to check token")
if received_token != token:
print("Whoops! Not Authorized")
return
return func(token)
return inner
@api_view(['get'])
def login_with_google(request):
#print(request.META)
google_token = request.GET.get('token')
return Response(google_token)
@api_view(['post'])
def login(request):
#print(request.META)
if (request.META['CONTENT_TYPE'] == 'application/x-www-form-urlencoded' and request.data['grant_type']=='password'):
user_name = request.data['username']
#print(user_name)
try:
user = Aspnetusers.objects.get(username=user_name)
except:
user = None
if user:
if(user.passwordhash==request.data['password']):
response = dict()
response['companyid'] = user.companyid
response['roleid'] = user.roleid
response['rolename'] = Aspnetroles.objects.get(pk=response['roleid']).name
response['username'] = user.username
response['token_type'] = 'bearer'
response['access_token'] = create_token(user.username,user.companyid,user.roleid)
print(response['access_token'])
else:
user_name = request.data['username']
user = Agent.objects.get(username=user_name)
if (user.passwordhash == request.data['password']):
response = dict()
response['companyname'] = user.companyname
response['roleid'] = user.roleid
response['rolename'] = 'agent'
response['agentid'] = user.agentid
response['username'] = user.username
response['token_type'] = 'bearer'
response['access_token'] = create_token(user.username, user.companyname, user.roleid)
print(response['access_token'])
return Response(response)
from rest_framework.generics import ListAPIView
from django_filters.rest_framework import DjangoFilterBackend
class LeadstatusCountAPIView(APIView):
def get(self,request):
my_dict = dict(request.GET)
for key in my_dict:
if str(key.lower()) == 'username':
user_name = my_dict[key][0]
#username= request.GET.get('username')
company_id = Aspnetusers.objects.get(username=user_name).companyid
cursor = open()
query = "Select n.phonenumbercount, s.name,s.statusid from(Select COUNT(li.statusid) " \
"as phonenumbercount,li.statusid from Leads l join LeadItems li on li.leadid = l.leadid where " \
"l.companyid in ({}) Group by li.statusid) as n join LeadStatus s on n.statusid = s.statusid UNION Select" \
" count(leadid) as phonenumbercount,'Raw Leads' as name,{} as statusid from Leads where companyid = {} and" \
" isassigned ={}".format(company_id, 16, company_id, False)
#print(query)
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
j = len(loaded_r)
#print(loaded_r)
#print(j)
response = dict()
response['currentleadscount'] = 0
response['noworkcount'] = 0
response['notconnectedcount'] = 0
response['followupscount'] = 0
response['visitoncounts'] = 0
response['visitdonecount'] = 0
response['visitdeadcount'] = 0
response['otherprojectscount'] = 0
response['resalecount'] = 0
response['alreadybookedcount'] = 0
response['bookeddone'] = 0
response['deadcount'] = 0
response['rentcount'] = 0
response['plotcount'] = 0
response['duplicatecount'] = 0
response['rawleadscount'] = 0
#print('working')
#print('next working')
i = 0
while i <= j - 1:
if loaded_r[i]['statusid'] == 16:
print(loaded_r[i])
response['rawleadscount'] = loaded_r[i]['phonenumbercount']
print(response['rawleadscount'])
elif loaded_r[i]['statusid'] == 1:
print(loaded_r[i])
response['currentleadscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 2:
response['noworkcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 3:
response['notconnectedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 4:
response['followupscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 5:
response['visitoncounts'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 6:
response['visitdonecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 7:
response['visitdeadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 8:
response['otherprojectscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 9:
response['resalecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 10:
response['alreadybookedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 11:
response['bookeddone'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 12:
response['deadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 13:
response['rentcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 14:
response['plotcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 15:
response['duplicatecount'] = loaded_r[i]['phonenumbercount']
i = i + 1
con.close()
print('end')
print(type(response))
return Response(response)
class MobileLeadstatusCountAPIView(APIView):
def get(self,request):
username= request.GET.get('username')
cursor = open()
query = "Select n.phonenumbercount, s.name,s.statusid from (Select COUNT(DISTINCT l.leadid) " \
"as phonenumbercount,statusid from Leads l join LeadItems li on li.leadid = l.leadid join " \
"AspNetUsers u on u.Id::varchar = li.assignedto where u.UserName = '{}' Group by statusid) as" \
" n join LeadStatus s on n.statusid = s.statusid".format(username)
print(query)
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
j = len(loaded_r)
print(loaded_r)
print(j)
response = dict()
response['currentleadscount'] = 0
response['noworkcount'] = 0
response['notconnectedcount'] = 0
response['followupscount'] = 0
response['visitoncounts'] = 0
response['visitdonecount'] = 0
response['visitdeadcount'] = 0
response['otherprojectscount'] = 0
response['resalecount'] = 0
response['alreadybookedcount'] = 0
response['bookeddone'] = 0
response['deadcount'] = 0
response['rentcount'] = 0
response['plotcount'] = 0
response['duplicatecount'] = 0
response['rawleadscount'] = 0
print('working')
print('next working')
i = 0
while i <= j - 1:
if loaded_r[i]['statusid'] == 16:
print(loaded_r[i])
response['rawleadscount'] = loaded_r[i]['phonenumbercount']
print(response['rawleadscount'])
elif loaded_r[i]['statusid'] == 1:
print(loaded_r[i])
response['currentleadscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 2:
response['noworkcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 3:
response['notconnectedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 4:
response['followupscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 5:
response['visitoncounts'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 6:
response['visitdonecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 7:
response['visitdeadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 8:
response['otherprojectscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 9:
response['resalecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 10:
response['alreadybookedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 11:
response['bookeddone'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 12:
response['deadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 13:
response['rentcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 14:
response['plotcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 15:
response['duplicatecount'] = loaded_r[i]['phonenumbercount']
i = i + 1
con.close()
print('end')
print(type(response))
return Response(response)
@api_view(['get'])
def lead_status_count(request,company_id):
cursor = open()
query = "Select n.phonenumbercount, s.name,s.statusid from(Select COUNT(DISTINCT l.leadid) " \
"as phonenumbercount,statusid from Leads l join LeadItems li on li.leadid = l.leadid where " \
"l.companyid in ({}) Group by statusid) as n join LeadStatus s on n.statusid = s.statusid UNION Select" \
" count(leadid) as phonenumbercount,'Raw Leads' as name,{} as statusid from Leads where companyid = {} and" \
" isassigned ={}".format(company_id, 16, company_id, False)
print(query)
cursor.execute(query)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
j = len(loaded_r)
print(loaded_r)
print(j)
response = dict()
response['currentleadscount'] = 0
response['noworkcount'] = 0
response['notconnectedcount'] = 0
response['followupscount'] = 0
response['visitoncounts'] = 0
response['visitdonecount'] = 0
response['visitdeadcount'] = 0
response['otherprojectscount'] = 0
response['resalecount'] = 0
response['alreadybookedcount'] = 0
response['bookeddone'] = 0
response['deadcount'] = 0
response['rentcount'] = 0
response['plotcount'] = 0
response['duplicatecount'] = 0
response['rawleadscount'] = 0
print('working')
print('next working')
i = 0
while i <= j - 1:
if loaded_r[i]['statusid'] == 16:
print(loaded_r[i])
response['rawleadscount'] = loaded_r[i]['phonenumbercount']
print(response['rawleadscount'])
elif loaded_r[i]['statusid'] == 1:
print(loaded_r[i])
response['currentleadscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 2:
response['noworkcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 3:
response['notconnectedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 4:
response['followupscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 5:
response['visitoncounts'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 6:
response['visitdonecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 7:
response['visitdeadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 8:
response['otherprojectscount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 9:
response['resalecount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 10:
response['alreadybookedcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 11:
response['bookeddone'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 12:
response['deadcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 13:
response['rentcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 14:
response['plotcount'] = loaded_r[i]['phonenumbercount']
elif loaded_r[i]['statusid'] == 15:
response['duplicatecount'] = loaded_r[i]['phonenumbercount']
i = i + 1
con.close()
print('end')
print(type(response))
return Response(response)
@api_view(['get'])
def mobile_status_count(request, company_id):
response = 'mobile_status_count: ' + str(company_id)
return Response(response)
class Leads_update_delete(APIView):
def put(self, request, lead_id, format=None):
# record = Leads.objects.get(pk=lead_id)
# item12=
leads_serializer = LeadsUpdataSerializer(data=request.data)
# leads_item_serializer = LeaditemsSerializer(record)
# leads = json.dumps(request.data)
# leads_list = json.loads(leads)
# if leads_serializer.is_valid():
# print('working',leads_serializer.data)
#
# items_list = leads_list['items']
# for item in items_list:
# if(item['lead_id'] != 0):
# #print('item',item)
# item_serializer = LeaditemsSerializer(data=item)
# if item_serializer.is_valid():
# #print('items_ser',item_serializer.data)
# #item_serializer.validated_data
# item_serializer.save()
if leads_serializer.is_valid():
leads_serializer.save()
return Response(leads_serializer.data)
return Response(leads_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['post'])
def create_lead(request, company_id):
response = 'new lead: ' + str(company_id)
return Response(response)
@api_view(['put'])
def Leads_update(request, lead_id):
#print('type',type(request.data))
leads = json.dumps(request.data)
leads_json = json.loads(leads)
record = Leads.objects.get(pk=lead_id)
lead_serializer = LeadsSerializer(record, data=leads_json)
print('received items',leads_json['items'])
print('items length',len(leads_json['items']))
lead_items = leads_json['items']
#item_serializer = LeaditemsSerializer(data=leads_json['items'])
token = ''
for item in lead_items:
print('leaditems',item)
if(item['leadid'] != 0):
token = item['token']
item_serializer = LeaditemsSerializer(data=item)
if item_serializer.is_valid():
item_serializer.save()
print('success item inserted')
print('type=',type(leads_json))
if lead_serializer.is_valid():
lead_serializer.save()
send_firebase_push_notification = firebase_push_notification(token)
return Response(lead_serializer.data)
return Response(lead_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class locationsbyuser(APIView):
def get(self,request):
username = request.GET.get('username')
cursor = open()
function_query = "Select * from Location where companyid in " \
"(Select companyid from AspNetUsers where username = '{}' LIMIT 1 )".format(username)
cursor.execute(function_query)
records = cursor.fetchall()
dump_records = json.dumps(records,sort_keys=True, default=str)
loaded_records = json.loads(dump_records)
print(loaded_records)
final_output = list()
for item in loaded_records:
newd = dict()
newd['locationid'] = item['locationid']
newd['title'] = item['username']
newd['lat'] = item['lattitude']
newd['lng'] = item['longitude']
newd['companyID'] = item['companyid']
newd['description'] = item['username']
final_output.append(newd)
return Response(final_output)
class CreateLocation(APIView):
def post(self,request):
serializer = LocationSerializer(data=request.data)
username = request.data['username']
cursor = open()
queryset = "Select * from Location l join AspNetUsers u on u.userid::varchar = l.userid " \
"where u.username = '{}' LIMIT(1)".format(username)
cursor.execute(queryset)
records = cursor.fetchall()
r = json.dumps(records, indent=4, sort_keys=True, default=str)
loaded_r = json.loads(r)
con.close()
return Response(loaded_r)
@api_view(['get'])
def gtest(request, username):
response = 'mobile_status_count: ' + username
return Response(response)
class LeadSms(APIView):
def post(self, request, *args, **kwargs):
cursor = open()
data = request.body
data = data.decode("utf-8")
# print('data received',data.decode("utf-8"))
print('received data = ', data)
import re
r1 = re.findall(r"\(?\d{3}\)?-? *\d{3}-? *-?\d{4}", data)
print(r1)
split = [x.strip() for x in data.split(',')]
username = request.GET.get('UserName')
print(username)
portalType = request.GET.get('portalType')
cid = Aspnetusers.objects.get(username=username).companyid
UserID = Aspnetusers.objects.get(username=username).userid
name = split[0]
print(name)
phonenumber = r1[0]
print(phonenumber)
cmpctlabel = data
print('sdds', phonenumber, cmpctlabel, name)
if (phonenumber):
if (portalType.lower() == "mgcbrk"):
Status = 1
elif (portalType.lower() == "nnacre"):
Status = 2
query = "Insert into Leads(createuserid,CreateDateTimeOffset,EditUser_ID,EditDateTimeOffset," \
"name,email,phonenumber,isassigned,companyid,cmpctlabel,receivedon,status) Values ({}," \
"CURRENT_TIMESTAMP,null,null,'{}',null,{},'False',{},'{}'," \
"CURRENT_TIMESTAMP,{})".format(UserID, name, phonenumber, cid, cmpctlabel, Status)
try:
cursor.execute(query)
print("queryexecuted")
except Exception as e:
print("Sorry someerror " + str(e))
#con.commit()
print(query)
con.close()
return Response('success')
class apiLeadsTest(APIView):
def get(self,request):
username = request.GET.get('username')
statusId = request.GET.get('statusId')
projectId = request.GET.get('projectid')
leadName = request.GET.get('leadname')
leadId = request.GET.get('lead_id')
dateFrom = request.GET.get('datefrom')
dateTo = request.GET.get('dateto')
#company_id = Aspnetusers.objects.get(username=username).companyid
cursor = open()
additional_query= "myleads:"
if(username!=None and statusId!=None):
additional_query = " u.UserName=\'" + username + "\' and li.Statusid= " + statusId
if(projectId!=None and projectId!=''):
print('projectid',projectId)
additional_query = additional_query + " and u.projectid="+projectId
if(leadName != None and leadName!=''):
additional_query = additional_query + " and l.name=\'" + leadName + "\' "
if(leadId != None and leadId!=''):
additional_query = additional_query + " and l.lead_id=" + leadId
if(dateFrom != None and dateFrom != '' and dateTo != None and dateTo !='' and dateTo > dateFrom ):
additional_query = additional_query + " and li.StatusDate > \'" + dateFrom + "\' and li.statusDate < \'" + dateTo +"\'"
print('xyz',additional_query)
return Response(additional_query)
def send_email_client():
SCOPES = 'https://mail.google.com/'
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('gmail', 'v1', http=creds.authorize(Http()))
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('gmail', 'v1', http=creds.authorize(Http()))
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
return Response('success')
|
[
"[email protected]"
] | |
c7e18e72d352a0e8e4db91ed53dbd9f44471b7ba
|
fb5dd7410679bd28299cfe3841de6fe826d978cb
|
/src/core/migrations/0005_auto_20201207_1836.py
|
64b6f100ce9b6c92c5e82a0f07135be96127816d
|
[] |
no_license
|
IvanYukish/finance-manager
|
35202fde63a7f519b52d8e09f3f64dd547cccbc5
|
9147d09cff7543361f5ccefa79ec334a58efc9a1
|
refs/heads/master
| 2023-07-11T14:39:17.536557 | 2021-08-04T23:05:45 | 2021-08-04T23:05:45 | 317,544,811 | 1 | 0 | null | 2021-08-23T17:18:10 | 2020-12-01T13:09:50 |
CSS
|
UTF-8
|
Python
| false | false | 428 |
py
|
# Generated by Django 3.1.3 on 2020-12-07 18:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20201207_1825'),
]
operations = [
migrations.AlterField(
model_name='debts',
name='description',
field=models.CharField(db_index=True, max_length=500, verbose_name='Опис'),
),
]
|
[
"[email protected]"
] | |
8eff9c7439dde637d31627de4f899aa0551c24f1
|
a97f789530412fc1cb83170a11811f294b139ee8
|
/疯狂Python讲义/codes/19/19.2/barh_test.py
|
6d599b9a156299c3f8e0b59efbad2a80af7a7fb8
|
[] |
no_license
|
baidongbin/python
|
3cebf2cc342a15b38bf20c23f941e6887dac187a
|
1c1398bff1f1820afdd8ddfa0c95ccebb4ee836f
|
refs/heads/master
| 2021-07-21T19:23:32.860444 | 2020-03-07T11:55:30 | 2020-03-07T11:55:30 | 195,909,272 | 0 | 1 | null | 2020-07-21T00:51:24 | 2019-07-09T01:24:31 |
Python
|
UTF-8
|
Python
| false | false | 1,476 |
py
|
import matplotlib.pyplot as plt
import numpy as np
# 用来正常显示中文标签
plt.rcParams['font.sans-serif'] = ['SimHei']
# 用来正常显示负号
plt.rcParams['axes.unicode_minus'] = False
# 构建数据
x_data = ['2011', '2012', '2013', '2014', '2015', '2016', '2017']
y_data = [58000, 60200, 63000, 71000, 84000, 90500, 107000]
y_data2 = [52000, 54200, 51500, 58300, 56800, 59500, 62700]
bar_width = 0.3
# Y 轴数据使用 range(len(x_data), 就是 0、1、2...
plt.barh(y=range(len(x_data)), width=y_data, label='疯狂Java讲义',
color='steelblue', alpha=0.8, height=bar_width)
# Y 轴数据使用 np.arange(len(x_data))+bar_width,
# 就是 bar_width、1+bar_width、2+bar_width... 这样就和第一个柱状图并列了
plt.barh(y=np.arange(len(x_data)) + bar_width, width=y_data2,
label='疯狂 Android 讲义', color='indianred', alpha=0.8, height=bar_width)
# 在柱状图上显示具体数值, ha 参数控制水平对齐方式, va 控制垂直对齐方式
for y, x in enumerate(y_data):
plt.text(x + 5000, y - bar_width / 2, '%s' % x, ha='center', va='bottom')
for y, x in enumerate(y_data2):
plt.text(x + 5000, y + bar_width / 2, '%s' % x, ha='center', va='bottom')
# 为 Y 轴设置刻度值
plt.yticks(np.arange(len(x_data)) + bar_width / 2, x_data)
# 设置标题
plt.title("Java 与 Android 图书对比")
# 为两条坐标轴设置名称
plt.xlabel("销量")
plt.ylabel("年份")
# 显示图例
plt.legend()
plt.show()
|
[
"[email protected]"
] | |
d2a5a496985f700bb4960a47b15877953707e73d
|
98d832289b7437247ce03ea54ad3cb7b95451159
|
/test/test_created_reference_scan_id_link.py
|
94b61d6a427e85f2c8130ff8f6f93a0e64dac28b
|
[
"MIT"
] |
permissive
|
rmehilli-r7/vm-console-client-python
|
7f02f13345dce4f4d4d85e18da7146daeefbceb9
|
069041c1c7b53c6b3d8bfdd81b974141bfca3c0c
|
refs/heads/master
| 2020-03-23T11:20:33.364442 | 2018-08-10T20:06:37 | 2018-08-10T20:06:37 | 141,498,444 | 0 | 0 |
MIT
| 2018-08-08T19:58:45 | 2018-07-18T23:00:41 |
Python
|
UTF-8
|
Python
| false | false | 48,984 |
py
|
# coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"/api/3/json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is like` ` not like` | | `container-status` | `is` ` is not` | | `containers` | `are` | | `criticality-tag` | `is` ` is not` ` is greater than` ` is less than` ` is applied` ` is not applied` | | `custom-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `cve` | `is` ` is not` ` contains` ` does not contain` | | `cvss-access-complexity` | `is` ` is not` | | `cvss-authentication-required` | `is` ` is not` | | `cvss-access-vector` | `is` ` is not` | | `cvss-availability-impact` | `is` ` is not` | | `cvss-confidentiality-impact` | `is` ` is not` | | `cvss-integrity-impact` | `is` ` is not` | | `cvss-v3-confidentiality-impact` | `is` ` is not` | | `cvss-v3-integrity-impact` | `is` ` is not` | | `cvss-v3-availability-impact` | `is` ` is not` | | `cvss-v3-attack-vector` | `is` ` is not` | | `cvss-v3-attack-complexity` | `is` ` is not` | | `cvss-v3-user-interaction` | `is` ` is not` | | `cvss-v3-privileges-required` | `is` ` is not` | | `host-name` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is empty` ` is not empty` ` is like` ` not like` | | `host-type` | `in` ` not in` | | `ip-address` | `is` ` is not` ` in range` ` not in range` ` is like` ` not like` | | `ip-address-type` | `in` ` not in` | | `last-scan-date` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `location-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is earlier than` | | `open-ports` | `is` ` is not` ` in range` | | `operating-system` | `contains` ` does not contain` ` is empty` ` is not empty` | | `owner-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is not` ` in range` ` greater than` ` less than` | | `service-name` | `contains` ` does not contain` | | `site-id` | `in` ` not in` | | `software` | `contains` ` does not contain` | | `vAsset-cluster` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-datacenter` | `is` ` is not` | | `vAsset-host-name` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-power-state` | `in` ` not in` | | `vAsset-resource-pool-path` | `contains` ` does not contain` | | `vulnerability-assessed` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `vulnerability-category` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` | | `vulnerability-cvss-v3-score` | `is` ` is not` | | `vulnerability-cvss-score` | `is` ` is not` ` in range` ` is greater than` ` is less than` | | `vulnerability-exposures` | `includes` ` does not include` | | `vulnerability-title` | `contains` ` does not contain` ` is` ` is not` ` starts with` ` ends with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `string` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.created_reference_scan_id_link import CreatedReferenceScanIDLink # noqa: E501
from swagger_client.rest import ApiException
class TestCreatedReferenceScanIDLink(unittest.TestCase):
"""CreatedReferenceScanIDLink unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCreatedReferenceScanIDLink(self):
"""Test CreatedReferenceScanIDLink"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.created_reference_scan_id_link.CreatedReferenceScanIDLink() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
66211a39680ce1fb98ce072f4bcc2daf88b80087
|
51d8f003828d6ee6e6611f0e133b1e35cf400601
|
/ipaxi/ixbr_api/core/utils/makefake.py
|
6ef19665cfffdc430c7a59c022a27ffed711a7d1
|
[
"Apache-2.0"
] |
permissive
|
tatubola/xpto
|
23b5f7a42c13c7d39eb321e52b9b4b2d1ef76c4c
|
6ed8cec23b06bccb1edf57e6b67af017f9a162d3
|
refs/heads/master
| 2020-04-02T11:05:24.560009 | 2018-10-23T17:41:10 | 2018-10-23T17:41:10 | 154,370,519 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 18,879 |
py
|
from ixbr_api.core.models import *
from ixbr_api.core.tests.factories import *
CONTACTSBYIX = 15
class MakeFakeData(object):
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_instance'):
cls._instance = super(MakeFakeData, cls).__new__(
cls, *args, **kwargs)
return cls._instance
def __init__(self):
self.ixs = []
self.user = UserFactory()
self.user.save()
self.ports_ext_pix1_sp = []
self.ports_pe_pix2_sp = []
self.ports_ext_pix1_cpv = []
self.channel_port_cisco_ext_pix1_sp = None
def createIX(self):
self.sp = IXFactory(code='sp',
shortname='saopaulo.sp',
fullname='São Paulo - SP',
create_ips=True,
tags_policy='distributed')
print('IX sp created')
self.cpv = IXFactory(code='cpv',
shortname='campinagrande.pb',
fullname='Campina Grande - PB',
create_ips=True,
tags_policy='ix_managed')
print('IX cpv created')
def createPIX(self):
self.pix_sp_1 = PIXFactory(ix=self.sp)
print('PIX ' + self.pix_sp_1.code + ' in sp created')
self.pix_sp_2 = PIXFactory(ix=self.sp)
print('PIX ' + self.pix_sp_2.code + ' in sp created')
self.pix_cpv_1 = PIXFactory(ix=self.cpv)
print('PIX ' + self.pix_cpv_1.code + ' in cpv created')
self.pix_cpv_2 = PIXFactory(ix=self.cpv)
print('PIX ' + self.pix_cpv_2.code + ' in cpv created')
def createSwitches(self):
self.cisco = SwitchModelFactory(model='ASR9922', vendor='CISCO')
print('Cisco switch model created')
self.ext = SwitchModelFactory(model='X670a-48x', vendor='EXTREME')
print('Extreme switch model created')
self.port_cisco = SwitchPortRangeFactory(name_format='TenGigE0/0/0/{0}',
capacity=1000, connector_type='SFP', switch_model=self.cisco)
print('Cisco port range created')
self.port_ext = SwitchPortRangeFactory(name_format='{0}',
capacity=1000, connector_type='SFP', switch_model=self.ext)
print('Extreme port range created')
self.cisco_pix1_sp = SwitchFactory(
model=self.cisco, create_ports=True, pix=self.pix_sp_1)
self.cisco_pix1_sp.is_pe = True
self.cisco_pix1_sp.save()
print('Pix1 SP Cisco PE created')
self.ext_pix1_sp = SwitchFactory(
model=self.ext, create_ports=True, pix=self.pix_sp_1)
print('Pix1 SP Extreme created')
self.cisco_pix2_sp = SwitchFactory(
model=self.cisco, create_ports=True, pix=self.pix_sp_2)
self.cisco_pix2_sp.is_pe = True
self.cisco_pix2_sp.save()
print('Pix2 SP Cisco PE created')
self.ext_pix2_sp = SwitchFactory(
model=self.ext, create_ports=True, pix=self.pix_sp_2)
print('Pix2 SP Extreme created')
self.ext_pix1_cpv = SwitchFactory(
model=self.ext, create_ports=True, pix=self.pix_cpv_1)
print('Pix1 CPV Extreme created')
self.ext_pix2_cpv = SwitchFactory(
model=self.ext, create_ports=True, pix=self.pix_cpv_2)
print('Pix2 CPV Extreme created')
self.ports_pe_pix1_sp = list(
Port.objects.filter(switch=self.cisco_pix1_sp))
self.ports_pe_pix2_sp = list(
Port.objects.filter(switch=self.cisco_pix2_sp))
self.ports_ext_pix1_sp = list(
Port.objects.filter(switch=self.ext_pix1_sp))
self.ports_ext_pix2_sp = list(
Port.objects.filter(switch=self.ext_pix2_sp))
self.channel_port_ext_cisco_pix1_sp = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
print('Channel Port extreme pix1 SP created')
self.channel_port_cisco_ext_pix1_sp = ChannelPortFactory(
create_tags=False, tags_type='Direct-Bundle-Ether')
print('Channel Port cisco pe pix1 SP created')
self.channel_port_cisco1_cisco2_sp = ChannelPortFactory(
create_tags=False, tags_type='Core')
print('Channel Port cisco pix1 cisco pix2 SP created')
self.channel_port_cisco2_cisco1_sp = ChannelPortFactory(
create_tags=False, tags_type='Core')
print('Channel Port cisco pix2 cisco pix1 SP created')
self.channel_port_ext_cisco_pix2_sp = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
print('Channel Port extreme pix2 SP created')
self.channel_port_cisco_ext_pix2_sp = ChannelPortFactory(
create_tags=False, tags_type='Direct-Bundle-Ether')
print('Channel Port cisco pe pix2 SP created')
# exteme pix 1 to cisco pe pix 1
self.port_channel_port_cisco_ext_pix1_sp = self.ports_pe_pix1_sp.pop()
self.port_channel_port_cisco_ext_pix1_sp.channel_port = self.channel_port_cisco_ext_pix1_sp
self.port_channel_port_cisco_ext_pix1_sp.status = 'INFRASTRUCTURE'
self.port_channel_port_cisco_ext_pix1_sp.save()
self.downlink_cisco_ext_pix1_sp = DownlinkChannelFactory(
name='dl-BE1010', is_lag=False,
channel_port=self.channel_port_cisco_ext_pix1_sp)
print('Downlink channel cisco pe pix1 sp created')
self.channel_port_cisco_ext_pix1_sp.create_tags = True
self.channel_port_cisco_ext_pix1_sp.save()
self.channel_port_cisco_ext_pix1_sp.create_tags = False
self.channel_port_cisco_ext_pix1_sp.save()
self.port_channel_port_ext_cisco_pix1_sp = self.ports_ext_pix1_sp.pop()
self.port_channel_port_ext_cisco_pix1_sp.channel_port = self.channel_port_ext_cisco_pix1_sp
self.port_channel_port_ext_cisco_pix1_sp.status = 'INFRASTRUCTURE'
self.port_channel_port_ext_cisco_pix1_sp.save()
self.uplink_ext_cisco_pix1_sp = UplinkChannelFactory(
name='ul-1', is_lag=False,
channel_port=self.channel_port_ext_cisco_pix1_sp,
downlink_channel=self.downlink_cisco_ext_pix1_sp)
print('Uplink channel extreme pix1 sp created')
# lag core pix 1 to pix 2
self.port1_channel_port_cisco2_cisco1_sp = self.ports_pe_pix2_sp.pop()
self.port1_channel_port_cisco2_cisco1_sp.channel_port = self.channel_port_cisco2_cisco1_sp
self.port1_channel_port_cisco2_cisco1_sp.status = 'INFRASTRUCTURE'
self.port1_channel_port_cisco2_cisco1_sp.save()
self.port2_channel_port_cisco2_cisco1_sp = self.ports_pe_pix2_sp.pop()
self.port2_channel_port_cisco2_cisco1_sp.channel_port = self.channel_port_cisco2_cisco1_sp
self.port2_channel_port_cisco2_cisco1_sp.status = 'INFRASTRUCTURE'
self.port2_channel_port_cisco2_cisco1_sp.save()
self.channel_port_cisco2_cisco1_sp.port_set.add(
self.port1_channel_port_cisco2_cisco1_sp)
self.channel_port_cisco2_cisco1_sp.port_set.add(
self.port2_channel_port_cisco2_cisco1_sp)
self.port2_channel_port_cisco2_cisco1_sp.save()
self.core_cisco2_cisco1 = CoreChannelFactory(
name='cc-BE1020', is_lag=True,
channel_port=self.channel_port_cisco2_cisco1_sp)
print('Core channel cisco pix2 to cisco pix1 sp created')
self.port1_channel_port_cisco1_cisco2_sp = self.ports_pe_pix1_sp.pop()
self.port1_channel_port_cisco1_cisco2_sp.channel_port = self.channel_port_cisco1_cisco2_sp
self.port1_channel_port_cisco1_cisco2_sp.status = 'INFRASTRUCTURE'
self.port1_channel_port_cisco1_cisco2_sp.save()
self.port2_channel_port_cisco1_cisco2_sp = self.ports_pe_pix1_sp.pop()
self.port2_channel_port_cisco1_cisco2_sp.channel_port = self.channel_port_cisco1_cisco2_sp
self.port2_channel_port_cisco1_cisco2_sp.status = 'INFRASTRUCTURE'
self.port2_channel_port_cisco1_cisco2_sp.save()
self.core_cisco1_cisco2 = CoreChannelFactory(
name='cc-BE1020', is_lag=True,
channel_port=self.channel_port_cisco1_cisco2_sp)
print('Core channel cisco pix1 to cisco pix2 sp created')
###########
# extreme pix 2 to cisco pe pix 2
self.port_channel_port_cisco_ext_pix2_sp = self.ports_pe_pix2_sp.pop()
self.port_channel_port_cisco_ext_pix2_sp.channel_port = self.channel_port_cisco_ext_pix2_sp
self.port_channel_port_cisco_ext_pix2_sp.status = 'INFRASTRUCTURE'
self.port_channel_port_cisco_ext_pix2_sp.save()
self.channel_port_cisco_ext_pix2_sp.port_set.add(
self.port_channel_port_cisco_ext_pix2_sp)
self.downlink_cisco_ext_pix1_sp = DownlinkChannelFactory(
name='dl-BE1110', is_lag=False,
channel_port=self.channel_port_cisco_ext_pix2_sp)
print('Downlink channel cisco pe pix 2 sp created')
self.channel_port_cisco_ext_pix2_sp.create_tags = True
self.channel_port_cisco_ext_pix2_sp.save()
self.channel_port_cisco_ext_pix2_sp.create_tags = False
self.channel_port_cisco_ext_pix2_sp.save()
self.port_channel_port_ext_cisco_pix2_sp = self.ports_ext_pix2_sp.pop()
self.port_channel_port_ext_cisco_pix2_sp.channel_port = self.channel_port_ext_cisco_pix2_sp
self.port_channel_port_ext_cisco_pix2_sp.status = 'INFRASTRUCTURE'
self.port_channel_port_ext_cisco_pix2_sp.save()
self.uplink_ext_cisco_pix1_sp = UplinkChannelFactory(
name='ul-1', is_lag=False,
channel_port=self.channel_port_ext_cisco_pix2_sp,
downlink_channel=self.downlink_cisco_ext_pix1_sp)
print('Uplink channel extreme pix2 sp created')
# EXTREMES CPV
self.ports_ext_pix1_cpv = list(
Port.objects.filter(switch=self.ext_pix1_cpv))
self.ports_ext_pix2_cpv = list(
Port.objects.filter(switch=self.ext_pix2_cpv))
self.ext_pix1_cpv_ext_pix2_cpv_channel_port = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
self.ext_pix2_cpv_ext_pix1_cpv_channel_port = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
self.port_pop_ext_pix1_cpv = self.ports_ext_pix1_cpv.pop()
self.port_pop_ext_pix1_cpv.channel_port = self.ext_pix1_cpv_ext_pix2_cpv_channel_port
self.port_pop_ext_pix1_cpv.status = 'INFRASTRUCTURE'
self.port_pop_ext_pix1_cpv.save()
self.core_pix1_pix2_cpv = CoreChannelFactory(
is_lag=False, channel_port=self.ext_pix1_cpv_ext_pix2_cpv_channel_port)
print('Core channel pix1 to pix2 cpv created')
self.port_pop_ext_pix2_cpv = self.ports_ext_pix2_cpv.pop()
self.port_pop_ext_pix2_cpv.channel_port = self.ext_pix2_cpv_ext_pix1_cpv_channel_port
self.port_pop_ext_pix2_cpv.status = 'INFRASTRUCTURE'
self.port_pop_ext_pix2_cpv.save()
self.core_pix2_pix1_cpv = CoreChannelFactory(
name='cc-1', is_lag=False,
channel_port=self.ext_pix2_cpv_ext_pix1_cpv_channel_port)
print('Core channel pix1 to pix2 cpv created')
self.channel_port_ext_cisco_pix1_sp.save()
self.channel_port_cisco_ext_pix1_sp.save()
self.channel_port_cisco1_cisco2_sp.save()
self.channel_port_cisco2_cisco1_sp.save()
self.channel_port_ext_cisco_pix2_sp.save()
self.channel_port_cisco_ext_pix2_sp.save()
def createContacts(self):
for ix in IX.objects.all():
for i in range(CONTACTSBYIX):
self.asn = ASNFactory(modified_by=self.user)
self.org = OrganizationFactory(modified_by=self.user)
self.phone = PhoneFactory()
self.contact = self.phone.contact
self.map = ContactsMapFactory(organization=self.org,
asn=self.asn, ix=ix, noc_contact=self.contact,
modified_by=self.user)
self.asn.save()
self.phone.save()
self.contact.save()
self.map.save()
print("created contact " +
str(self.contact) + " in ix " + str(ix))
def createCustomerChannels(self):
self.sp_asns = list(ASN.objects.filter(contactsmap__ix='sp'))
self.cpv_asns = list(ASN.objects.filter(contactsmap__ix='cpv'))
self.channel_port_sp_ext = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
self.port_customer_ext_pix1_sp = self.ports_ext_pix1_sp.pop()
self.port_customer_ext_pix1_sp.channel_port = self.channel_port_sp_ext
self.port_customer_ext_pix1_sp.status = 'CUSTOMER'
self.channel_port_sp_ext.port_set.add(self.port_customer_ext_pix1_sp)
self.customer_channel_ext_pix1_sp = CustomerChannelFactoryVanilla(
channel_port=self.channel_port_sp_ext,
ix=IX.objects.get(pk='sp'),
is_lag=False,
asn=self.sp_asns.pop(),
cix_type=0)
print('Customer Channel as ' +
str(self.customer_channel_ext_pix1_sp.asn) + ' created')
self.port_customer_ext_pix1_sp.save()
self.channel_port_sp_cisco = ChannelPortFactory(
create_tags=False, tags_type='Direct-Bundle-Ether')
self.port_customer_cisco_pix2_sp = self.ports_pe_pix2_sp.pop()
self.port_customer_cisco_pix2_sp.channel_port = self.channel_port_sp_cisco
self.port_customer_cisco_pix2_sp.status = 'CUSTOMER'
self.channel_port_sp_cisco.port_set.add(self.port_customer_cisco_pix2_sp)
self.customer_channel_cisco_pix2_sp = CustomerChannelFactoryVanilla(
name='ct-BE2010',
channel_port=self.channel_port_sp_cisco,
ix=IX.objects.get(pk='sp'),
is_lag=False,
asn=self.sp_asns.pop(),
cix_type=0)
self.port_customer_cisco_pix2_sp.save()
print('Customer Channel as ' +
str(self.customer_channel_cisco_pix2_sp.asn) + ' created at cisco')
self.channel_port_cpv_ext = ChannelPortFactory(
create_tags=False, tags_type='Indirect-Bundle-Ether')
self.port_customer_ext_cpv = self.ports_ext_pix1_cpv.pop()
self.port_customer_ext_cpv.channel_port = self.channel_port_cpv_ext
self.port_customer_ext_cpv.status = 'CUSTOMER'
self.channel_port_cpv_ext.port_set.add(self.port_customer_ext_cpv)
self.customer_channel_ext_cpv = CustomerChannelFactoryVanilla(
channel_port=self.channel_port_cpv_ext,
ix=IX.objects.get(pk='cpv'),
is_lag=False,
asn=self.cpv_asns.pop(),
cix_type=0)
self.port_customer_ext_cpv.save()
print('Customer Channel as ' +
str(self.customer_channel_ext_cpv.asn) + ' created at cpv extreme')
self.ipv4s_sp = list(IPv4Address.objects.filter(ix='sp'))
self.ipv6s_sp = list(IPv6Address.objects.filter(ix='sp'))
self.ipv4s_cpv = list(IPv4Address.objects.filter(ix='cpv'))
self.ipv6s_cpv = list(IPv6Address.objects.filter(ix='cpv'))
self.tags_ext_pix1_sp = list(Tag.objects.filter(
tag_domain=self.channel_port_cisco_ext_pix1_sp))
self.tag_mplpav4_extreme_sp = self.tags_ext_pix1_sp.pop()
self.mlpav4_ext_sp = MLPAv4Factory(
status='PRODUCTION',
mlpav4_address=self.ipv4s_sp.pop(),
asn=self.customer_channel_ext_pix1_sp.asn,
customer_channel=self.customer_channel_ext_pix1_sp,
tag=self.tag_mplpav4_extreme_sp)
self.tag_mplpav4_extreme_sp.status = 'PRODUCTION'
self.tag_mplpav4_extreme_sp.save()
print('created MLPAv4 extreme pix1 SP')
self.tag_mlpav6_cisco_sp = self.tags_ext_pix1_sp.pop()
self.mlpav6_ext_sp = MLPAv6Factory(
status='PRODUCTION',
mlpav6_address=self.ipv6s_sp.pop(),
asn=self.customer_channel_ext_pix1_sp.asn,
customer_channel=self.customer_channel_ext_pix1_sp,
tag=self.tag_mlpav6_cisco_sp)
self.tag_mlpav6_cisco_sp.status = 'PRODUCTION'
self.tag_mlpav6_cisco_sp.save()
print('created MLPAv6 extreme pix1 SP')
self.tag_mlpav4_cisco_sp = TagFactory(
tag=1,
ix=IX.objects.get(pk='sp'),
tag_domain=self.channel_port_sp_cisco,
status='PRODUCTION')
self.mlpav4_ext_sp = MLPAv4Factory(
status='PRODUCTION',
mlpav4_address=self.ipv4s_sp.pop(),
asn=self.customer_channel_cisco_pix2_sp.asn,
customer_channel=self.customer_channel_cisco_pix2_sp,
tag=self.tag_mlpav4_cisco_sp)
print('created MLPAv4 cisco pix1 SP')
self.tag_mlpav6_cisco_sp = TagFactory(
tag=2,
ix=IX.objects.get(pk='sp'),
tag_domain=self.channel_port_sp_cisco,
status='PRODUCTION')
self.mlpav6_cisco_sp = MLPAv6Factory(
status='PRODUCTION',
mlpav6_address=self.ipv6s_sp.pop(),
asn=self.customer_channel_cisco_pix2_sp.asn,
customer_channel=self.customer_channel_cisco_pix2_sp,
tag=self.tag_mlpav6_cisco_sp)
print('created MLPAv6 cisco pix1 SP')
self.tags_ext_pix1_cpv = list(
Tag.objects.filter(ix=IX.objects.get(pk='cpv')))
self.tag_mplpav4_extreme_cpv = self.tags_ext_pix1_cpv.pop()
self.mlpav4_ext_cpv = MLPAv4Factory(
status='PRODUCTION',
mlpav4_address=self.ipv4s_cpv.pop(),
asn=self.customer_channel_ext_cpv.asn,
customer_channel=self.customer_channel_ext_cpv,
tag=self.tag_mplpav4_extreme_cpv)
self.tag_mplpav4_extreme_cpv.status = 'PRODUCTION'
self.tag_mplpav4_extreme_cpv.save()
print('created MLPAv4 extreme pix1 CPV')
self.tag_mplpav6_extreme_cpv = self.tags_ext_pix1_cpv.pop()
self.mlpav6_ext_cpv = MLPAv6Factory(
status='PRODUCTION',
mlpav6_address=self.ipv6s_cpv.pop(),
asn=self.customer_channel_ext_cpv.asn,
customer_channel=self.customer_channel_ext_cpv,
tag=self.tag_mplpav4_extreme_cpv)
self.tag_mplpav6_extreme_cpv.status = 'PRODUCTION'
self.tag_mplpav6_extreme_cpv.save()
print('created MLPAv6 extreme pix1 CPV')
def makeData(self):
self.createIX()
self.createPIX()
self.createSwitches()
self.createContacts()
self.createCustomerChannels()
|
[
"[email protected]"
] | |
61d80d6085e27dca1345de8bbdbe220d09436811
|
aee21094d4280862cc1c5b4ec4eb2e61a2c45f3e
|
/source/sagemaker-python-sdk/tests/unit/test_tfs.py
|
5bcdbfba8b1eb33e6a80ee22a50a8b94e53ce529
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
ggiallo28/aws-deepracer-local
|
fc63aa6fffaf5ce537e42268a28ea88195fe5489
|
e1e069d8f63c877a723715758910d0039edd7ec2
|
refs/heads/master
| 2022-10-30T23:01:42.797873 | 2019-11-04T09:48:54 | 2019-11-04T09:48:54 | 219,015,159 | 3 | 2 |
MIT
| 2022-10-16T04:46:59 | 2019-11-01T15:44:35 |
Python
|
UTF-8
|
Python
| false | false | 10,100 |
py
|
# Copyright 2017-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import io
import json
import logging
import pytest
from mock import Mock
from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow.predictor import csv_serializer
from sagemaker.tensorflow.serving import Model, Predictor
JSON_CONTENT_TYPE = 'application/json'
CSV_CONTENT_TYPE = 'text/csv'
INSTANCE_COUNT = 1
INSTANCE_TYPE = 'ml.c4.4xlarge'
ACCELERATOR_TYPE = 'ml.eia1.medium'
ROLE = 'Dummy'
REGION = 'us-west-2'
PREDICT_INPUT = {'instances': [1.0, 2.0, 5.0]}
PREDICT_RESPONSE = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
CLASSIFY_INPUT = {
'signature_name': 'tensorflow/serving/classify',
'examples': [{'x': 1.0}, {'x': 2.0}]
}
CLASSIFY_RESPONSE = {'result': [[0.4, 0.6], [0.2, 0.8]]}
REGRESS_INPUT = {
'signature_name': 'tensorflow/serving/regress',
'examples': [{'x': 1.0}, {'x': 2.0}]
}
REGRESS_RESPONSE = {'results': [3.5, 4.0]}
ENDPOINT_DESC = {
'EndpointConfigName': 'test-endpoint'
}
ENDPOINT_CONFIG_DESC = {
'ProductionVariants': [{'ModelName': 'model-1'},
{'ModelName': 'model-2'}]
}
@pytest.fixture()
def sagemaker_session():
boto_mock = Mock(name='boto_session', region_name=REGION)
session = Mock(name='sagemaker_session', boto_session=boto_mock,
boto_region_name=REGION, config=None, local_mode=False)
session.default_bucket = Mock(name='default_bucket', return_value='my_bucket')
session.expand_role = Mock(name="expand_role", return_value=ROLE)
describe = {'ModelArtifacts': {'S3ModelArtifacts': 's3://m/m.tar.gz'}}
session.sagemaker_client.describe_training_job = Mock(return_value=describe)
session.sagemaker_client.describe_endpoint = Mock(return_value=ENDPOINT_DESC)
session.sagemaker_client.describe_endpoint_config = Mock(return_value=ENDPOINT_CONFIG_DESC)
return session
def test_tfs_model(sagemaker_session, tf_version):
model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version,
sagemaker_session=sagemaker_session)
cdef = model.prepare_container_def(INSTANCE_TYPE)
assert cdef['Image'].endswith('sagemaker-tensorflow-serving:{}-cpu'.format(tf_version))
assert cdef['Environment'] == {}
predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE)
assert isinstance(predictor, Predictor)
def test_tfs_model_image_accelerator(sagemaker_session, tf_version):
model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version,
sagemaker_session=sagemaker_session)
cdef = model.prepare_container_def(INSTANCE_TYPE, accelerator_type=ACCELERATOR_TYPE)
assert cdef['Image'].endswith('sagemaker-tensorflow-serving-eia:{}-cpu'.format(tf_version))
predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE)
assert isinstance(predictor, Predictor)
def test_tfs_model_with_log_level(sagemaker_session, tf_version):
model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version,
container_log_level=logging.INFO,
sagemaker_session=sagemaker_session)
cdef = model.prepare_container_def(INSTANCE_TYPE)
assert cdef['Environment'] == {Model.LOG_LEVEL_PARAM_NAME: 'info'}
def test_tfs_model_with_custom_image(sagemaker_session, tf_version):
model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version,
image='my-image',
sagemaker_session=sagemaker_session)
cdef = model.prepare_container_def(INSTANCE_TYPE)
assert cdef['Image'] == 'my-image'
def test_estimator_deploy(sagemaker_session):
container_log_level = '"logging.INFO"'
source_dir = 's3://mybucket/source'
custom_image = 'custom:1.0'
tf = TensorFlow(entry_point='script.py', role=ROLE, sagemaker_session=sagemaker_session,
training_steps=1000, evaluation_steps=10, train_instance_count=INSTANCE_COUNT,
train_instance_type=INSTANCE_TYPE, image_name=custom_image,
container_log_level=container_log_level, base_job_name='job',
source_dir=source_dir)
job_name = 'doing something'
tf.fit(inputs='s3://mybucket/train', job_name=job_name)
predictor = tf.deploy(INSTANCE_COUNT, INSTANCE_TYPE, endpoint_name='endpoint',
endpoint_type='tensorflow-serving')
assert isinstance(predictor, Predictor)
def test_predictor(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session)
mock_response(json.dumps(PREDICT_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.predict(PREDICT_INPUT)
assert_invoked(sagemaker_session,
EndpointName='endpoint',
ContentType=JSON_CONTENT_TYPE,
Accept=JSON_CONTENT_TYPE,
Body=json.dumps(PREDICT_INPUT))
assert PREDICT_RESPONSE == result
def test_predictor_jsons(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, serializer=None,
content_type='application/jsons')
mock_response(json.dumps(PREDICT_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.predict('[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]')
assert_invoked(sagemaker_session,
EndpointName='endpoint',
ContentType='application/jsons',
Accept=JSON_CONTENT_TYPE,
Body='[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]')
assert PREDICT_RESPONSE == result
def test_predictor_csv(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, serializer=csv_serializer)
mock_response(json.dumps(PREDICT_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.predict([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
assert_invoked(sagemaker_session,
EndpointName='endpoint',
ContentType=CSV_CONTENT_TYPE,
Accept=JSON_CONTENT_TYPE,
Body='1.0,2.0,3.0\n4.0,5.0,6.0')
assert PREDICT_RESPONSE == result
def test_predictor_model_attributes(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, model_name='model', model_version='123')
mock_response(json.dumps(PREDICT_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.predict(PREDICT_INPUT)
assert_invoked(sagemaker_session,
EndpointName='endpoint',
ContentType=JSON_CONTENT_TYPE,
Accept=JSON_CONTENT_TYPE,
CustomAttributes='tfs-model-name=model,tfs-model-version=123',
Body=json.dumps(PREDICT_INPUT))
assert PREDICT_RESPONSE == result
def test_predictor_classify(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session)
mock_response(json.dumps(CLASSIFY_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.classify(CLASSIFY_INPUT)
assert_invoked_with_body_dict(sagemaker_session,
EndpointName='endpoint',
ContentType=JSON_CONTENT_TYPE,
Accept=JSON_CONTENT_TYPE,
CustomAttributes='tfs-method=classify',
Body=json.dumps(CLASSIFY_INPUT))
assert CLASSIFY_RESPONSE == result
def test_predictor_regress(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, model_name='model', model_version='123')
mock_response(json.dumps(REGRESS_RESPONSE).encode('utf-8'), sagemaker_session)
result = predictor.regress(REGRESS_INPUT)
assert_invoked_with_body_dict(sagemaker_session,
EndpointName='endpoint',
ContentType=JSON_CONTENT_TYPE,
Accept=JSON_CONTENT_TYPE,
CustomAttributes='tfs-method=regress,tfs-model-name=model,tfs-model-version=123',
Body=json.dumps(REGRESS_INPUT))
assert REGRESS_RESPONSE == result
def test_predictor_regress_bad_content_type(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, csv_serializer)
with pytest.raises(ValueError):
predictor.regress(REGRESS_INPUT)
def test_predictor_classify_bad_content_type(sagemaker_session):
predictor = Predictor('endpoint', sagemaker_session, csv_serializer)
with pytest.raises(ValueError):
predictor.classify(CLASSIFY_INPUT)
def assert_invoked(sagemaker_session, **kwargs):
sagemaker_session.sagemaker_runtime_client.invoke_endpoint.assert_called_once_with(**kwargs)
def assert_invoked_with_body_dict(sagemaker_session, **kwargs):
call = sagemaker_session.sagemaker_runtime_client.invoke_endpoint.call_args
cargs, ckwargs = call
assert not cargs
assert len(kwargs) == len(ckwargs)
for k in ckwargs:
if k != 'Body':
assert kwargs[k] == ckwargs[k]
else:
actual_body = json.loads(ckwargs[k])
expected_body = json.loads(kwargs[k])
assert len(actual_body) == len(expected_body)
for k2 in actual_body:
assert actual_body[k2] == expected_body[k2]
def mock_response(expected_response, sagemaker_session, content_type=JSON_CONTENT_TYPE):
sagemaker_session.sagemaker_runtime_client.invoke_endpoint.return_value = {
'ContentType': content_type,
'Body': io.BytesIO(expected_response)
}
|
[
"[email protected]"
] | |
abfa065e0669f0ebca97cd0fcf096a691985a579
|
96d22b720aa724341afc9ecb60951340f18afc11
|
/bench_run_times/compare_gelsy.py
|
54510a751a137ff97c3b2c9bd1d2ef97fc650932
|
[
"MIT"
] |
permissive
|
cjekel/pwlf_scipy_tf_benchmarks
|
7d10ecd5437ab0a73ddf10cc307bb50950d47033
|
7fc3a92fc4bd58b53a9839fe139fe0c53528a894
|
refs/heads/master
| 2023-03-21T04:56:03.779462 | 2019-05-15T23:26:50 | 2019-05-15T23:26:50 | 180,461,541 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,176 |
py
|
import numpy as np
import matplotlib.pyplot as plt
# factor for 90% coverage with 90% confidence using Normal distribution
# with 10 samples from table XII in [1]
# [1] Montgomery, D. C., & Runger, G. C. (2014). Applied statistics and
# probability for engineers. Sixth edition. John Wiley & Sons.
k = 2.535
run_times = np.load('intel_i5_6300u/6_break_times.npy')
n = np.load('intel_i5_6300u/n.npy')
run_times1 = np.load('intel_i5_6300u_gelsy/6_break_times.npy')
run_times_means = run_times.mean(axis=2)
run_times_stds = run_times.std(axis=2, ddof=1)
run_times_means1 = run_times1.mean(axis=2)
run_times_stds1 = run_times1.std(axis=2, ddof=1)
plt.figure()
plt.grid()
plt.errorbar(n, run_times_means[0], yerr=k*run_times_stds[0], capsize=2.0, label='Numpy')
plt.errorbar(n, run_times_means1[0], yerr=k*run_times_stds1[0], capsize=2.0, label='Scipy gelsy')
# plt.errorbar(n, run_times_means[1], yerr=k*run_times_stds[1], capsize=2.0, label='TF GPU')
plt.errorbar(n, run_times_means[1], yerr=k*run_times_stds[1], capsize=2.0, label='TF CPU')
plt.xlabel('Number of data points')
plt.ylabel('Run time (seconds, Lower is better)')
plt.semilogx()
plt.legend()
plt.show()
|
[
"[email protected]"
] | |
d131c57a429d7bc9e3f7bcc03878e32f6db37d3b
|
32904d4841d104143ba0f41cc3aeb749e470f546
|
/backend/django/apps/memos/migrations/0007_memoattachment_memocomment.py
|
38b6513ecf0f5f3e2176e62cf023ff8cb3467139
|
[] |
no_license
|
aurthurm/dispatrace-api-vuejs
|
20ec5deee015e69bce7a64dc2d89ccae8941b800
|
56d122318af27ff64755fc515345974631d3026f
|
refs/heads/master
| 2023-01-23T23:03:15.438339 | 2020-10-20T22:09:29 | 2020-10-20T22:09:29 | 219,028,985 | 0 | 1 | null | 2022-12-22T18:31:38 | 2019-11-01T17:08:35 |
Vue
|
UTF-8
|
Python
| false | false | 2,053 |
py
|
# Generated by Django 2.2.6 on 2019-10-25 17:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('memos', '0006_auto_20191024_1130'),
]
operations = [
migrations.CreateModel(
name='MemoComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('lft', models.PositiveIntegerField(editable=False)),
('rght', models.PositiveIntegerField(editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(editable=False)),
('commenter', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
('memo', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='memocomment_comment', to='memos.Memo')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='memocomment_sub_comment', to='memos.MemoComment')),
],
options={
'verbose_name': 'memo comment',
'verbose_name_plural': 'memo comments',
},
),
migrations.CreateModel(
name='MemoAttachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('memo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='memos.Memo')),
],
),
]
|
[
"[email protected]"
] | |
7c75f12faf85cdd86400124d3913a6fab1f7255e
|
97f4a8bbd501e03cc0ff463b30cd031a905532fe
|
/custom_components/yandex_station/humidifier.py
|
1322fa524feba59f89e2e2ec8c55e654b39c8fbd
|
[] |
no_license
|
AlexxIT/YandexStation
|
57ced1544745a71d11e12c74a04afdae657019ad
|
9966a647d9f1d385ac6f0365b5e0ed0b516686a6
|
refs/heads/master
| 2023-08-28T18:50:10.197891 | 2023-06-15T09:08:23 | 2023-06-15T09:08:23 | 236,572,107 | 1,018 | 134 | null | 2023-06-15T08:59:28 | 2020-01-27T19:15:27 |
Python
|
UTF-8
|
Python
| false | false | 6,036 |
py
|
"""Support for Yandex Smart Home humidifier."""
import logging
from typing import Any
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from homeassistant.components.climate.const import SUPPORT_TARGET_HUMIDITY
from homeassistant.components.humidifier import HumidifierEntity
from homeassistant.const import ATTR_STATE
from homeassistant.helpers import entity_platform
from . import CONF_INCLUDE, DATA_CONFIG, DOMAIN, YandexQuasar
_LOGGER = logging.getLogger(__name__)
SERVICE_MUTE = "mute"
SERVICE_IONIZATION = "ionization"
SERVICE_BACKLIGHT = "backlight"
HUMIDIFIER_STATE_USER_SCHEMA = {vol.Required(ATTR_STATE): cv.boolean}
DEVICES = ["devices.types.humidifier"]
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up humidifier from a config entry."""
include = hass.data[DOMAIN][DATA_CONFIG][CONF_INCLUDE]
quasar = hass.data[DOMAIN][entry.unique_id]
devices = [
YandexHumidifier(quasar, device)
for device in quasar.devices
if device["name"] in include and device["type"] in DEVICES
]
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
SERVICE_MUTE, HUMIDIFIER_STATE_USER_SCHEMA, "mute"
)
platform.async_register_entity_service(
SERVICE_IONIZATION, HUMIDIFIER_STATE_USER_SCHEMA, "ionization"
)
platform.async_register_entity_service(
SERVICE_BACKLIGHT, HUMIDIFIER_STATE_USER_SCHEMA, "backlight"
)
async_add_entities(devices, True)
# noinspection PyAbstractClass
class YandexHumidifier(HumidifierEntity):
"""Yandex Home humidifier entity"""
_is_on = None
_min_humidity = None
_max_humidity = None
_target_humidity = None
_precision = None
_is_muted = None
_is_ionization_on = None
_is_backlight_on = None
_supported = 0
def __init__(self, quasar: YandexQuasar, device: dict) -> None:
"""Initialize entity."""
self.quasar = quasar
self.device = device
@property
def unique_id(self):
"""Return entity unique id."""
return self.device["id"].replace("-", "")
@property
def name(self):
"""Return entity name."""
return self.device["name"]
@property
def is_on(self) -> bool:
"""Return if device is turned on."""
return self._is_on
@property
def min_humidity(self) -> int:
"""Return min humidity."""
return self._min_humidity
@property
def max_humidity(self) -> int:
"""Return max humidity."""
return self._max_humidity
@property
def precision(self) -> int:
"""Return target humidity precision."""
return self._precision
@property
def target_humidity(self) -> int:
"""Return target humidity."""
return self._target_humidity
@property
def is_muted(self) -> bool:
"""Return if device is muted."""
return self._is_muted
@property
def is_ionization_on(self) -> bool:
"""Return if ionization is turned on."""
return self._is_ionization_on
@property
def is_backlight_on(self) -> bool:
"""Return if backlight is turned on."""
return self._is_backlight_on
@property
def supported_features(self):
"""Return supported features."""
return self._supported
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the device specific state attributes."""
attributes = {
"is_muted": self.is_muted,
"is_ionization_on": self.is_ionization_on,
"is_backlight_on": self.is_backlight_on,
}
return attributes
async def init_params(self, capabilities: dict):
"""Initialize parameters."""
for capability in capabilities:
parameters = capability["parameters"]
instance = parameters.get("instance")
if instance == "humidity":
self._supported |= SUPPORT_TARGET_HUMIDITY
range_ = parameters["range"]
self._min_humidity = range_["min"]
self._max_humidity = range_["max"]
self._precision = range_["precision"]
async def async_update(self):
"""Update the entity."""
data = await self.quasar.get_device(self.device["id"])
self._attr_available = data["state"] == "online"
if self._is_on is None:
await self.init_params(data["capabilities"])
for capability in data["capabilities"]:
if not capability["retrievable"]:
continue
instance = capability["state"]["instance"]
value = capability["state"]["value"]
if instance == "on":
self._is_on = value
if instance == "humidity":
self._target_humidity = value
if instance == "mute":
self._is_muted = value
if instance == "ionization":
self._is_ionization_on = value
if instance == "backlight":
self._is_backlight_on = value
async def async_turn_on(self, **kwargs):
"""Turn on."""
await self.quasar.device_action(self.device["id"], on=True)
async def async_turn_off(self, **kwargs):
"""Turn off."""
await self.quasar.device_action(self.device["id"], on=False)
async def async_set_humidity(self, humidity):
"""Set humidity."""
await self.quasar.device_action(self.device["id"], humidity=humidity)
async def mute(self, state):
"""Mute humidifier."""
await self.quasar.device_action(self.device["id"], mute=state)
async def ionization(self, state):
"""Turn on/off ionization."""
await self.quasar.device_action(self.device["id"], ionization=state)
async def backlight(self, state):
"""Turn on/off backlight."""
await self.quasar.device_action(self.device["id"], backlight=state)
|
[
"[email protected]"
] | |
fec05f42f7b72c82ba38dc3428a5783483be17a6
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03853/s779896391.py
|
6f97398bb0b315850d271c06415041efea07b88f
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 87 |
py
|
a,b=map(int,input().split())
for i in range(a):
S=input()
print(S)
print(S)
|
[
"[email protected]"
] | |
7721dbf166413b9eeee07f742fda689f3e4e3158
|
e0980f704a573894350e285f66f4cf390837238e
|
/.history/streams/blocks_20201029154240.py
|
5d26c40dc248b24e75e50c0955ce1170f1062551
|
[] |
no_license
|
rucpata/WagtailWebsite
|
28008474ec779d12ef43bceb61827168274a8b61
|
5aa44f51592f49c9a708fc5515ad877c6a29dfd9
|
refs/heads/main
| 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,772 |
py
|
from django import forms
from wagtail.core import blocks
from wagtail.images.blocks import ImageChooserBlock
from wagtail.contrib.table_block.blocks import TableBlock
#Walidacja problemu
from django.core.exceptions import ValidationError
from django.co
class TitleBlock(blocks.StructBlock):
text = blocks.CharBlock(
required = True,
elp_text='Tekst do wyświetlenia',
)
class Meta:
template = 'streams/title_block.html'
icon = 'edycja'
label = 'Tytuł'
help_text = 'Wyśrodkowany tekst do wyświetlenia na stronie.'
class LinkValue(blocks.StructValue):
"""Dodatkowao logika dla lików"""
def url(self) -> str:
internal_page = self.get('internal_page')
external_link = self.get('external_link')
if internal_page:
return internal_page.url
elif external_link:
return external_link
return ''
class Link(blocks.StructBlock):
link_text = blocks.CharBlock(
max_length=50,
default='Więcej szczegółów'
)
internal_page = blocks.PageChooserBlock(
required=False
)
external_link = blocks.URLBlock(
required=False
)
class Meta:
value_class = LinkValue
class Card(blocks.StructBlock):
title = blocks.CharBlock(
max_length=100,
help_text = 'Pogrubiony tytuł tej karty. Maksymalnie 100 znaków.'
)
text = blocks.TextBlock(
max_length=255,
help_text='Opcjonalny tekst tej karty. Maksymalnie 255 znaków.'
)
image = ImageChooserBlock(
help_text = 'Obraz zostanie automatycznie przycięty o 570 na 370 pikseli'
)
link = Link(help_text = 'Wwybierz link')
class CardsBlock(blocks.StructBlock):
cards = blocks.ListBlock(
Card()
)
class Meta:
template = 'streams/card_block.html'
icon = 'image'
label = 'Karty standardowe'
class RadioSelectBlock(blocks.ChoiceBlock):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.field.widget = forms.RadioSelect(
choices=self.field.widget.choices
)
class ImageAndTextBlock(blocks.StructBlock):
image = ImageChooserBlock(help_text='Obraz automatycznie przycięty do rozmiaru 786 na 552 px.')
image_alignment = RadioSelectBlock(
choices = (
('left','Opraz po lewej stronie'),
('right', 'Obraz po prawej stronie'),
),
default = 'left',
help_text = 'Obraz po lewej stronie, tekst po prawej lub obraz po prawej stronie tekst po lewej.'
)
title = blocks.CharBlock(
max_length=60,
help_text='Maksymalna długość 60 znaków.'
)
text = blocks.CharBlock(
max_length = 140,
required = False,
)
link = Link()
class Meta:
template = 'streams/image_and_text_block.html'
icon = 'image'
label = 'Obraz & Tekst'
class CallToActionBlock(blocks.StructBlock):
title =blocks.CharBlock(
max_length = 200,
help_text = 'Maksymalnie 200 znaków.'
)
link = Link()
class Meta:
template = 'streams/call_to_action_block.html'
icon = 'plus'
label = 'Wezwanie do działania'
class PricingTableBlock(TableBlock):
"""Blok tabeli cen."""
class Meta:
template = 'streams/pricing_table_block.html'
label = 'Tabela cen'
icon = 'table'
help_text = 'Twoje tabele z cenami powinny zawierać zawsze 4 kolumny.'
'''
class RichTextWithTitleBlock(blocks.StructBlock):
title = blocks.CharBlock(max_length=50)
context = blocks.RichTextBlock(features=[])
class Meta:
template = 'streams/simple_richtext_block.html'
'''
|
[
"[email protected]"
] | |
d84fb94a99b36b6de87d7c2df65643fe96c30f94
|
f40ff2ac9d25137230c2a80f74be8fd013e73aca
|
/utopiantree.py
|
e8e5b7f386649bb4777219e312191f0aa444d743
|
[] |
no_license
|
SurajPatil314/HackerRank-challenges
|
805020147e5227498b674b9da4c99a915627882d
|
921b1b1d7c0cd4beae54f8833063e16aa33883ea
|
refs/heads/master
| 2021-06-30T16:18:27.431389 | 2021-06-13T22:59:55 | 2021-06-13T22:59:55 | 239,815,221 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 979 |
py
|
'''
https://www.hackerrank.com/challenges/utopian-tree/problem
The Utopian Tree goes through 2 cycles of growth every year. Each spring, it doubles in height. Each summer, its height increases by 1 meter.
Laura plants a Utopian Tree sapling with a height of 1 meter at the onset of spring. How tall will her tree be after growth cycles?
For example, if the number of growth cycles is , the calculations are as follows:
'''
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the utopianTree function below.
def utopianTree(n):
ans = 1
if n==0:
return ans
for i in range(1,n+1):
if i%2 == 0:
ans = ans + 1
else:
ans = ans*2
return ans
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input())
for t_itr in range(t):
n = int(input())
result = utopianTree(n)
fptr.write(str(result) + '\n')
fptr.close()
|
[
"[email protected]"
] | |
51dc4dbd22833e715ae231caae8e175f4626a237
|
344e2956b4e2a30a8ef7532d951f96d995d1dd1e
|
/18_mmaction/lib/mmcv/mmcv/video/io.py
|
dfddcfd9d23b2760773cf6ca2fd2a352d4de62ee
|
[
"Apache-2.0",
"LGPL-3.0-only",
"MIT",
"LicenseRef-scancode-proprietary-license",
"BSD-3-Clause",
"GPL-3.0-only"
] |
permissive
|
karndeepsingh/Monk_Object_Detection
|
e64199705326e4cd65e4b29946cae210a4ef9649
|
425fa50a3236cb9097389646275da06bf9185f6b
|
refs/heads/master
| 2022-12-22T18:26:53.933397 | 2020-09-28T12:49:50 | 2020-09-28T12:49:50 | 299,307,843 | 1 | 1 |
Apache-2.0
| 2020-09-28T12:52:18 | 2020-09-28T12:52:17 | null |
UTF-8
|
Python
| false | false | 10,321 |
py
|
# Copyright (c) Open-MMLab. All rights reserved.
import os.path as osp
from collections import OrderedDict
import cv2
from cv2 import (CAP_PROP_FOURCC, CAP_PROP_FPS, CAP_PROP_FRAME_COUNT,
CAP_PROP_FRAME_HEIGHT, CAP_PROP_FRAME_WIDTH,
CAP_PROP_POS_FRAMES, VideoWriter_fourcc)
from mmcv.utils import (check_file_exist, mkdir_or_exist, scandir,
track_progress)
class Cache:
def __init__(self, capacity):
self._cache = OrderedDict()
self._capacity = int(capacity)
if capacity <= 0:
raise ValueError('capacity must be a positive integer')
@property
def capacity(self):
return self._capacity
@property
def size(self):
return len(self._cache)
def put(self, key, val):
if key in self._cache:
return
if len(self._cache) >= self.capacity:
self._cache.popitem(last=False)
self._cache[key] = val
def get(self, key, default=None):
val = self._cache[key] if key in self._cache else default
return val
class VideoReader:
"""Video class with similar usage to a list object.
This video warpper class provides convenient apis to access frames.
There exists an issue of OpenCV's VideoCapture class that jumping to a
certain frame may be inaccurate. It is fixed in this class by checking
the position after jumping each time.
Cache is used when decoding videos. So if the same frame is visited for
the second time, there is no need to decode again if it is stored in the
cache.
:Example:
>>> import mmcv
>>> v = mmcv.VideoReader('sample.mp4')
>>> len(v) # get the total frame number with `len()`
120
>>> for img in v: # v is iterable
>>> mmcv.imshow(img)
>>> v[5] # get the 6th frame
"""
def __init__(self, filename, cache_capacity=10):
check_file_exist(filename, 'Video file not found: ' + filename)
self._vcap = cv2.VideoCapture(filename)
assert cache_capacity > 0
self._cache = Cache(cache_capacity)
self._position = 0
# get basic info
self._width = int(self._vcap.get(CAP_PROP_FRAME_WIDTH))
self._height = int(self._vcap.get(CAP_PROP_FRAME_HEIGHT))
self._fps = self._vcap.get(CAP_PROP_FPS)
self._frame_cnt = int(self._vcap.get(CAP_PROP_FRAME_COUNT))
self._fourcc = self._vcap.get(CAP_PROP_FOURCC)
@property
def vcap(self):
""":obj:`cv2.VideoCapture`: The raw VideoCapture object."""
return self._vcap
@property
def opened(self):
"""bool: Indicate whether the video is opened."""
return self._vcap.isOpened()
@property
def width(self):
"""int: Width of video frames."""
return self._width
@property
def height(self):
"""int: Height of video frames."""
return self._height
@property
def resolution(self):
"""tuple: Video resolution (width, height)."""
return (self._width, self._height)
@property
def fps(self):
"""float: FPS of the video."""
return self._fps
@property
def frame_cnt(self):
"""int: Total frames of the video."""
return self._frame_cnt
@property
def fourcc(self):
"""str: "Four character code" of the video."""
return self._fourcc
@property
def position(self):
"""int: Current cursor position, indicating frame decoded."""
return self._position
def _get_real_position(self):
return int(round(self._vcap.get(CAP_PROP_POS_FRAMES)))
def _set_real_position(self, frame_id):
self._vcap.set(CAP_PROP_POS_FRAMES, frame_id)
pos = self._get_real_position()
for _ in range(frame_id - pos):
self._vcap.read()
self._position = frame_id
def read(self):
"""Read the next frame.
If the next frame have been decoded before and in the cache, then
return it directly, otherwise decode, cache and return it.
Returns:
ndarray or None: Return the frame if successful, otherwise None.
"""
# pos = self._position
if self._cache:
img = self._cache.get(self._position)
if img is not None:
ret = True
else:
if self._position != self._get_real_position():
self._set_real_position(self._position)
ret, img = self._vcap.read()
if ret:
self._cache.put(self._position, img)
else:
ret, img = self._vcap.read()
if ret:
self._position += 1
return img
def get_frame(self, frame_id):
"""Get frame by index.
Args:
frame_id (int): Index of the expected frame, 0-based.
Returns:
ndarray or None: Return the frame if successful, otherwise None.
"""
if frame_id < 0 or frame_id >= self._frame_cnt:
raise IndexError(
f'"frame_id" must be between 0 and {self._frame_cnt - 1}')
if frame_id == self._position:
return self.read()
if self._cache:
img = self._cache.get(frame_id)
if img is not None:
self._position = frame_id + 1
return img
self._set_real_position(frame_id)
ret, img = self._vcap.read()
if ret:
if self._cache:
self._cache.put(self._position, img)
self._position += 1
return img
def current_frame(self):
"""Get the current frame (frame that is just visited).
Returns:
ndarray or None: If the video is fresh, return None, otherwise
return the frame.
"""
if self._position == 0:
return None
return self._cache.get(self._position - 1)
def cvt2frames(self,
frame_dir,
file_start=0,
filename_tmpl='{:06d}.jpg',
start=0,
max_num=0,
show_progress=True):
"""Convert a video to frame images.
Args:
frame_dir (str): Output directory to store all the frame images.
file_start (int): Filenames will start from the specified number.
filename_tmpl (str): Filename template with the index as the
placeholder.
start (int): The starting frame index.
max_num (int): Maximum number of frames to be written.
show_progress (bool): Whether to show a progress bar.
"""
mkdir_or_exist(frame_dir)
if max_num == 0:
task_num = self.frame_cnt - start
else:
task_num = min(self.frame_cnt - start, max_num)
if task_num <= 0:
raise ValueError('start must be less than total frame number')
if start > 0:
self._set_real_position(start)
def write_frame(file_idx):
img = self.read()
filename = osp.join(frame_dir, filename_tmpl.format(file_idx))
cv2.imwrite(filename, img)
if show_progress:
track_progress(write_frame, range(file_start,
file_start + task_num))
else:
for i in range(task_num):
img = self.read()
if img is None:
break
filename = osp.join(frame_dir,
filename_tmpl.format(i + file_start))
cv2.imwrite(filename, img)
def __len__(self):
return self.frame_cnt
def __getitem__(self, index):
if isinstance(index, slice):
return [
self.get_frame(i)
for i in range(*index.indices(self.frame_cnt))
]
# support negative indexing
if index < 0:
index += self.frame_cnt
if index < 0:
raise IndexError('index out of range')
return self.get_frame(index)
def __iter__(self):
self._set_real_position(0)
return self
def __next__(self):
img = self.read()
if img is not None:
return img
else:
raise StopIteration
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self._vcap.release()
def frames2video(frame_dir,
video_file,
fps=30,
fourcc='XVID',
filename_tmpl='{:06d}.jpg',
start=0,
end=0,
show_progress=True):
"""Read the frame images from a directory and join them as a video.
Args:
frame_dir (str): The directory containing video frames.
video_file (str): Output filename.
fps (float): FPS of the output video.
fourcc (str): Fourcc of the output video, this should be compatible
with the output file type.
filename_tmpl (str): Filename template with the index as the variable.
start (int): Starting frame index.
end (int): Ending frame index.
show_progress (bool): Whether to show a progress bar.
"""
if end == 0:
ext = filename_tmpl.split('.')[-1]
end = len([name for name in scandir(frame_dir, ext)])
first_file = osp.join(frame_dir, filename_tmpl.format(start))
check_file_exist(first_file, 'The start frame not found: ' + first_file)
img = cv2.imread(first_file)
height, width = img.shape[:2]
resolution = (width, height)
vwriter = cv2.VideoWriter(video_file, VideoWriter_fourcc(*fourcc), fps,
resolution)
def write_frame(file_idx):
filename = osp.join(frame_dir, filename_tmpl.format(file_idx))
img = cv2.imread(filename)
vwriter.write(img)
if show_progress:
track_progress(write_frame, range(start, end))
else:
for i in range(start, end):
filename = osp.join(frame_dir, filename_tmpl.format(i))
img = cv2.imread(filename)
vwriter.write(img)
vwriter.release()
|
[
"[email protected]"
] | |
a226024dcb719920028ef52f87364fe61ccdb0d3
|
235fb362b5af1f7dbd90dc3819fe63f18e074e9d
|
/learn_django/test_Create/test_Create/wsgi.py
|
a0497b8ae5db66fb70718326dbcd16b5b0ed7552
|
[] |
no_license
|
cener-1999/learn_about_python
|
74c9b8c6a546224261d5577183a946a78ca7e84f
|
86cfc0a5621f86fc8a1885a39847d40b33137c49
|
refs/heads/master
| 2023-04-30T06:38:34.459506 | 2021-05-18T14:20:29 | 2021-05-18T14:20:29 | 368,473,253 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 399 |
py
|
"""
WSGI config for test_Create project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_Create.settings')
application = get_wsgi_application()
|
[
"[email protected]"
] | |
9cbc729eee969f3724cdb8e89268bcf22b9fc0eb
|
a25f7829512f09d9a25679b2ccbf0a4d970e8a44
|
/restiro/generators/__init__.py
|
90572410a7aa5439101b9ab74141c7cfb5cd6034
|
[
"MIT"
] |
permissive
|
meyt/restiro
|
1adaa27e6818ed0de29529b4e76c3829e376e23c
|
016ffe386656eda9fea490f348e1d5408a1e9608
|
refs/heads/master
| 2022-01-17T01:11:51.943211 | 2019-07-24T06:43:35 | 2019-07-24T06:43:35 | 95,980,903 | 0 | 0 |
MIT
| 2019-04-22T07:28:56 | 2017-07-01T18:23:55 |
Python
|
UTF-8
|
Python
| false | false | 136 |
py
|
from .base import BaseGenerator
from .markdown import MarkdownGenerator
from .json import JSONGenerator
from .mock import MockGenerator
|
[
"[email protected]"
] | |
19f664fb896775e20e494f366d1f1424d2c39aa1
|
52b5773617a1b972a905de4d692540d26ff74926
|
/.history/scale_20200709203334.py
|
f0e58bcaf9e76b14a3ab0ae93d883663fbab5442
|
[] |
no_license
|
MaryanneNjeri/pythonModules
|
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
|
f4e56b1e4dda2349267af634a46f6b9df6686020
|
refs/heads/master
| 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 790 |
py
|
def balancing(strArr):
sides = strArr[0][1:-1].split(", ")
left = int(sides[0])
right = int(sides[1])
# here we convert every element in the array to a int as we ignore the first bracket and last bracket
# then here we split where there is a comma and space
weights = [int(x) for x in strArr[1][1:-1].split(", ")]
# we loop through the array
for i in range(len(weights)):
if(left + weights[i] == right) or (right + weights[i] ==left):
print('weights',weights[i])
for i in range(len(weights)):
for j in range(i+1,len(weights)):
if (left + weights[i] + weights[j] == right ) or (right + weights[i]+weights[j] == left) /
or (left + weights[i] == right):
balancing(["[5, 9]", "[1, 2, 6, 7]"])
|
[
"[email protected]"
] | |
d966f966eff2bfe0046d1c8f0c333cd97e208c4b
|
4836c4349bd65944b48fef01e2d2b7149479573c
|
/bin/pip3.6
|
b991042ffd8e1d511edfe2717f2d425b404b7251
|
[] |
no_license
|
ogol254/M1
|
c0b634e2a735002497bdf0114f656d3c12c65194
|
8761a4228ce91961922f0e722dba8191fce73a9b
|
refs/heads/master
| 2020-03-18T17:59:20.979639 | 2018-05-27T16:46:07 | 2018-05-27T16:46:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 384 |
6
|
#!/Users/Mcogol/venv/M1/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==9.0.1','console_scripts','pip3.6'
__requires__ = 'pip==9.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==9.0.1', 'console_scripts', 'pip3.6')()
)
|
[
"[email protected]"
] | |
08bedf82fa1c96657922a3c2500f8c8b9ef6083c
|
e9032e64138d7b9dd90a330dfe4588e2c83f6667
|
/google/cloud/compute_v1/services/firewall_policies/transports/base.py
|
44b407fc53b599b3a5953117712676512b70881f
|
[
"Apache-2.0"
] |
permissive
|
Ctfbuster/python-compute
|
6cff2418969009794c3fadadc4c45e20d7b40509
|
7a9e8324e08c46a93050908760b2b5aca054a863
|
refs/heads/main
| 2023-08-26T12:37:52.363526 | 2021-10-04T15:34:37 | 2021-10-04T15:34:37 | 412,884,620 | 0 | 0 |
Apache-2.0
| 2021-10-02T18:49:05 | 2021-10-02T18:49:03 | null |
UTF-8
|
Python
| false | false | 13,664 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
from requests import __version__ as requests_version
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.compute_v1.types import compute
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version,
grpc_version=None,
rest_version=requests_version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class FirewallPoliciesTransport(abc.ABC):
"""Abstract transport class for FirewallPolicies."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
)
DEFAULT_HOST: str = "compute.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.add_association: gapic_v1.method.wrap_method(
self.add_association, default_timeout=None, client_info=client_info,
),
self.add_rule: gapic_v1.method.wrap_method(
self.add_rule, default_timeout=None, client_info=client_info,
),
self.clone_rules: gapic_v1.method.wrap_method(
self.clone_rules, default_timeout=None, client_info=client_info,
),
self.delete: gapic_v1.method.wrap_method(
self.delete, default_timeout=None, client_info=client_info,
),
self.get: gapic_v1.method.wrap_method(
self.get, default_timeout=None, client_info=client_info,
),
self.get_association: gapic_v1.method.wrap_method(
self.get_association, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
self.get_iam_policy, default_timeout=None, client_info=client_info,
),
self.get_rule: gapic_v1.method.wrap_method(
self.get_rule, default_timeout=None, client_info=client_info,
),
self.insert: gapic_v1.method.wrap_method(
self.insert, default_timeout=None, client_info=client_info,
),
self.list: gapic_v1.method.wrap_method(
self.list, default_timeout=None, client_info=client_info,
),
self.list_associations: gapic_v1.method.wrap_method(
self.list_associations, default_timeout=None, client_info=client_info,
),
self.move: gapic_v1.method.wrap_method(
self.move, default_timeout=None, client_info=client_info,
),
self.patch: gapic_v1.method.wrap_method(
self.patch, default_timeout=None, client_info=client_info,
),
self.patch_rule: gapic_v1.method.wrap_method(
self.patch_rule, default_timeout=None, client_info=client_info,
),
self.remove_association: gapic_v1.method.wrap_method(
self.remove_association, default_timeout=None, client_info=client_info,
),
self.remove_rule: gapic_v1.method.wrap_method(
self.remove_rule, default_timeout=None, client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
client_info=client_info,
),
}
@property
def add_association(
self,
) -> Callable[
[compute.AddAssociationFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def add_rule(
self,
) -> Callable[
[compute.AddRuleFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def clone_rules(
self,
) -> Callable[
[compute.CloneRulesFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def delete(
self,
) -> Callable[
[compute.DeleteFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def get(
self,
) -> Callable[
[compute.GetFirewallPolicyRequest],
Union[compute.FirewallPolicy, Awaitable[compute.FirewallPolicy]],
]:
raise NotImplementedError()
@property
def get_association(
self,
) -> Callable[
[compute.GetAssociationFirewallPolicyRequest],
Union[
compute.FirewallPolicyAssociation,
Awaitable[compute.FirewallPolicyAssociation],
],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
) -> Callable[
[compute.GetIamPolicyFirewallPolicyRequest],
Union[compute.Policy, Awaitable[compute.Policy]],
]:
raise NotImplementedError()
@property
def get_rule(
self,
) -> Callable[
[compute.GetRuleFirewallPolicyRequest],
Union[compute.FirewallPolicyRule, Awaitable[compute.FirewallPolicyRule]],
]:
raise NotImplementedError()
@property
def insert(
self,
) -> Callable[
[compute.InsertFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def list(
self,
) -> Callable[
[compute.ListFirewallPoliciesRequest],
Union[compute.FirewallPolicyList, Awaitable[compute.FirewallPolicyList]],
]:
raise NotImplementedError()
@property
def list_associations(
self,
) -> Callable[
[compute.ListAssociationsFirewallPolicyRequest],
Union[
compute.FirewallPoliciesListAssociationsResponse,
Awaitable[compute.FirewallPoliciesListAssociationsResponse],
],
]:
raise NotImplementedError()
@property
def move(
self,
) -> Callable[
[compute.MoveFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def patch(
self,
) -> Callable[
[compute.PatchFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def patch_rule(
self,
) -> Callable[
[compute.PatchRuleFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def remove_association(
self,
) -> Callable[
[compute.RemoveAssociationFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def remove_rule(
self,
) -> Callable[
[compute.RemoveRuleFirewallPolicyRequest],
Union[compute.Operation, Awaitable[compute.Operation]],
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
) -> Callable[
[compute.SetIamPolicyFirewallPolicyRequest],
Union[compute.Policy, Awaitable[compute.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
) -> Callable[
[compute.TestIamPermissionsFirewallPolicyRequest],
Union[
compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse]
],
]:
raise NotImplementedError()
__all__ = ("FirewallPoliciesTransport",)
|
[
"[email protected]"
] | |
fd48fb9af25fb41caa09a6fdceb03bf2615e8c43
|
0f1b67ee77adab881409b9bea17dfbc6f8c15c27
|
/backend/restless_sunset_27759/settings.py
|
12d0ff62cfdea9c0787f83e5a3e5afd6a3e75538
|
[] |
no_license
|
crowdbotics-apps/restless-sunset-27759
|
e7d7279616d1528815c4e33ca55139f5ac5a150c
|
8ae02190c9fdb515ff75bd9e33523371d97bb3d8
|
refs/heads/master
| 2023-05-31T06:22:27.536695 | 2021-06-06T00:40:25 | 2021-06-06T00:40:25 | 374,238,654 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,129 |
py
|
"""
Django settings for restless_sunset_27759 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'modules',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'restless_sunset_27759.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'web_build')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'restless_sunset_27759.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'), os.path.join(BASE_DIR, 'web_build/static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
[
"[email protected]"
] | |
d010596cc59c1d09eea01fdb646b958bd1c2b051
|
c9fe05f893deff75232aabca4e877c144972249a
|
/arcpyenv/arcgispro-py3-clone/Lib/site-packages/osgeo/samples/tigerpoly.py
|
15a90c3c88116baf036cef900fe7a427f5323cb2
|
[
"Python-2.0"
] |
permissive
|
SherbazHashmi/HackathonServer
|
4d1dc7f0122a701a0f3a17787d32efe83bc67601
|
a874fe7e5c95196e4de68db2da0e2a05eb70e5d8
|
refs/heads/master
| 2022-12-26T06:46:33.893749 | 2019-11-03T10:49:47 | 2019-11-03T10:49:47 | 218,912,149 | 3 | 3 | null | 2022-12-11T11:52:37 | 2019-11-01T04:16:38 |
Python
|
UTF-8
|
Python
| false | false | 7,060 |
py
|
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: OGR Python samples
# Purpose: Assemble TIGER Polygons.
# Author: Frank Warmerdam, [email protected]
#
###############################################################################
# Copyright (c) 2003, Frank Warmerdam <[email protected]>
# Copyright (c) 2009, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
from osgeo import ogr
from osgeo import osr
#############################################################################
class Module:
def __init__(self):
self.lines = {}
self.poly_line_links = {}
#############################################################################
def Usage():
print('Usage: tigerpoly.py infile [outfile].shp')
print('')
sys.exit(1)
#############################################################################
# Argument processing.
infile = None
outfile = None
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i = i + 1
if outfile is None:
outfile = 'poly.shp'
if infile is None:
Usage()
#############################################################################
# Open the datasource to operate on.
ds = ogr.Open(infile, update=0)
poly_layer = ds.GetLayerByName('Polygon')
#############################################################################
# Create output file for the composed polygons.
nad83 = osr.SpatialReference()
nad83.SetFromUserInput('NAD83')
shp_driver = ogr.GetDriverByName('ESRI Shapefile')
shp_driver.DeleteDataSource(outfile)
shp_ds = shp_driver.CreateDataSource(outfile)
shp_layer = shp_ds.CreateLayer('out', geom_type=ogr.wkbPolygon,
srs=nad83)
src_defn = poly_layer.GetLayerDefn()
poly_field_count = src_defn.GetFieldCount()
for fld_index in range(poly_field_count):
src_fd = src_defn.GetFieldDefn(fld_index)
fd = ogr.FieldDefn(src_fd.GetName(), src_fd.GetType())
fd.SetWidth(src_fd.GetWidth())
fd.SetPrecision(src_fd.GetPrecision())
shp_layer.CreateField(fd)
#############################################################################
# Read all features in the line layer, holding just the geometry in a hash
# for fast lookup by TLID.
line_layer = ds.GetLayerByName('CompleteChain')
line_count = 0
modules_hash = {}
feat = line_layer.GetNextFeature()
geom_id_field = feat.GetFieldIndex('TLID')
tile_ref_field = feat.GetFieldIndex('MODULE')
while feat is not None:
geom_id = feat.GetField(geom_id_field)
tile_ref = feat.GetField(tile_ref_field)
try:
module = modules_hash[tile_ref]
except:
module = Module()
modules_hash[tile_ref] = module
module.lines[geom_id] = feat.GetGeometryRef().Clone()
line_count = line_count + 1
feat.Destroy()
feat = line_layer.GetNextFeature()
print('Got %d lines in %d modules.' % (line_count, len(modules_hash)))
#############################################################################
# Read all polygon/chain links and build a hash keyed by POLY_ID listing
# the chains (by TLID) attached to it.
link_layer = ds.GetLayerByName('PolyChainLink')
feat = link_layer.GetNextFeature()
geom_id_field = feat.GetFieldIndex('TLID')
tile_ref_field = feat.GetFieldIndex('MODULE')
lpoly_field = feat.GetFieldIndex('POLYIDL')
rpoly_field = feat.GetFieldIndex('POLYIDR')
link_count = 0
while feat is not None:
module = modules_hash[feat.GetField(tile_ref_field)]
tlid = feat.GetField(geom_id_field)
lpoly_id = feat.GetField(lpoly_field)
rpoly_id = feat.GetField(rpoly_field)
if lpoly_id == rpoly_id:
feat.Destroy()
feat = link_layer.GetNextFeature()
continue
try:
module.poly_line_links[lpoly_id].append(tlid)
except:
module.poly_line_links[lpoly_id] = [tlid]
try:
module.poly_line_links[rpoly_id].append(tlid)
except:
module.poly_line_links[rpoly_id] = [tlid]
link_count = link_count + 1
feat.Destroy()
feat = link_layer.GetNextFeature()
print('Processed %d links.' % link_count)
#############################################################################
# Process all polygon features.
feat = poly_layer.GetNextFeature()
tile_ref_field = feat.GetFieldIndex('MODULE')
polyid_field = feat.GetFieldIndex('POLYID')
poly_count = 0
degenerate_count = 0
while feat is not None:
module = modules_hash[feat.GetField(tile_ref_field)]
polyid = feat.GetField(polyid_field)
tlid_list = module.poly_line_links[polyid]
link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection)
for tlid in tlid_list:
geom = module.lines[tlid]
link_coll.AddGeometry(geom)
try:
poly = ogr.BuildPolygonFromEdges(link_coll)
if poly.GetGeometryRef(0).GetPointCount() < 4:
degenerate_count = degenerate_count + 1
poly.Destroy()
feat.Destroy()
feat = poly_layer.GetNextFeature()
continue
# print poly.ExportToWkt()
# feat.SetGeometryDirectly( poly )
feat2 = ogr.Feature(feature_def=shp_layer.GetLayerDefn())
for fld_index in range(poly_field_count):
feat2.SetField(fld_index, feat.GetField(fld_index))
feat2.SetGeometryDirectly(poly)
shp_layer.CreateFeature(feat2)
feat2.Destroy()
poly_count = poly_count + 1
except:
print('BuildPolygonFromEdges failed.')
feat.Destroy()
feat = poly_layer.GetNextFeature()
if degenerate_count:
print('Discarded %d degenerate polygons.' % degenerate_count)
print('Built %d polygons.' % poly_count)
#############################################################################
# Cleanup
shp_ds.Destroy()
ds.Destroy()
|
[
"[email protected]"
] | |
532b32bc9e36d22b7092537e44b75a301a3bc920
|
17df5351498798ad348ee1ea3a26835f6ef7de49
|
/linak_dpg_bt/synchronized.py
|
8d5623594ea79015df0e03b4386414fcf7a03cd6
|
[
"MIT"
] |
permissive
|
anetczuk/linak_bt_desk
|
8e4c8f514d6671af18962b0e13ecda3210778421
|
7ac5fe1b69638976326842b27d76e52f0cc958fd
|
refs/heads/master
| 2023-05-28T04:52:09.859883 | 2023-05-12T19:58:12 | 2023-05-12T19:58:12 | 142,626,557 | 20 | 5 | null | 2018-07-27T21:52:13 | 2018-07-27T21:52:13 | null |
UTF-8
|
Python
| false | false | 2,547 |
py
|
'''
Implementation of method '@synchronized' decorator. it reflects functionality
of 'synchronized' keyword from Java language.
It accepts one optional argument -- name of lock field declared within object.
Usage examples:
@synchronized
def send_dpg_write_command(self, dpgCommandType, data):
pass
@synchronized()
def send_dpg_write_command(self, dpgCommandType, data):
pass
@synchronized("myLock")
def send_dpg_write_command(self, dpgCommandType, data):
pass
'''
import threading
from functools import wraps
def dirprint(var):
names = dir(var)
for name in names:
if name == "__globals__":
print( name, ": --globals--" )
else:
value = getattr(var, name)
print( name, ":", value )
def extractSelf(func, decorator, *args):
params = args
if len(params) < 1:
return None
## 'self' goes always as first parameter
firstParam = params[0]
fName = func.__name__
if hasattr(firstParam, fName) == False:
return None
## object has method with the same name -- check if it has the same decorator
method = getattr(firstParam, fName)
if checkMethod(decorator, method):
return firstParam
return None
def checkMethod(func, method):
if method.__func__ == func:
return True
return False
##
## Definition of function decorator
##
def synchronized_with_arg(lock_name = None):
if lock_name == None:
lock_name = "_methods_lock"
def synced_method(func):
### every decorated method has it's own instance of 'decorator()' function
@wraps(func)
def decorator(self, *args, **kws):
# owner = extractSelf(func, decorator, *args)
# if owner == None:
# return func(*args, **kws)
lock = None
if hasattr(self, lock_name) == False:
lock = threading.RLock()
setattr(self, lock_name, lock)
else:
lock = getattr(self, lock_name)
with lock:
return func(self, *args, **kws)
return decorator
return synced_method
def synchronized(lock_name = None):
if callable(lock_name):
### lock_name contains function to call
function = lock_name
synced = synchronized_with_arg()
return synced(function)
else:
### lock_name contains name of lock to handle
synced = synchronized_with_arg(lock_name)
return synced
|
[
"[email protected]"
] | |
f1a872bbd8dac19294b36e06e80294270286549d
|
c6e22a6901bc40ba92a0470c6323929368727bbb
|
/src/podcast/admin.py
|
c824c01263680eea3888bf711ff6318373ef4be6
|
[] |
no_license
|
iamgaddiel/learners_coner
|
bdc47c7caac9898ca3a8836f1ad972afa9f88cf8
|
fb3ea68de8c02d1f1db6177b7c267a743a0b5a32
|
refs/heads/main
| 2023-08-04T06:12:08.728355 | 2021-09-13T04:24:42 | 2021-09-13T04:24:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 92 |
py
|
from django.contrib import admin
from .models import Podcast
admin.site.register(Podcast)
|
[
"[email protected]"
] | |
4dde1224a57efff18e13fe73847ae88291ab6578
|
6ad700a44e2d99d5e66115f10d133451c0e860ee
|
/yyy.py
|
0dbff290fe1ee91807ec39a61e8807d0d131f900
|
[] |
no_license
|
suganthicj/yyy
|
992388db4a5184ba4779a5e39e8a751ef75333c8
|
f036e67f1874edc76c4df400275a13737a4e6694
|
refs/heads/master
| 2020-06-21T02:05:39.750322 | 2019-07-17T05:07:44 | 2019-07-17T05:07:44 | 197,318,890 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 586 |
py
|
def isComposite(n):
# Corner cases
if (n <= 1):
return False
if (n <= 3):
return False
# This is checked so that we can skip
# middle five numbers in below loop
if (n % 2 == 0 or n % 3 == 0):
return True
i = 5
while(i * i <= n):
if (n % i == 0 or n % (i + 2) == 0):
return True
i = i + 6
return False
# Driver Program to test above function
print("true") if(isComposite(11)) else print("false")
print("true") if(isComposite(15)) else print("false")
|
[
"[email protected]"
] | |
028ebf4e6b1845fbf22d97ccc8aada2ef4353edf
|
efb60258270c70d0b16581e8b6fe32001015b16b
|
/modules/services/dbservices/media/save_dir_audio_service.py
|
c7d5ffbcfc1efaa4699c5873b82a1c148fffe38e
|
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
signeus/API-Web
|
bd48177201007e2091828458da3572fde4f24755
|
b9fe99465a178666299a079f99a42dafc80c5bb7
|
refs/heads/master
| 2021-01-11T20:30:50.964928 | 2017-05-19T09:31:01 | 2017-05-19T09:31:01 | 79,131,449 | 1 | 1 | null | 2017-04-07T08:42:55 | 2017-01-16T15:25:05 |
Python
|
UTF-8
|
Python
| false | false | 493 |
py
|
# -*- coding: utf-8 -*-
from services.interfaces.i_service import IService
class SaveDirAudioService(IService):
def __init__(self, core, parameters):
super(SaveDirAudioService, self).__init__(core, parameters)
def run(self):
path = self.parameters.get("path", "unknown/")
_id = self.parameters.get("id", "")
data = self.parameters.get("data", "")
return self.core.InternalOperation("saveAudio",{"path":path,"filename":str(_id), "data":data})
|
[
"[email protected]"
] | |
18ea2824a677d14685ff06a804a57f4f1f3a119c
|
9da47968b4a023c33119d040f4d12942047dccf7
|
/recipients/code/dashboard/dashboard/urls.py
|
42944346dd97d449ad24d0aec2979fe9c49c0230
|
[] |
no_license
|
ajgara/whoscorecards
|
05597de11793de24fcc6109ab1a75ebc49757693
|
8554e154f666fa4af24bf0782e244c1f3c179dd0
|
refs/heads/master
| 2021-01-24T22:28:05.225717 | 2015-10-22T21:44:44 | 2015-10-22T21:44:44 | 25,478,944 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 513 |
py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.views.static import serve
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'dashboard.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^oda/', include('oda.urls')),
)
#if settings.DEBUG:
# urlpatterns += patterns('',
# (r'^site_media/(?P<path>.*)$', serve, {'document_root': '/path/to/media'}),
# )
|
[
"[email protected]"
] | |
37493394aee839e6a7f89c89857ef7d0914c18b4
|
2f5a18e24e1e14c2e778aa0370b9aa2944323415
|
/abupy/UtilBu/ABuStatsUtil.py
|
a94d7d0c5cd4c6062ad613a05f5ae7686e6ad96f
|
[
"MIT"
] |
permissive
|
luqin/firefly
|
b739ebd61d29f432d58f9fbe20f8886b4547c3e0
|
2e5ab17f2d20deb3c68c927f6208ea89db7c639d
|
refs/heads/master
| 2022-12-22T14:25:18.822625 | 2019-02-26T15:03:39 | 2019-02-26T15:03:39 | 165,602,881 | 1 | 0 |
MIT
| 2022-12-08T01:38:23 | 2019-01-14T05:51:34 |
Python
|
UTF-8
|
Python
| false | false | 30,628 |
py
|
# -*- encoding:utf-8 -*-
"""
统计相关工具模块
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
from collections import OrderedDict
except ImportError:
from ..ExtBu.odict import OrderedDict
from collections import namedtuple, Iterable
import logging
import math
import functools
import numpy as np
import pandas as pd
import scipy.stats as scs
from numpy import linalg as la
from sklearn.metrics.pairwise import euclidean_distances, manhattan_distances, cosine_distances
from ..CoreBu import ABuEnv
from ..UtilBu import ABuScalerUtil
from ..CoreBu.ABuFixes import six
from ..CoreBu.ABuPdHelper import pd_rolling_mean
__author__ = '阿布'
__weixin__ = 'abu_quant'
g_euclidean_safe = True
# noinspection PyUnresolvedReferences
def _distance_matrix(distance_func, df, scale_end=True, to_similar=False):
"""
非两两distance计算,限制只有一个矩阵的输入,且输入必须为pd.DataFrame or np.array or 多层迭代序列[[],[]]
:param distance_func: 计算距离的方法
:param df: pd.DataFrame or np.array or 多层迭代序列[[],[]], 之所以叫df,是因为在内部会统一转换为pd.DataFrame
:param scale_end: 对结果矩阵进行标准化处理
:param to_similar: 是否进行后置输出转换similar值
:return: distance_matrix,pd.DataFrame对象
"""
if not callable(distance_func):
raise TypeError('distance_func must callable!!!')
if isinstance(df, np.ndarray):
# 把np.ndarray转DataFrame,便统一处理
df = pd.DataFrame(df)
if not isinstance(df, pd.DataFrame):
if all(isinstance(arr_item, Iterable) for arr_item in df):
# 如果子序列的元素也都是可以迭代的,那么先转np.array,然后再DataFrame
df = pd.DataFrame(np.array(df))
else:
raise TypeError('df must pd.DataFrame object!!!')
# 做列的distance所以df.T
distance = distance_func(df.T)
if scale_end:
# TODO 这里需要可以设置标准化使用的方法,暂时都使用scaler_mm
distance = ABuScalerUtil.scaler_mm(distance)
if to_similar:
# 只有scale_end的情况下to_similar才会生效,否则没有意义
distance = 1 - distance
# 将计算结果的distance转换为pd.DataFrame对象,行和列索引都使用df.columns
distance_df = pd.DataFrame(distance, index=df.columns, columns=df.columns)
return distance_df
def _distance_xy(distance_func, x, y):
"""
两两distance计算,输入x, y
:param distance_func: 计算距离的方法
:param x: 可迭代序列
:param y: 可迭代序列
:return: float数值
"""
if hasattr(x, 'shape') and len(x.shape) == 1:
# 有shape 但是eg: (10,)纠正为(1, 10)
x = x.reshape(1, -1)
if hasattr(y, 'shape') and len(y.shape) == 1:
# 有shape 但是eg: (10,)纠正为(1, 10)
y = y.reshape(1, -1)
distance = distance_func(x, y)
if isinstance(distance, float):
return distance
return distance_func(x, y)[0][0]
def euclidean_distance_xy(x, y, to_similar=False):
"""
欧式距离(L2范数)计算两个序列distance, g_euclidean_safe控制是否使用euclidean_distances计算
还是使用la.norm,效率差别如下:
euclidean_distances: 10000 loops, best of 3: 128 µs per loop
la.norm : 10000 loops, best of 3: 89.6 µs per loop
在需要批量且很耗时的情况下切好模式,否则不要切换
:param x: 可迭代序列
:param y: 可迭代序列
:param to_similar: 是否进行后置输出转换similar值
:return: float数值
"""
if g_euclidean_safe:
euclidean = lambda a, b: euclidean_distances(a, b)
else:
euclidean = lambda a, b: la.norm(a - b)
distance = _distance_xy(euclidean, x, y)
if to_similar:
# 实际上l1和l2转换similar的值不直观,只能对比使用
distance = 1.0 / (1.0 + distance)
return distance
def euclidean_distance_matrix(df, scale_end=True, to_similar=False):
"""
欧式距离(L2范数): 与euclidean_distance_xy的区别主要是,非两两distance计算,只有一个矩阵的输入,
且输入必须为pd.DataFrame or np.array or 多层迭代序列[[],[]], 注意需要理解数据的测距目的来分析
是否需要进行scale_start,进行和不进行scale_start的结果将完全不一样,在功能需求及数据理解的情况下
选择是否进行scale_start
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
ABuStatsUtil.euclidean_distance_matrix(cc, scale_start=True)
output:
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.4086 0.7539 0.7942 0.4810 0.7638 0.3713
bidu 0.4086 0.0000 0.7732 0.7047 0.6185 0.6161 0.4184
noah 0.7539 0.7732 0.0000 0.7790 0.7174 0.6957 0.7425
sfun 0.7942 0.7047 0.7790 0.0000 0.9950 0.5422 0.9558
goog 0.4810 0.6185 0.7174 0.9950 0.0000 1.0000 0.5379
vips 0.7638 0.6161 0.6957 0.5422 1.0000 0.0000 0.7348
aapl 0.3713 0.4184 0.7425 0.9558 0.5379 0.7348 0.0000
ABuStatsUtil.euclidean_distance_matrix(cc, scale_start=False)
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.0781 0.3314 0.3573 0.6527 0.3386 0.1933
bidu 0.0781 0.0000 0.2764 0.3018 0.7112 0.2827 0.1392
noah 0.3314 0.2764 0.0000 0.0284 0.9732 0.0140 0.1408
sfun 0.3573 0.3018 0.0284 0.0000 1.0000 0.0203 0.1674
goog 0.6527 0.7112 0.9732 1.0000 0.0000 0.9820 0.8369
vips 0.3386 0.2827 0.0140 0.0203 0.9820 0.0000 0.1481
aapl 0.1933 0.1392 0.1408 0.1674 0.8369 0.1481 0.0000
:param df: pd.DataFrame or np.array or 多层迭代序列[[],[]], 之所以叫df,是因为在内部会统一转换为pd.DataFrame
:param scale_end: 对结果矩阵进行标准化处理
:param to_similar: 是否进行后置输出转换similar值
:return: distance_df,pd.DataFrame对象
"""
return _distance_matrix(euclidean_distances, df, scale_end, to_similar)
# noinspection PyUnresolvedReferences
def manhattan_distances_xy(x, y, to_similar=False):
"""
曼哈顿距离(L1范数)计算两个序列distance,注意需要理解数据的测距目的来分析
是否需要进行scale_start,进行和不进行scale_start的结果将完全不一样,在功能需求及数据理解的情况下
选择是否进行scale_start
:param x: 可迭代序列
:param y: 可迭代序列
:param to_similar: 是否进行后置输出转换similar值
:return: float数值
"""
distance = _distance_xy(manhattan_distances, x, y)
if to_similar:
# 实际上l1和l2转换similar的值不直观,只能对比使用
distance = 1.0 / (1.0 + distance)
return distance
def manhattan_distance_matrix(df, scale_end=True, to_similar=False):
"""
曼哈顿距离(L1范数): 与manhattan_distances_xy的区别主要是,非两两distance计算,只有一个矩阵的输入,
且输入必须为pd.DataFrame or np.array or 多层迭代序列[[],[]],注意需要理解数据的测距目的来分析
是否需要进行scale_start,进行和不进行scale_start的结果将完全不一样,在功能需求及数据理解的情况下
选择是否进行scale_start
eg:
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
ABuStatsUtil.manhattan_distance_matrix(cc, scale_start=True)
output:
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.3698 0.6452 0.7917 0.4670 0.7426 0.3198
bidu 0.3698 0.0000 0.5969 0.7056 0.6495 0.5822 0.4000
noah 0.6452 0.5969 0.0000 0.7422 0.7441 0.6913 0.6896
sfun 0.7917 0.7056 0.7422 0.0000 0.9236 0.4489 1.0000
goog 0.4670 0.6495 0.7441 0.9236 0.0000 0.8925 0.5134
vips 0.7426 0.5822 0.6913 0.4489 0.8925 0.0000 0.7038
aapl 0.3198 0.4000 0.6896 1.0000 0.5134 0.7038 0.0000
ABuStatsUtil.manhattan_distance_matrix(cc, scale_start=False)
output:
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.0640 0.3318 0.3585 0.6415 0.3395 0.1906
bidu 0.0640 0.0000 0.2750 0.3018 0.6982 0.2827 0.1338
noah 0.3318 0.2750 0.0000 0.0267 0.9733 0.0124 0.1412
sfun 0.3585 0.3018 0.0267 0.0000 1.0000 0.0191 0.1680
goog 0.6415 0.6982 0.9733 1.0000 0.0000 0.9809 0.8320
vips 0.3395 0.2827 0.0124 0.0191 0.9809 0.0000 0.1489
aapl 0.1906 0.1338 0.1412 0.1680 0.8320 0.1489 0.000
:param df: pd.DataFrame or np.array or 多层迭代序列[[],[]], 之所以叫df,是因为在内部会统一转换为pd.DataFrame
:param scale_end: 对结果矩阵进行标准化处理
:param to_similar: 是否进行后置输出转换similar值
:return: distance_df,pd.DataFrame对象
"""
return _distance_matrix(manhattan_distances, df, scale_end, to_similar)
def cosine_distances_xy(x, y, to_similar=False):
"""
余弦距离计算两个序列distance,注意需要理解数据的测距目的来分析
是否需要进行scale_start,进行和不进行scale_start的结果将完全不一样,在功能需求及数据理解的情况下
选择是否进行scale_start
:param x: 可迭代序列
:param y: 可迭代序列
:param to_similar: 是否进行后置输出转换similar值
:return: float数值
"""
distance = _distance_xy(cosine_distances, x, y)
if to_similar:
# 余弦距离转换余弦相似度直接减就行
distance = 1.0 - distance
return distance
def cosine_distance_matrix(df, scale_end=True, to_similar=False):
"""
余弦距离: 与cosine_distances_xy的区别主要是,非两两distance计算,只有一个矩阵的输入,
且输入必须为pd.DataFrame or np.array or 多层迭代序列[[],[]],注意需要理解数据的测距目的来分析
是否需要进行scale_start,进行和不进行scale_start的结果将完全不一样,在功能需求及数据理解的情况下
选择是否进行scale_start
eg:
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
ABuStatsUtil.cosine_distance_matrix(cc, scale_start=True)
output:
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.1743 0.4434 0.2945 0.2394 0.4763 0.1266
bidu 0.1743 0.0000 0.5808 0.2385 0.3986 0.3034 0.1470
noah 0.4434 0.5808 0.0000 1.0000 0.3411 0.7626 0.2632
sfun 0.2945 0.2385 1.0000 0.0000 0.7494 0.4448 0.4590
goog 0.2394 0.3986 0.3411 0.7494 0.0000 0.9717 0.2806
vips 0.4763 0.3034 0.7626 0.4448 0.9717 0.0000 0.2669
aapl 0.1266 0.1470 0.2632 0.4590 0.2806 0.2669 0.0000
ABuStatsUtil.cosine_distance_matrix(cc, scale_start=False)
output:
tsla bidu noah sfun goog vips aapl
tsla 0.0000 0.1743 0.4434 0.2945 0.2394 0.4763 0.1266
bidu 0.1743 0.0000 0.5808 0.2385 0.3986 0.3034 0.1470
noah 0.4434 0.5808 0.0000 1.0000 0.3411 0.7626 0.2632
sfun 0.2945 0.2385 1.0000 0.0000 0.7494 0.4448 0.4590
goog 0.2394 0.3986 0.3411 0.7494 0.0000 0.9717 0.2806
vips 0.4763 0.3034 0.7626 0.4448 0.9717 0.0000 0.2669
aapl 0.1266 0.1470 0.2632 0.4590 0.2806 0.2669 0.0000
:param df: pd.DataFrame or np.array or 多层迭代序列[[],[]], 之所以叫df,是因为在内部会统一转换为pd.DataFrame
:param scale_end: 对结果矩阵进行标准化处理
:param to_similar: 是否进行后置输出转换similar值
:return: distance_df,pd.DataFrame对象
"""
return _distance_matrix(cosine_distances, df, scale_end, to_similar)
# TODO distance与similar之间的关系以及模块功能位置
def arr_to_pandas(func):
"""
函数装饰器:定参数装饰器,非通用,通用转换使用ABuDTUtil中的装饰器
将被装饰函数中的arr序列转换为pd.DataFrame或者pd.Series
"""
@functools.wraps(func)
def wrapper(arr, *arg, **kwargs):
# TODO Iterable和six.string_types的判断抽出来放在一个模块,做为Iterable的判断来使用
if not isinstance(arr, Iterable) or isinstance(arr, six.string_types):
# arr必须是可以迭代的对象
raise TypeError('arr not isinstance of Iterable')
if not isinstance(arr, pd.DataFrame) or isinstance(arr, pd.Series):
if isinstance(arr, np.ndarray) and len(arr.shape) > 1 and arr.shape[1] > 1:
# np.ndarray > 1列的转换为pd.DataFrame
arr = pd.DataFrame(arr)
elif isinstance(arr, dict):
# 针对dict转换pd.DataFrame
arr = pd.DataFrame(arr)
elif all(isinstance(arr_item, Iterable) for arr_item in arr):
# 如果子序列的元素也都是可以迭代的,那么先转np.array,然后再DataFrame
arr = pd.DataFrame(np.array(arr))
else:
# 否则序列对象转换为pd.Series
arr = pd.Series(arr)
return func(arr, *arg, **kwargs)
return wrapper
def arr_to_numpy(func):
"""
函数装饰器:定参数装饰器,非通用,通用转换使用ABuDTUtil中的装饰器
将被装饰函数中的arr序列转换为np.array
"""
@functools.wraps(func)
def wrapper(arr, *arg, **kwargs):
# TODO Iterable和six.string_types的判断抽出来放在一个模块,做为Iterable的判断来使用
if not isinstance(arr, Iterable) or isinstance(arr, six.string_types):
# arr必须是可以迭代的对象
raise TypeError('arr not isinstance of Iterable')
if not isinstance(arr, np.ndarray):
if isinstance(arr, pd.DataFrame) or isinstance(arr, pd.Series):
# 如果是pandas直接拿values
arr = arr.values
elif isinstance(arr, dict):
# 针对dict转换np.array
arr = np.array(list(arr.values())).T
else:
arr = np.array(arr)
return func(arr, *arg, **kwargs)
return wrapper
@arr_to_pandas
def demean(arr, rolling_window=0, show=False):
"""
去均值化后处理demean, 如果输入的是np.array进行转换为pd.DataFrame处理,
被arr_to_pandas装饰,统一输出,且这样使用arr.mean()不需要axis参数区别np.array轴向
eg:
cc.head()
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.11 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.45 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.22 585.61 21.190 98.38
2014-07-30 228.92 219.13 16.83 11.78 587.42 21.185 98.15
2014-07-31 223.30 216.05 16.06 11.47 571.60 20.550 95.60
ABuStatsUtil.demean(cc.head())
tsla bidu noah sfun goog vips aapl
2014-07-25 -1.554 5.004 -0.898 0.104 4.17 0.1846 -0.094
2014-07-28 -0.304 4.304 -0.088 0.444 5.75 0.3836 1.256
2014-07-29 -0.114 -1.496 0.532 0.214 0.76 0.0256 0.616
2014-07-30 3.796 -2.366 0.612 -0.226 2.57 0.0206 0.386
2014-07-31 -1.824 -5.446 -0.158 -0.536 -13.25 -0.6144 -2.164
ABuStatsUtil.demean(cc.head().values)
0 1 2 3 4 5 6
0 -1.554 5.004 -0.898 0.104 4.17 0.1846 -0.094
1 -0.304 4.304 -0.088 0.444 5.75 0.3836 1.256
2 -0.114 -1.496 0.532 0.214 0.76 0.0256 0.616
3 3.796 -2.366 0.612 -0.226 2.57 0.0206 0.386
4 -1.824 -5.446 -0.158 -0.536 -13.25 -0.6144 -2.164
tsla.head()
2014-07-25 223.57
2014-07-28 224.82
2014-07-29 225.01
2014-07-30 228.92
2014-07-31 223.30
ABuStatsUtil.demean(tsla.head())
2014-07-25 -1.554
2014-07-28 -0.304
2014-07-29 -0.114
2014-07-30 3.796
2014-07-31 -1.824
ABuStatsUtil.demean(tsla.head().values)
0
0 -1.554
1 -0.304
2 -0.114
3 3.796
4 -1.824
:param arr: pd.DataFrame or pd.Series or Iterable
:param rolling_window: 默认=0,即不使用移动平均做去均值,rolling_window > 0 生效,
注意如果rolling_window值过小将导致去均值后不连续,比如5日,10日的
结果只能类似close pct_change的结果,如果需求要钝化,最好是两个月以上
的交易日数量,user要根据需求,选择使用的参数,
:param show: 是否可视化去均值后的结果,默认False
:return:
"""
if rolling_window > 0:
# arr_to_pandas装饰器保证了进来的类型不是pd.DataFrame就是pd.Series
arr_mean = pd_rolling_mean(arr, window=rolling_window, min_periods=1)
# arr_mean.fillna(method='bfill', inplace=True)
else:
arr_mean = arr.mean()
demean_v = arr - arr_mean
if show:
demean_v.plot()
return demean_v
@arr_to_numpy
def print_stats_sm(arr):
"""
对arr的统计相关信息输出,并且分别输出数据的实体前面,中间,最后5列, 被arr_to_numpy装饰,
统一输出,且这样使用arr.max(), arr.min()等不需要axis参数区别
eg:
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
output:
************************************************************************************************************
shape = (504, 7)
************************************************************************************************************
max = 776.6
************************************************************************************************************
min = 4.44
************************************************************************************************************
mean = 172.445713435
************************************************************************************************************
************************************************************************************************************
head =
[[ 223.57 226.5 15.32 12.11 589.02 21.349 97.67 ]
[ 224.82 225.8 16.13 12.45 590.6 21.548 99.02 ]
[ 225.01 220. 16.75 12.22 585.61 21.19 98.38 ]
[ 228.92 219.13 16.83 11.78 587.42 21.185 98.15 ]
[ 223.3 216.05 16.06 11.47 571.6 20.55 95.6 ]]
************************************************************************************************************
mid =
[[ 267.2 207.63 26.25 7.44 644.28 21.23 125.16]
[ 265.41 206.25 23.4 7.17 623.56 20.25 124.5 ]
[ 253.01 197.68 22.16 6.76 627.26 19.99 122.77]
[ 264.82 168.03 22.94 6.76 628. 20.2 123.38]
[ 263.82 170.01 23.35 6.79 631.93 20.26 122.99]]
************************************************************************************************************
tail =
[[ 228.36 160.65 25.11 4.9 741.19 13.63 99.96 ]
[ 220.5 161.4 25.51 4.79 738.63 13.69 99.43 ]
[ 222.27 160.88 25.5 4.85 742.74 13.51 98.66 ]
[ 230.01 160.25 25.57 4.79 739.77 13.39 97.34 ]
[ 225.93 163.09 24.75 4.945 740.92 13.655 97.76 ]]
:param arr:
"""
log_func = logging.info if ABuEnv.g_is_ipython else print
separator = format('*', '*^108s')
log_func(separator)
log_func('shape = ' + str(arr.shape))
log_func(separator)
log_func('max = ' + str(arr.max()))
log_func(separator)
log_func('min = ' + str(arr.min()))
log_func(separator)
log_func('mean = ' + str(arr.mean()))
log_func(separator)
if arr.shape[0] > 5 * 3:
# 要求数量能满足打三排数据的
log_func(separator)
log_func('head = \n' + str(arr[:5]))
log_func(separator)
# 使用floor找到中间的index
mid_ind = int(math.floor(arr.shape[0] / 2))
# 因为使用floor所以-2, +3
log_func('mid = \n' + str(arr[mid_ind - 2:mid_ind + 3]))
log_func(separator)
log_func('tail = \n' + str(arr[-5:]))
def normality_stats(arr):
"""
统计信息偏度,峰度,正态分布检测,p-value
eg:
input:
2014-07-25 223.57
2014-07-28 224.82
2014-07-29 225.01
...
2016-07-22 222.27
2016-07-25 230.01
2016-07-26 225.93
output:
array skew = -0.282635248604699
array skew p-value = 0.009884539532576725
array kurt = 0.009313464006726946
array kurt p-value = 0.8403947352953821
array norm = NormaltestResult(statistic=6.6961445106692237, pvalue=0.035152053009441256)
array norm p-value = 0.035152053009441256
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
output:
array skew = [-0.2826 -0.2544 0.1456 1.0322 0.2095 0.095 0.1719]
array skew p-value = [ 0.0099 0.0198 0.1779 0. 0.0539 0.3781 0.1124]
array kurt = [ 0.0093 -0.8414 -0.4205 0.4802 -1.547 -0.9203 -1.2104]
array kurt p-value = [ 0.8404 0. 0.0201 0.0461 1. 0. 0. ]
array norm = NormaltestResult(statistic=array([ 6.6961, 52.85 , 7.2163, 69.0119, 3.7161,
69.3468, 347.229 ]), pvalue=array([ 0.0352, 0. , 0.0271, 0. , 0.156 , 0. , 0. ]))
array norm p-value = [ 0.0352 0. 0.0271 0. 0.156 0. 0. ]
:param arr: pd.DataFrame or pd.Series or Iterable
"""
log_func = logging.info if ABuEnv.g_is_ipython else print
log_func('array skew = {}'.format(scs.skew(arr)))
log_func('array skew p-value = {}'.format(scs.skewtest(arr)[1]))
log_func('array kurt = {}'.format(scs.kurtosis(arr)))
log_func('array kurt p-value = {}'.format(scs.kurtosistest(arr)[1]))
log_func('array norm = {}'.format(scs.normaltest(arr)))
log_func('array norm p-value = {}'.format(scs.normaltest(arr)[1]))
def print_stats(arr):
"""
对arr的统计相关信息输出
eg:
input:
tsla bidu noah sfun goog vips aapl
2014-07-25 223.57 226.50 15.32 12.110 589.02 21.349 97.67
2014-07-28 224.82 225.80 16.13 12.450 590.60 21.548 99.02
2014-07-29 225.01 220.00 16.75 12.220 585.61 21.190 98.38
... ... ... ... ... ... ... ...
2016-07-22 222.27 160.88 25.50 4.850 742.74 13.510 98.66
2016-07-25 230.01 160.25 25.57 4.790 739.77 13.390 97.34
2016-07-26 225.93 163.09 24.75 4.945 740.92 13.655 97.76
output:
array size = 504
array min = [ 143.67 132.37 12.95 4.44 492.55 10.35 90.34]
array max = [ 286.04 250.34 37.32 12.52 776.6 30. 133. ]
array mean = [ 228.4885 193.4488 23.7362 7.2458 624.3016 19.0181 110.881 ]
array var = [ 653.509 752.7421 30.1604 3.3109 7310.2084 27.0994
135.156 ]
array std = [ 25.5638 27.4361 5.4919 1.8196 85.4998 5.2057 11.6257]
array skew = [-0.2826 -0.2544 0.1456 1.0322 0.2095 0.095 0.1719]
array kurt = [ 0.0093 -0.8414 -0.4205 0.4802 -1.547 -0.9203 -1.2104]
:param arr: pd.DataFrame or pd.Series or Iterable
"""
stats = scs.describe(arr)
log_func = logging.info if ABuEnv.g_is_ipython else print
log_func('array size = {}'.format(stats[0]))
log_func('array min = {}'.format(stats[1][0]))
log_func('array max = {}'.format(stats[1][1]))
log_func('array mean = {}'.format(stats[2]))
log_func('array var = {}'.format(stats[3]))
log_func('array std = {}'.format(np.sqrt(stats[3])))
log_func('array skew = {}'.format(stats[4]))
log_func('array kurt = {}'.format(stats[5]))
@arr_to_numpy
def stats_dict(arr):
"""
通过序列构造arr的基础统计信息dict,被arr_to_numpy装饰,统一输出,且这样使用arr.max(), arr.min()等不需要axis参数区别
:param arr: pd.DataFrame or pd.Series or Iterable
:return: stats_dict,dict对象
eg:
{'count': 504,
'kurtosis': 0.009313464006726946,
'max': 286.04000000000002,
'mean': 228.48845238095237,
'min': 143.66999999999999,
'skewness': -0.282635248604699,
'std': 25.538448192811927}
"""
count = arr.shape[0]
if len(arr.shape) > 1 and arr.shape[1] > 1:
count = arr.shape[0] * arr.shape[1]
return {'count': count, 'max': arr.max(), 'min': arr.min(), 'mean': arr.mean(),
'std': arr.std(), 'skewness': scs.skew(arr), 'kurtosis': scs.kurtosis(arr)
}
# noinspection PyClassHasNoInit
class AbuMomentsTuple(namedtuple('AbuMomentsTuple',
('count',
'max',
'min',
'mean',
'std',
'skewness',
'kurtosis'))):
__slots__ = ()
def __repr__(self):
return "count:{}\nmax:{}\nmin:{}\nmean:{}\nstd:{}\nskewness:{}\nkurtosis:{}".format(self.count, self.max,
self.min, self.mean,
self.std, self.skewness,
self.kurtosis)
@arr_to_numpy
def stats_namedtuple(arr):
"""
通过序列构造arr的基础统计信息dict, 被arr_to_numpy装饰,统一输出,且这样使用arr.max(), arr.min()等不需要axis参数区别
与stats_dict的区别只是返回namedtuple对象
:param arr: pd.DataFrame or pd.Series or Iterable
:return: AbuMomentsTuple对象
eg:
count:504
max:286.04
min:143.67
mean:228.48845238095237
std:25.538448192811927
skewness:-0.282635248604699
kurtosis:0.009313464006726946
"""
count = arr.shape[0]
if len(arr.shape) > 1 and arr.shape[1] > 1:
count = arr.shape[0] * arr.shape[1]
return AbuMomentsTuple(count, arr.max(), arr.min(), arr.mean(), arr.std(), scs.skew(arr), scs.kurtosis(arr))
def sigmoid(arr):
"""sigmoid序列处理,大的更大,小的更小,分辨不清晰的极值"""
return 1.0 / (1 + np.exp(-arr))
|
[
"[email protected]"
] | |
b6ed6298710eb2c6a421aa64540abdca56d7c85b
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_galleons.py
|
8334bd189fb9d336e509df65b4441f348038d30e
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 245 |
py
|
from xai.brain.wordbase.nouns._galleon import _GALLEON
#calss header
class _GALLEONS(_GALLEON, ):
def __init__(self,):
_GALLEON.__init__(self)
self.name = "GALLEONS"
self.specie = 'nouns'
self.basic = "galleon"
self.jsondata = {}
|
[
"[email protected]"
] | |
41415a7eec0ea40d6e56743ac77ede744174333b
|
8f5f0c3ef83fdd482387973149738f6178477a42
|
/medium/arithmetic/find_the_duplicate_number.py
|
a39dfb3b6a44397ae279a9a249f76971a33f567d
|
[] |
no_license
|
nicokuzak/leetcode
|
79a5771ad83786cc7dbfd790f8fffcf1ce58794e
|
39b0235dc429a97a7cba0689d44641a6af6d7a32
|
refs/heads/main
| 2023-04-06T21:02:09.553185 | 2021-04-14T22:21:20 | 2021-04-14T22:21:20 | 336,847,511 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 795 |
py
|
"""Given an array of integers nums containing n + 1 integers where each integer is in the range [1, n] inclusive.
There is only one repeated number in nums, return this repeated number.
Example 1:
Input: nums = [1,3,4,2,2]
Output: 2
Example 2:
Input: nums = [3,1,3,4,2]
Output: 3
Example 3:
Input: nums = [1,1]
Output: 1
Example 4:
Input: nums = [1,1,2]
Output: 1
Constraints:
2 <= n <= 3 * 104
nums.length == n + 1
1 <= nums[i] <= n
All the integers in nums appear only once except for precisely one integer which appears two or more times."""
from typing import List
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
addl, l = sum(nums), len(nums)
st = set(nums)
adds, s = sum(st), len(st)
return int((addl-adds)/(l-s))
|
[
"[email protected]"
] | |
0f9114ffc547c5c89058181b18e5f8eec218ea51
|
d99572b009c3c519cee6fcaf0ad3f9cd2d7a13ae
|
/deeplearn8.py
|
f460c0b3dcaedc855e2cbcd467b91ba9d08a8cc2
|
[] |
no_license
|
bhatnagaranshika02/Deep-Learning
|
10a3f6794bf1265222c8b78555398aea7bbca34e
|
de851c909fb40f17d07999a65cc269d6b5ee6ff5
|
refs/heads/master
| 2023-01-27T20:09:37.500589 | 2020-12-04T23:43:00 | 2020-12-04T23:43:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 211 |
py
|
import numpy as np
weights = np.array([1, 2])
input_data = np.array([3, 4])
target = 6
learning_rate = 0.01
preds = (weights*input_data).sum()
error = preds - target
slope = input_data * error * 2
print(slope)
|
[
"[email protected]"
] | |
2cb541b98ffac5f54ae764f99dae824b9c55bf17
|
d0c4e3b53310c291ff1faf391b7240cb41ae2a31
|
/tensorflow_probability/python/internal/auto_batching/xla.py
|
575aea285a952da733aaa5254fe4bd391738433e
|
[
"Apache-2.0"
] |
permissive
|
harryprince/probability
|
0696c47d8f78a4343ebdf7a7a41280a08cec34ce
|
9439c3d04b4d5e60b8cf721cc5a1dbfac73605d2
|
refs/heads/master
| 2020-06-30T20:04:12.874945 | 2019-08-06T22:38:34 | 2019-08-06T22:39:36 | 200,938,780 | 0 | 1 |
Apache-2.0
| 2019-08-06T23:42:01 | 2019-08-06T23:42:01 | null |
UTF-8
|
Python
| false | false | 2,073 |
py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""XLA utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
__all__ = ['compile_nested_output']
def compile_nested_output(f, compile_fn=None):
"""Wraps f with a `tpu.rewrite` or `xla.compile`, propagates output structure.
`xla.compile` insists `f` output a flat list of `Tensor`s or `Op`s, but
tolerates nested input arguments. Here, we capture the output structure in
order to propagate it.
Args:
f: Callable to compile, may accept/return nested inputs/outputs.
compile_fn: The function to use to compile, i.e. `xla.compile` or
`tpu.rewrite`. Accepts two args, `f` and `inputs`.
Returns:
g: Callable wrapping `f` which returns XLA-compiled, nested outputs.
"""
def _wrapper(*inputs): # pylint:disable=missing-docstring
nest = tf.compat.v2.nest
struct = [None]
def _flattened(*inputs):
result = f(*inputs)
flat = nest.flatten(result)
# Ick: Side-effect. Ideally we could push output nest support into
# tpu.rewrite / xla.compile. b/121383831
struct[0] = nest.pack_sequence_as(result, [1] * len(flat))
return flat
res = compile_fn(_flattened, inputs)
if struct[0] is None:
raise ValueError('Expected nest structure in struct[0]')
return nest.pack_sequence_as(struct[0], res)
return _wrapper
|
[
"[email protected]"
] | |
7c8d823ff49cd459f646d18538c1285dd725138f
|
b41b996b4a14f11bb3d7676b4539725a93c2d586
|
/SourceCode/Codesignal-Selenium/SourceNew/Py3/squarePerimeter.py3
|
0c9af17adb32a6c0750f0dfc0c1849b97df3341d
|
[] |
no_license
|
mquandvr/selenium
|
ee2c0e7febb15f4db4d33a8575726f67c48bde05
|
d7bb4c95d4d0756c66cbf3c69387318fc07219f2
|
refs/heads/master
| 2020-06-05T05:49:57.476867 | 2019-06-18T10:55:27 | 2019-06-18T10:55:27 | 192,335,493 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 117 |
py3
|
int squarePerimeter(int n) {
int result = 0;
for (int i = 0; i < 4; i++) {
result+=n;
}
return result;
}
|
[
"[email protected]"
] | |
e8c0b63502c01b7f908df1fb3f7f68e3397ca2c2
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/verbs/_reshuffles.py
|
d4d2867c23fe73fde20033cb9e9460d4dcd34b5c
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 259 |
py
|
from xai.brain.wordbase.verbs._reshuffle import _RESHUFFLE
#calss header
class _RESHUFFLES(_RESHUFFLE, ):
def __init__(self,):
_RESHUFFLE.__init__(self)
self.name = "RESHUFFLES"
self.specie = 'verbs'
self.basic = "reshuffle"
self.jsondata = {}
|
[
"[email protected]"
] | |
fa4131b83b9bc2e159026691e37231865d73e8ba
|
5930f323d96e7ed45c01fef63b100e1ad220f764
|
/catalyst/contrib/dl/callbacks/cutmix_callback.py
|
642279ee763e5e3b56a96d047a01709ac4c464d5
|
[
"Apache-2.0"
] |
permissive
|
saswat0/catalyst
|
8cb91c2392bccdbdd318544e6861e6fe6ac39b33
|
a35297ecab8d1a6c2f00b6435ea1d6d37ec9f441
|
refs/heads/master
| 2023-04-05T00:43:29.124864 | 2020-06-18T05:41:33 | 2020-06-18T05:41:33 | 272,268,902 | 2 | 0 |
Apache-2.0
| 2020-06-18T05:41:34 | 2020-06-14T19:24:04 | null |
UTF-8
|
Python
| false | false | 4,315 |
py
|
from typing import List
import numpy as np
import torch
from catalyst.core.callbacks import CriterionCallback
from catalyst.core.runner import IRunner
class CutmixCallback(CriterionCallback):
"""
Callback to do Cutmix augmentation that has been proposed in
`CutMix: Regularization Strategy to Train Strong Classifiers
with Localizable Features`_.
.. warning::
:class:`catalyst.contrib.dl.callbacks.CutmixCallback` is inherited from
:class:`catalyst.dl.CriterionCallback` and does its work.
You may not use them together.
.. _CutMix\: Regularization Strategy to Train Strong Classifiers
with Localizable Features: https://arxiv.org/abs/1905.04899
"""
def __init__(
self,
fields: List[str] = ("features",),
alpha=1.0,
on_train_only=True,
**kwargs
):
"""
Args:
fields (List[str]): list of features which must be affected.
alpha (float): beta distribution parameter.
on_train_only (bool): Apply to train only.
So, if on_train_only is True, use a standard output/metric
for validation.
"""
assert (
len(fields) > 0
), "At least one field for CutmixCallback is required"
assert alpha >= 0, "alpha must be >=0"
super().__init__(**kwargs)
self.on_train_only = on_train_only
self.fields = fields
self.alpha = alpha
self.lam = 1
self.index = None
self.is_needed = True
def _compute_loss(self, runner: IRunner, criterion):
"""Computes loss.
If self.is_needed is ``False`` then calls ``_compute_loss``
from ``CriterionCallback``, otherwise computes loss value.
Args:
runner (IRunner): current runner
criterion: that is used to compute loss
"""
if not self.is_needed:
return super()._compute_loss_value(runner, criterion)
pred = runner.output[self.output_key]
y_a = runner.input[self.input_key]
y_b = runner.input[self.input_key][self.index]
loss = self.lam * criterion(pred, y_a) + (1 - self.lam) * criterion(
pred, y_b
)
return loss
def _rand_bbox(self, size, lam):
"""
Generates top-left and bottom-right coordinates of the box
of the given size.
Args:
size: size of the box
lam: lambda parameter
Returns:
top-left and bottom-right coordinates of the box
"""
w = size[2]
h = size[3]
cut_rat = np.sqrt(1.0 - lam)
cut_w = np.int(w * cut_rat)
cut_h = np.int(h * cut_rat)
cx = np.random.randint(w)
cy = np.random.randint(h)
bbx1 = np.clip(cx - cut_w // 2, 0, w)
bby1 = np.clip(cy - cut_h // 2, 0, h)
bbx2 = np.clip(cx + cut_w // 2, 0, w)
bby2 = np.clip(cy + cut_h // 2, 0, h)
return bbx1, bby1, bbx2, bby2
def on_loader_start(self, runner: IRunner) -> None:
"""Checks if it is needed for the loader.
Args:
runner (IRunner): current runner
"""
self.is_needed = not self.on_train_only or runner.is_train_loader
def on_batch_start(self, runner: IRunner) -> None:
"""Mixes data according to Cutmix algorithm.
Args:
runner (IRunner): current runner
"""
if not self.is_needed:
return
if self.alpha > 0:
self.lam = np.random.beta(self.alpha, self.alpha)
else:
self.lam = 1
self.index = torch.randperm(runner.input[self.fields[0]].shape[0])
self.index.to(runner.device)
bbx1, bby1, bbx2, bby2 = self._rand_bbox(
runner.input[self.fields[0]].shape, self.lam
)
for f in self.fields:
runner.input[f][:, :, bbx1:bbx2, bby1:bby2] = runner.input[f][
self.index, :, bbx1:bbx2, bby1:bby2
]
self.lam = 1 - (
(bbx2 - bbx1)
* (bby2 - bby1)
/ (
runner.input[self.fields[0]].shape[-1]
* runner.input[self.fields[0]].shape[-2]
)
)
__all__ = ["CutmixCallback"]
|
[
"[email protected]"
] | |
5fdb64d4234410647093b1bb7411e27ec879697f
|
1141cd4aeffafe496bb7d8a1399ca7c8445edd6e
|
/tests/ui_tests/test_ui_config_hell.py
|
876ce1e478d23dcec405f796a0f6e07036954323
|
[
"Apache-2.0"
] |
permissive
|
amleshkov/adcm
|
d338c3b7c51e38ffe9a0b2715c85e54bed0c4f46
|
e1c67e3041437ad9e17dccc6c95c5ac02184eddb
|
refs/heads/master
| 2020-11-30T15:35:57.456194 | 2019-12-16T20:27:06 | 2019-12-16T20:27:06 | 230,432,278 | 0 | 0 |
NOASSERTION
| 2019-12-27T11:30:23 | 2019-12-27T11:30:22 | null |
UTF-8
|
Python
| false | false | 1,776 |
py
|
import os
import pytest
from adcm_pytest_plugin.utils import get_data_dir
# pylint: disable=W0611, W0621
from tests.ui_tests.app.app import ADCMTest
from tests.ui_tests.app.pages import Configuration, LoginPage
DATADIR = get_data_dir(__file__)
BUNDLES = os.path.join(os.path.dirname(__file__), "../stack/")
@pytest.fixture(scope='function')
def ui_hell_fs(sdk_client_fs):
bundle = sdk_client_fs.upload_from_fs(DATADIR)
cluster = bundle.cluster_create(name='my cluster')
cluster.service_add(name='ui_config_hell')
service = cluster.service(name="ui_config_hell")
return service
@pytest.fixture()
def app(adcm_fs):
return ADCMTest(adcm_fs)
@pytest.fixture()
def login(app):
app.driver.get(app.adcm.url)
login = LoginPage(app.driver)
login.login("admin", "admin")
@pytest.fixture()
def prototype_display_names(ui_hell_fs):
display_header_name = ui_hell_fs.display_name
display_names = {config['display_name'] for config in ui_hell_fs.prototype().config}
return display_header_name, display_names
@pytest.fixture()
def ui_display_names(login, app, ui_hell_fs):
app.driver.get("{}/cluster/{}/service/{}/config".format
(app.adcm.url, ui_hell_fs.cluster_id, ui_hell_fs.service_id))
ui_config = Configuration(app.driver)
return ui_config.get_display_names()
def test_display_names(prototype_display_names, ui_display_names):
"""Scenario:
1. Get Service configuration
2. Get display names from UI
3. Check that config name in prototype is correct
4. Check that in UI we have full list of display names from prototype
"""
assert prototype_display_names[0] == "UI Config Hell"
for d_name in ui_display_names:
assert d_name in prototype_display_names[1]
|
[
"[email protected]"
] | |
3fa2c83c546500af96324893edd0add3698409b3
|
1b94aae63500b6ff94b0446d01c3c9bee385fad2
|
/.history/chandori/account/views_20210825231017.py
|
962f5084c5591398941e7b67df6a489f3776d996
|
[] |
no_license
|
miracle3070/chandori
|
71389c2a9df76c242a5895c2c23d4394220f9c8e
|
b01d1eaa1d9c0d12d7abdc8f164039bcd9c42925
|
refs/heads/master
| 2023-08-18T11:46:11.303934 | 2021-09-28T19:23:22 | 2021-09-28T19:23:22 | 393,949,742 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,292 |
py
|
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.forms import UserChangeForm
from django.utils import timezone
from .models import *
from .models import BankAccount
from .forms import CustomUserChangeForm
from django.contrib import messages
from django.contrib.auth.decorators import login_required
@login_required
def edit(request):
if request.method == 'GET':
return render(request, 'edit.html')
elif request.method == 'POST':
user_change_form = CustomUserChangeForm(request.POST, instance = request.user)
user_change_form.nickname = request.POST.get('nickname')
user_change_form.age = int(request.POST.get('age'))
user_change_form.job = request.POST.get('job')
user_change_form.income = int(request.POST.get('income'))
user_change_form.save()
messages.success(request, '회원정보가 수정되었습니다.')
return render(request, 'edit.html')
def edit_bank(request):
if request.method == 'GET':
return render(request, 'add_Account.html')
elif request.method == 'POST':
add_Account = BankAccount()
add_Account.user = CustomUser.objects.get(pk=1)
add_Account.account_num = request.POST.get('account_num')
add_Account.bank = request.POST.get('bank')
add_Account.balance = request.POST.get('balance')
add_Account.save()
return render(request, 'edit.html', {add_Account:'add_Account'})
def login_view(request):
error_msg = ""
if request.method == "POST":
username = request.POST.get('username')
password = request.POST.get('password')
if username == "" or password == "":
error_msg = "아이디 또는 비밀번호를 입력해주세요."
else:
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect("accounting:home")
else:
error_msg = "아이디 또는 비밀번호가 틀렸습니다."
return render(request, "login.html", {"error_msg" : error_msg})
def logout_view(request):
logout(request)
return redirect("accounting:home")
def signup_view(request):
error_msg = ""
if request.method == "POST":
password1 = request.POST["password1"]
password2 = request.POST["password2"]
if password1 == password2:
username = request.POST["username"]
nickname = request.POST["nickname"]
age = int(request.POST['age'])
job = request.POST['job']
income = int(request.POST['income'])
signup_date = timezone.now()
user = CustomUser.objects.create_user(
username = username,
password = password1,
nickname = nickname,
age = age,
job = job,
income = income,
signup_date = signup_date,
)
return redirect("account:login")
else:
error_msg = "비밀번호가 일치하지 않습니다."
return render(request, "signup.html", {"error_msg" : error_msg})
|
[
"[email protected]"
] | |
941d7134dc48a6b37fcc70578c7ecebb24d49a90
|
939e9dc95a720fef1844d8b52890b9ca688754c2
|
/outliers/enron_outliers.py
|
310ba76c1f350da99da7fc011629782a37cbd0ee
|
[] |
no_license
|
ilyarudyak/ud120-machine-learning
|
2350d8d451b94106606a486d4ac4a78ff3c4fe63
|
c9ddce6599b278f40abfe5e15f92c02209dfacb4
|
refs/heads/master
| 2021-01-20T22:05:53.258319 | 2018-06-14T12:59:40 | 2018-06-14T12:59:40 | 101,799,504 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 686 |
py
|
#!/usr/bin/python
import pickle
import matplotlib.pyplot as plt
from tools.feature_format import featureFormat, targetFeatureSplit
import numpy as np
def get_data():
# read in data dictionary, convert to numpy array
data_dict = pickle.load(open('final_project_dataset.pkl','rb'))
del data_dict['TOTAL']
features = ['salary', 'bonus']
return featureFormat(data_dict, features)
def plot_data(salary_data):
salary, bonus = salary_data[:, 0], salary_data[:, 1]
plt.scatter(salary, bonus)
plt.xlabel('salary')
plt.ylabel('bonus')
plt.show()
# your code below
if __name__ == '__main__':
salary_data = get_data()
plot_data(salary_data)
|
[
"[email protected]"
] | |
b77c353f1ad2cd60e82160dbf7587d168c2f0d15
|
08c73d76d4f933bae76b5f8519bc0883d2ba184a
|
/com/data/models.py
|
290c050bd632fb131842a63bbeac36e46c30efe5
|
[] |
no_license
|
palencia77/social-core
|
fa17df4d48d07d2f97041491599f08bcddfb4e20
|
f7a0812b70c476ce073f8bdb54bbde4d517658cf
|
refs/heads/master
| 2021-09-16T01:01:24.109023 | 2018-05-28T03:36:10 | 2018-05-28T03:36:10 | 85,777,596 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 11,007 |
py
|
'''
Created on 19/06/2014
@author: palencia77
'''
from datetime import datetime
from mongoengine import *
from com.__init__ import *
from mongoengine import queryset_manager
from com.tools.objects_status import *
class User(db.Document):
login = db.StringField()
facebook_id = db.StringField()
id_social_network = db.DictField()
image_social_network = db.DictField()
password = db.StringField()
email = db.EmailField(required=True)
full_name = db.StringField(max_length=50)
gender = db.StringField(max_length=50)
status = db.StringField(default=STATUS_OBJECT_PENDING)
status_date = db.DateTimeField(default=datetime.now)
parameters = db.DictField() #id_bee, avatar
birthday = db.DateTimeField()
phone = db.StringField(max_length=50, default='00000000')
created_date = db.DateTimeField(default=datetime.now)
type = db.StringField()
class Token(db.Document):
owner = db.ReferenceField(User)
access_token = db.StringField(max_length=500, required=True)
start_date = db.DateTimeField(default=datetime.now)
finish_date = db.DateTimeField()
type = db.StringField(required=True)
status = db.StringField(default=STATUS_TOKEN_VALID)
class Bee(db.Document):
name = db.StringField(required=True)
owner = db.ReferenceField(User)
post_counter = db.IntField(default=0) # Count Post
love_counter = db.IntField(default=0) # Count Love Action
love_refs = db.ListField() # Referencias a los bee que hicieron acton love
resource_refs = db.ListField() # Referencias a los Id de Recursos
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_PENDING)
status_date = db.DateTimeField(default=datetime.now)
bee_blocked_refs = db.ListField() #blacklist
administrators_refs = db.ListField() # Referencias a los administradores del bee
parameters = db.DictField() #avatar,cover,promotional_photo,promotional_video,document
short_url = db.StringField()
current_status = db.StringField(default='Mi estado actual') # Referente al estado actual en la red social
meta = {'allow_inheritance': True}
class Award(db.Document):
title = db.StringField()
text = db.StringField()
owner = db.ReferenceField(Bee)
resource_refs = db.ListField() # Referencias a los Id de Recursos
quantity = db.IntField(default=0) #Cantidad de premio
created_date = db.DateTimeField(default=datetime.now)
amount_love = db.IntField(default=0)
fly_refs = db.ListField()
fly_counter = db.IntField(default=0) #Contador de Fly recibidos
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class Person(Bee):
love_score = db.IntField(default=0) # Love Organico para donar en la Red
love_coin = db.IntField(default=0)
bee_refs = db.ListField() # Referencias a Bee que sigue
sub_scope_refs = db.ListField() # Referencias a id de los SubAmbitos que sigue el bee
awards_refs = db.ListField(db.ReferenceField(Award)) #Id de los premios obtenidos (por canje).
class Celebrity(Bee):
email = db.EmailField()
description = db.StringField()
telephone = db.StringField(max_length=50, default='00000000')
web_site = db.StringField()
facebook = db.StringField()
twitter = db.StringField()
google_plus = db.StringField()
address = db.StringField()
class Partner(Bee):
email = db.EmailField()
description = db.StringField()
telephone = db.StringField(max_length=50, default='00000000')
web_site = db.StringField()
address = db.StringField()
facebook = db.StringField()
twitter = db.StringField()
google_plus = db.StringField()
class Post(db.Document):
title = db.StringField()
text = db.StringField(required=True)
owner = db.ReferenceField(Bee)
love_counter = db.IntField(default=0) # Contador de Love Recibidos
love_refs = db.ListField()
fly_refs = db.ListField()
postcomment_refs = db.ListField() # Referencias a los Id de PostComments
resource_refs = db.ListField() # Referencias a los Id de Recursos
fly_counter = db.IntField(default=0) #Contador de Fly recibidos
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class PostComment(db.Document):
text = db.StringField(required=True)
owner = db.ReferenceField(Bee)
parent = db. ReferenceField(Post)
love_counter = db.IntField(default=0) # Es necesario para un comentario?
love_refs = db.ListField()
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class Resource(db.Document):
name = db.StringField(required=True)
image = db.ImageField()
text = db.StringField(required=True)
binary_content = db.BinaryField()
content_type = db.StringField()
owner = db.ReferenceField(Bee)
post = db.ReferenceField(Post)
award = db.ReferenceField(Award)
love_counter = db.IntField(default=0) # Contador de Love Recibidos
love_refs = db.ListField()
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class ResourceComment(db.Document):
text = db.StringField(required=True)
owner = db.ReferenceField(Bee)
resource = db.ReferenceField(Resource)
love_counter = db.IntField(default=0) # Contador de Love Recibidos
love_refs = db.ListField()
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class Scope(db.Document):
name = db.StringField(required=True)
description = db.StringField()
_cls = db.StringField()
logo = db.ReferenceField(Resource)
creation_date = db.DateTimeField(default=datetime.now)
activation_date = db.DateTimeField(default=datetime.now)
closing_date = db.DateTimeField(default=datetime.now)
color = db.StringField()
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
published = BooleanField()
meta = {'allow_inheritance': True}
@queryset_manager
def objects(self, queryset):
return queryset.filter(_cls = 'Scope')
class SubScope(Scope):
parent = db.ReferenceField(Scope)
class Hero(db.Document):
bee = db.ReferenceField(Person)
cause = db.ObjectIdField() #id_cause
date = db.DateTimeField(default=datetime.now)
class Contact(db.Document):
name = db.StringField()
email = db.StringField()
mobile_phone = db.StringField(max_length=50, default='00000000')
telephone = db.StringField(max_length=50, default='00000000')
organization = db.StringField()
address = db.StringField()
class Cause(Bee):
description = db.StringField()
goal = db.StringField()
sub_scope = db.ReferenceField(SubScope)
start_date = db.DateTimeField()
finish_date = db.DateTimeField()
geographic_location = db.GeoPointField()
closing_date = db.DateTimeField()
fly_counter = db.IntField(default=0) # Contador de fly recibidas
fly_refs = db.ListField()
love_meter = db.IntField(default=0) # Love money given to the cause
love_goal = db.IntField(default=0)
ambassadors = db.ListField() #BORRAR
hero = db.ReferenceField(Hero)
beneficiary = db.StringField() #ACLARAR
risk_classification = db.StringField() #ACLARAR
responsible = db.ReferenceField(Person) # Responsable que se muestra en la red social
contacts = db.ListField(db.ReferenceField(Contact)) #Id de los responsables legales de la causa (No se muestran en la red)
partners = db.ListField(db.ReferenceField(Partner)) #Id de los aliados de la causa
celebrities = db.ListField(db.ReferenceField(Celebrity)) #Id de los embajadores de la causa
awards = db.ListField(db.ReferenceField(Award)) #Id de los premios de la causa
url_promotional_video = db.StringField()
class OperationType(db.Document):
codename = db.StringField(primary_key=True, required=True)
name = db.StringField(required=True)
created_date = db.DateTimeField(default=datetime.now)
class OperationLog(db.Document):
owner = db.ReferenceField(Bee)
operation_type = db.ReferenceField(OperationType)
created_date = db.DateTimeField(default=datetime.now)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
meta = {'allow_inheritance': True}
class OperationBee(OperationLog):
bee_destination = db.ReferenceField(Bee)
class OperationPost(OperationLog):
post_destination = db. ReferenceField(Post)
class OperationComment(OperationLog):
comment_destination = db. ReferenceField(PostComment)
class OperationAward(OperationLog):
award_destination = db.ReferenceField(Award)
class RequestFriendship(db.Document):
owner = db.ReferenceField(Bee)
destination = db.ReferenceField(Bee)
status = db.StringField(default=STATUS_FRIENDSHIP_PENDING)
created_date = db.DateTimeField(default=datetime.now)
class NotificationType(db.Document):
codename = db.StringField(primary_key=True, required=True)
name = db.StringField(required=True)
created_date = db.DateTimeField(default=datetime.now)
class Notification(db.Document):
owner = db.ReferenceField(Bee)
target = db.ReferenceField(Bee)
description = db.StringField()
notification_type = db.ReferenceField(NotificationType)
status = db.StringField(default=STATUS_NOTIFICATION_UNREAD)
created_date = db.DateTimeField(default=datetime.now)
meta = {'allow_inheritance': True}
class NotificationPost(Notification):
post_destination = db.ReferenceField(Post)
class NotificationComment (Notification):
comment_destination = db.ReferenceField(PostComment)
class InteractionType (db.Document):
codename = db.StringField(primary_key=True, required=True)
name = db.StringField(required=True)
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
class Interaction (db.Document):
name = db.StringField(required=True)
value = db.IntField(default=0)
time_interval = db.IntField(default=0) # in minutes
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
interaction_type = db.ReferenceField(InteractionType)
class InteractionLog(db.Document):
owner = db.ReferenceField(Bee)
created_date = db.DateTimeField(default=datetime.now)
value = db.IntField(default=0)
interaction_year_week = db.StringField() #compose year and week
interaction_name = db.StringField()
interaction_type = db.StringField()
status = db.StringField(default=STATUS_OBJECT_ACTIVE)
meta = {'allow_inheritance': True}
class InteractionBee(InteractionLog):
bee_destination = db.ReferenceField(Bee)
class InteractionPost(InteractionLog):
post_destination = db.ReferenceField(Post)
class InteractionComment(InteractionLog):
comment_destination = db.ReferenceField(PostComment)
class InteractionAward(InteractionLog):
award_destination = db.ReferenceField(Award)
|
[
"[email protected]"
] | |
34a6c530d071afbfe82a7bd521e4964a0e475056
|
9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56
|
/google/ads/googleads/v11/enums/types/customizer_attribute_status.py
|
a55a3f6d36c4ffb95a7f855ec2dc9f165968622a
|
[
"Apache-2.0"
] |
permissive
|
GerhardusM/google-ads-python
|
73b275a06e5401e6b951a6cd99af98c247e34aa3
|
676ac5fcb5bec0d9b5897f4c950049dac5647555
|
refs/heads/master
| 2022-07-06T19:05:50.932553 | 2022-06-17T20:41:17 | 2022-06-17T20:41:17 | 207,535,443 | 0 | 0 |
Apache-2.0
| 2019-09-10T10:58:55 | 2019-09-10T10:58:55 | null |
UTF-8
|
Python
| false | false | 1,194 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v11.enums",
marshal="google.ads.googleads.v11",
manifest={"CustomizerAttributeStatusEnum",},
)
class CustomizerAttributeStatusEnum(proto.Message):
r"""Container for enum describing possible statuses of a
customizer attribute.
"""
class CustomizerAttributeStatus(proto.Enum):
r"""The possible statuses of a customizer attribute."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 3
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"[email protected]"
] | |
6046d42a8be0edfd0e6f61ba8a5aa1359e0f0f75
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-mrsp.0/mrsp_ut=3.5_rd=0.8_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=66/sched.py
|
24ca03c1050f53f56b98349570171dfcef06cc75
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 435 |
py
|
-S 0 -X RUN -Q 0 -L 4 97 300
-S 0 -X RUN -Q 0 -L 4 78 300
-S 0 -X RUN -Q 0 -L 4 73 300
-S 1 -X RUN -Q 1 -L 3 69 250
-S 1 -X RUN -Q 1 -L 3 66 300
-S 1 -X RUN -Q 1 -L 3 62 200
-S 2 -X RUN -Q 2 -L 2 48 150
-S 2 -X RUN -Q 2 -L 2 41 175
-S 2 -X RUN -Q 2 -L 2 40 150
-S 3 -X RUN -Q 3 -L 1 34 175
-S 3 -X RUN -Q 3 -L 1 29 100
-S 3 -X RUN -Q 3 -L 1 22 125
-S 4 21 150
-S 4 19 200
-S 4 15 100
|
[
"[email protected]"
] | |
549191a5508e7d23bc35f003a2772ba44440c3b9
|
41586d36dd07c06860b9808c760e2b0212ed846b
|
/system/base/inary/actions.py
|
51f407d19d443069040111eb40f7b1876b6bbf54
|
[] |
no_license
|
SulinOS/SulinRepository
|
4d5551861f57bc1f4bec6879dfe28ce68c7c125d
|
9686811a1e06080f63199233561a922fe1f78d67
|
refs/heads/master
| 2021-06-15T21:34:25.039979 | 2021-06-05T13:43:34 | 2021-06-05T13:43:34 | 207,672,864 | 6 | 3 | null | 2019-12-06T08:11:22 | 2019-09-10T22:16:17 |
Python
|
UTF-8
|
Python
| false | false | 605 |
py
|
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from inary.actionsapi import get
from inary.actionsapi import inarytools
from inary.actionsapi import shelltools
from inary.actionsapi import pythonmodules
def build():
pythonmodules.compile(pyVer='3')
def install():
shelltools.system("rm -r po/*")
pythonmodules.install("--install-lib=/usr/lib/sulin", pyVer='3')
inarytools.dosym("inary-cli", "/usr/bin/inary")
inarytools.dodir("/var/lib/inary/info/")
inarytools.dodir("/usr/lib/sulin")
|
[
"[email protected]"
] | |
a7b8ecc8600ae64fff27cc805a08157f9d474fb8
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03816/s472692535.py
|
0bf949a31b20b3367d9721e32c4c1197579ce541
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 281 |
py
|
n = int(input())
a = sorted(list(map(int, input().split())))
eat = 0
l = 0
r = n-1
while l < r:
while l < n - 1 and a[l+1] != a[l]:
l += 1
while r > 0 and a[r-1] != a[r]:
r -= 1
if r <= l:
break
eat += 1
l += 1
r -= 1
print(n-eat*2)
|
[
"[email protected]"
] | |
4c7315c59f37c2824822cae4701239e8f973c9a6
|
60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24
|
/IronPythonStubs/release/stubs.min/System/Windows/Controls/__init___parts/ProgressBar.py
|
6b225c1974edd1cc5337d07575eb6877ff575bf0
|
[
"MIT"
] |
permissive
|
shnlmn/Rhino-Grasshopper-Scripts
|
a9411098c5d1bbc55feb782def565d535b27b709
|
0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823
|
refs/heads/master
| 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 57,945 |
py
|
class ProgressBar(RangeBase,IResource,IAnimatable,IInputElement,IFrameworkInputElement,ISupportInitialize,IHaveResources,IQueryAmbient):
"""
Indicates the progress of an operation.
ProgressBar()
"""
def AddLogicalChild(self,*args):
"""
AddLogicalChild(self: FrameworkElement,child: object)
Adds the provided object to the logical tree of this element.
child: Child element to be added.
"""
pass
def AddVisualChild(self,*args):
"""
AddVisualChild(self: Visual,child: Visual)
Defines the parent-child relationship between two visuals.
child: The child visual object to add to parent visual.
"""
pass
def ArrangeCore(self,*args):
"""
ArrangeCore(self: FrameworkElement,finalRect: Rect)
Implements System.Windows.UIElement.ArrangeCore(System.Windows.Rect) (defined as virtual in
System.Windows.UIElement) and seals the implementation.
finalRect: The final area within the parent that this element should use to arrange itself and its children.
"""
pass
def ArrangeOverride(self,*args):
"""
ArrangeOverride(self: Control,arrangeBounds: Size) -> Size
Called to arrange and size the content of a System.Windows.Controls.Control object.
arrangeBounds: The computed size that is used to arrange the content.
Returns: The size of the control.
"""
pass
def GetLayoutClip(self,*args):
"""
GetLayoutClip(self: FrameworkElement,layoutSlotSize: Size) -> Geometry
Returns a geometry for a clipping mask. The mask applies if the layout system attempts to
arrange an element that is larger than the available display space.
layoutSlotSize: The size of the part of the element that does visual presentation.
Returns: The clipping geometry.
"""
pass
def GetTemplateChild(self,*args):
"""
GetTemplateChild(self: FrameworkElement,childName: str) -> DependencyObject
Returns the named element in the visual tree of an instantiated
System.Windows.Controls.ControlTemplate.
childName: Name of the child to find.
Returns: The requested element. May be null if no element of the requested name exists.
"""
pass
def GetUIParentCore(self,*args):
"""
GetUIParentCore(self: FrameworkElement) -> DependencyObject
Returns an alternative logical parent for this element if there is no visual parent.
Returns: Returns something other than null whenever a WPF framework-level implementation of this method
has a non-visual parent connection.
"""
pass
def GetVisualChild(self,*args):
"""
GetVisualChild(self: FrameworkElement,index: int) -> Visual
Overrides System.Windows.Media.Visual.GetVisualChild(System.Int32),and returns a child at the
specified index from a collection of child elements.
index: The zero-based index of the requested child element in the collection.
Returns: The requested child element. This should not return null; if the provided index is out of range,
an exception is thrown.
"""
pass
def HitTestCore(self,*args):
"""
HitTestCore(self: UIElement,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.GeometryHitTestParameters) to
supply base element hit testing behavior (returning System.Windows.Media.GeometryHitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated geometry.
HitTestCore(self: UIElement,hitTestParameters: PointHitTestParameters) -> HitTestResult
Implements System.Windows.Media.Visual.HitTestCore(System.Windows.Media.PointHitTestParameters)
to supply base element hit testing behavior (returning System.Windows.Media.HitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated point.
"""
pass
def MeasureCore(self,*args):
"""
MeasureCore(self: FrameworkElement,availableSize: Size) -> Size
Implements basic measure-pass layout system behavior for System.Windows.FrameworkElement.
availableSize: The available size that the parent element can give to the child elements.
Returns: The desired size of this element in layout.
"""
pass
def MeasureOverride(self,*args):
"""
MeasureOverride(self: Control,constraint: Size) -> Size
Called to remeasure a control.
constraint: The maximum size that the method can return.
Returns: The size of the control,up to the maximum specified by constraint.
"""
pass
def OnAccessKey(self,*args):
"""
OnAccessKey(self: UIElement,e: AccessKeyEventArgs)
Provides class handling for when an access key that is meaningful for this element is invoked.
e: The event data to the access key event. The event data reports which key was invoked,and
indicate whether the System.Windows.Input.AccessKeyManager object that controls the sending of
these events also sent this access key invocation to other elements.
"""
pass
def OnApplyTemplate(self):
"""
OnApplyTemplate(self: ProgressBar)
Called when a template is applied to a System.Windows.Controls.ProgressBar.
"""
pass
def OnChildDesiredSizeChanged(self,*args):
"""
OnChildDesiredSizeChanged(self: UIElement,child: UIElement)
Supports layout behavior when a child element is resized.
child: The child element that is being resized.
"""
pass
def OnContextMenuClosing(self,*args):
"""
OnContextMenuClosing(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ContextMenuClosing routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnContextMenuOpening(self,*args):
"""
OnContextMenuOpening(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ContextMenuOpening routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnCreateAutomationPeer(self,*args):
"""
OnCreateAutomationPeer(self: ProgressBar) -> AutomationPeer
Provides an appropriate System.Windows.Automation.Peers.ProgressBarAutomationPeer implementation
for this control,as part of the WPF automation infrastructure.
Returns: The type-specific System.Windows.Automation.Peers.AutomationPeer implementation.
"""
pass
def OnDpiChanged(self,*args):
""" OnDpiChanged(self: Visual,oldDpi: DpiScale,newDpi: DpiScale) """
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragLeave�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragOver�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDrop(self,*args):
"""
OnDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.GiveFeedback�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: FrameworkElement,e: RoutedEventArgs)
Invoked whenever an unhandled System.Windows.UIElement.GotFocus event reaches this element in
its route.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnGotKeyboardFocus(self,*args):
"""
OnGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.GotKeyboardFocus�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnGotMouseCapture(self,*args):
"""
OnGotMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.GotMouseCapture�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnGotStylusCapture(self,*args):
"""
OnGotStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.GotStylusCapture�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnGotTouchCapture(self,*args):
"""
OnGotTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.GotTouchCapture routed event that
occurs when a touch is captured to this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnInitialized(self,*args):
"""
OnInitialized(self: FrameworkElement,e: EventArgs)
Raises the System.Windows.FrameworkElement.Initialized event. This method is invoked whenever
System.Windows.FrameworkElement.IsInitialized is set to true internally.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnIsKeyboardFocusedChanged(self,*args):
"""
OnIsKeyboardFocusedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsKeyboardFocusedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsKeyboardFocusWithinChanged(self,*args):
"""
OnIsKeyboardFocusWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked just before the System.Windows.UIElement.IsKeyboardFocusWithinChanged event is raised by
this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseCapturedChanged(self,*args):
"""
OnIsMouseCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCapturedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseCaptureWithinChanged(self,*args):
"""
OnIsMouseCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCaptureWithinChanged event is raised
on this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseDirectlyOverChanged(self,*args):
"""
OnIsMouseDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseDirectlyOverChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsStylusCapturedChanged(self,*args):
"""
OnIsStylusCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCapturedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsStylusCaptureWithinChanged(self,*args):
"""
OnIsStylusCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCaptureWithinChanged event is raised
on this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsStylusDirectlyOverChanged(self,*args):
"""
OnIsStylusDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusDirectlyOverChanged event is raised
on this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyUp�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: UIElement,e: RoutedEventArgs)
Raises the System.Windows.UIElement.LostFocus�routed event by using the event data that is
provided.
e: A System.Windows.RoutedEventArgs that contains event data. This event data must contain the
identifier for the System.Windows.UIElement.LostFocus event.
"""
pass
def OnLostKeyboardFocus(self,*args):
"""
OnLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.LostKeyboardFocus�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains event data.
"""
pass
def OnLostMouseCapture(self,*args):
"""
OnLostMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.LostMouseCapture�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains event data.
"""
pass
def OnLostStylusCapture(self,*args):
"""
OnLostStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.LostStylusCapture�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains event data.
"""
pass
def OnLostTouchCapture(self,*args):
"""
OnLostTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.LostTouchCapture routed event that
occurs when this element loses a touch capture.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnManipulationBoundaryFeedback(self,*args):
"""
OnManipulationBoundaryFeedback(self: UIElement,e: ManipulationBoundaryFeedbackEventArgs)
Called when the System.Windows.UIElement.ManipulationBoundaryFeedback event occurs.
e: The data for the event.
"""
pass
def OnManipulationCompleted(self,*args):
"""
OnManipulationCompleted(self: UIElement,e: ManipulationCompletedEventArgs)
Called when the System.Windows.UIElement.ManipulationCompleted event occurs.
e: The data for the event.
"""
pass
def OnManipulationDelta(self,*args):
"""
OnManipulationDelta(self: UIElement,e: ManipulationDeltaEventArgs)
Called when the System.Windows.UIElement.ManipulationDelta event occurs.
e: The data for the event.
"""
pass
def OnManipulationInertiaStarting(self,*args):
"""
OnManipulationInertiaStarting(self: UIElement,e: ManipulationInertiaStartingEventArgs)
Called when the System.Windows.UIElement.ManipulationInertiaStarting event occurs.
e: The data for the event.
"""
pass
def OnManipulationStarted(self,*args):
"""
OnManipulationStarted(self: UIElement,e: ManipulationStartedEventArgs)
Called when the System.Windows.UIElement.ManipulationStarted event occurs.
e: The data for the event.
"""
pass
def OnManipulationStarting(self,*args):
"""
OnManipulationStarting(self: UIElement,e: ManipulationStartingEventArgs)
Provides class handling for the System.Windows.UIElement.ManipulationStarting routed event that
occurs when the manipulation processor is first created.
e: A System.Windows.Input.ManipulationStartingEventArgs that contains the event data.
"""
pass
def OnMaximumChanged(self,*args):
"""
OnMaximumChanged(self: ProgressBar,oldMaximum: float,newMaximum: float)
Updates the current position of the System.Windows.Controls.ProgressBar when the
System.Windows.Controls.Primitives.RangeBase.Maximum property changes.
oldMaximum: Old value of the System.Windows.Controls.Primitives.RangeBase.Maximum property.
newMaximum: New value of the System.Windows.Controls.Primitives.RangeBase.Maximum property.
"""
pass
def OnMinimumChanged(self,*args):
"""
OnMinimumChanged(self: ProgressBar,oldMinimum: float,newMinimum: float)
Updates the current position of the System.Windows.Controls.ProgressBar when the
System.Windows.Controls.Primitives.RangeBase.Minimum property changes.
oldMinimum: Old value of the System.Windows.Controls.Primitives.RangeBase.Minimum property.
newMinimum: New value of the System.Windows.Controls.Primitives.RangeBase.Minimum property.
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.MouseDoubleClick routed event.
e: The event data.
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseDown�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. This event data
reports details about the mouse button that was pressed and the handled state.
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseEnter�attached event is raised on this
element. Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseLeave�attached event is raised on this
element. Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnMouseLeftButtonDown(self,*args):
"""
OnMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonDown�routed event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was pressed.
"""
pass
def OnMouseLeftButtonUp(self,*args):
"""
OnMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonUp�routed event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was released.
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseMove�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnMouseRightButtonDown(self,*args):
"""
OnMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonDown�routed event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was pressed.
"""
pass
def OnMouseRightButtonUp(self,*args):
"""
OnMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonUp�routed event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was released.
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseUp�routed event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the mouse button was released.
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseWheel�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
"""
pass
def OnPreviewDragEnter(self,*args):
"""
OnPreviewDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragEnter�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDragLeave(self,*args):
"""
OnPreviewDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragLeave�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDragOver(self,*args):
"""
OnPreviewDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragOver�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDrop(self,*args):
"""
OnPreviewDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDrop�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewGiveFeedback(self,*args):
"""
OnPreviewGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewGiveFeedback�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnPreviewGotKeyboardFocus(self,*args):
"""
OnPreviewGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewGotKeyboardFocus�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnPreviewKeyUp(self,*args):
"""
OnPreviewKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnPreviewLostKeyboardFocus(self,*args):
"""
OnPreviewLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnPreviewMouseDoubleClick(self,*args):
"""
OnPreviewMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.PreviewMouseDoubleClick routed event.
e: The event data.
"""
pass
def OnPreviewMouseDown(self,*args):
"""
OnPreviewMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseDown attached�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that one or more mouse buttons were pressed.
"""
pass
def OnPreviewMouseLeftButtonDown(self,*args):
"""
OnPreviewMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonDown�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was pressed.
"""
pass
def OnPreviewMouseLeftButtonUp(self,*args):
"""
OnPreviewMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonUp�routed event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was released.
"""
pass
def OnPreviewMouseMove(self,*args):
"""
OnPreviewMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnPreviewMouseRightButtonDown(self,*args):
"""
OnPreviewMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonDown�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was pressed.
"""
pass
def OnPreviewMouseRightButtonUp(self,*args):
"""
OnPreviewMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonUp�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was released.
"""
pass
def OnPreviewMouseUp(self,*args):
"""
OnPreviewMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that one or more mouse buttons were released.
"""
pass
def OnPreviewMouseWheel(self,*args):
"""
OnPreviewMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseWheel�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
"""
pass
def OnPreviewQueryContinueDrag(self,*args):
"""
OnPreviewQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewQueryContinueDrag�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnPreviewStylusButtonDown(self,*args):
"""
OnPreviewStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonDown�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnPreviewStylusButtonUp(self,*args):
"""
OnPreviewStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonUp�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnPreviewStylusDown(self,*args):
"""
OnPreviewStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusDown�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
"""
pass
def OnPreviewStylusInAirMove(self,*args):
"""
OnPreviewStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInAirMove�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusInRange(self,*args):
"""
OnPreviewStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInRange�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusMove(self,*args):
"""
OnPreviewStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusMove�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusOutOfRange(self,*args):
"""
OnPreviewStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusOutOfRange�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusSystemGesture(self,*args):
"""
OnPreviewStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusSystemGesture�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event data.
"""
pass
def OnPreviewStylusUp(self,*args):
"""
OnPreviewStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewTextInput(self,*args):
"""
OnPreviewTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.PreviewTextInput�attached
event reaches an element in its route that is derived from this class. Implement this method to
add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
"""
pass
def OnPreviewTouchDown(self,*args):
"""
OnPreviewTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchDown routed event that
occurs when a touch presses this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPreviewTouchMove(self,*args):
"""
OnPreviewTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchMove routed event that
occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPreviewTouchUp(self,*args):
"""
OnPreviewTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchUp routed event that occurs
when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: FrameworkElement,e: DependencyPropertyChangedEventArgs)
Invoked whenever the effective value of any dependency property on this
System.Windows.FrameworkElement has been updated. The specific dependency property that changed
is reported in the arguments parameter. Overrides
System.Windows.DependencyObject.OnPropertyChanged(System.Windows.DependencyPropertyChangedEventAr
gs).
e: The event data that describes the property that changed,as well as old and new values.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.QueryContinueDrag�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnQueryCursor(self,*args):
"""
OnQueryCursor(self: UIElement,e: QueryCursorEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.QueryCursor�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.QueryCursorEventArgs that contains the event data.
"""
pass
def OnRender(self,*args):
"""
OnRender(self: UIElement,drawingContext: DrawingContext)
When overridden in a derived class,participates in rendering operations that are directed by
the layout system. The rendering instructions for this element are not used directly when this
method is invoked,and are instead preserved for later asynchronous use by layout and drawing.
drawingContext: The drawing instructions for a specific element. This context is provided to the layout system.
"""
pass
def OnRenderSizeChanged(self,*args):
"""
OnRenderSizeChanged(self: FrameworkElement,sizeInfo: SizeChangedInfo)
Raises the System.Windows.FrameworkElement.SizeChanged event,using the specified information as
part of the eventual event data.
sizeInfo: Details of the old and new size involved in the change.
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: FrameworkElement,oldStyle: Style,newStyle: Style)
Invoked when the style in use on this element changes,which will invalidate the layout.
oldStyle: The old style.
newStyle: The new style.
"""
pass
def OnStylusButtonDown(self,*args):
"""
OnStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnStylusButtonUp(self,*args):
"""
OnStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnStylusDown(self,*args):
"""
OnStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
"""
pass
def OnStylusEnter(self,*args):
"""
OnStylusEnter(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusEnter�attached event is raised by
this element. Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusInAirMove(self,*args):
"""
OnStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInAirMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusInRange(self,*args):
"""
OnStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInRange�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusLeave(self,*args):
"""
OnStylusLeave(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusLeave�attached event is raised by
this element. Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusMove(self,*args):
"""
OnStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusOutOfRange(self,*args):
"""
OnStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusOutOfRange�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusSystemGesture(self,*args):
"""
OnStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusSystemGesture�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event data.
"""
pass
def OnStylusUp(self,*args):
"""
OnStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusUp�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnTemplateChanged(self,*args):
"""
OnTemplateChanged(self: Control,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
Called whenever the control's template changes.
oldTemplate: The old template.
newTemplate: The new template.
"""
pass
def OnTextInput(self,*args):
"""
OnTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.TextInput�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
"""
pass
def OnToolTipClosing(self,*args):
"""
OnToolTipClosing(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ToolTipClosing routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnToolTipOpening(self,*args):
"""
OnToolTipOpening(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever the System.Windows.FrameworkElement.ToolTipOpening routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnTouchDown(self,*args):
"""
OnTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchDown routed event that occurs when
a touch presses inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchEnter(self,*args):
"""
OnTouchEnter(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchEnter routed event that occurs
when a touch moves from outside to inside the bounds of this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchLeave(self,*args):
"""
OnTouchLeave(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchLeave routed event that occurs
when a touch moves from inside to outside the bounds of this System.Windows.UIElement.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchMove(self,*args):
"""
OnTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchMove routed event that occurs when
a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchUp(self,*args):
"""
OnTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchUp routed event that occurs when a
touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnValueChanged(self,*args):
"""
OnValueChanged(self: ProgressBar,oldValue: float,newValue: float)
Updates the current position of the System.Windows.Controls.ProgressBar when the
System.Windows.Controls.Primitives.RangeBase.Value property changes.
oldValue: Old value of the System.Windows.Controls.Primitives.RangeBase.Value property.
newValue: New value of the System.Windows.Controls.Primitives.RangeBase.Value property.
"""
pass
def OnVisualChildrenChanged(self,*args):
"""
OnVisualChildrenChanged(self: Visual,visualAdded: DependencyObject,visualRemoved: DependencyObject)
Called when the System.Windows.Media.VisualCollection of the visual object is modified.
visualAdded: The System.Windows.Media.Visual that was added to the collection
visualRemoved: The System.Windows.Media.Visual that was removed from the collection
"""
pass
def OnVisualParentChanged(self,*args):
"""
OnVisualParentChanged(self: FrameworkElement,oldParent: DependencyObject)
Invoked when the parent of this element in the visual tree is changed. Overrides
System.Windows.UIElement.OnVisualParentChanged(System.Windows.DependencyObject).
oldParent: The old parent element. May be null to indicate that the element did not have a visual parent
previously.
"""
pass
def ParentLayoutInvalidated(self,*args):
"""
ParentLayoutInvalidated(self: FrameworkElement,child: UIElement)
Supports incremental layout implementations in specialized subclasses of
System.Windows.FrameworkElement.
System.Windows.FrameworkElement.ParentLayoutInvalidated(System.Windows.UIElement) is invoked
when a child element has invalidated a property that is marked in metadata as affecting the
parent's measure or arrange passes during layout.
child: The child element reporting the change.
"""
pass
def RemoveLogicalChild(self,*args):
"""
RemoveLogicalChild(self: FrameworkElement,child: object)
Removes the provided object from this element's logical tree. System.Windows.FrameworkElement
updates the affected logical tree parent pointers to keep in sync with this deletion.
child: The element to remove.
"""
pass
def RemoveVisualChild(self,*args):
"""
RemoveVisualChild(self: Visual,child: Visual)
Removes the parent-child relationship between two visuals.
child: The child visual object to remove from the parent visual.
"""
pass
def ShouldSerializeProperty(self,*args):
"""
ShouldSerializeProperty(self: DependencyObject,dp: DependencyProperty) -> bool
Returns a value that indicates whether serialization processes should serialize the value for
the provided dependency property.
dp: The identifier for the dependency property that should be serialized.
Returns: true if the dependency property that is supplied should be value-serialized; otherwise,false.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
DefaultStyleKey=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the key to use to reference the style for this control,when theme styles are used or defined.
"""
HandlesScrolling=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether a control supports scrolling.
"""
HasEffectiveKeyboardFocus=property(lambda self: object(),lambda self,v: None,lambda self: None)
InheritanceBehavior=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the scope limits for property value inheritance,resource key lookup,and RelativeSource FindAncestor lookup.
"""
IsEnabledCore=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that becomes the return value of System.Windows.UIElement.IsEnabled in derived classes.
"""
IsIndeterminate=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets whether the System.Windows.Controls.ProgressBar shows actual values or generic,continuous progress feedback.
Get: IsIndeterminate(self: ProgressBar) -> bool
Set: IsIndeterminate(self: ProgressBar)=value
"""
LogicalChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an enumerator for logical child elements of this element.
"""
Orientation=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the orientation of a System.Windows.Controls.ProgressBar: horizontal or vertical.
Get: Orientation(self: ProgressBar) -> Orientation
Set: Orientation(self: ProgressBar)=value
"""
StylusPlugIns=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all stylus plug-in (customization) objects associated with this element.
"""
VisualBitmapEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffect value for the System.Windows.Media.Visual.
"""
VisualBitmapEffectInput=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffectInput value for the System.Windows.Media.Visual.
"""
VisualBitmapScalingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.BitmapScalingMode for the System.Windows.Media.Visual.
"""
VisualCacheMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a cached representation of the System.Windows.Media.Visual.
"""
VisualChildrenCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of visual child elements within this element.
"""
VisualClearTypeHint=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.ClearTypeHint that determines how ClearType is rendered in the System.Windows.Media.Visual.
"""
VisualClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the clip region of the System.Windows.Media.Visual as a System.Windows.Media.Geometry value.
"""
VisualEdgeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edge mode of the System.Windows.Media.Visual as an System.Windows.Media.EdgeMode value.
"""
VisualEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the bitmap effect to apply to the System.Windows.Media.Visual.
"""
VisualOffset=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the offset value of the visual object.
"""
VisualOpacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the opacity of the System.Windows.Media.Visual.
"""
VisualOpacityMask=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Brush value that represents the opacity mask of the System.Windows.Media.Visual.
"""
VisualParent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the visual tree parent of the visual object.
"""
VisualScrollableAreaClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a clipped scrollable area for the System.Windows.Media.Visual.
"""
VisualTextHintingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextHintingMode of the System.Windows.Media.Visual.
"""
VisualTextRenderingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextRenderingMode of the System.Windows.Media.Visual.
"""
VisualTransform=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Transform value for the System.Windows.Media.Visual.
"""
VisualXSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the x-coordinate (vertical) guideline collection.
"""
VisualYSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the y-coordinate (horizontal) guideline collection.
"""
IsIndeterminateProperty=None
OrientationProperty=None
|
[
"[email protected]"
] | |
25edf5e1709e8c999cb8d8d26d28ee82133cf944
|
628ec414b7807fc50de67345361e41cc68ba3720
|
/mayan/apps/events/icons.py
|
8741af807574afd2c754b2d27e0efb1073aa1085
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
TestingCodeReview/Mayan-EDMS
|
aafe144424ffa8128a4ff7cee24d91bf1e1f2750
|
d493ec34b2f93244e32e1a2a4e6cda4501d3cf4e
|
refs/heads/master
| 2020-05-27T23:34:44.118503 | 2019-04-05T02:04:18 | 2019-04-05T02:04:18 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 588 |
py
|
from __future__ import absolute_import, unicode_literals
from appearance.classes import Icon
icon_event_types_subscriptions_list = Icon(
driver_name='fontawesome', symbol='list-ol'
)
icon_events_list = Icon(driver_name='fontawesome', symbol='list-ol')
icon_events_for_object = Icon(driver_name='fontawesome', symbol='list-ol')
icon_events_user_list = Icon(driver_name='fontawesome', symbol='rss')
icon_object_event_types_user_subcriptions_list = Icon(
driver_name='fontawesome', symbol='rss'
)
icon_user_notifications_list = Icon(
driver_name='fontawesome', symbol='bell'
)
|
[
"[email protected]"
] | |
922ea7da8f1d8bb455f3da5f8e3aa7e842fb3ab4
|
19eafacbf77452f5059b8524ab1c72954fb6ecf6
|
/quant_engine/Factor/Analyst/ROE_FY1.py
|
abb7c47cd55f6bd89402eb415f6fbd7fdc09c69a
|
[] |
no_license
|
fagan2888/DFZQ
|
1893fe113428234f7f6f10408043e9b1683fb885
|
94730b31c8f53ca0ebecdea4327e55d92fb244b3
|
refs/heads/master
| 2022-11-17T17:57:55.608746 | 2020-07-14T10:01:28 | 2020-07-14T10:01:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,517 |
py
|
# 盈利能力因子 ROE_FY1 的计算
# 对齐 report period
from factor_base import FactorBase
import pandas as pd
import numpy as np
import datetime
from global_constant import N_JOBS, FACTOR_DB
from joblib import Parallel, delayed, parallel_backend
from influxdb_data import influxdbData
from data_process import DataProcess
class ROE_FY1(FactorBase):
def __init__(self):
super().__init__()
self.db = 'DailyFactors_Gus'
@staticmethod
def JOB_cur_ROE_TTM(codes, df, db, measure):
influx = influxdbData()
save_res = []
for code in codes:
code_df = df.loc[df['code'] == code, :].copy()
conditions = [code_df['FY0_rp'].values == code_df['equity_last0Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last1Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last2Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last3Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last4Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last5Q_rp'].values,
code_df['FY0_rp'].values == code_df['equity_last6Q_rp'].values]
choices = [code_df['net_equity'].values, code_df['net_equity_last1Q'].values,
code_df['net_equity_last2Q'].values, code_df['net_equity_last3Q'].values,
code_df['net_equity_last4Q'].values, code_df['net_equity_last5Q'].values,
code_df['net_equity_last6Q'].values]
code_df['ROE_equity'] = np.select(conditions, choices, default=np.nan)
# 用最近的非nan值填充ROE_equity
code_df[['net_equity_last6Q', 'net_equity_last5Q', 'net_equity_last4Q', 'net_equity_last3Q',
'net_equity_last2Q', 'net_equity_last1Q', 'net_equity', 'ROE_equity']] = \
code_df[['net_equity_last6Q', 'net_equity_last5Q', 'net_equity_last4Q', 'net_equity_last3Q',
'net_equity_last2Q', 'net_equity_last1Q', 'net_equity', 'ROE_equity']].fillna(
method='ffill', axis=1)
# 计算 ROE_FY1
code_df['ROE_FY1'] = code_df['net_profit_FY1'] / code_df['ROE_equity']
code_df.set_index('date', inplace=True)
code_df = code_df.loc[:, ['code', 'ROE_FY1', 'report_period']]
code_df = code_df.replace(np.inf, np.nan)
code_df = code_df.replace(-np.inf, np.nan)
code_df = code_df.dropna()
print('code: %s' % code)
if code_df.empty:
continue
r = influx.saveData(code_df, db, measure)
if r == 'No error occurred...':
pass
else:
save_res.append('ROE_FY1 Error: %s' % r)
return save_res
def cal_ROE_TTM(self):
save_measure = 'ROE_FY1'
# get profit
profit_df = self.influx.getDataMultiprocess(FACTOR_DB, 'AnalystNetProfit', self.start, self.end,
['code', 'net_profit_FY1', 'report_period'])
profit_df.index.names = ['date']
profit_df.reset_index(inplace=True)
# --------------------------------------------------------------------------------------
# 计算 ROE_FY1
cur_rps = []
former_rps = []
for rp in profit_df['report_period'].unique():
cur_rps.append(rp)
former_rps.append(DataProcess.get_former_RP(rp, 4))
rp_dict = dict(zip(cur_rps, former_rps))
profit_df['FY0_rp'] = profit_df['report_period'].map(rp_dict)
equity_df = self.net_equity.copy()
for i in range(1, 7):
cur_rps = []
former_rps = []
for rp in equity_df['report_period'].unique():
cur_rps.append(rp)
former_rps.append(DataProcess.get_former_RP(rp, i))
rp_dict = dict(zip(cur_rps, former_rps))
equity_df['equity_last{0}Q_rp'.format(i)] = equity_df['report_period'].map(rp_dict)
equity_df.rename(columns={'report_period': 'equity_last0Q_rp'}, inplace=True)
ROE_df = pd.merge(profit_df, equity_df, how='inner', on=['date', 'code'])
ROE_df = ROE_df.sort_values(['date', 'code', 'report_period'])
codes = ROE_df['code'].unique()
split_codes = np.array_split(codes, self.n_jobs)
with parallel_backend('multiprocessing', n_jobs=self.n_jobs):
res = Parallel()(delayed(ROE_FY1.JOB_cur_ROE_TTM)
(codes, ROE_df, self.db, save_measure) for codes in split_codes)
print('ROE_FY1 finish')
print('-' * 30)
for r in res:
self.fail_list.extend(r)
def cal_factors(self, start, end, n_jobs):
pd.set_option('mode.use_inf_as_na', True)
self.start = start
self.end = end
self.n_jobs = n_jobs
self.fail_list = []
# get net equity
self.net_equity = \
self.influx.getDataMultiprocess('FinancialReport_Gus', 'net_equity', start, end)
self.net_equity.index.names = ['date']
self.net_equity.reset_index(inplace=True)
self.cal_ROE_TTM()
return self.fail_list
if __name__ == '__main__':
roe = ROE_FY1()
r = roe.cal_factors(20090101, 20200604, N_JOBS)
print('task finish')
print(r)
print(datetime.datetime.now())
|
[
"[email protected]"
] | |
13189888deeee3a661863fd34c894aa4787cd58e
|
4cd0631100e099e9b154b12b234715ddee0711d3
|
/model/BiSeNetV2A51.py
|
a82a22b71dcc648614459fce1abfa0e8638b1bca
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
Ethan-ye/Efficient-Segmentation-Networks
|
d6dd029c76cb46b89ac00ee2f6a49d9ddcd99a3a
|
27272e43126a507a6d93b21cd2372f5432f61237
|
refs/heads/master
| 2023-04-22T11:10:23.256349 | 2021-05-07T05:04:40 | 2021-05-07T05:12:38 | 281,823,847 | 0 | 0 |
MIT
| 2020-07-23T01:50:42 | 2020-07-23T01:50:41 | null |
UTF-8
|
Python
| false | false | 60,031 |
py
|
# *- coding: utf-8 -*
###########################################################################
# https://github.com/Soulempty/BiseNetv2-pytorch
import torch
import torch.nn as nn
from torch.nn import functional as F
from torchsummary import summary
from utils.activations import NON_LINEARITY
from fvcore.nn.flop_count import flop_count # https://github.com/facebookresearch/fvcore
from tools.flops_counter.ptflops import get_model_complexity_info
from thop import profile # https://github.com/Lyken17/pytorch-OpCounter
__all__ = ['BiSeNetV2A51']
class conv2d(nn.Module):
def __init__(self, in_dim, out_dim, k, pad, stride, groups=1, bias=False, use_bn=True, use_rl=True):
super(conv2d, self).__init__()
self.use_bn = use_bn
self.use_rl = use_rl
self.conv = nn.Conv2d(in_dim, out_dim, k, padding=pad, stride=stride, groups=groups, bias=bias)
self.bn = nn.BatchNorm2d(out_dim)
self.relu = nn.ReLU(inplace=True)
def forward(self, bottom):
if self.use_bn and self.use_rl:
return self.relu(self.bn(self.conv(bottom)))
elif self.use_bn:
return self.bn(self.conv(bottom))
else:
return self.conv(bottom)
class StemBlock(nn.Module):
def __init__(self):
super(StemBlock, self).__init__()
self.conv1 = conv2d(3, 16, 3, 1, 2)
self.conv_1x1 = conv2d(16, 8, 1, 0, 1)
self.conv_3x3 = conv2d(8, 16, 3, 1, 2)
self.mpooling = nn.MaxPool2d(3, 2, 1)
self.conv2 = conv2d(32, 16, 3, 1, 1)
def forward(self, bottom):
base = self.conv1(bottom)
conv_1 = self.conv_1x1(base)
conv_3 = self.conv_3x3(conv_1)
pool = self.mpooling(base)
cat = torch.cat([conv_3, pool], 1)
res = self.conv2(cat)
return res
class ContextEmbeddingBlock(nn.Module):
def __init__(self, in_dim):
super(ContextEmbeddingBlock, self).__init__()
self.gap = nn.AdaptiveAvgPool2d(1) # 1
self.bn1 = nn.BatchNorm2d(in_dim)
self.conv1 = conv2d(in_dim, in_dim, 1, 0, 1)
self.conv2 = conv2d(in_dim, in_dim, 3, 1, 1, use_bn=False, use_rl=False)
def forward(self, bottom):
gap = self.gap(bottom)
bn = self.bn1(gap)
conv1 = self.conv1(bn)
feat = bottom + conv1
res = self.conv2(feat)
return res
class GatherExpansion(nn.Module):
def __init__(self, in_dim, out_dim, stride=1, exp=6):
super(GatherExpansion, self).__init__()
exp_dim = in_dim * exp
self.stride = stride
self.conv1 = conv2d(in_dim, exp_dim, 3, 1, 1)
self.dwconv2 = conv2d(exp_dim, exp_dim, 3, 1, 1, exp_dim, use_rl=False)
self.conv_11 = conv2d(exp_dim, out_dim, 1, 0, 1, use_rl=False)
self.dwconv1 = conv2d(exp_dim, exp_dim, 3, 1, 2, exp_dim, use_rl=False)
self.dwconv3 = conv2d(in_dim, in_dim, 3, 1, 2, in_dim, use_rl=False)
self.conv_12 = conv2d(in_dim, out_dim, 1, 0, 1, use_rl=False)
self.relu = nn.ReLU(inplace=True)
def forward(self, bottom):
base = self.conv1(bottom)
if self.stride == 2:
base = self.dwconv1(base)
bottom = self.dwconv3(bottom)
bottom = self.conv_12(bottom)
x = self.dwconv2(base)
x = self.conv_11(x)
res = self.relu(x + bottom)
return res
class MA(nn.Module):
def __init__(self, in_dim, map_dim, matmul_norm):
super(MA, self).__init__()
self.in_dim = in_dim
self.map_dim = map_dim
self.matmul_norm = matmul_norm
self.query_project = conv2d(in_dim, map_dim, 1, 0, 1)
self.key_project = conv2d(in_dim, map_dim, 1, 0, 1)
self.value_project = conv2d(in_dim, map_dim, 1, 0, 1)
self.out_project = conv2d(map_dim, in_dim, 1, 0, 1)
def forward(self, query_feats, key_feats):
"""Forward function."""
batch_size = query_feats.size(0)
query = self.query_project(query_feats)
# if self.query_downsample is not None:
# query = self.query_downsample(query)
query = query.reshape(*query.shape[:2], -1) #B,C,H*W
query = query.permute(0, 2, 1).contiguous() #B,H*w,C
key = self.key_project(key_feats)
value = self.value_project(key_feats)
# if self.key_downsample is not None:
# key = self.key_downsample(key)
# value = self.key_downsample(value)
key = key.reshape(*key.shape[:2], -1) #B,C,h*w
value = value.reshape(*value.shape[:2], -1) #B,C,h*w
value = value.permute(0, 2, 1).contiguous() #B,h*w,C
sim_map = torch.matmul(query, key)#B,H*W,h*w
if self.matmul_norm:
sim_map = (self.map_dim**-.5) * sim_map
sim_map = F.softmax(sim_map, dim=-1)
context = torch.matmul(sim_map, value)#B,H*W,C
context = context.permute(0, 2, 1).contiguous()#B,C,H*W
context = context.reshape(batch_size, -1, *query_feats.shape[2:])#B,C,H,W
if self.out_project is not None:
context = self.out_project(context)
return context
# class BGA(nn.Module):
# def __init__(self, in_dim):
# super(BGA, self).__init__()
# self.in_dim = in_dim
# self.db_dwconv = conv2d(in_dim, in_dim, 3, 1, 1, in_dim, use_rl=False)
# self.db_conv1x1 = conv2d(in_dim, in_dim, 1, 0, 1, use_rl=False, use_bn=False)
# self.db_conv = conv2d(in_dim, in_dim, 3, 1, 2, use_rl=False)
# self.db_apooling = nn.AvgPool2d(3, 2, 1)
#
# self.sb_dwconv = conv2d(in_dim, in_dim, 3, 1, 1, in_dim, use_rl=False)
# self.sb_conv1x1 = conv2d(in_dim, in_dim, 1, 0, 1, use_rl=False, use_bn=False)
# self.sb_conv = conv2d(in_dim, in_dim, 3, 1, 1, use_rl=False)
# self.sb_sigmoid = nn.Sigmoid()
#
# self.conv = conv2d(in_dim, in_dim, 3, 1, 1, use_rl=False)
#
# def forward(self, db, sb):
# db_dwc = self.db_dwconv(db)
# db_out = self.db_conv1x1(db_dwc) #
# db_conv = self.db_conv(db)
# db_pool = self.db_apooling(db_conv)
#
# sb_dwc = self.sb_dwconv(sb)
# sb_out = self.sb_sigmoid(self.sb_conv1x1(sb_dwc)) #
# sb_conv = self.sb_conv(sb)
# sb_up = self.sb_sigmoid(F.interpolate(sb_conv, size=db_out.size()[2:], mode="bilinear", align_corners=True))
# db_l = db_out * sb_up
# sb_r = F.interpolate(sb_out * db_pool, size=db_out.size()[2:], mode="bilinear", align_corners=True)
# res = self.conv(db_l + sb_r)
# return res
class SegHead(nn.Module):
def __init__(self, in_dim, out_dim, classes):
super(SegHead, self).__init__()
# self.size = size
self.conv = conv2d(in_dim, out_dim, 3, 1, 1)
self.classes = conv2d(out_dim, classes, 1, 0, 1, use_bn=False, use_rl=False)
def forward(self, feat,size):
x = self.conv(feat)
x = self.classes(x)
pred = F.interpolate(x, size=size, mode="bilinear", align_corners=True)
return pred
class DetailedBranch(nn.Module):
def __init__(self):
super(DetailedBranch, self).__init__()
self.s1_conv1 = conv2d(3, 64, 3, 1, 2)
self.s1_conv2 = conv2d(64, 64, 3, 1, 1)
self.s2_conv1 = conv2d(64, 64, 3, 1, 2)
self.s2_conv2 = conv2d(64, 64, 3, 1, 1)
self.s2_conv3 = conv2d(64, 64, 3, 1, 1)
self.s3_conv1 = conv2d(64, 128, 3, 1, 2)
self.s3_conv2 = conv2d(128, 128, 3, 1, 1)
self.s3_conv3 = conv2d(128, 128, 3, 1, 1)
def forward(self, bottom):
s1_1 = self.s1_conv1(bottom)
s1_2 = self.s1_conv2(s1_1)
s2_1 = self.s2_conv1(s1_2)
s2_2 = self.s2_conv2(s2_1)
s2_3 = self.s2_conv3(s2_2)
s3_1 = self.s3_conv1(s2_3)
s3_2 = self.s3_conv2(s3_1)
s3_3 = self.s3_conv3(s3_2)
return s3_3
class SemanticBranch(nn.Module):
def __init__(self, classes):
super(SemanticBranch, self).__init__()
# self.training = True
self.stem = StemBlock()
self.s3_ge1 = GatherExpansion(16, 32, 2)
self.s3_ge2 = GatherExpansion(32, 32)
self.s4_ge1 = GatherExpansion(32, 64, 2)
self.s4_ge2 = GatherExpansion(64, 64)
self.s5_ge1 = GatherExpansion(64, 128, 2)
self.s5_ge2 = GatherExpansion(128, 128)
self.s5_ge3 = GatherExpansion(128, 128)
self.s5_ge4 = GatherExpansion(128, 128)
self.s5_ge5 = GatherExpansion(128, 128, exp=1)
# if self.training:
self.seghead1 = SegHead(16, 16, classes)
self.seghead2 = SegHead(32, 32, classes)
self.seghead3 = SegHead(64, 64, classes)
self.seghead4 = SegHead(128, 128, classes)
self.ceb = ContextEmbeddingBlock(128)
def forward(self, bottom, size):
stg12 = self.stem(bottom)
# print(stg12.size())
stg3 = self.s3_ge1(stg12)
stg3 = self.s3_ge2(stg3)
# print(stg3.size())
stg4 = self.s4_ge1(stg3)
stg4 = self.s4_ge2(stg4)
# print(stg4.size())
stg5 = self.s5_ge1(stg4)
stg5 = self.s5_ge2(stg5)
stg5 = self.s5_ge3(stg5)
stg5 = self.s5_ge4(stg5)
stg5 = self.s5_ge5(stg5)
# print(stg5.size())
out = self.ceb(stg5)
# if self.training:
seghead1 = self.seghead1(stg12,size)
seghead2 = self.seghead2(stg3,size)
seghead3 = self.seghead3(stg4,size)
seghead4 = self.seghead4(stg5,size)
return out, seghead1, seghead2, seghead3, seghead4
# else:
# return out
class BiSeNetV2A51(nn.Module):
def __init__(self, classes):
super(BiSeNetV2A51, self).__init__()
# self.training = True
self.db = DetailedBranch()
self.sb = SemanticBranch(classes)
# self.bga = BGA(128)
self.sma = MA(128,16,True)
self.seghead = SegHead(128, 128, classes)
self._init_params()
# self.criterion = nn.CrossEntropyLoss(ignore_index=255)
def _init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def forward(self, data, y=None):
size = data.size()[2:]
# db = self.db(data)
# if self.training:
sb, head1, head2, head3, head4 = self.sb(data,size)
# else:
# sb = self.sb(data,size)
# bga = self.bga(db, sb)
# pred = self.seghead(bga,size)
sma = self.sma(sb,sb)
sb = sb+sma
sb_up = F.interpolate(sb, size=(44,60), mode="bilinear", align_corners=True)
pred = self.seghead(sb_up,size)
# if self.training:
# main_loss = self.criterion(pred, y)
# aux1_loss = self.criterion(head1, y)
# aux2_loss = self.criterion(head2, y)
# aux3_loss = self.criterion(head3, y)
# aux4_loss = self.criterion(head4, y)
# return pred.max(1)[1], main_loss, (aux1_loss, aux2_loss, aux3_loss, aux4_loss)
return [pred,head1, head2, head3, head4]
if __name__ == "__main__":
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = BiSeNetV2A51(classes=11).to(device)
summary(model,(3, 352, 480))
flops_count, params_count = get_model_complexity_info(model, (3, 352, 480),
as_strings=False,
print_per_layer_stat=True)
print(flops_count/1000000000,'GMac', params_count/1000000, params_count/1024/1024*4,'MB')
x = torch.randn(2, 3, 352, 480).to(device)
input = x
macs, params = profile(model, inputs=(input,))
print(macs/2000000000,'GMac', params/1000000, params/1024/1024*4,'MB')
# from fvcore.nn.jit_handles import batchnorm_flop_jit
# from fvcore.nn.jit_handles import generic_activation_jit
#
# supported_ops = {
# "aten::batch_norm": batchnorm_flop_jit,
# }
# flop_dict, _ = flop_count(model, (x,), supported_ops)
# print(flop_dict)
'''
/home/ethan/anaconda3/envs/py36_cuda101/bin/python /home/ethan/codes/Efficient-Segmentation-Networks/model/BiSeNetV2A51.py
----------------------------------------------------------------
Layer (type) Output Shape Param #
================================================================
Conv2d-1 [-1, 16, 176, 240] 432
BatchNorm2d-2 [-1, 16, 176, 240] 32
ReLU-3 [-1, 16, 176, 240] 0
conv2d-4 [-1, 16, 176, 240] 0
Conv2d-5 [-1, 8, 176, 240] 128
BatchNorm2d-6 [-1, 8, 176, 240] 16
ReLU-7 [-1, 8, 176, 240] 0
conv2d-8 [-1, 8, 176, 240] 0
Conv2d-9 [-1, 16, 88, 120] 1,152
BatchNorm2d-10 [-1, 16, 88, 120] 32
ReLU-11 [-1, 16, 88, 120] 0
conv2d-12 [-1, 16, 88, 120] 0
MaxPool2d-13 [-1, 16, 88, 120] 0
Conv2d-14 [-1, 16, 88, 120] 4,608
BatchNorm2d-15 [-1, 16, 88, 120] 32
ReLU-16 [-1, 16, 88, 120] 0
conv2d-17 [-1, 16, 88, 120] 0
StemBlock-18 [-1, 16, 88, 120] 0
Conv2d-19 [-1, 96, 88, 120] 13,824
BatchNorm2d-20 [-1, 96, 88, 120] 192
ReLU-21 [-1, 96, 88, 120] 0
conv2d-22 [-1, 96, 88, 120] 0
Conv2d-23 [-1, 96, 44, 60] 864
BatchNorm2d-24 [-1, 96, 44, 60] 192
conv2d-25 [-1, 96, 44, 60] 0
Conv2d-26 [-1, 16, 44, 60] 144
BatchNorm2d-27 [-1, 16, 44, 60] 32
conv2d-28 [-1, 16, 44, 60] 0
Conv2d-29 [-1, 32, 44, 60] 512
BatchNorm2d-30 [-1, 32, 44, 60] 64
conv2d-31 [-1, 32, 44, 60] 0
Conv2d-32 [-1, 96, 44, 60] 864
BatchNorm2d-33 [-1, 96, 44, 60] 192
conv2d-34 [-1, 96, 44, 60] 0
Conv2d-35 [-1, 32, 44, 60] 3,072
BatchNorm2d-36 [-1, 32, 44, 60] 64
conv2d-37 [-1, 32, 44, 60] 0
ReLU-38 [-1, 32, 44, 60] 0
GatherExpansion-39 [-1, 32, 44, 60] 0
Conv2d-40 [-1, 192, 44, 60] 55,296
BatchNorm2d-41 [-1, 192, 44, 60] 384
ReLU-42 [-1, 192, 44, 60] 0
conv2d-43 [-1, 192, 44, 60] 0
Conv2d-44 [-1, 192, 44, 60] 1,728
BatchNorm2d-45 [-1, 192, 44, 60] 384
conv2d-46 [-1, 192, 44, 60] 0
Conv2d-47 [-1, 32, 44, 60] 6,144
BatchNorm2d-48 [-1, 32, 44, 60] 64
conv2d-49 [-1, 32, 44, 60] 0
ReLU-50 [-1, 32, 44, 60] 0
GatherExpansion-51 [-1, 32, 44, 60] 0
Conv2d-52 [-1, 192, 44, 60] 55,296
BatchNorm2d-53 [-1, 192, 44, 60] 384
ReLU-54 [-1, 192, 44, 60] 0
conv2d-55 [-1, 192, 44, 60] 0
Conv2d-56 [-1, 192, 22, 30] 1,728
BatchNorm2d-57 [-1, 192, 22, 30] 384
conv2d-58 [-1, 192, 22, 30] 0
Conv2d-59 [-1, 32, 22, 30] 288
BatchNorm2d-60 [-1, 32, 22, 30] 64
conv2d-61 [-1, 32, 22, 30] 0
Conv2d-62 [-1, 64, 22, 30] 2,048
BatchNorm2d-63 [-1, 64, 22, 30] 128
conv2d-64 [-1, 64, 22, 30] 0
Conv2d-65 [-1, 192, 22, 30] 1,728
BatchNorm2d-66 [-1, 192, 22, 30] 384
conv2d-67 [-1, 192, 22, 30] 0
Conv2d-68 [-1, 64, 22, 30] 12,288
BatchNorm2d-69 [-1, 64, 22, 30] 128
conv2d-70 [-1, 64, 22, 30] 0
ReLU-71 [-1, 64, 22, 30] 0
GatherExpansion-72 [-1, 64, 22, 30] 0
Conv2d-73 [-1, 384, 22, 30] 221,184
BatchNorm2d-74 [-1, 384, 22, 30] 768
ReLU-75 [-1, 384, 22, 30] 0
conv2d-76 [-1, 384, 22, 30] 0
Conv2d-77 [-1, 384, 22, 30] 3,456
BatchNorm2d-78 [-1, 384, 22, 30] 768
conv2d-79 [-1, 384, 22, 30] 0
Conv2d-80 [-1, 64, 22, 30] 24,576
BatchNorm2d-81 [-1, 64, 22, 30] 128
conv2d-82 [-1, 64, 22, 30] 0
ReLU-83 [-1, 64, 22, 30] 0
GatherExpansion-84 [-1, 64, 22, 30] 0
Conv2d-85 [-1, 384, 22, 30] 221,184
BatchNorm2d-86 [-1, 384, 22, 30] 768
ReLU-87 [-1, 384, 22, 30] 0
conv2d-88 [-1, 384, 22, 30] 0
Conv2d-89 [-1, 384, 11, 15] 3,456
BatchNorm2d-90 [-1, 384, 11, 15] 768
conv2d-91 [-1, 384, 11, 15] 0
Conv2d-92 [-1, 64, 11, 15] 576
BatchNorm2d-93 [-1, 64, 11, 15] 128
conv2d-94 [-1, 64, 11, 15] 0
Conv2d-95 [-1, 128, 11, 15] 8,192
BatchNorm2d-96 [-1, 128, 11, 15] 256
conv2d-97 [-1, 128, 11, 15] 0
Conv2d-98 [-1, 384, 11, 15] 3,456
BatchNorm2d-99 [-1, 384, 11, 15] 768
conv2d-100 [-1, 384, 11, 15] 0
Conv2d-101 [-1, 128, 11, 15] 49,152
BatchNorm2d-102 [-1, 128, 11, 15] 256
conv2d-103 [-1, 128, 11, 15] 0
ReLU-104 [-1, 128, 11, 15] 0
GatherExpansion-105 [-1, 128, 11, 15] 0
Conv2d-106 [-1, 768, 11, 15] 884,736
BatchNorm2d-107 [-1, 768, 11, 15] 1,536
ReLU-108 [-1, 768, 11, 15] 0
conv2d-109 [-1, 768, 11, 15] 0
Conv2d-110 [-1, 768, 11, 15] 6,912
BatchNorm2d-111 [-1, 768, 11, 15] 1,536
conv2d-112 [-1, 768, 11, 15] 0
Conv2d-113 [-1, 128, 11, 15] 98,304
BatchNorm2d-114 [-1, 128, 11, 15] 256
conv2d-115 [-1, 128, 11, 15] 0
ReLU-116 [-1, 128, 11, 15] 0
GatherExpansion-117 [-1, 128, 11, 15] 0
Conv2d-118 [-1, 768, 11, 15] 884,736
BatchNorm2d-119 [-1, 768, 11, 15] 1,536
ReLU-120 [-1, 768, 11, 15] 0
conv2d-121 [-1, 768, 11, 15] 0
Conv2d-122 [-1, 768, 11, 15] 6,912
BatchNorm2d-123 [-1, 768, 11, 15] 1,536
conv2d-124 [-1, 768, 11, 15] 0
Conv2d-125 [-1, 128, 11, 15] 98,304
BatchNorm2d-126 [-1, 128, 11, 15] 256
conv2d-127 [-1, 128, 11, 15] 0
ReLU-128 [-1, 128, 11, 15] 0
GatherExpansion-129 [-1, 128, 11, 15] 0
Conv2d-130 [-1, 768, 11, 15] 884,736
BatchNorm2d-131 [-1, 768, 11, 15] 1,536
ReLU-132 [-1, 768, 11, 15] 0
conv2d-133 [-1, 768, 11, 15] 0
Conv2d-134 [-1, 768, 11, 15] 6,912
BatchNorm2d-135 [-1, 768, 11, 15] 1,536
conv2d-136 [-1, 768, 11, 15] 0
Conv2d-137 [-1, 128, 11, 15] 98,304
BatchNorm2d-138 [-1, 128, 11, 15] 256
conv2d-139 [-1, 128, 11, 15] 0
ReLU-140 [-1, 128, 11, 15] 0
GatherExpansion-141 [-1, 128, 11, 15] 0
Conv2d-142 [-1, 128, 11, 15] 147,456
BatchNorm2d-143 [-1, 128, 11, 15] 256
ReLU-144 [-1, 128, 11, 15] 0
conv2d-145 [-1, 128, 11, 15] 0
Conv2d-146 [-1, 128, 11, 15] 1,152
BatchNorm2d-147 [-1, 128, 11, 15] 256
conv2d-148 [-1, 128, 11, 15] 0
Conv2d-149 [-1, 128, 11, 15] 16,384
BatchNorm2d-150 [-1, 128, 11, 15] 256
conv2d-151 [-1, 128, 11, 15] 0
ReLU-152 [-1, 128, 11, 15] 0
GatherExpansion-153 [-1, 128, 11, 15] 0
AdaptiveAvgPool2d-154 [-1, 128, 1, 1] 0
BatchNorm2d-155 [-1, 128, 1, 1] 256
Conv2d-156 [-1, 128, 1, 1] 16,384
BatchNorm2d-157 [-1, 128, 1, 1] 256
ReLU-158 [-1, 128, 1, 1] 0
conv2d-159 [-1, 128, 1, 1] 0
Conv2d-160 [-1, 128, 11, 15] 147,456
conv2d-161 [-1, 128, 11, 15] 0
ContextEmbeddingBlock-162 [-1, 128, 11, 15] 0
Conv2d-163 [-1, 16, 88, 120] 2,304
BatchNorm2d-164 [-1, 16, 88, 120] 32
ReLU-165 [-1, 16, 88, 120] 0
conv2d-166 [-1, 16, 88, 120] 0
Conv2d-167 [-1, 11, 88, 120] 176
conv2d-168 [-1, 11, 88, 120] 0
SegHead-169 [-1, 11, 352, 480] 0
Conv2d-170 [-1, 32, 44, 60] 9,216
BatchNorm2d-171 [-1, 32, 44, 60] 64
ReLU-172 [-1, 32, 44, 60] 0
conv2d-173 [-1, 32, 44, 60] 0
Conv2d-174 [-1, 11, 44, 60] 352
conv2d-175 [-1, 11, 44, 60] 0
SegHead-176 [-1, 11, 352, 480] 0
Conv2d-177 [-1, 64, 22, 30] 36,864
BatchNorm2d-178 [-1, 64, 22, 30] 128
ReLU-179 [-1, 64, 22, 30] 0
conv2d-180 [-1, 64, 22, 30] 0
Conv2d-181 [-1, 11, 22, 30] 704
conv2d-182 [-1, 11, 22, 30] 0
SegHead-183 [-1, 11, 352, 480] 0
Conv2d-184 [-1, 128, 11, 15] 147,456
BatchNorm2d-185 [-1, 128, 11, 15] 256
ReLU-186 [-1, 128, 11, 15] 0
conv2d-187 [-1, 128, 11, 15] 0
Conv2d-188 [-1, 11, 11, 15] 1,408
conv2d-189 [-1, 11, 11, 15] 0
SegHead-190 [-1, 11, 352, 480] 0
SemanticBranch-191 [[-1, 128, 11, 15], [-1, 11, 352, 480], [-1, 11, 352, 480], [-1, 11, 352, 480], [-1, 11, 352, 480]] 0
Conv2d-192 [-1, 16, 11, 15] 2,048
BatchNorm2d-193 [-1, 16, 11, 15] 32
ReLU-194 [-1, 16, 11, 15] 0
conv2d-195 [-1, 16, 11, 15] 0
Conv2d-196 [-1, 16, 11, 15] 2,048
BatchNorm2d-197 [-1, 16, 11, 15] 32
ReLU-198 [-1, 16, 11, 15] 0
conv2d-199 [-1, 16, 11, 15] 0
Conv2d-200 [-1, 16, 11, 15] 2,048
BatchNorm2d-201 [-1, 16, 11, 15] 32
ReLU-202 [-1, 16, 11, 15] 0
conv2d-203 [-1, 16, 11, 15] 0
Conv2d-204 [-1, 128, 11, 15] 2,048
BatchNorm2d-205 [-1, 128, 11, 15] 256
ReLU-206 [-1, 128, 11, 15] 0
conv2d-207 [-1, 128, 11, 15] 0
MA-208 [-1, 128, 11, 15] 0
Conv2d-209 [-1, 128, 44, 60] 147,456
BatchNorm2d-210 [-1, 128, 44, 60] 256
ReLU-211 [-1, 128, 44, 60] 0
conv2d-212 [-1, 128, 44, 60] 0
Conv2d-213 [-1, 11, 44, 60] 1,408
conv2d-214 [-1, 11, 44, 60] 0
SegHead-215 [-1, 11, 352, 480] 0
================================================================
Total params: 4,371,712
Trainable params: 4,371,712
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 1.93
Forward/backward pass size (MB): 62196495155494.29
Params size (MB): 16.68
Estimated Total Size (MB): 62196495155512.91
----------------------------------------------------------------
BiSeNetV2A51(
1.949 GMac, 100.000% MACs,
(db): DetailedBranch(
0.0 GMac, 0.000% MACs,
(s1_conv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s1_conv2): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s2_conv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s2_conv2): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s2_conv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s3_conv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s3_conv2): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(s3_conv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
(sb): SemanticBranch(
1.553 GMac, 79.707% MACs,
(stem): StemBlock(
0.089 GMac, 4.578% MACs,
(conv1): conv2d(
0.02 GMac, 1.040% MACs,
(conv): Conv2d(0.018 GMac, 0.936% MACs, 3, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.069% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.001 GMac, 0.035% MACs, inplace=True)
)
(conv_1x1): conv2d(
0.006 GMac, 0.329% MACs,
(conv): Conv2d(0.005 GMac, 0.277% MACs, 16, 8, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.035% MACs, 8, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.017% MACs, inplace=True)
)
(conv_3x3): conv2d(
0.013 GMac, 0.650% MACs,
(conv): Conv2d(0.012 GMac, 0.624% MACs, 8, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.017% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.009% MACs, inplace=True)
)
(mpooling): MaxPool2d(0.001 GMac, 0.035% MACs, kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
(conv2): conv2d(
0.049 GMac, 2.523% MACs,
(conv): Conv2d(0.049 GMac, 2.497% MACs, 32, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.017% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.009% MACs, inplace=True)
)
)
(s3_ge1): GatherExpansion(
0.165 GMac, 8.465% MACs,
(conv1): conv2d(
0.149 GMac, 7.647% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 16, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.002 GMac, 0.104% MACs, 96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.001 GMac, 0.052% MACs, inplace=True)
)
(dwconv2): conv2d(
0.003 GMac, 0.143% MACs,
(conv): Conv2d(0.002 GMac, 0.117% MACs, 96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=96, bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.026% MACs, 96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.008 GMac, 0.425% MACs,
(conv): Conv2d(0.008 GMac, 0.416% MACs, 96, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.009% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.003 GMac, 0.143% MACs,
(conv): Conv2d(0.002 GMac, 0.117% MACs, 96, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=96, bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.026% MACs, 96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.024% MACs,
(conv): Conv2d(0.0 GMac, 0.020% MACs, 16, 16, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=16, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.004% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.002 GMac, 0.078% MACs,
(conv): Conv2d(0.001 GMac, 0.069% MACs, 16, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.009% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.004% MACs, inplace=True)
)
(s3_ge2): GatherExpansion(
0.17 GMac, 8.701% MACs,
(conv1): conv2d(
0.148 GMac, 7.569% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 32, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.052% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.001 GMac, 0.026% MACs, inplace=True)
)
(dwconv2): conv2d(
0.006 GMac, 0.286% MACs,
(conv): Conv2d(0.005 GMac, 0.234% MACs, 192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.052% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.016 GMac, 0.841% MACs,
(conv): Conv2d(0.016 GMac, 0.832% MACs, 192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.009% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=192, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=32, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 32, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.004% MACs, inplace=True)
)
(s4_ge1): GatherExpansion(
0.16 GMac, 8.221% MACs,
(conv1): conv2d(
0.148 GMac, 7.569% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 32, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.052% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.001 GMac, 0.026% MACs, inplace=True)
)
(dwconv2): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.059% MACs, 192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.008 GMac, 0.421% MACs,
(conv): Conv2d(0.008 GMac, 0.416% MACs, 192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.004% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.059% MACs, 192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=192, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.012% MACs,
(conv): Conv2d(0.0 GMac, 0.010% MACs, 32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=32, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.001 GMac, 0.074% MACs,
(conv): Conv2d(0.001 GMac, 0.069% MACs, 32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.004% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.002% MACs, inplace=True)
)
(s4_ge2): GatherExpansion(
0.166 GMac, 8.512% MACs,
(conv1): conv2d(
0.147 GMac, 7.530% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 64, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.026% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.013% MACs, inplace=True)
)
(dwconv2): conv2d(
0.003 GMac, 0.143% MACs,
(conv): Conv2d(0.002 GMac, 0.117% MACs, 384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.026% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.016 GMac, 0.837% MACs,
(conv): Conv2d(0.016 GMac, 0.832% MACs, 384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.004% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=384, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=64, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.002% MACs, inplace=True)
)
(s5_ge1): GatherExpansion(
0.158 GMac, 8.099% MACs,
(conv1): conv2d(
0.147 GMac, 7.530% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 64, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.026% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.013% MACs, inplace=True)
)
(dwconv2): conv2d(
0.001 GMac, 0.036% MACs,
(conv): Conv2d(0.001 GMac, 0.029% MACs, 384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.007% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.008 GMac, 0.418% MACs,
(conv): Conv2d(0.008 GMac, 0.416% MACs, 384, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.001 GMac, 0.036% MACs,
(conv): Conv2d(0.001 GMac, 0.029% MACs, 384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=384, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.007% MACs, 384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.006% MACs,
(conv): Conv2d(0.0 GMac, 0.005% MACs, 64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=64, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.001% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.069% MACs, 64, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(s5_ge2): GatherExpansion(
0.164 GMac, 8.418% MACs,
(conv1): conv2d(
0.146 GMac, 7.511% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 128, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.007% MACs, inplace=True)
)
(dwconv2): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.059% MACs, 768, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.016 GMac, 0.835% MACs,
(conv): Conv2d(0.016 GMac, 0.832% MACs, 768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(s5_ge3): GatherExpansion(
0.164 GMac, 8.418% MACs,
(conv1): conv2d(
0.146 GMac, 7.511% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 128, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.007% MACs, inplace=True)
)
(dwconv2): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.059% MACs, 768, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.016 GMac, 0.835% MACs,
(conv): Conv2d(0.016 GMac, 0.832% MACs, 768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(s5_ge4): GatherExpansion(
0.164 GMac, 8.418% MACs,
(conv1): conv2d(
0.146 GMac, 7.511% MACs,
(conv): Conv2d(0.146 GMac, 7.491% MACs, 128, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.007% MACs, inplace=True)
)
(dwconv2): conv2d(
0.001 GMac, 0.072% MACs,
(conv): Conv2d(0.001 GMac, 0.059% MACs, 768, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.013% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.016 GMac, 0.835% MACs,
(conv): Conv2d(0.016 GMac, 0.832% MACs, 768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=768, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 768, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(s5_ge5): GatherExpansion(
0.027 GMac, 1.406% MACs,
(conv1): conv2d(
0.024 GMac, 1.252% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(dwconv2): conv2d(
0.0 GMac, 0.012% MACs,
(conv): Conv2d(0.0 GMac, 0.010% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_11): conv2d(
0.003 GMac, 0.141% MACs,
(conv): Conv2d(0.003 GMac, 0.139% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv1): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(dwconv3): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=128, bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv_12): conv2d(
0.0 GMac, 0.000% MACs,
(conv): Conv2d(0.0 GMac, 0.000% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(seghead1): SegHead(
0.027 GMac, 1.370% MACs,
(conv): conv2d(
0.025 GMac, 1.275% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 16, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.017% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.009% MACs, inplace=True)
)
(classes): conv2d(
0.002 GMac, 0.095% MACs,
(conv): Conv2d(0.002 GMac, 0.095% MACs, 16, 11, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
(seghead2): SegHead(
0.026 GMac, 1.309% MACs,
(conv): conv2d(
0.025 GMac, 1.262% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.009% MACs, 32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.004% MACs, inplace=True)
)
(classes): conv2d(
0.001 GMac, 0.048% MACs,
(conv): Conv2d(0.001 GMac, 0.048% MACs, 32, 11, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
(seghead3): SegHead(
0.025 GMac, 1.279% MACs,
(conv): conv2d(
0.024 GMac, 1.255% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.004% MACs, 64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.002% MACs, inplace=True)
)
(classes): conv2d(
0.0 GMac, 0.024% MACs,
(conv): Conv2d(0.0 GMac, 0.024% MACs, 64, 11, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
(seghead4): SegHead(
0.025 GMac, 1.264% MACs,
(conv): conv2d(
0.024 GMac, 1.252% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
(classes): conv2d(
0.0 GMac, 0.012% MACs,
(conv): Conv2d(0.0 GMac, 0.012% MACs, 128, 11, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
(ceb): ContextEmbeddingBlock(
0.024 GMac, 1.251% MACs,
(gap): AdaptiveAvgPool2d(0.0 GMac, 0.001% MACs, output_size=1)
(bn1): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(conv1): conv2d(
0.0 GMac, 0.001% MACs,
(conv): Conv2d(0.0 GMac, 0.001% MACs, 128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(conv2): conv2d(
0.024 GMac, 1.249% MACs,
(conv): Conv2d(0.024 GMac, 1.249% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
)
(sma): MA(
0.001 GMac, 0.074% MACs,
(query_project): conv2d(
0.0 GMac, 0.018% MACs,
(conv): Conv2d(0.0 GMac, 0.017% MACs, 128, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(key_project): conv2d(
0.0 GMac, 0.018% MACs,
(conv): Conv2d(0.0 GMac, 0.017% MACs, 128, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(value_project): conv2d(
0.0 GMac, 0.018% MACs,
(conv): Conv2d(0.0 GMac, 0.017% MACs, 128, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
(out_project): conv2d(
0.0 GMac, 0.021% MACs,
(conv): Conv2d(0.0 GMac, 0.017% MACs, 16, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.002% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.001% MACs, inplace=True)
)
)
(seghead): SegHead(
0.394 GMac, 20.219% MACs,
(conv): conv2d(
0.39 GMac, 20.029% MACs,
(conv): Conv2d(0.389 GMac, 19.977% MACs, 128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(bn): BatchNorm2d(0.001 GMac, 0.035% MACs, 128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.017% MACs, inplace=True)
)
(classes): conv2d(
0.004 GMac, 0.191% MACs,
(conv): Conv2d(0.004 GMac, 0.191% MACs, 128, 11, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn): BatchNorm2d(0.0 GMac, 0.000% MACs, 11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(relu): ReLU(0.0 GMac, 0.000% MACs, inplace=True)
)
)
)
1.948688144 GMac 5.002958 19.08477020263672 MB
[INFO] Register count_convNd() for <class 'torch.nn.modules.conv.Conv2d'>.
[INFO] Register count_bn() for <class 'torch.nn.modules.batchnorm.BatchNorm2d'>.
[INFO] Register zero_ops() for <class 'torch.nn.modules.activation.ReLU'>.
[WARN] Cannot find rule for <class '__main__.conv2d'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.DetailedBranch'>. Treat it as zero Macs and zero Params.
[INFO] Register zero_ops() for <class 'torch.nn.modules.pooling.MaxPool2d'>.
[WARN] Cannot find rule for <class '__main__.StemBlock'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.GatherExpansion'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.SegHead'>. Treat it as zero Macs and zero Params.
[INFO] Register count_adap_avgpool() for <class 'torch.nn.modules.pooling.AdaptiveAvgPool2d'>.
[WARN] Cannot find rule for <class '__main__.ContextEmbeddingBlock'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.SemanticBranch'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.MA'>. Treat it as zero Macs and zero Params.
[WARN] Cannot find rule for <class '__main__.BiSeNetV2A51'>. Treat it as zero Macs and zero Params.
1.942682144 GMac 4.371712 16.6767578125 MB
Process finished with exit code 0
'''
|
[
"[email protected]"
] | |
7d400123827930ea4426b72ad394fba68219b856
|
6de622e922361beac91e3cfc4cd67829451bc095
|
/wyzepal/wyzepal/examples/message-history
|
fb650052f4bdae7a8b5fe58bf6694d9755660181
|
[] |
no_license
|
WyzePal/api
|
fd1f1771aa9e1bfeb5d5de102b3f525d905fae29
|
8646c90148885b1c4286557bd62cfcf844b9d107
|
refs/heads/master
| 2020-03-23T15:25:53.559240 | 2019-03-08T23:54:00 | 2019-03-08T23:54:00 | 141,747,661 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 415 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import wyzepal
usage = """message-history <message_id> [options]
Example: message-history 42
"""
parser = wyzepal.add_default_arguments(argparse.ArgumentParser(usage=usage))
parser.add_argument('message_id', type=int)
options = parser.parse_args()
client = wyzepal.init_from_options(options)
print(client.get_message_history(options.message_id))
|
[
"[email protected]"
] | ||
42505fe70d5e1e62150a3a5fe90404a7a83fd63d
|
42516b0348936e257d04113c2e632dc72ba58e91
|
/test_env/test_suit_ui_native_apk/test_suit_ui_native_apk_case05.py
|
dabb1ffacba1db255a1a1217d9e02bc7a0be878f
|
[] |
no_license
|
wwlwwlqaz/Qualcomm
|
2c3a225875fba955d771101f3c38ca0420d8f468
|
a04b717ae437511abae1e7e9e399373c161a7b65
|
refs/heads/master
| 2021-01-11T19:01:06.123677 | 2017-04-05T07:57:21 | 2017-04-05T07:57:21 | 79,292,426 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,095 |
py
|
# coding: utf-8
'''
check native apk: alarm
@author: U{huitingn<[email protected]>}
@version: version 1.0.0
@requires: python 2.7+
@license:
@see: L{TestCaseBase <TestCaseBase>}
@note:
@attention:
@bug:
@warning:
'''
import fs_wrapper
import settings.common as SC
from case_utility import *
from logging_wrapper import log_test_case, take_screenshot
from test_case_base import TestCaseBase
from qrd_shared.case import *
############################################
# author:
# [email protected]
# function:
# check native clock
# precondition:
#
# steps:
# launch native clock
# set alarm
# wait for alarm
############################################
import sys, string, os, shutil
from threading import Thread
import commands
import re, subprocess, shlex
import datetime
from test_suit_ui_native_apk import *
class test_suit_ui_native_apk_case05(TestCaseBase):
'''
test_suit_ui_native_apk_case05 is a class for check native clock: can it alarm in expect time.
@see: L{TestCaseBase <TestCaseBase>}
'''
tag = 'ui_native_apk_case05'
def test_case_main(self, case_results):
'''
main entry.
@type case_results: tuple
@param case_results: record some case result information
'''
case_flag = False
pre_check()
#
# read what's the time now
#
try:
(hour, minute, a_p, cur_time) = cur_time_in_mobilephone()
except:
set_cannot_continue()
log_test_case(self.tag, "before SET ALARM: time format maybe wrong" + cur_time)
#
# STEP 1: launch alarm
#
if can_continue():
launcher.launch_from_launcher('clock')
#
# STEP 2: set alarm
#
if can_continue():
# new alarm
click_view_by_container_id('action_bar_container', 'android.widget.ImageView', 0)
click_button_by_id('fab') # alarm_add_alarm
# set the alarm. e.g.:now(12:56AM)set( 1:00AM)
# now( PM)set( PM)
# now(11:56AM)set(12:00PM)
# now( PM)set( AM)
#
# caculate what time should be set
#
# minute decide hour
if (int(minute) + 1 + 5) > 60:boundary = True
else:boundary = False
setMinute = (int(minute) + 1 + 5) / 5
setHour = int(hour) + boundary
if setHour % 12 == 0 and boundary:
apDict = {'True':'pm', 'False':'am'}
setAP = apDict[str(a_p == 'AM')]
else:setAP = a_p.lower()
setMinute = '%02.0f' % (setMinute * 5 % 60)
setHour = str(setHour)
log_test_case(self.tag, "SET hour: " + setHour + " minute: " + setMinute + " ap: " + setAP)
# set alarm
click(CLOCK_PLATE['HOUR'][setHour][0], CLOCK_PLATE['HOUR'][setHour][1])
click(CLOCK_PLATE['MINUTE'][setMinute][0], CLOCK_PLATE['MINUTE'][setMinute][1])
# click(CLOCK_PLATE['A_P'][setAP][0],CLOCK_PLATE['A_P'][setAP][1])
click_textview_by_text(setAP.upper())
#
# check if alarm is set correctly
#
if get_view_text_by_id(VIEW_TEXT_VIEW, 'hours') == setHour \
and get_view_text_by_id(VIEW_TEXT_VIEW, 'minutes') == setMinute:
# and get_view_text_by_id(VIEW_TEXT_VIEW,'ampm_label')==setAP
click_button_by_text('OK')
else:
set_cannot_continue()
log_test_case(self.tag, "SET ALARM: h,m,ap At least one of them is clicked wrong")
#
# STEP 3: wait for alarm
#
if can_continue():
send_key(KEY_HOME)
sleep(2)
send_key(KEYCODE_POWER)
sleep(2)
func = lambda:is_view_enabled_by_id(VIEW_IMAGE_VIEW, 'alarm', isScrollable=0)
if wait_for_fun(func, True, timeout=300, sleeptime=10):
a = get_view_text_by_id(VIEW_TEXT_VIEW, 'digital_clock', isScrollable=0)
if a:case_flag = True
startX = int(240.0 / 480 * 100)
startY = int(590.0 / 855 * 100)
endX = int(400.0 / 480 * 100)
endY = int(590.0 / 855 * 100)
drag_by_param(startX, startY, endX, endY, 10)
#
# STEP 5: exit
#
exit_cur_case(self.tag)
log_test_case(self.tag, "case_flag = " + str(case_flag))
if case_flag:
qsst_log_case_status(STATUS_SUCCESS, "" , SEVERITY_HIGH)
else:
qsst_log_case_status(STATUS_FAILED, "native alarm is failed", SEVERITY_HIGH)
case_results.append((self.case_config_map[fs_wrapper.CASE_NAME_ATTR], can_continue()))
|
[
"[email protected]"
] | |
7f07de87a674fce74e537b03815c9f0175773dfd
|
3a9f2b3d79cf214704829427ee280f4b49dca70a
|
/saigon/rat/RuckusAutoTest/tests/zd/CB_AP_CLI_Check_Wlans.py
|
93b2c57d9cb88aa0a0d9f29f9af7f3c9188362ed
|
[] |
no_license
|
jichunwei/MyGitHub-1
|
ae0c1461fe0a337ef459da7c0d24d4cf8d4a4791
|
f826fc89a030c6c4e08052d2d43af0b1b4b410e3
|
refs/heads/master
| 2021-01-21T10:19:22.900905 | 2016-08-20T03:34:52 | 2016-08-20T03:34:52 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,144 |
py
|
'''
Description:
Prerequisite (Assumptions about the state of the testbed/DUT):
1. Build under test is loaded on the AP and Zone Director
Required components: 'RuckusAP'
Test parameters:
Result type: PASS/FAIL
Results: PASS:
FAIL:
Messages: If FAIL the test script returns a message related to the criteria that is not satisfied
Test procedure:
1. Config:
- initialize parameter.
2. Test:
- check number of ssids
3. Cleanup:
- None
How it was tested:
Create on 2013-1-10
@author: [email protected]
'''
import logging
from RuckusAutoTest.models import Test
from RuckusAutoTest.components.lib.apcli import radiogroup
class CB_AP_CLI_Check_Wlans(Test):
required_components = ['RuckusAP']
parameters_description = {}
def _init_params(self, conf):
self.conf = dict(num_of_ssids=64,
ap_tag = 'AP_01'
)
self.conf.update(conf)
def _retrieve_carribag(self):
self.active_ap = self.carrierbag[self.conf.get('ap_tag')]['ap_ins']
def _update_carribag(self):
pass
def config(self, conf):
self._init_params(conf)
self._retrieve_carribag()
def test(self):
import time
st = time.time()
while time.time() - st < 230:
wlan_list = radiogroup.get_wlanlist(self.active_ap)
cnt = 0
for wlan in wlan_list:
if 'AP' == wlan.get('type') and 'up' == wlan.get('status') and '00:00:00:00:00:00' != wlan.get('bssid') and (not 'mesh' in wlan.get('name')):
cnt += 1
if self.conf.get('num_of_ssids') == cnt:
return self.returnResult('PASS', 'The WLANs status is correct')
else:
time.sleep(10)
if 'wlan_list' in locals():
logging.info(wlan_list)
return self.returnResult('FAIL', 'The WLANs status is incorrect, please check')
def cleanup(self):
self._update_carribag()
|
[
"[email protected]"
] | |
3d7c87091f35d690835e37012b967b52d9e57aa6
|
dc280634cd9c6601c1d35cc31debc63fe4d4d88d
|
/twisted/plugins/anchore_simplequeue.py
|
bcd31006b26ab16231db135609db5203a309cb19
|
[
"Apache-2.0"
] |
permissive
|
roachmd/anchore-engine
|
9fe5166bbce00471516730c270b9dab7658f38d2
|
521d6796778139a95f51542670714205c2735a81
|
refs/heads/master
| 2020-03-26T01:22:25.812770 | 2018-08-15T06:10:18 | 2018-08-15T06:10:18 | 144,364,236 | 0 | 0 |
Apache-2.0
| 2018-08-11T07:17:22 | 2018-08-11T07:17:22 | null |
UTF-8
|
Python
| false | false | 2,642 |
py
|
import sys
import os
from twisted.application.service import IServiceMaker
from twisted.plugin import IPlugin
from twisted.python import log
from twisted.python import usage
from zope.interface import implements
# anchore modules
from anchore_engine.configuration import localconfig
import anchore_engine.services.common
from anchore_engine.subsys import logger
class Options(usage.Options):
#class Options(usage.Options, strcred.AuthOptionMixin):
# supportedInterfaces = (credentials.IUsernamePassword,)
optParameters = [
["config", "c", None, "Configuration directory location."]
]
class AnchoreServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "anchore-simplequeue"
servicenames = ["simplequeue"]
description = "Anchore Container Image Scanner Service: " + ','.join(servicenames)
options = Options
def makeService(self, options):
slist = []
try:
configfile = os.path.join(options['config'], 'config.yaml')
config = localconfig.read_config(configfile=configfile)
except Exception as err:
log.err("cannot load local configuration: " + str(err))
raise err
log_level = 'INFO'
log_to_db = False
if 'log_level' in config:
log_level = config['log_level']
if 'log_to_db' in config:
log_to_db = config['log_to_db']
slist = self.servicenames
try:
config_services = config['services']
isEnabled = False
for sname in slist:
if 'log_level' in config_services[sname]:
log_level = config_services[sname]['log_level']
if config_services[sname]['enabled']:
isEnabled = True
break
if not isEnabled:
log.err("no services in list ("+str(self.servicenames)+") are enabled in configuration file: shutting down")
sys.exit(0)
except Exception as err:
log.err("error checking for enabled services, check config file - exception: " + str(err))
raise Exception("error checking for enabled services, check config file - exception: " + str(err))
try:
logger.set_log_level(log_level, log_to_db=log_to_db)
except Exception as err:
log.err("exception while initializing logger - exception: " + str(err))
logger.set_log_level('INFO')
r = anchore_engine.services.common.makeService(slist, options)
return(r)
serviceMaker = AnchoreServiceMaker()
|
[
"[email protected]"
] | |
aca0f8da76438bb92344a411b386bae33b859c61
|
804d40b874e2eb1f2e9f3f3f124d507bf2b517f1
|
/env/Lib/site-packages/sqlalchemy/dialects/mysql/json.py
|
4961ef33b155b194515e5e298d98eb38f3cc976d
|
[] |
no_license
|
Nestor-Leyva/api-flask
|
86d5d3053e62767813aeacea5f30cc6a355320d0
|
55675a02fd79263518b0dfc731a2b4a2be50bd0d
|
refs/heads/main
| 2023-08-21T03:00:18.740097 | 2021-10-04T19:25:38 | 2021-10-04T19:25:38 | 413,517,941 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,121 |
py
|
# mysql/json.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
from ... import types as sqltypes
class JSON(sqltypes.JSON):
"""MySQL JSON type.
MySQL supports JSON as of version 5.7.
MariaDB supports JSON (as an alias for LONGTEXT) as of version 10.2.
The :class:`.mysql.JSON` type supports persistence of JSON values
as well as the core index operations provided by :class:`_types.JSON`
datatype, by adapting the operations to render the ``JSON_EXTRACT``
function at the database level.
.. versionadded:: 1.1
"""
pass
class _FormatTypeMixin(object):
def _format_value(self, value):
raise NotImplementedError()
def bind_processor(self, dialect):
super_proc = self.string_bind_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
def literal_processor(self, dialect):
super_proc = self.string_literal_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
def _format_value(self, value):
if isinstance(value, int):
value = "$[%s]" % value
else:
value = '$."%s"' % value
return value
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
def _format_value(self, value):
return "$%s" % (
"".join(
[
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
for elem in value
]
)
)
|
[
"[email protected]"
] | |
463affe2ad1841699dfb96a3668f21e2c37af98e
|
0ce68cc0e9b93ae339e69f9f328e27262ebe0ab9
|
/art/attacks/inference/membership_inference/label_only_boundary_distance.py
|
be29c3e16d90254053b6f3121a0fd6f4623acf20
|
[
"MIT"
] |
permissive
|
igor-barinov/adversarial-robustness-toolbox
|
ede762bafa471d0d0664e82649f35bf0455c0d9a
|
10518daca0d5f2eb3bcd64022c2151cadc843443
|
refs/heads/main
| 2023-07-16T08:36:51.500788 | 2021-07-14T15:19:45 | 2021-07-14T15:19:45 | 376,598,416 | 1 | 0 |
MIT
| 2021-06-13T17:09:16 | 2021-06-13T17:09:15 | null |
UTF-8
|
Python
| false | false | 8,046 |
py
|
# MIT License
#
# Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2020
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This module implements the Label-Only Inference Attack based on Decision Boundary.
| Paper link: https://arxiv.org/abs/2007.14321
"""
import logging
from typing import Optional, TYPE_CHECKING
import numpy as np
from art.attacks.attack import InferenceAttack
from art.estimators.estimator import BaseEstimator
from art.estimators.classification.classifier import ClassifierMixin
from art.utils import check_and_transform_label_format
if TYPE_CHECKING:
from art.utils import CLASSIFIER_TYPE
logger = logging.getLogger(__name__)
class LabelOnlyDecisionBoundary(InferenceAttack):
"""
Implementation of Label-Only Inference Attack based on Decision Boundary.
| Paper link: https://arxiv.org/abs/2007.14321
"""
attack_params = InferenceAttack.attack_params + [
"distance_threshold_tau",
]
_estimator_requirements = (BaseEstimator, ClassifierMixin)
def __init__(self, estimator: "CLASSIFIER_TYPE", distance_threshold_tau: Optional[float] = None):
"""
Create a `LabelOnlyDecisionBoundary` instance for Label-Only Inference Attack based on Decision Boundary.
:param estimator: A trained classification estimator.
:param distance_threshold_tau: Threshold distance for decision boundary. Samples with boundary distances larger
than threshold are considered members of the training dataset.
"""
super().__init__(estimator=estimator)
self.distance_threshold_tau = distance_threshold_tau
self._check_params()
def infer(self, x: np.ndarray, y: Optional[np.ndarray] = None, **kwargs) -> np.ndarray:
"""
Infer membership of input `x` in estimator's training data.
:param x: Input data.
:param y: True labels for `x`.
:Keyword Arguments for HopSkipJump:
* *norm*: Order of the norm. Possible values: "inf", np.inf or 2.
* *max_iter*: Maximum number of iterations.
* *max_eval*: Maximum number of evaluations for estimating gradient.
* *init_eval*: Initial number of evaluations for estimating gradient.
* *init_size*: Maximum number of trials for initial generation of adversarial examples.
* *verbose*: Show progress bars.
:return: An array holding the inferred membership status, 1 indicates a member and 0 indicates non-member.
"""
from art.attacks.evasion.hop_skip_jump import HopSkipJump
if y is None:
raise ValueError("Argument `y` is None, but this attack requires true labels `y` to be provided.")
if self.distance_threshold_tau is None:
raise ValueError(
"No value for distance threshold `distance_threshold_tau` provided. Please set"
"`distance_threshold_tau` or run method `calibrate_distance_threshold` on known training and test"
"dataset."
)
if "classifier" in kwargs:
raise ValueError("Keyword `classifier` in kwargs is not supported.")
if "targeted" in kwargs:
raise ValueError("Keyword `targeted` in kwargs is not supported.")
y = check_and_transform_label_format(y, self.estimator.nb_classes)
hsj = HopSkipJump(classifier=self.estimator, targeted=False, **kwargs)
x_adv = hsj.generate(x=x, y=y)
distance = np.linalg.norm((x_adv - x).reshape((x.shape[0], -1)), ord=2, axis=1)
y_pred = self.estimator.predict(x=x)
distance[np.argmax(y_pred, axis=1) != np.argmax(y, axis=1)] = 0
is_member = np.where(distance > self.distance_threshold_tau, 1, 0)
return is_member
def calibrate_distance_threshold(
self, x_train: np.ndarray, y_train: np.ndarray, x_test: np.ndarray, y_test: np.ndarray, **kwargs
):
"""
Calibrate distance threshold maximising the membership inference accuracy on `x_train` and `x_test`.
:param x_train: Training data.
:param y_train: Labels of training data `x_train`.
:param x_test: Test data.
:param y_test: Labels of test data `x_test`.
:Keyword Arguments for HopSkipJump:
* *norm*: Order of the norm. Possible values: "inf", np.inf or 2.
* *max_iter*: Maximum number of iterations.
* *max_eval*: Maximum number of evaluations for estimating gradient.
* *init_eval*: Initial number of evaluations for estimating gradient.
* *init_size*: Maximum number of trials for initial generation of adversarial examples.
* *verbose*: Show progress bars.
"""
from art.attacks.evasion.hop_skip_jump import HopSkipJump
if "classifier" in kwargs:
raise ValueError("Keyword `classifier` in kwargs is not supported.")
if "targeted" in kwargs:
raise ValueError("Keyword `targeted` in kwargs is not supported.")
y_train = check_and_transform_label_format(y_train, self.estimator.nb_classes)
y_test = check_and_transform_label_format(y_test, self.estimator.nb_classes)
hsj = HopSkipJump(classifier=self.estimator, targeted=False, **kwargs)
x_train_adv = hsj.generate(x=x_train, y=y_train)
x_test_adv = hsj.generate(x=x_test, y=y_test)
distance_train = np.linalg.norm((x_train_adv - x_train).reshape((x_train.shape[0], -1)), ord=2, axis=1)
distance_test = np.linalg.norm((x_test_adv - x_test).reshape((x_test.shape[0], -1)), ord=2, axis=1)
y_train_pred = self.estimator.predict(x=x_train)
y_test_pred = self.estimator.predict(x=x_test)
distance_train[np.argmax(y_train_pred, axis=1) != np.argmax(y_train, axis=1)] = 0
distance_test[np.argmax(y_test_pred, axis=1) != np.argmax(y_test, axis=1)] = 0
num_increments = 100
tau_increment = np.amax([np.amax(distance_train), np.amax(distance_test)]) / num_increments
acc_max = 0.0
distance_threshold_tau = 0.0
for i_tau in range(1, num_increments):
is_member_train = np.where(distance_train > i_tau * tau_increment, 1, 0)
is_member_test = np.where(distance_test > i_tau * tau_increment, 1, 0)
acc = (np.sum(is_member_train) + (is_member_test.shape[0] - np.sum(is_member_test))) / (
is_member_train.shape[0] + is_member_test.shape[0]
)
if acc > acc_max:
distance_threshold_tau = i_tau * tau_increment
acc_max = acc
self.distance_threshold_tau = distance_threshold_tau
def _check_params(self) -> None:
if self.distance_threshold_tau is not None and (
not isinstance(self.distance_threshold_tau, (int, float)) or self.distance_threshold_tau <= 0.0
):
raise ValueError("The distance threshold `distance_threshold_tau` needs to be a positive float.")
|
[
"[email protected]"
] | |
3413e4da2bfd468cf595d767feb1d2525b88ca04
|
cdfb77f5fb782ed8c731c6789ba154fefb34b830
|
/Seção 7/deque.py
|
1677e2b381400de0a3e586e8e583e9016cf89942
|
[] |
no_license
|
Yuri-Santiago/curso-udemy-python
|
7dc83e0ade45e8d959ce12b81098a13617e0a7ca
|
2af0ddad01b08f6afd0bfe35648212d4ee49f52b
|
refs/heads/master
| 2023-04-21T07:11:35.594753 | 2021-05-18T05:14:56 | 2021-05-18T05:14:56 | 350,412,085 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 474 |
py
|
"""
Módulo Collections - Deque
Podemos dizer que o Deque é uma lista de alta performance.
"""
from collections import deque
# Criando deques
deq = deque('yuri')
print(deq)
# Adicionando elementos no deque
deq.append('m') # Adciona no final
print(deq)
deq.appendleft('O') # Adiciona no começo
print(deq)
# Remover elementos
print(deq.pop()) # Remove e retorna o último elemento
print(deq)
print(deq.popleft()) # Remove e retorna o primeiro elemento
print(deq)
|
[
"[email protected]"
] | |
05e7e379498d8da233aebf0da6207fd6cce541c8
|
fc9f4e6af9df3d05c507c9e114b956dfc26cd0f0
|
/chapters/2023/Qualité logicielle dans les notebooks Jupyter/assets/python-scripts/0002_get_basket_composition_for_date_range.py
|
f241d486a24f743a1215f5f1f900614880dfe8a5
|
[] |
no_license
|
RIMEL-UCA/RIMEL-UCA.github.io
|
0f1334bf9ba77a5ef59c63065f2dbe7c00d70f25
|
3009e69eab06c9dc4f6f2b7f866fa0b00f909516
|
refs/heads/master
| 2023-07-03T16:00:05.606141 | 2023-02-12T14:40:35 | 2023-02-12T14:40:35 | 230,765,683 | 7 | 29 | null | 2023-03-05T22:09:35 | 2019-12-29T15:04:00 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 629 |
py
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import datetime as dt
import pandas as pd
from gs_quant.markets.baskets import Basket
from gs_quant.session import Environment, GsSession
# In[ ]:
client = 'CLIENT ID'
secret = 'CLIENT SECRET'
GsSession.use(Environment.PROD, client_id=client, client_secret=secret, scopes=('read_product_data',))
# In[ ]:
basket = Basket.get('GSMBXXXX') # substitute input with any identifier for a basket
# In[ ]:
position_sets = basket.get_position_sets(dt.date(2021, 1, 7), dt.date(2021, 1, 7))
position_sets = pd.concat([position_set.to_frame() for position_set in position_sets])
|
[
"[email protected]"
] | |
c33a7c77d3a95e5a197628b12e94dbd929b9403d
|
4359911a3546134982c10fa2965a85e3eaf244c1
|
/test_3d_car_instance.py
|
34f7356def88cc6d5a44f77c0ef23b095232db3a
|
[] |
no_license
|
itsme-ranger/ApolloScape_InstanceSeg
|
154614eefbf4965204cfc243f77ea52a8830322f
|
816abea8992abdcd54f0fc155620c1b8da41ba2d
|
refs/heads/master
| 2022-04-10T02:48:38.147939 | 2020-02-08T09:36:11 | 2020-02-08T09:36:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,122 |
py
|
"""Perform inference on one or more datasets."""
import argparse
import cv2
import os
import pprint
import sys
import matplotlib
#matplotlib.use('Agg')
import torch
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
import _init_paths # pylint: disable=unused-import
from core.config import cfg, merge_cfg_from_file, merge_cfg_from_list, assert_and_infer_cfg
from core.test_engine import run_inference
import utils.logging
# OpenCL may be enabled by default in OpenCV3; disable it because it's not
# thread safe and causes unwanted GPU memory allocations.
cv2.ocl.setUseOpenCL(False)
def parse_args():
"""Parse in command line arguments"""
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
######################## cfg #####################
#parser.add_argument('--cfg', dest='cfg_file', default='./configs/e2e_3d_car_101_FPN_triple_head_non_local_weighted.yaml', help='Config file for training (and optionally testing)')
#parser.add_argument('--cfg', dest='cfg_file', default='./configs/e2e_3d_car_101_FPN_triple_head_non_local.yaml', help='Config file for training (and optionally testing)')
parser.add_argument('--cfg', dest='cfg_file', default='./configs/e2e_3d_car_101_FPN_triple_head.yaml', help='Config file for training (and optionally testing)')
#parser.add_argument('--load_ckpt', default='/media/samsumg_1tb/ApolloScape/ApolloScape_InstanceSeg/e2e_3d_car_101_FPN_triple_head_non_local/Oct03-12-44-22_N606-TITAN32_step/ckpt/model_step55277.pth', help='checkpoint path to load')
#parser.add_argument('--load_ckpt', default='/media/samsumg_1tb/ApolloScape/ApolloScape_InstanceSeg/e2e_3d_car_101_FPN_triple_head_non_local/Oct03-12-44-22_N606-TITAN32_step/ckpt/model_step55277.pth', help='checkpoint path to load')
parser.add_argument('--load_ckpt', default='/media/samsumg_1tb/ApolloScape/ApolloScape_InstanceSeg/e2e_3d_car_101_FPN_triple_head/Sep09-23-42-21_N606-TITAN32_step/ckpt/model_step56534.pth', help='checkpoint path to load')
######################## ckpt #####################
parser.add_argument('--dataset', dest='dataset', default='ApolloScape', help='Dataset to use')
parser.add_argument('--dataset_dir', default='/media/samsumg_1tb/ApolloScape/ECCV2018_apollo/train/')
parser.add_argument('--load_detectron', help='path to the detectron weight pickle file')
parser.add_argument('--output_dir', help='output directory to save the testing results. If not provided defaults to [args.load_ckpt|args.load_detectron]/../test.')
parser.add_argument('--set', dest='set_cfgs', help='set config keys, will overwrite config in the cfg_file. See lib/core/config.py for all options', default=[], nargs='*')
parser.add_argument('--multi-gpu-testing', help='using multiple gpus for inference', default=False, action='store_true')
parser.add_argument('--vis', default=False, dest='vis', help='visualize detections', action='store_true')
parser.add_argument('--list_flag', default='val', help='Choosing between [val, test]')
parser.add_argument('--iou_ignore_threshold', default=0.5, help='Filter out by this iou')
return parser.parse_args()
if __name__ == '__main__':
if not torch.cuda.is_available():
sys.exit("Need a CUDA device to run the code.")
logger = utils.logging.setup_logging(__name__)
args = parse_args()
logger.info('Called with args:')
logger.info(args)
assert (torch.cuda.device_count() == 1) ^ bool(args.multi_gpu_testing)
assert bool(args.load_ckpt) ^ bool(args.load_detectron), 'Exactly one of --load_ckpt and --load_detectron should be specified.'
if args.output_dir is None:
ckpt_path = args.load_ckpt if args.load_ckpt else args.load_detectron
args.output_dir = os.path.join(os.path.dirname(os.path.dirname(ckpt_path)), 'test')
logger.info('Automatically set output directory to %s', args.output_dir)
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
cfg.VIS = args.vis
if args.cfg_file is not None:
merge_cfg_from_file(args.cfg_file)
if args.set_cfgs is not None:
merge_cfg_from_list(args.set_cfgs)
# Manually change the following:
cfg.TEST.DATASETS = ['Car3D',]
cfg.MODEL.NUM_CLASSES = 8
cfg.MODEL.NUMBER_CARS = 34
assert_and_infer_cfg()
logger.info('Testing with config:')
logger.info(pprint.pformat(cfg))
# For test_engine.multi_gpu_test_net_on_dataset
args.test_net_file, _ = os.path.splitext(__file__)
# manually set args.cuda
args.cuda = True
# Wudi hard coded the following range
if args.list_flag == 'test':
#args.range = [0, 1041]
i = 1
args.range = [i*125, (i+1)*125]
#args.range = [1000, 1041]
elif args.list_flag == 'val':
# args.range = [0, 206]
i = 3
args.range = [i*50, (i+1)*50]
args.range = [0, 206]
elif args.list_flag == 'train':
args.range = [4, 3888]
run_inference(
args,
ind_range=args.range,
multi_gpu_testing=args.multi_gpu_testing,
check_expected_results=True)
|
[
"[email protected]"
] | |
91f7ff0874a31b537fe71f816acf68fdd71e2cf6
|
321cc8a471b6cf7134edb292bc905bb6ffde1a13
|
/SaveOnTech/SnapDeal/apps.py
|
56b875222513c2b19800d3452d118de53f3b9598
|
[] |
no_license
|
Murgowt/SaveOnTech
|
da386748ab8c016f494dadb779a380c3de8cbd10
|
9d838adefdcc41a2130cd3e1d9ee729d967ce731
|
refs/heads/master
| 2020-07-21T20:42:09.586624 | 2019-10-09T14:28:36 | 2019-10-09T14:28:36 | 206,971,575 | 4 | 1 | null | 2019-10-09T14:28:03 | 2019-09-07T13:23:58 |
Python
|
UTF-8
|
Python
| false | false | 96 |
py
|
from django.apps import AppConfig
class SnapdealConfig(AppConfig):
name = 'SnapDeal'
|
[
"[email protected]"
] | |
d185dff0848d40badb6664ead738792964b15ce0
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/pzQXHMqizBmaLDCHc_17.py
|
92b7a0873f57358e73a7f1a469c02827953bbc1a
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 450 |
py
|
def calculate_damage(your_type, opponent_type, attack, defense):
effectiveness=1
d = {('fire','grass'):2,('fire','water'):0.5,('fire','electric'):1,('water', 'grass'):0.5,
('water', 'electric'):0.5,('grass', 'electric'):1, ('grass','fire'):0.5,("grass", "water"):2}
for i in d:
if i == (your_type,opponent_type):
effectiveness=d[(your_type,opponent_type)]
damage = 50 * (attack / defense) * effectiveness
return damage
|
[
"[email protected]"
] | |
9c2abe985a3afab65e9881bb794b7c361855e052
|
91b2fb1fb6df216f2e365c3366bab66a567fc70d
|
/Week09/每日一题/61. 旋转链表.py
|
5edbeab4480fb64151aaafd831aeb3035a12ede2
|
[] |
no_license
|
hrz123/algorithm010
|
d17aee642f03f607a7984beb099eec18f2de1c8e
|
817911d4282d2e226518b3533dff28282a91b3d4
|
refs/heads/master
| 2022-12-20T14:09:26.365781 | 2020-10-11T04:15:57 | 2020-10-11T04:15:57 | 270,178,423 | 1 | 0 | null | 2020-06-07T03:21:09 | 2020-06-07T03:21:09 | null |
UTF-8
|
Python
| false | false | 4,281 |
py
|
# 61. 旋转链表.py
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head:
return head
size = self.get_size(head)
k %= size
dummy = ListNode(0)
dummy.next = head
new_tail = dummy
for _ in range(size - k):
new_tail = new_tail.next
tail = new_tail
while tail.next:
tail = tail.next
tail.next = dummy.next
dummy.next = new_tail.next
new_tail.next = None
return dummy.next
def get_size(self, head):
s = 0
while head:
s += 1
head = head.next
return s
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head:
return head
size = self.get_size(head)
dummy = ListNode(0)
dummy.next = head
k %= size
pre = dummy
for _ in range(size - k):
pre = pre.next
tail = pre
while tail.next:
tail = tail.next
tail.next = dummy.next
dummy.next = pre.next
pre.next = None
return dummy.next
def get_size(self, head):
c = 0
while head:
c += 1
head = head.next
return c
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h = h.next
size += 1
h.next = head
k %= size
pre, cur = h, h.next
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h, size = h.next, size + 1
h.next = head
k %= size
pre, cur = h, head
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h, size = h.next, size + 1
h.next = head
k %= size
pre, cur = h, head
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h, size = h.next, size + 1
h.next = head
k %= size
pre, cur = h, head
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h, size = h.next, size + 1
h.next = head
k %= size
pre, cur = h, head
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
if not head or not head.next:
return head
h, size = head, 1
while h.next:
h, size = h.next, size + 1
h.next = head
k %= size
pre, cur = h, head
for _ in range(size - k):
pre, cur = cur, cur.next
pre.next = None
return cur
def main():
sol = Solution()
a = ListNode(1)
a.next = ListNode(2)
a.next.next = ListNode(3)
a.next.next.next = ListNode(4)
a.next.next.next.next = ListNode(5)
b = sol.rotateRight(a, 2)
def print_all(a):
while a:
print(a.val, end="->")
a = a.next
print_all(b)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
fa19bb1aae904e95f252db7303fe8a7bf7953dce
|
fd882ae9ceab15868b102328ec33e9d3dbe73cb4
|
/devil/devil/android/sdk/adb_compatibility_devicetest.py
|
d4e63ade953b10753d42d875a72bf7d2d58d2169
|
[
"BSD-3-Clause"
] |
permissive
|
rohitrayachoti/catapult
|
9a904e4120dabdc61643897610ad894b06faa52b
|
cd2eebd327e35c839149f7a4d888b046d628df12
|
refs/heads/master
| 2022-03-16T11:47:18.234529 | 2020-10-09T20:10:31 | 2020-10-09T21:41:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,676 |
py
|
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import os
import posixpath
import random
import signal
import sys
import unittest
if sys.version_info.major >= 3:
basestring = str # pylint: disable=redefined-builtin
_CATAPULT_BASE_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
sys.path.append(os.path.join(_CATAPULT_BASE_DIR, 'devil'))
from devil import devil_env
from devil.android import device_errors
from devil.android import device_test_case
from devil.android.sdk import adb_wrapper
from devil.utils import cmd_helper
from devil.utils import timeout_retry
_TEST_DATA_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'test', 'data'))
def _hostAdbPids():
ps_status, ps_output = cmd_helper.GetCmdStatusAndOutput(
['pgrep', '-l', 'adb'])
if ps_status != 0:
return []
pids_and_names = (line.split() for line in ps_output.splitlines())
return [int(pid) for pid, name in pids_and_names if name == 'adb']
class AdbCompatibilityTest(device_test_case.DeviceTestCase):
@classmethod
def setUpClass(cls):
custom_adb_path = os.environ.get('ADB_PATH')
custom_deps = {
'config_type': 'BaseConfig',
'dependencies': {},
}
if custom_adb_path:
custom_deps['dependencies']['adb'] = {
'file_info': {
devil_env.GetPlatform(): {
'local_paths': [custom_adb_path],
},
},
}
devil_env.config.Initialize(configs=[custom_deps])
def testStartServer(self):
# Manually kill off any instances of adb.
adb_pids = _hostAdbPids()
for p in adb_pids:
os.kill(p, signal.SIGKILL)
self.assertIsNotNone(
timeout_retry.WaitFor(
lambda: not _hostAdbPids(), wait_period=0.1, max_tries=10))
# start the adb server
start_server_status, _ = cmd_helper.GetCmdStatusAndOutput(
[adb_wrapper.AdbWrapper.GetAdbPath(), 'start-server'])
# verify that the server is now online
self.assertEquals(0, start_server_status)
self.assertIsNotNone(
timeout_retry.WaitFor(
lambda: bool(_hostAdbPids()), wait_period=0.1, max_tries=10))
def testKillServer(self):
adb_pids = _hostAdbPids()
if not adb_pids:
adb_wrapper.AdbWrapper.StartServer()
adb_pids = _hostAdbPids()
self.assertGreaterEqual(len(adb_pids), 1)
kill_server_status, _ = cmd_helper.GetCmdStatusAndOutput(
[adb_wrapper.AdbWrapper.GetAdbPath(), 'kill-server'])
self.assertEqual(0, kill_server_status)
adb_pids = _hostAdbPids()
self.assertEqual(0, len(adb_pids))
def testDevices(self):
devices = adb_wrapper.AdbWrapper.Devices()
self.assertNotEqual(0, len(devices), 'No devices found.')
def getTestInstance(self):
"""Creates a real AdbWrapper instance for testing."""
return adb_wrapper.AdbWrapper(self.serial)
def testShell(self):
under_test = self.getTestInstance()
shell_ls_result = under_test.Shell('ls')
self.assertIsInstance(shell_ls_result, basestring)
self.assertTrue(bool(shell_ls_result))
def testShell_failed(self):
under_test = self.getTestInstance()
with self.assertRaises(device_errors.AdbShellCommandFailedError):
under_test.Shell('ls /foo/bar/baz')
def testShell_externalStorageDefined(self):
under_test = self.getTestInstance()
external_storage = under_test.Shell('echo $EXTERNAL_STORAGE')
self.assertIsInstance(external_storage, basestring)
self.assertTrue(posixpath.isabs(external_storage))
@contextlib.contextmanager
def getTestPushDestination(self, under_test):
"""Creates a temporary directory suitable for pushing to."""
external_storage = under_test.Shell('echo $EXTERNAL_STORAGE').strip()
if not external_storage:
self.skipTest('External storage not available.')
while True:
random_hex = hex(random.randint(0, 2**52))[2:]
name = 'tmp_push_test%s' % random_hex
path = posixpath.join(external_storage, name)
try:
under_test.Shell('ls %s' % path)
except device_errors.AdbShellCommandFailedError:
break
under_test.Shell('mkdir %s' % path)
try:
yield path
finally:
under_test.Shell('rm -rf %s' % path)
def testPush_fileToFile(self):
under_test = self.getTestInstance()
with self.getTestPushDestination(under_test) as push_target_directory:
src = os.path.join(_TEST_DATA_DIR, 'push_file.txt')
dest = posixpath.join(push_target_directory, 'push_file.txt')
with self.assertRaises(device_errors.AdbShellCommandFailedError):
under_test.Shell('ls %s' % dest)
under_test.Push(src, dest)
self.assertEquals(dest, under_test.Shell('ls %s' % dest).strip())
def testPush_fileToDirectory(self):
under_test = self.getTestInstance()
with self.getTestPushDestination(under_test) as push_target_directory:
src = os.path.join(_TEST_DATA_DIR, 'push_file.txt')
dest = push_target_directory
resulting_file = posixpath.join(dest, 'push_file.txt')
with self.assertRaises(device_errors.AdbShellCommandFailedError):
under_test.Shell('ls %s' % resulting_file)
under_test.Push(src, dest)
self.assertEquals(resulting_file,
under_test.Shell('ls %s' % resulting_file).strip())
def testPush_directoryToDirectory(self):
under_test = self.getTestInstance()
with self.getTestPushDestination(under_test) as push_target_directory:
src = os.path.join(_TEST_DATA_DIR, 'push_directory')
dest = posixpath.join(push_target_directory, 'push_directory')
with self.assertRaises(device_errors.AdbShellCommandFailedError):
under_test.Shell('ls %s' % dest)
under_test.Push(src, dest)
self.assertEquals(
sorted(os.listdir(src)),
sorted(under_test.Shell('ls %s' % dest).strip().split()))
def testPush_directoryToExistingDirectory(self):
under_test = self.getTestInstance()
with self.getTestPushDestination(under_test) as push_target_directory:
src = os.path.join(_TEST_DATA_DIR, 'push_directory')
dest = push_target_directory
resulting_directory = posixpath.join(dest, 'push_directory')
with self.assertRaises(device_errors.AdbShellCommandFailedError):
under_test.Shell('ls %s' % resulting_directory)
under_test.Shell('mkdir %s' % resulting_directory)
under_test.Push(src, dest)
self.assertEquals(
sorted(os.listdir(src)),
sorted(under_test.Shell('ls %s' % resulting_directory).split()))
# TODO(jbudorick): Implement tests for the following:
# taskset -c
# devices [-l]
# pull
# shell
# ls
# logcat [-c] [-d] [-v] [-b]
# forward [--remove] [--list]
# jdwp
# install [-l] [-r] [-s] [-d]
# install-multiple [-l] [-r] [-s] [-d] [-p]
# uninstall [-k]
# backup -f [-apk] [-shared] [-nosystem] [-all]
# restore
# wait-for-device
# get-state (BROKEN IN THE M SDK)
# get-devpath
# remount
# reboot
# reboot-bootloader
# root
# emu
@classmethod
def tearDownClass(cls):
print
print
print 'tested %s' % adb_wrapper.AdbWrapper.GetAdbPath()
print ' %s' % adb_wrapper.AdbWrapper.Version()
print 'connected devices:'
try:
for d in adb_wrapper.AdbWrapper.Devices():
print ' %s' % d
except device_errors.AdbCommandFailedError:
print ' <failed to list devices>'
raise
finally:
print
if __name__ == '__main__':
sys.exit(unittest.main())
|
[
"[email protected]"
] | |
0e5570449a1fc4e0ffc94af74051560a30a4798b
|
2d0d39b7c066d6f98199e5968dfe2ad3f078eb4a
|
/Python3/Dictionaries/pop_popitems_update.py
|
37b39a2443fcda48fefc11e58ea3e32b96916f87
|
[
"MIT"
] |
permissive
|
norbertosanchezdichi/TIL
|
a232b8648eb41cfb6d74ed6f09affba94c7d6bbb
|
45304c1896725fb8ffbe957f4da5f9a377f7ad62
|
refs/heads/master
| 2023-05-26T20:04:50.146277 | 2023-05-20T17:10:44 | 2023-05-20T17:10:44 | 222,038,339 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 477 |
py
|
student = {
'name': 'Norberto',
'owns_parrot' : True,
'favorite_language': 'Python',
25 : 'my favorite number!'}
print(f'{student =}')
print(f'{student.pop(25) =}')
print(f'{student =}')
print(f'{student.popitem() =}')
print(f'{student.popitem() =}')
print(f'{student =}')
person = {'city': 'Los Angeles'}
print(f'{person =}')
person.update(student)
print(f'{person =}')
person['name'] = 'Otrebron'
print(f'{person =}')
person.update({})
print(f'{person =}')
|
[
"[email protected]"
] | |
e5ef6e99151307ff308b5f59eb0e82f785a86ec7
|
1d928c3f90d4a0a9a3919a804597aa0a4aab19a3
|
/python/sympy/2015/12/authors_update.py
|
be1a714c8269f40c37c1f3444979dfcf925bc2b3
|
[] |
no_license
|
rosoareslv/SED99
|
d8b2ff5811e7f0ffc59be066a5a0349a92cbb845
|
a062c118f12b93172e31e8ca115ce3f871b64461
|
refs/heads/main
| 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null |
UTF-8
|
Python
| false | false | 2,770 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A tool generate AUTHORS. We started tracking authors before moving to git, so
we have to do some manual rearrangement of the git history authors in order to
get the order in AUTHORS.
"""
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
from fabric.api import local, env
from fabric.colors import yellow, blue, green, red
from fabric.utils import error
mailmap_update_path = os.path.abspath(__file__)
mailmap_update_dir = os.path.dirname(mailmap_update_path)
sympy_top = os.path.split(mailmap_update_dir)[0]
sympy_dir = os.path.join(sympy_top, 'sympy')
if os.path.isdir(sympy_dir):
sys.path.insert(0, sympy_top)
from sympy.utilities.misc import filldedent
try:
# Only works in newer versions of fabric
env.colorize_errors = True
except AttributeError:
pass
git_command = """git log --topo-order --reverse --format="%aN <%aE>" | awk ' !x[$0]++'"""
git_people = unicode(local(git_command, capture=True), 'utf-8').strip().split("\n")
from distutils.version import LooseVersion
git_ver = local('git --version', capture=True)[12:]
if LooseVersion(git_ver) < LooseVersion('1.8.4.2'):
print(yellow("Please use a newer git version >= 1.8.4.2"))
def move(l, i1, i2):
x = l.pop(i1)
l.insert(i2, x)
# Do the few changes necessary in order to reproduce AUTHORS:
move(git_people, 2, 0) # Ondřej Čertík
move(git_people, 42, 1) # Fabian Pedregosa
move(git_people, 22, 2) # Jurjen N.E. Bos
git_people.insert(4, "*Marc-Etienne M.Leveille <[email protected]>")
move(git_people, 10, 5) # Brian Jorgensen
git_people.insert(11, "*Ulrich Hecht <[email protected]>")
git_people.pop(12) # Kirill Smelkov
move(git_people, 12, 32) # Sebastian Krämer
git_people.insert(35, "*Case Van Horsen <[email protected]>")
git_people.insert(43, "*Dan <[email protected]>")
move(git_people, 57, 59) # Aaron Meurer
move(git_people, 58, 57) # Andrew Docherty
move(git_people, 67, 66) # Chris Smith
move(git_people, 79, 76) # Kevin Goodsell
git_people.insert(84, "*Chu-Ching Huang <[email protected]>")
move(git_people, 93, 92) # James Pearson
git_people.pop(226) # Sergey B Kirpichev
header = """\
All people who contributed to SymPy by sending at least a patch or more (in the
order of the date of their first contribution), except those who explicitly
didn't want to be mentioned. People with a * next to their names are not found
in the metadata of the git history. This file is generated automatically by
running `./bin/authors_update.py`.
"""
fd = open(os.path.realpath(os.path.join(__file__, os.path.pardir,
os.path.pardir, "AUTHORS")), "w")
fd.write(header)
fd.write("\n")
fd.write("\n".join(git_people).encode("utf8"))
fd.write("\n")
|
[
"[email protected]"
] | |
3f9e25b9467dc1ba529923a9c89c66a19ee6aacd
|
19cf0afe2ee84711337a661630974c74dd29d946
|
/CLOVER/nature2017/mcs_energy.py
|
314bfeee39f64e5f79f591c488f20ac33ab1abe2
|
[] |
no_license
|
zhpfu/proj_CEH
|
b253bfe9334a372af7d9de7ba21cb57e52b4f370
|
b4be27bdf1e4452baff276014da014b7ff89fddc
|
refs/heads/master
| 2022-12-26T11:27:48.126308 | 2020-10-02T19:06:36 | 2020-10-02T19:06:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 592 |
py
|
from decimal import Decimal
lv = 2.26*1e6 # energy of evaporation in J / kg of water
seconds_day = (60*60*24)
storm_acc_rain = 19 # mm / storm on average or kg / m2
wetted_area = 100000*1e6 # storm area in square metres 12 m/s over 24 hours
storm_per_m2 = storm_acc_rain * lv # J / m2 = W s / m2
print('%.2E' % Decimal(storm_per_m2), 'J/m2')
lifetime = storm_per_m2 / seconds_day # W / m2
print('for every m2', lifetime) # W / m2
watt = lifetime*wetted_area
print('24h storm with 100000km2 wet', '%.2E' % Decimal(watt), 'Watt')
print('Watt hours', '%.2E' % Decimal(watt*24))
|
[
"[email protected]"
] | |
9b6dba52e389e6fefc316b128ba47280ee641249
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5648941810974720_1/Python/iakl/solution_large.py
|
d656ecb440e5e40876b6bfd9698fe80d36b9c7e0
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,402 |
py
|
input_file = "A-large.in"
output_file = "A-large.out"
in_f = open(input_file)
T = -1
N = -1
tcs = []
for l in in_f:
sl = l.strip()
if len(sl) == 0:
continue
if T == -1:
T = int(sl)
continue
tcs.append(sl)
in_f.close()
out_f = open(output_file, "w")
def OutputTestCase(tcn, x):
out_f.write("Case #" + str(tcn) + ": " + str(x) + "\n")
def GetWord(d, w, c):
for ch in w:
d[ch] -= c
def CheckWord(d, ch, w, dgs, dg):
if ch in d and d[ch] > 0:
for i in range(0, d[ch]):
dgs.append(dg)
GetWord(d, w, d[ch])
def SolveTestCase(w):
d = dict()
for ch in w:
if not ch in d:
d[ch] = 0
d[ch] += 1
dgs = []
CheckWord(d, 'Z', 'ZERO', dgs, 0)
CheckWord(d, 'W', 'TWO', dgs, 2)
CheckWord(d, 'U', 'FOUR', dgs, 4)
CheckWord(d, 'X', 'SIX', dgs, 6)
CheckWord(d, 'G', 'EIGHT', dgs, 8)
CheckWord(d, 'H', 'THREE', dgs, 3)
CheckWord(d, 'S', 'SEVEN', dgs, 7)
CheckWord(d, 'O', 'ONE', dgs, 1)
CheckWord(d, 'V', 'FIVE', dgs, 5)
CheckWord(d, 'I', 'NINE', dgs, 9)
dgs = sorted(dgs)
r = ""
for d in dgs:
r = r + str(d)
return r
#print tcs
for i in range(0, T):
print "Case #" + str(i + 1)
r = SolveTestCase(tcs[i])
OutputTestCase(i + 1, r)
out_f.close()
|
[
"[email protected]"
] | |
d773bda77e43e291d97be37bee13c098710d31cf
|
7c551e749064b25af706b9167211050f8c6ad0a9
|
/signatures/windows/infostealer_keylogger.py
|
53b784f83f339cd94322d7f17b1539b9363316a7
|
[] |
no_license
|
dashjuvi/Cuckoo-Sandbox-vbox-win7
|
fa382828b4895c5e1ee60b37a840edd395bf1588
|
a3a26b539b06db15176deadeae46fc0476e78998
|
refs/heads/master
| 2020-03-12T08:33:06.231245 | 2019-01-14T23:09:02 | 2019-01-14T23:09:02 | 130,529,882 | 6 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,333 |
py
|
# Copyright (C) 2012 Thomas "stacks" Birn (@stacksth)
# Copyright (C) 2014 Claudio "nex" Guarnieri (@botherder)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class Keylogger(Signature):
name = "infostealer_keylogger"
description = "Creates a windows hook that monitors keyboard input (keylogger)"
severity = 3
categories = ["generic"]
authors = ["Thomas Birn", "nex"]
minimum = "2.0"
filter_apinames = "SetWindowsHookExA", "SetWindowsHookExW"
def on_call(self, call, process):
if call["arguments"]["hook_identifier"] in [2, 13]:
if not call["arguments"]["thread_identifier"]:
self.mark_call()
return True
|
[
"[email protected]"
] | |
0120a0666de2492a42fec06064f920cf942ac669
|
0c6100dc16291986fab157ed0437f9203f306f1b
|
/2000- 3000/2356.py
|
6b7f8786e155e42660feea4e0eb1af9ab1f4caa9
|
[] |
no_license
|
Matuiss2/URI-ONLINE
|
4c93c139960a55f7cc719d0a3dcd6c6c716d3924
|
6cb20f0cb2a6d750d58b826e97c39c11bf8161d9
|
refs/heads/master
| 2021-09-17T09:47:16.209402 | 2018-06-30T08:00:14 | 2018-06-30T08:00:14 | 110,856,303 | 13 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 273 |
py
|
while True:
try:
seq1 = input()
seq2 = input()
except EOFError:
break
if seq2 in seq1: # Se a sequencia 2 estiver contida na sequencia 1, ela é resistente
print("Resistente")
else:
print("Nao resistente")
|
[
"[email protected]"
] | |
9f40b8886fbf044b37fa72687a25fec88bcc83bd
|
f4b60f5e49baf60976987946c20a8ebca4880602
|
/lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/firmware/ctrlrfwstatusconttask.py
|
d1446fcbd5bc1d51ffddb1e46f07fdf5aef3fd10
|
[] |
no_license
|
cqbomb/qytang_aci
|
12e508d54d9f774b537c33563762e694783d6ba8
|
a7fab9d6cda7fadcc995672e55c0ef7e7187696e
|
refs/heads/master
| 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 |
Python
|
UTF-8
|
Python
| false | false | 17,201 |
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class CtrlrFwStatusContTask(Mo):
"""
Firmware status container task object for a controller, which contains tasks for managing the firmware of a controller.
"""
meta = ClassMeta("cobra.model.firmware.CtrlrFwStatusContTask")
meta.moClassName = "firmwareCtrlrFwStatusContTask"
meta.rnFormat = "firmwareCtrlrFwStatusContTask-%(id)s"
meta.category = MoCategory.TASK
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x20000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.action.TopomgrSubj")
meta.parentClasses.add("cobra.model.action.ObserverSubj")
meta.parentClasses.add("cobra.model.action.VmmmgrSubj")
meta.parentClasses.add("cobra.model.action.SnmpdSubj")
meta.parentClasses.add("cobra.model.action.ScripthandlerSubj")
meta.parentClasses.add("cobra.model.action.ConfelemSubj")
meta.parentClasses.add("cobra.model.action.EventmgrSubj")
meta.parentClasses.add("cobra.model.action.OspaelemSubj")
meta.parentClasses.add("cobra.model.action.VtapSubj")
meta.parentClasses.add("cobra.model.action.OshSubj")
meta.parentClasses.add("cobra.model.action.DhcpdSubj")
meta.parentClasses.add("cobra.model.action.ObserverelemSubj")
meta.parentClasses.add("cobra.model.action.DbgrelemSubj")
meta.parentClasses.add("cobra.model.action.VleafelemSubj")
meta.parentClasses.add("cobra.model.action.NxosmockSubj")
meta.parentClasses.add("cobra.model.action.DbgrSubj")
meta.parentClasses.add("cobra.model.action.AppliancedirectorSubj")
meta.parentClasses.add("cobra.model.action.OpflexpSubj")
meta.parentClasses.add("cobra.model.action.BootmgrSubj")
meta.parentClasses.add("cobra.model.action.AeSubj")
meta.parentClasses.add("cobra.model.action.PolicymgrSubj")
meta.parentClasses.add("cobra.model.action.ExtXMLApiSubj")
meta.parentClasses.add("cobra.model.action.OpflexelemSubj")
meta.parentClasses.add("cobra.model.action.PolicyelemSubj")
meta.parentClasses.add("cobra.model.action.IdmgrSubj")
meta.superClasses.add("cobra.model.action.RInst")
meta.superClasses.add("cobra.model.pol.ComplElem")
meta.superClasses.add("cobra.model.task.Inst")
meta.superClasses.add("cobra.model.action.Inst")
meta.rnPrefixes = [
('firmwareCtrlrFwStatusContTask-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "data", "data", 52, PropCategory.REGULAR)
prop.label = "Data"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("data", prop)
prop = PropMeta("str", "descr", "descr", 33, PropCategory.REGULAR)
prop.label = "Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "endTs", "endTs", 15575, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("endTs", prop)
prop = PropMeta("str", "fail", "fail", 46, PropCategory.REGULAR)
prop.label = "Fail"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("fail", prop)
prop = PropMeta("str", "id", "id", 5645, PropCategory.REGULAR)
prop.label = "ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("none", "none", 0)
prop._addConstant("sendCtrlrRunning", "sendctrlrrunning", 69)
meta.props.add("id", prop)
prop = PropMeta("str", "invErrCode", "invErrCode", 49, PropCategory.REGULAR)
prop.label = "Remote Error Code"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("ERR-FILTER-illegal-format", None, 1140)
prop._addConstant("ERR-FSM-no-such-state", None, 1160)
prop._addConstant("ERR-HTTP-set-error", None, 1551)
prop._addConstant("ERR-HTTPS-set-error", None, 1552)
prop._addConstant("ERR-MO-CONFIG-child-object-cant-be-configured", None, 1130)
prop._addConstant("ERR-MO-META-no-such-object-class", None, 1122)
prop._addConstant("ERR-MO-PROPERTY-no-such-property", None, 1121)
prop._addConstant("ERR-MO-PROPERTY-value-out-of-range", None, 1120)
prop._addConstant("ERR-MO-access-denied", None, 1170)
prop._addConstant("ERR-MO-deletion-rule-violation", None, 1107)
prop._addConstant("ERR-MO-duplicate-object", None, 1103)
prop._addConstant("ERR-MO-illegal-containment", None, 1106)
prop._addConstant("ERR-MO-illegal-creation", None, 1105)
prop._addConstant("ERR-MO-illegal-iterator-state", None, 1100)
prop._addConstant("ERR-MO-illegal-object-lifecycle-transition", None, 1101)
prop._addConstant("ERR-MO-naming-rule-violation", None, 1104)
prop._addConstant("ERR-MO-object-not-found", None, 1102)
prop._addConstant("ERR-MO-resource-allocation", None, 1150)
prop._addConstant("ERR-aaa-config-modify-error", None, 1520)
prop._addConstant("ERR-acct-realm-set-error", None, 1513)
prop._addConstant("ERR-add-ctrlr", None, 1574)
prop._addConstant("ERR-admin-passwd-set", None, 1522)
prop._addConstant("ERR-api", None, 1571)
prop._addConstant("ERR-auth-issue", None, 1548)
prop._addConstant("ERR-auth-realm-set-error", None, 1514)
prop._addConstant("ERR-authentication", None, 1534)
prop._addConstant("ERR-authorization-required", None, 1535)
prop._addConstant("ERR-connect", None, 1572)
prop._addConstant("ERR-create-domain", None, 1562)
prop._addConstant("ERR-create-keyring", None, 1560)
prop._addConstant("ERR-create-role", None, 1526)
prop._addConstant("ERR-create-user", None, 1524)
prop._addConstant("ERR-delete-domain", None, 1564)
prop._addConstant("ERR-delete-role", None, 1528)
prop._addConstant("ERR-delete-user", None, 1523)
prop._addConstant("ERR-domain-set-error", None, 1561)
prop._addConstant("ERR-http-initializing", None, 1549)
prop._addConstant("ERR-incompat-ctrlr-version", None, 1568)
prop._addConstant("ERR-internal-error", None, 1540)
prop._addConstant("ERR-invalid-args", None, 1569)
prop._addConstant("ERR-invalid-domain-name", None, 1582)
prop._addConstant("ERR-ldap-delete-error", None, 1510)
prop._addConstant("ERR-ldap-get-error", None, 1509)
prop._addConstant("ERR-ldap-group-modify-error", None, 1518)
prop._addConstant("ERR-ldap-group-set-error", None, 1502)
prop._addConstant("ERR-ldap-set-error", None, 1511)
prop._addConstant("ERR-missing-method", None, 1546)
prop._addConstant("ERR-modify-ctrlr-access", None, 1567)
prop._addConstant("ERR-modify-ctrlr-dvs-version", None, 1576)
prop._addConstant("ERR-modify-ctrlr-rootcont", None, 1575)
prop._addConstant("ERR-modify-ctrlr-scope", None, 1573)
prop._addConstant("ERR-modify-ctrlr-trig-inventory", None, 1577)
prop._addConstant("ERR-modify-domain", None, 1563)
prop._addConstant("ERR-modify-domain-encapmode", None, 1581)
prop._addConstant("ERR-modify-domain-enfpref", None, 1578)
prop._addConstant("ERR-modify-domain-mcastpool", None, 1579)
prop._addConstant("ERR-modify-domain-mode", None, 1580)
prop._addConstant("ERR-modify-role", None, 1527)
prop._addConstant("ERR-modify-user", None, 1525)
prop._addConstant("ERR-modify-user-domain", None, 1565)
prop._addConstant("ERR-modify-user-role", None, 1532)
prop._addConstant("ERR-no-buf", None, 1570)
prop._addConstant("ERR-passwd-set-failure", None, 1566)
prop._addConstant("ERR-provider-group-modify-error", None, 1519)
prop._addConstant("ERR-provider-group-set-error", None, 1512)
prop._addConstant("ERR-radius-global-set-error", None, 1505)
prop._addConstant("ERR-radius-group-set-error", None, 1501)
prop._addConstant("ERR-radius-set-error", None, 1504)
prop._addConstant("ERR-request-timeout", None, 1545)
prop._addConstant("ERR-role-set-error", None, 1515)
prop._addConstant("ERR-secondary-node", None, 1550)
prop._addConstant("ERR-service-not-ready", None, 1539)
prop._addConstant("ERR-set-password-strength-check", None, 1543)
prop._addConstant("ERR-store-pre-login-banner-msg", None, 1521)
prop._addConstant("ERR-tacacs-enable-error", None, 1508)
prop._addConstant("ERR-tacacs-global-set-error", None, 1507)
prop._addConstant("ERR-tacacs-group-set-error", None, 1503)
prop._addConstant("ERR-tacacs-set-error", None, 1506)
prop._addConstant("ERR-user-account-expired", None, 1536)
prop._addConstant("ERR-user-set-error", None, 1517)
prop._addConstant("ERR-xml-parse-error", None, 1547)
prop._addConstant("communication-error", "communication-error", 1)
prop._addConstant("none", "none", 0)
meta.props.add("invErrCode", prop)
prop = PropMeta("str", "invErrDescr", "invErrDescr", 50, PropCategory.REGULAR)
prop.label = "Remote Error Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("invErrDescr", prop)
prop = PropMeta("str", "invRslt", "invRslt", 48, PropCategory.REGULAR)
prop.label = "Remote Result"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("capability-not-implemented-failure", "capability-not-implemented-failure", 16384)
prop._addConstant("capability-not-implemented-ignore", "capability-not-implemented-ignore", 8192)
prop._addConstant("capability-not-supported", "capability-not-supported", 32768)
prop._addConstant("capability-unavailable", "capability-unavailable", 65536)
prop._addConstant("end-point-failed", "end-point-failed", 32)
prop._addConstant("end-point-protocol-error", "end-point-protocol-error", 64)
prop._addConstant("end-point-unavailable", "end-point-unavailable", 16)
prop._addConstant("extend-timeout", "extend-timeout", 134217728)
prop._addConstant("failure", "failure", 1)
prop._addConstant("fru-identity-indeterminate", "fru-identity-indeterminate", 4194304)
prop._addConstant("fru-info-malformed", "fru-info-malformed", 8388608)
prop._addConstant("fru-not-ready", "fru-not-ready", 67108864)
prop._addConstant("fru-not-supported", "fru-not-supported", 536870912)
prop._addConstant("fru-state-indeterminate", "fru-state-indeterminate", 33554432)
prop._addConstant("fw-defect", "fw-defect", 256)
prop._addConstant("hw-defect", "hw-defect", 512)
prop._addConstant("illegal-fru", "illegal-fru", 16777216)
prop._addConstant("intermittent-error", "intermittent-error", 1073741824)
prop._addConstant("internal-error", "internal-error", 4)
prop._addConstant("not-applicable", "not-applicable", 0)
prop._addConstant("resource-capacity-exceeded", "resource-capacity-exceeded", 2048)
prop._addConstant("resource-dependency", "resource-dependency", 4096)
prop._addConstant("resource-unavailable", "resource-unavailable", 1024)
prop._addConstant("service-not-implemented-fail", "service-not-implemented-fail", 262144)
prop._addConstant("service-not-implemented-ignore", "service-not-implemented-ignore", 131072)
prop._addConstant("service-not-supported", "service-not-supported", 524288)
prop._addConstant("service-protocol-error", "service-protocol-error", 2097152)
prop._addConstant("service-unavailable", "service-unavailable", 1048576)
prop._addConstant("sw-defect", "sw-defect", 128)
prop._addConstant("task-reset", "task-reset", 268435456)
prop._addConstant("timeout", "timeout", 8)
prop._addConstant("unidentified-fail", "unidentified-fail", 2)
meta.props.add("invRslt", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "oDn", "oDn", 51, PropCategory.REGULAR)
prop.label = "Subject DN"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("oDn", prop)
prop = PropMeta("str", "operSt", "operSt", 15674, PropCategory.REGULAR)
prop.label = "Completion"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "scheduled"
prop._addConstant("cancelled", "cancelled", 3)
prop._addConstant("completed", "completed", 2)
prop._addConstant("crashsuspect", "crash-suspect", 7)
prop._addConstant("failed", "failed", 4)
prop._addConstant("indeterminate", "indeterminate", 5)
prop._addConstant("processing", "processing", 1)
prop._addConstant("ready", "ready", 8)
prop._addConstant("scheduled", "scheduled", 0)
prop._addConstant("suspended", "suspended", 6)
meta.props.add("operSt", prop)
prop = PropMeta("str", "originMinority", "originMinority", 54, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("originMinority", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "runId", "runId", 45, PropCategory.REGULAR)
prop.label = "ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("runId", prop)
prop = PropMeta("str", "startTs", "startTs", 36, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("startTs", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "try", "try", 15574, PropCategory.REGULAR)
prop.label = "Try"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("try", prop)
prop = PropMeta("str", "ts", "ts", 47, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("ts", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"[email protected]"
] | |
accfc0d6128d53607169652f5d408409eb9af9b3
|
40404006721939f6f65f74d289d6f4614d3bf8bc
|
/identYwaf.py
|
148d9f24b52c8a9684d80c43a2ef6c69af051d61
|
[
"MIT"
] |
permissive
|
zan00789/identYwaf
|
71236377e82378ee59aa24185398b5d39ebb4dbf
|
5c88e7925aa94621eb23d494d916e5c614ae47a1
|
refs/heads/master
| 2020-04-25T12:07:52.510372 | 2019-02-26T10:33:12 | 2019-02-26T10:33:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 25,208 |
py
|
#!/usr/bin/env python
"""
Copyright (c) 2019 Miroslav Stampar (@stamparm), MIT
See the file 'LICENSE' for copying permission
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
"""
from __future__ import print_function
import base64
import codecs
import difflib
import json
import locale
import optparse
import os
import random
import re
import ssl
import socket
import string
import struct
import subprocess
import sys
import time
import urllib
import zlib
if sys.version_info >= (3, 0):
import http.cookiejar
import http.client as httplib
import urllib
IS_WIN = subprocess._mswindows
build_opener = urllib.request.build_opener
install_opener = urllib.request.install_opener
quote = urllib.parse.quote
urlopen = urllib.request.urlopen
CookieJar = http.cookiejar.CookieJar
ProxyHandler = urllib.request.ProxyHandler
Request = urllib.request.Request
HTTPCookieProcessor = urllib.request.HTTPCookieProcessor
xrange = range
else:
import cookielib
import httplib
import urllib2
IS_WIN = subprocess.mswindows
build_opener = urllib2.build_opener
install_opener = urllib2.install_opener
quote = urllib.quote
urlopen = urllib2.urlopen
CookieJar = cookielib.CookieJar
ProxyHandler = urllib2.ProxyHandler
Request = urllib2.Request
HTTPCookieProcessor = urllib2.HTTPCookieProcessor
# Reference: http://blog.mathieu-leplatre.info/python-utf-8-print-fails-when-redirecting-stdout.html
sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
NAME = "identYwaf"
VERSION = "1.0.93"
BANNER = """
` __ __ `
____ ___ ___ ____ ______ `| T T` __ __ ____ _____
l j| \ / _]| \ | T`| | |`| T__T T / T| __|
| T | \ / [_ | _ Yl_j l_j`| ~ |`| | | |Y o || l_
| | | D YY _]| | | | | `|___ |`| | | || || _|
j l | || [_ | | | | | `| !` \ / | | || ]
|____jl_____jl_____jl__j__j l__j `l____/ ` \_/\_/ l__j__jl__j (%s)%s""".strip("\n") % (VERSION, "\n")
RAW, TEXT, HTTPCODE, SERVER, TITLE, HTML, URL = xrange(7)
COOKIE, UA, REFERER = "Cookie", "User-Agent", "Referer"
GET, POST = "GET", "POST"
GENERIC_PROTECTION_KEYWORDS = ("rejected", "forbidden", "suspicious", "malicious", "captcha", "invalid", "your ip", "please contact", "terminated", "protected", "unauthorized", "blocked", "protection", "incident", "denied", "detected", "dangerous", "firewall", "fw_block", "unusual activity", "bad request", "request id", "injection", "permission", "not acceptable", "security policy", "security reasons")
GENERIC_PROTECTION_REGEX = r"(?i)\b(%s)\b"
GENERIC_ERROR_MESSAGE_REGEX = r"\b[A-Z][\w, '-]*(protected by|security|unauthorized|detected|attack|error|rejected|allowed|suspicious|automated|blocked|invalid|denied|permission)[\w, '!-]*"
WAF_RECOGNITION_REGEX = None
HEURISTIC_PAYLOAD = "1 AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#" # Reference: https://github.com/sqlmapproject/sqlmap/blob/master/lib/core/settings.py
PAYLOADS = []
SIGNATURES = {}
DATA_JSON = {}
DATA_JSON_FILE = "data.json"
MAX_HELP_OPTION_LENGTH = 18
IS_TTY = sys.stdout.isatty()
COLORIZE = not IS_WIN and IS_TTY
LEVEL_COLORS = {"o": "\033[00;94m", "x": "\033[00;91m", "!": "\033[00;93m", "i": "\033[00;95m", "=": "\033[00;93m", "+": "\033[00;92m", "-": "\033[00;91m"}
VERIFY_OK_INTERVAL = 5
VERIFY_RETRY_TIMES = 3
MIN_MATCH_PARTIAL = 5
DEFAULTS = {"timeout": 10}
MAX_MATCHES = 5
QUICK_RATIO_THRESHOLD = 0.2
MAX_JS_CHALLENGE_SNAPLEN = 120
ENCODING_TRANSLATIONS = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"} # Reference: https://github.com/sqlmapproject/sqlmap/blob/master/lib/request/basic.py
PROXY_TESTING_PAGE = "https://myexternalip.com/raw"
if COLORIZE:
for _ in re.findall(r"`.+?`", BANNER):
BANNER = BANNER.replace(_, "\033[01;92m%s\033[00;49m" % _.strip('`'))
for _ in re.findall(r" [Do] ", BANNER):
BANNER = BANNER.replace(_, "\033[01;93m%s\033[00;49m" % _.strip('`'))
BANNER = re.sub(VERSION, r"\033[01;91m%s\033[00;49m" % VERSION, BANNER)
else:
BANNER = BANNER.replace('`', "")
_ = random.randint(20, 64)
DEFAULT_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; %s; rv:%d.0) Gecko/20100101 Firefox/%d.0" % (NAME, _, _)
HEADERS = {"User-Agent": DEFAULT_USER_AGENT, "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "identity", "Cache-Control": "max-age=0"}
original = None
options = None
intrusive = None
heuristic = None
chained = False
locked_code = None
locked_regex = None
non_blind = set()
seen = set()
servers = set()
codes = set()
proxies = list()
proxies_index = 0
_exit = exit
def exit(message=None):
if message:
print("%s%s" % (message, ' ' * 20))
_exit(1)
def retrieve(url, data=None):
global proxies_index
retval = {}
if proxies:
while True:
try:
opener = build_opener(ProxyHandler({"http": proxies[proxies_index], "https": proxies[proxies_index]}))
install_opener(opener)
proxies_index = (proxies_index + 1) % len(proxies)
urlopen(PROXY_TESTING_PAGE).read()
except KeyboardInterrupt:
raise
except:
pass
else:
break
try:
req = Request("".join(url[_].replace(' ', "%20") if _ > url.find('?') else url[_] for _ in xrange(len(url))), data, HEADERS)
resp = urlopen(req, timeout=options.timeout)
retval[URL] = resp.url
retval[HTML] = resp.read()
retval[HTTPCODE] = resp.code
retval[RAW] = "%s %d %s\n%s\n%s" % (httplib.HTTPConnection._http_vsn_str, retval[HTTPCODE], resp.msg, str(resp.headers), retval[HTML])
except Exception as ex:
retval[URL] = getattr(ex, "url", url)
retval[HTTPCODE] = getattr(ex, "code", None)
try:
retval[HTML] = ex.read() if hasattr(ex, "read") else getattr(ex, "msg", str(ex))
except:
retval[HTML] = ""
retval[RAW] = "%s %s %s\n%s\n%s" % (httplib.HTTPConnection._http_vsn_str, retval[HTTPCODE] or "", getattr(ex, "msg", ""), str(ex.headers) if hasattr(ex, "headers") else "", retval[HTML])
for encoding in re.findall(r"charset=[\s\"']?([\w-]+)", retval[RAW])[::-1] + ["utf8"]:
encoding = ENCODING_TRANSLATIONS.get(encoding, encoding)
try:
retval[HTML] = retval[HTML].decode(encoding, errors="replace")
break
except:
pass
match = re.search(r"<title>\s*(?P<result>[^<]+?)\s*</title>", retval[HTML], re.I)
retval[TITLE] = match.group("result") if match and "result" in match.groupdict() else None
retval[TEXT] = re.sub(r"(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>|<[^>]+>|\s+", " ", retval[HTML])
match = re.search(r"(?im)^Server: (.+)", retval[RAW])
retval[SERVER] = match.group(1).strip() if match else ""
return retval
def calc_hash(value, binary=True):
value = value.encode("utf8") if not isinstance(value, bytes) else value
result = zlib.crc32(value) & 0xffff
if binary:
result = struct.pack(">H", result)
return result
def single_print(message):
if message not in seen:
print(message)
seen.add(message)
def check_payload(payload, protection_regex=GENERIC_PROTECTION_REGEX % '|'.join(GENERIC_PROTECTION_KEYWORDS)):
global chained
global heuristic
global intrusive
global locked_code
global locked_regex
time.sleep(options.delay or 0)
if options.post:
_ = "%s=%s" % ("".join(random.sample(string.ascii_letters, 3)), quote(payload))
intrusive = retrieve(options.url, _)
else:
_ = "%s%s%s=%s" % (options.url, '?' if '?' not in options.url else '&', "".join(random.sample(string.ascii_letters, 3)), quote(payload))
intrusive = retrieve(_)
if options.lock and not payload.isdigit():
if payload == HEURISTIC_PAYLOAD:
match = re.search(re.sub(r"Server:|Protected by", "".join(random.sample(string.ascii_letters, 6)), WAF_RECOGNITION_REGEX, flags=re.I), intrusive[RAW] or "")
if match:
result = True
for _ in match.groupdict():
if match.group(_):
waf = re.sub(r"\Awaf_", "", _)
locked_regex = DATA_JSON["wafs"][waf]["regex"]
locked_code = intrusive[HTTPCODE]
break
else:
result = False
if not result:
exit(colorize("[x] can't lock results to a non-blind match"))
else:
result = re.search(locked_regex, intrusive[RAW]) is not None and locked_code == intrusive[HTTPCODE]
elif options.string:
result = options.string in (intrusive[RAW] or "")
elif options.code:
result = options.code == intrusive[HTTPCODE]
else:
result = intrusive[HTTPCODE] != original[HTTPCODE] or (intrusive[HTTPCODE] != 200 and intrusive[TITLE] != original[TITLE]) or (re.search(protection_regex, intrusive[HTML]) is not None and re.search(protection_regex, original[HTML]) is None) or (difflib.SequenceMatcher(a=original[HTML] or "", b=intrusive[HTML] or "").ratio() < QUICK_RATIO_THRESHOLD)
if not payload.isdigit():
if result:
if options.debug:
print("\r---%s" % (40 * ' '))
print(payload)
print(intrusive[HTTPCODE], intrusive[RAW])
print("---")
if intrusive[SERVER]:
servers.add(re.sub(r"\s*\(.+\)\Z", "", intrusive[SERVER]))
if len(servers) > 1:
chained = True
single_print(colorize("[!] multiple (reactive) rejection HTTP Server headers detected (%s)" % ', '.join("'%s'" % _ for _ in sorted(servers))))
if intrusive[HTTPCODE]:
codes.add(intrusive[HTTPCODE])
if len(codes) > 1:
chained = True
single_print(colorize("[!] multiple (reactive) rejection HTTP codes detected (%s)" % ', '.join("%s" % _ for _ in sorted(codes))))
if heuristic and heuristic[HTML] and intrusive[HTML] and difflib.SequenceMatcher(a=heuristic[HTML] or "", b=intrusive[HTML] or "").quick_ratio() < QUICK_RATIO_THRESHOLD:
chained = True
single_print(colorize("[!] multiple (reactive) rejection HTML responses detected"))
if payload == HEURISTIC_PAYLOAD:
heuristic = intrusive
return result
def colorize(message):
if COLORIZE:
message = re.sub(r"\[(.)\]", lambda match: "[%s%s\033[00;49m]" % (LEVEL_COLORS[match.group(1)], match.group(1)), message)
if any(_ in message for _ in ("rejected summary", "challenge detected")):
for match in re.finditer(r"[^\w]'([^)]+)'" if "rejected summary" in message else r"\('(.+)'\)", message):
message = message.replace("'%s'" % match.group(1), "'\033[37m%s\033[00;49m'" % match.group(1), 1)
else:
for match in re.finditer(r"[^\w]'([^']+)'", message):
message = message.replace("'%s'" % match.group(1), "'\033[37m%s\033[00;49m'" % match.group(1), 1)
if "blind match" in message:
for match in re.finditer(r"\(((\d+)%)\)", message):
message = message.replace(match.group(1), "\033[%dm%s\033[00;49m" % (92 if int(match.group(2)) >= 95 else (93 if int(match.group(2)) > 80 else 90), match.group(1)))
if "hardness" in message:
for match in re.finditer(r"\(((\d+)%)\)", message):
message = message.replace(match.group(1), "\033[%dm%s\033[00;49m" % (95 if " insane " in message else (91 if " hard " in message else (93 if " moderate " in message else 92)), match.group(1)))
return message
def parse_args():
global options
parser = optparse.OptionParser(version=VERSION)
parser.add_option("--delay", dest="delay", type=int, help="Delay (sec) between tests (default: 0)")
parser.add_option("--timeout", dest="timeout", type=int, help="Response timeout (sec) (default: 10)")
parser.add_option("--proxy", dest="proxy", help="HTTP proxy address (e.g. \"http://127.0.0.1:8080\")")
parser.add_option("--proxy-file", dest="proxy_file", help="Load (rotating) HTTP(s) proxy list from a file")
parser.add_option("--random-agent", dest="random_agent", help="Use random HTTP User-Agent header value")
parser.add_option("--code", dest="code", type=int, help="Expected HTTP code in rejected responses")
parser.add_option("--string", dest="string", help="Expected string in rejected responses")
parser.add_option("--post", dest="post", help="Use POST body for sending payloads")
parser.add_option("--debug", dest="debug", help=optparse.SUPPRESS_HELP)
parser.add_option("--fast", dest="fast", help=optparse.SUPPRESS_HELP)
parser.add_option("--lock", dest="lock", help=optparse.SUPPRESS_HELP)
# Dirty hack(s) for help message
def _(self, *args):
retval = parser.formatter._format_option_strings(*args)
if len(retval) > MAX_HELP_OPTION_LENGTH:
retval = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % retval
return retval
parser.usage = "python %s <host|url>" % parser.usage
parser.formatter._format_option_strings = parser.formatter.format_option_strings
parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser)
for _ in ("-h", "--version"):
option = parser.get_option(_)
option.help = option.help.capitalize()
try:
options, _ = parser.parse_args()
except SystemExit:
raise
if len(sys.argv) > 1:
url = sys.argv[-1]
if not url.startswith("http"):
url = "http://%s" % url
options.url = url
else:
parser.print_help()
raise SystemExit
for key in DEFAULTS:
if getattr(options, key, None) is None:
setattr(options, key, DEFAULTS[key])
def init():
global WAF_RECOGNITION_REGEX
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if os.path.isfile(DATA_JSON_FILE):
print(colorize("[o] loading data..."))
with codecs.open(DATA_JSON_FILE, "rb", encoding="utf8") as f:
DATA_JSON.update(json.load(f))
WAF_RECOGNITION_REGEX = ""
for waf in DATA_JSON["wafs"]:
if DATA_JSON["wafs"][waf]["regex"]:
WAF_RECOGNITION_REGEX += "%s|" % ("(?P<waf_%s>%s)" % (waf, DATA_JSON["wafs"][waf]["regex"]))
for signature in DATA_JSON["wafs"][waf]["signatures"]:
SIGNATURES[signature] = waf
WAF_RECOGNITION_REGEX = WAF_RECOGNITION_REGEX.strip('|')
else:
exit(colorize("[x] file '%s' is missing" % DATA_JSON_FILE))
print(colorize("[o] initializing handlers..."))
# Reference: https://stackoverflow.com/a/28052583
if hasattr(ssl, "_create_unverified_context"):
ssl._create_default_https_context = ssl._create_unverified_context
if options.proxy_file:
if os.path.isfile(options.proxy_file):
print(colorize("[o] loading proxy list..."))
with codecs.open(options.proxy_file, "rb", encoding="utf8") as f:
proxies.extend(re.sub(r"\s.*", "", _.strip()) for _ in f.read().strip().split('\n') if _.startswith("http"))
random.shuffle(proxies)
else:
exit(colorize("[x] file '%s' does not exist" % options.proxy_file))
cookie_jar = CookieJar()
opener = build_opener(HTTPCookieProcessor(cookie_jar))
install_opener(opener)
if options.proxy:
opener = build_opener(ProxyHandler({"http": options.proxy, "https": options.proxy}))
install_opener(opener)
if options.random_agent:
revision = random.randint(20, 64)
platform = random.sample(("X11; %s %s" % (random.sample(("Linux", "Ubuntu; Linux", "U; Linux", "U; OpenBSD", "U; FreeBSD"), 1)[0], random.sample(("amd64", "i586", "i686", "amd64"), 1)[0]), "Windows NT %s%s" % (random.sample(("5.0", "5.1", "5.2", "6.0", "6.1", "6.2", "6.3", "10.0"), 1)[0], random.sample(("", "; Win64", "; WOW64"), 1)[0]), "Macintosh; Intel Mac OS X 10.%s" % random.randint(1, 11)), 1)[0]
user_agent = "Mozilla/5.0 (%s; rv:%d.0) Gecko/20100101 Firefox/%d.0" % (platform, revision, revision)
HEADERS["User-Agent"] = user_agent
def format_name(waf):
return "%s%s" % (DATA_JSON["wafs"][waf]["name"], (" (%s)" % DATA_JSON["wafs"][waf]["company"]) if DATA_JSON["wafs"][waf]["name"] != DATA_JSON["wafs"][waf]["company"] else "")
def non_blind_check(raw):
retval = False
match = re.search(WAF_RECOGNITION_REGEX, raw or "")
if match:
retval = True
for _ in match.groupdict():
if match.group(_):
waf = re.sub(r"\Awaf_", "", _)
non_blind.add(waf)
single_print(colorize("[+] non-blind match: '%s'%s" % (format_name(waf), 20 * ' ')))
return retval
def run():
global original
hostname = options.url.split("//")[-1].split('/')[0].split(':')[0]
if not hostname.replace('.', "").isdigit():
print(colorize("[i] checking hostname '%s'..." % hostname))
try:
socket.getaddrinfo(hostname, None)
except socket.gaierror:
exit(colorize("[x] host '%s' does not exist" % hostname))
results = ""
signature = b""
counter = 0
original = retrieve(options.url)
if 300 <= (original[HTTPCODE] or 0) < 400 and original[URL]:
original = retrieve(original[URL])
options.url = original[URL]
if original[HTTPCODE] is None:
exit(colorize("[x] missing valid response"))
if not any((options.string, options.code)) and original[HTTPCODE] >= 400:
non_blind_check(original[RAW])
if options.debug:
print("\r---%s" % (40 * ' '))
print(original[HTTPCODE], original[RAW])
print("---")
exit(colorize("[x] access to host '%s' seems to be restricted%s" % (hostname, (" (%d: '<title>%s</title>')" % (original[HTTPCODE], original[TITLE].strip())) if original[TITLE] else "")))
challenge = None
if all(_ in original[HTML].lower() for _ in ("eval", "<script")):
match = re.search(r"(?is)<body[^>]*>(.*)</body>", re.sub(r"(?is)<script.+?</script>", "", original[HTML]))
if re.search(r"(?i)<(body|div)", original[HTML]) is None or (match and len(match.group(1)) == 0):
challenge = re.search(r"(?is)<script.+</script>", original[HTML]).group(0).replace("\n", "\\n")
print(colorize("[x] anti-robot JS challenge detected ('%s%s')" % (challenge[:MAX_JS_CHALLENGE_SNAPLEN], "..." if len(challenge) > MAX_JS_CHALLENGE_SNAPLEN else "")))
protection_keywords = GENERIC_PROTECTION_KEYWORDS
protection_regex = GENERIC_PROTECTION_REGEX % '|'.join(keyword for keyword in protection_keywords if keyword not in original[HTML].lower())
print(colorize("[i] running basic heuristic test..."))
if not check_payload(HEURISTIC_PAYLOAD):
check = False
if options.url.startswith("https://"):
options.url = options.url.replace("https://", "http://")
check = check_payload(HEURISTIC_PAYLOAD)
if not check:
if non_blind_check(intrusive[RAW]):
exit(colorize("[x] unable to continue due to static responses%s" % (" (captcha)" if re.search(r"(?i)captcha", intrusive[RAW]) is not None else "")))
elif challenge is None:
exit(colorize("[x] host '%s' does not seem to be protected" % hostname))
else:
exit(colorize("[x] response not changing without JS challenge solved"))
if options.fast and not non_blind:
exit(colorize("[x] fast exit because of missing non-blind match"))
if not intrusive[HTTPCODE]:
print(colorize("[i] rejected summary: RST|DROP"))
else:
_ = "...".join(match.group(0) for match in re.finditer(GENERIC_ERROR_MESSAGE_REGEX, intrusive[HTML])).strip().replace(" ", " ")
print(colorize(("[i] rejected summary: %d ('%s%s')" % (intrusive[HTTPCODE], ("<title>%s</title>" % intrusive[TITLE]) if intrusive[TITLE] else "", "" if not _ or intrusive[HTTPCODE] < 400 else ("...%s" % _))).replace(" ('')", "")))
found = non_blind_check(intrusive[RAW] if intrusive[HTTPCODE] is not None else original[RAW])
if not found:
print(colorize("[-] non-blind match: -"))
for payload in DATA_JSON["payloads"]:
counter += 1
if IS_TTY:
sys.stdout.write(colorize("\r[i] running payload tests... (%d/%d)\r" % (counter, len(DATA_JSON["payloads"]))))
sys.stdout.flush()
if counter % VERIFY_OK_INTERVAL == 0:
for i in xrange(VERIFY_RETRY_TIMES):
if not check_payload(str(random.randint(1, 9)), protection_regex):
break
elif i == VERIFY_RETRY_TIMES - 1:
exit(colorize("[x] host '%s' seems to be misconfigured or rejecting benign requests%s" % (hostname, (" (%d: '<title>%s</title>')" % (intrusive[HTTPCODE], intrusive[TITLE].strip())) if intrusive[TITLE] else "")))
else:
time.sleep(5)
last = check_payload(payload, protection_regex)
non_blind_check(intrusive[RAW])
signature += struct.pack(">H", ((calc_hash(payload, binary=False) << 1) | last) & 0xffff)
results += 'x' if last else '.'
_ = calc_hash(signature)
signature = "%s:%s" % (_.encode("hex") if not hasattr(_, "hex") else _.hex(), base64.b64encode(signature).decode("ascii"))
print(colorize("%s[=] results: '%s'" % ("\n" if IS_TTY else "", results)))
hardness = 100 * results.count('x') / len(results)
print(colorize("[=] hardness: %s (%d%%)" % ("insane" if hardness >= 80 else ("hard" if hardness >= 50 else ("moderate" if hardness >= 30 else "easy")), hardness)))
if not results.strip('.') or not results.strip('x'):
print(colorize("[-] blind match: -"))
if re.search(r"(?i)captcha", original[HTML]) is not None:
exit(colorize("[x] there seems to be an activated captcha"))
else:
print(colorize("[=] signature: '%s'" % signature))
if signature in SIGNATURES:
waf = SIGNATURES[signature]
print(colorize("[+] blind match: '%s' (100%%)" % format_name(waf)))
elif results.count('x') < MIN_MATCH_PARTIAL:
print(colorize("[-] blind match: -"))
else:
matches = {}
markers = set()
decoded = base64.b64decode(signature.split(':')[-1])
for i in xrange(0, len(decoded), 2):
part = struct.unpack(">H", decoded[i: i + 2])[0]
markers.add(part)
for candidate in SIGNATURES:
counter_y, counter_n = 0, 0
decoded = base64.b64decode(candidate.split(':')[-1])
for i in xrange(0, len(decoded), 2):
part = struct.unpack(">H", decoded[i: i + 2])[0]
if part in markers:
counter_y += 1
elif any(_ in markers for _ in (part & ~1, part | 1)):
counter_n += 1
result = int(round(100 * counter_y / (counter_y + counter_n)))
if SIGNATURES[candidate] in matches:
if result > matches[SIGNATURES[candidate]]:
matches[SIGNATURES[candidate]] = result
else:
matches[SIGNATURES[candidate]] = result
if chained:
for _ in list(matches.keys()):
if matches[_] < 90:
del matches[_]
if not matches:
print(colorize("[-] blind match: - "))
print(colorize("[!] probably chained web protection systems"))
else:
matches = [(_[1], _[0]) for _ in matches.items()]
matches.sort(reverse=True)
print(colorize("[+] blind match: %s" % ", ".join("'%s' (%d%%)" % (format_name(matches[i][1]), matches[i][0]) for i in xrange(min(len(matches), MAX_MATCHES) if matches[0][0] != 100 else 1))))
print()
def main():
if "--version" not in sys.argv:
print(BANNER)
parse_args()
init()
run()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
exit(colorize("\r[x] Ctrl-C pressed"))
|
[
"[email protected]"
] | |
773f7fef36812d17cd02f2a0c2a452e95541bfd7
|
6872caaa6c3bb59995627064ed1ab63df403bdf6
|
/eyantra_provider/venv/Lib/site-packages/authlib/jose/rfc7518/_backends/_key_cryptography.py
|
066c5da1e86247fa58b95e4ddde79714d1aed8d9
|
[
"MIT"
] |
permissive
|
Andreaf2395/OpenID-Provider
|
3189780631d9057140e233930ace72e9bfc76e58
|
cdedd42cc49e6f03e3b2570c03fb1f4a2c83be34
|
refs/heads/Sameeksha_Final_Provider
| 2023-08-21T16:05:42.864159 | 2020-06-18T18:47:16 | 2020-06-18T18:47:16 | 273,314,708 | 0 | 0 |
MIT
| 2020-06-18T18:48:34 | 2020-06-18T18:44:29 |
Python
|
UTF-8
|
Python
| false | false | 1,580 |
py
|
from cryptography.hazmat.primitives.serialization import (
load_pem_private_key, load_pem_public_key, load_ssh_public_key
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKey, RSAPublicKey
)
from cryptography.hazmat.primitives.asymmetric.ec import (
EllipticCurvePrivateKey, EllipticCurvePublicKey
)
from cryptography.hazmat.backends import default_backend
from authlib.common.encoding import to_bytes
class RSAKey(object):
def prepare_private_key(self, key):
if isinstance(key, RSAPrivateKey):
return key
key = to_bytes(key)
return load_pem_private_key(key, password=None, backend=default_backend())
def prepare_public_key(self, key):
if isinstance(key, RSAPublicKey):
return key
key = to_bytes(key)
if key.startswith(b'ssh-rsa'):
return load_ssh_public_key(key, backend=default_backend())
else:
return load_pem_public_key(key, backend=default_backend())
class ECKey(object):
def prepare_private_key(self, key):
if isinstance(key, EllipticCurvePrivateKey):
return key
key = to_bytes(key)
return load_pem_private_key(key, password=None, backend=default_backend())
def prepare_public_key(self, key):
if isinstance(key, EllipticCurvePublicKey):
return key
key = to_bytes(key)
if key.startswith(b'ecdsa-sha2-'):
return load_ssh_public_key(key, backend=default_backend())
return load_pem_public_key(key, backend=default_backend())
|
[
"[email protected]"
] | |
01916b1dc80855030aa378e97495ed5099a7b2a1
|
1c562b288a92dbef9ee76744f73acd334ba56306
|
/jaguar/tests/test_room_metadata.py
|
38f6ba87fbbf6b36be13158c768b806276852ce4
|
[] |
no_license
|
mkhfring/pychat
|
862ffaaee01ea5927e94640e19d88d698ed170af
|
8d7c4ea8eb35d8216c2f4194b00483995052b8ea
|
refs/heads/master
| 2021-03-13T07:13:55.433143 | 2020-03-11T18:44:26 | 2020-03-11T18:44:26 | 246,652,274 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 803 |
py
|
from bddrest.authoring import status, response
from jaguar.tests.helpers import AutoDocumentationBDDTest
class TestRoomMetadata(AutoDocumentationBDDTest):
def test_metadata(self):
with self.given('Test metadata verb', '/apiv1/rooms', 'METADATA'):
assert status == 200
fields = response.json['fields']
assert fields['type']['maxLength'] is not None
assert fields['type']['minLength'] is not None
assert fields['type']['name'] is not None
assert fields['type']['notNone'] is not None
assert fields['type']['required'] is not None
assert fields['type']['watermark'] is not None
assert fields['type']['example'] is not None
assert fields['type']['message'] is not None
|
[
"[email protected]"
] | |
9df6392902b582ba186f117b11700d66985b2de9
|
ef243d91a1826b490e935fa3f3e6c29c3cc547d0
|
/lxml/etree/CommentBase.py
|
29cd0fa1b6fe12df733b0e6bce2364a1a113ec15
|
[] |
no_license
|
VentiFang/Python_local_module
|
6b3d0b22399e817057dfd15d647a14bb1e41980e
|
c44f55379eca2818b29732c2815480ee755ae3fb
|
refs/heads/master
| 2020-11-29T11:24:54.932967 | 2019-12-25T12:57:14 | 2019-12-25T12:57:14 | 230,101,875 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,288 |
py
|
# encoding: utf-8
# module lxml.etree
# from F:\Python\Python36\lib\site-packages\lxml\etree.cp36-win_amd64.pyd
# by generator 1.147
""" The ``lxml.etree`` module implements the extended ElementTree API for XML. """
# imports
import builtins as __builtins__ # <module 'builtins' (built-in)>
from ._Comment import _Comment
class CommentBase(_Comment):
"""
All custom Comment classes must inherit from this one.
To create an XML Comment instance, use the ``Comment()`` factory.
Subclasses *must not* override __init__ or __new__ as it is
absolutely undefined when these objects will be created or
destroyed. All persistent state of Comments must be stored in the
underlying XML. If you really need to initialize the object after
creation, you can implement an ``_init(self)`` method that will be
called after object creation.
"""
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
__pyx_vtable__ = None # (!) real value is '<capsule object NULL at 0x000001F681EE3C00>'
|
[
"[email protected]"
] | |
520fb4963fd27b6f09e539e0d5fdf44b99f18a32
|
ba1d0f05e2faf2f21f076c90960e436db2930d36
|
/src/items/view.py
|
be96e57742877f7c5343250254dacff23ff56a4a
|
[
"MIT"
] |
permissive
|
elipavlov/items
|
94d553d05bab4e444c1172f96045058da387db64
|
c935e3321284af251c3339e72531f26e9dd64802
|
refs/heads/master
| 2021-01-22T20:18:27.036787 | 2017-03-20T18:18:20 | 2017-03-20T18:18:20 | 85,308,199 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,752 |
py
|
# coding=utf-8
from sqlite3 import IntegrityError
from flask import json, request, url_for, flash, jsonify, session
from items.exceptions import DataExtractionError
from sqlalchemy import exc
from .config import app
from .models import db, Item
from logging import getLogger
logger = getLogger(__name__)
def _process_error_response(error):
flash(str(error))
return app.response_class(
response=json.dumps(dict(
status='fail',
error_type=type(error).__name__,
error=str(error))),
status=400,
mimetype='application/json'
)
@app.route('%sadd' % app.config.get('API_PATH'), methods=['POST'])
def add_item():
data = None
try:
data = json.loads(request.data)
except ValueError as e:
flash('Data decoding error')
response = _process_error_response(e)
if data:
try:
item = Item(**data)
db.session.add(item)
db.session.commit()
response = jsonify(status='ok')
except (IntegrityError, exc.IntegrityError) as e:
print(str(e))
logger.warning(str(e))
db.session.rollback()
response = _process_error_response(
ValueError('Unpropper input data')
)
except (TypeError, DataExtractionError) as e:
response = _process_error_response(e)
return response
@app.route('%sitems' % app.config.get('API_PATH'))
def get_items_list():
items = [row.to_response()
for row in db.session.query(Item)
.filter((Item.start_time + Item.days*86400 + 12*86400) < db.func.current_date())
if not row.expired()]
response = jsonify(status='ok', items=items)
return response
@app.route('%sitems/' % app.config.get('API_PATH'), defaults={'path': ''})
@app.route('%sitems/<path:path>' % app.config.get('API_PATH'))
def get_item(path):
if not path:
response = _process_error_response(ValueError('Wrong requested id'))
else:
item = db.session.query(Item).filter(Item.id == path).first()
if not item:
response = app.response_class(
response=json.dumps(dict(status='not found')),
status=404,
mimetype='application/json'
)
else:
if item.expired():
db.session.delete(item)
db.session.commit()
response = app.response_class(
response=json.dumps(dict(status='not found')),
status=404,
mimetype='application/json'
)
else:
response = jsonify(item.to_response())
return response
|
[
"[email protected]"
] | |
bbd765025d00ad5f5576e91fbfc14956a25fa47a
|
c600f82e32bb1cbe22c6aff42371b1526ecae440
|
/src/livestreamer/packages/flashmedia/flv.py
|
7671fe0f2ae8ded170b8b509a32fa79bb9459f33
|
[
"BSD-2-Clause"
] |
permissive
|
john-peterson/livestreamer
|
6445a91cfd6f2002c2184c8c125934a1b5616cea
|
5b268a9cf1a157bbd2fecd3e61c110f046680ab1
|
refs/heads/master
| 2020-12-01T03:02:56.109651 | 2012-12-30T16:14:15 | 2012-12-30T16:14:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 465 |
py
|
#!/usr/bin/env python
from .tag import Header, Tag
from .compat import is_py2
class FLV(object):
def __init__(self, fd=None):
self.fd = fd
self.header = Header.deserialize(self.fd)
def __iter__(self):
return self
def __next__(self):
try:
tag = Tag.deserialize(self.fd)
except IOError:
raise StopIteration
return tag
if is_py2:
next = __next__
__all__ = ["FLV"]
|
[
"[email protected]"
] | |
a217ad299a4c52b3e630ca5237cbe36640af382d
|
77e22775135dff0de080573c7a6e83ef373fe4cb
|
/dl/data/base/datasets.py
|
20962cd49a0f1a1f01673ec07dc9f00f7f197657
|
[
"MIT"
] |
permissive
|
flyingGH/pytorch.dl
|
bfcd23ddbc7d405cbba7ce15695e0dda75b755fe
|
d82aa1191c14f328c62de85e391ac6fa1b4c7ee3
|
refs/heads/master
| 2023-06-01T11:21:15.636450 | 2021-03-31T05:47:31 | 2021-03-31T05:47:31 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,809 |
py
|
from torch.utils.data import Dataset
import abc, os, cv2
import numpy as np
from pycocotools.coco import COCO
from xml.etree import ElementTree as ET
from .exceptions import _TargetTransformBaseException, MaximumReapplyError
from .._utils import DATA_ROOT, _get_xml_et_value, _check_ins
reapply_in_exception = True
maximum_reapply = 10
class ImageDatasetBase(Dataset):
def __init__(self, transform=None, target_transform=None, augmentation=None):
"""
:param transform: instance of transforms
:param target_transform: instance of target_transforms
:param augmentation: instance of augmentations
"""
self.transform = transform
self.target_transform = target_transform # _contain_ignore(target_transform)
self.augmentation = augmentation
@abc.abstractmethod
def _get_image(self, index):
"""
:param index: int
:return:
rgb image(Tensor)
"""
raise NotImplementedError('\'_get_image\' must be overridden')
@abc.abstractmethod
def _get_target(self, index):
"""
:param index: int
:return:
list of bboxes, list of bboxes' label index, list of flags([difficult, truncated])
"""
raise NotImplementedError('\'_get_target\' must be overridden')
def get_imgtarget(self, index, count=0):
"""
:param index: int
:return:
img : rgb image(Tensor or ndarray)
targets : Tensor or array-like labels
"""
try:
img = self._get_image(index)
targets = self._get_target(index)
img, targets = self.apply_transform(img, *targets)
return img, targets
except _TargetTransformBaseException as e:
if count == maximum_reapply:
raise MaximumReapplyError('Maximum Reapplying reached: {}. last error was {}'.format(count, str(e)))
elif reapply_in_exception:
return self.get_imgtarget(np.random.randint(len(self)), count + 1)
else:
raise e
def __getitem__(self, index):
"""
:param index: int
:return:
img : rgb image(Tensor or ndarray)
targets : Tensor or array-like labels
"""
return self.get_imgtarget(index)
def apply_transform(self, img, *targets):
"""
IMPORTATANT: apply transform function in order with ignore, augmentation, transform and target_transform
:param img:
:param targets:
:return:
Transformed img, targets, args
"""
if self.augmentation:
img, targets = self.augmentation(img, *targets)
if self.transform:
img = self.transform(img)
if self.target_transform:
targets = self.target_transform(*targets)
return img, targets
@abc.abstractmethod
def __len__(self):
pass
class COCODatasetMixin:
_coco_dir: str
_focus: str
_coco: COCO
_imageids: list
def _jpgpath(self, filename):
"""
:param filename: path containing .jpg
:return: path of jpg
"""
return os.path.join(self._coco_dir, 'images', self._focus, filename)
def _get_image(self, index):
"""
:param index: int
:return:
rgb image(ndarray)
"""
"""
self._coco.loadImgs(self._imageids[index]): list of dict, contains;
license: int
file_name: str
coco_url: str
height: int
width: int
date_captured: str
flickr_url: str
id: int
"""
filename = self._coco.loadImgs(self._imageids[index])[0]['file_name']
img = cv2.imread(self._jpgpath(filename))
# pytorch's image order is rgb
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img.astype(np.float32)
class VOCDatasetMixin:
_voc_dir: str
_annopaths: list
def _jpgpath(self, filename):
"""
:param filename: path containing .jpg
:return: path of jpg
"""
return os.path.join(self._voc_dir, 'JPEGImages', filename)
"""
Detail of contents in voc > https://towardsdatascience.com/coco-data-format-for-object-detection-a4c5eaf518c5
VOC bounding box (xmin, ymin, xmax, ymax)
"""
def _get_image(self, index):
"""
:param index: int
:return:
rgb image(ndarray)
"""
root = ET.parse(self._annopaths[index]).getroot()
img = cv2.imread(self._jpgpath(_get_xml_et_value(root, 'filename')))
# pytorch's image order is rgb
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img.astype(np.float32)
|
[
"[email protected]"
] | |
c2f89f6c6b456ec887bd59fcc97d7a64f2a6337c
|
a58d930f68712bc9635911af646711201bd1634d
|
/Lib/site-packages/flask_debugtoolbar/panels/sqlalchemy.py
|
ded1c101a228d80486fb6d8e410af07cb20adbe6
|
[
"MIT"
] |
permissive
|
kyle8998/AppGen
|
9999d5d895531ab2dd704e54f016d0e12e74e81a
|
4cc9e57c85363d41dc39c8a4687c9f6ada103beb
|
refs/heads/master
| 2022-12-01T17:06:02.824557 | 2017-11-20T21:53:23 | 2017-11-20T21:53:23 | 84,370,560 | 2 | 1 |
MIT
| 2022-11-27T00:26:13 | 2017-03-08T22:05:01 |
Python
|
UTF-8
|
Python
| false | false | 4,128 |
py
|
try:
from flask_sqlalchemy import get_debug_queries, SQLAlchemy
except ImportError:
sqlalchemy_available = False
get_debug_queries = SQLAlchemy = None
else:
sqlalchemy_available = True
from flask import request, current_app, abort, json_available, g
from flask_debugtoolbar import module
from flask_debugtoolbar.panels import DebugPanel
from flask_debugtoolbar.utils import format_fname, format_sql
import itsdangerous
_ = lambda x: x
def query_signer():
return itsdangerous.URLSafeSerializer(current_app.config['SECRET_KEY'],
salt='fdt-sql-query')
def is_select(statement):
prefix = b'select' if isinstance(statement, bytes) else 'select'
return statement.lower().strip().startswith(prefix)
def dump_query(statement, params):
if not params or not is_select(statement):
return None
try:
return query_signer().dumps([statement, params])
except TypeError:
return None
def load_query(data):
try:
statement, params = query_signer().loads(request.args['query'])
except (itsdangerous.BadSignature, TypeError):
abort(406)
# Make sure it is a select statement
if not is_select(statement):
abort(406)
return statement, params
def extension_used():
return 'sqlalchemy' in current_app.extensions
def recording_enabled():
return (current_app.debug
or current_app.config.get('SQLALCHEMY_RECORD_QUERIES'))
def is_available():
return (json_available and sqlalchemy_available
and extension_used() and recording_enabled())
def get_queries():
if get_debug_queries:
return get_debug_queries()
else:
return []
class SQLAlchemyDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'SQLAlchemy'
@property
def has_content(self):
return bool(get_queries()) or not is_available()
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('SQLAlchemy')
def nav_subtitle(self):
count = len(get_queries())
if not count and not is_available():
return 'Unavailable'
return '%d %s' % (count, 'query' if count == 1 else 'queries')
def title(self):
return _('SQLAlchemy queries')
def url(self):
return ''
def content(self):
queries = get_queries()
if not queries and not is_available():
return self.render('panels/sqlalchemy_error.html', {
'json_available': json_available,
'sqlalchemy_available': sqlalchemy_available,
'extension_used': extension_used(),
'recording_enabled': recording_enabled(),
})
data = []
for query in queries:
data.append({
'duration': query.duration,
'sql': format_sql(query.statement, query.parameters),
'signed_query': dump_query(query.statement, query.parameters),
'context_long': query.context,
'context': format_fname(query.context)
})
return self.render('panels/sqlalchemy.html', {'queries': data})
# Panel views
@module.route('/sqlalchemy/sql_select', methods=['GET', 'POST'])
@module.route('/sqlalchemy/sql_explain', methods=['GET', 'POST'],
defaults=dict(explain=True))
def sql_select(explain=False):
statement, params = load_query(request.args['query'])
engine = SQLAlchemy().get_engine(current_app)
if explain:
if engine.driver == 'pysqlite':
statement = 'EXPLAIN QUERY PLAN\n%s' % statement
else:
statement = 'EXPLAIN\n%s' % statement
result = engine.execute(statement, params)
return g.debug_toolbar.render('panels/sqlalchemy_select.html', {
'result': result.fetchall(),
'headers': result.keys(),
'sql': format_sql(statement, params),
'duration': float(request.args['duration']),
})
|
[
"[email protected]"
] | |
f06396f839a325246ee0801bec2dd517652b1377
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/76/usersdata/179/39313/submittedfiles/jogoDaVelha.py
|
dc18ac8faf0b1370501cfc88faf49f897ae00dfb
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 925 |
py
|
# -*- coding: utf-8 -*-
import math
x1 = int(input('Digite x1: '))
x2 = int(input('Digite x2: '))
x3 = int(input('Digite x3: '))
x4 = int(input('Digite x4: '))
x5 = int(input('Digite x5: '))
x6 = int(input('Digite x6: '))
x7 = int(input('Digite x7: '))
x8 = int(input('Digite x8: '))
x9 = int(input('Digite x9: '))
if x1==x5==x6:
print('E')
elif x2==x4==x7:
print('E')
elif x3==x8==x9:
print('E')
if x1==x2==x3:
print('0')
elif x4==x5==x6:
print('0')
elif x7==x8==x9:
print('0')
elif x1==x5==x9:
print('0')
elif x3==x5==x7:
print('0')
elif x1==x4==x7:
print('0')
elif x2==x5==x8:
print('0')
elif x3==x6==x9:
print('0')
if x1==x2==x3:
print('1')
elif x4==x5==x6:
print('1')
elif x7==x8==x9:
print('1')
elif x1==x5==x9:
print('1')
elif x3==x5==x7:
print('1')
elif x1==x4==x7:
print('1')
elif x2==x5==x8:
print('1')
elif x3==x6==x9:
print('1')
|
[
"[email protected]"
] | |
807298af3e1ebd4145bdd555747f385744339491
|
309dbf12ce8bb5ddb935978628f9a675141cffa5
|
/rev-linked-list/reversell.py
|
44d54c38073c38ee870b2913c2de6fd2e49990dc
|
[] |
no_license
|
eljlee/hb-code-challenges
|
38cc55df8cbf13f1c516cc315734ea029c6ce08d
|
3a190794483003a52ca7fd43349dad6aed252eee
|
refs/heads/master
| 2020-03-08T06:21:14.977981 | 2018-05-01T06:07:06 | 2018-05-01T06:07:06 | 127,969,767 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,178 |
py
|
"""Given linked list head node, return head node of new, reversed linked list.
For example:
>>> ll = Node(1, Node(2, Node(3)))
>>> reverse_linked_list(ll).as_string()
'321'
"""
class Node(object):
"""Class in a linked list."""
def __init__(self, data, next=None):
self.data = data
self.next = next
def as_string(self):
"""Represent data for this node and it's successors as a string.
>>> Node(3).as_string()
'3'
>>> Node(3, Node(2, Node(1))).as_string()
'321'
"""
out = []
n = self
while n:
out.append(str(n.data))
n = n.next
return "".join(out)
def reverse_linked_list(head):
"""Given LL head node, return head node of new, reversed linked list.
>>> ll = Node(1, Node(2, Node(3)))
>>> reverse_linked_list(ll).as_string()
'321'
"""
new_head = None
while head:
new_head = Node(head.data, new_head)
head = head.next
return new_head
if __name__ == '__main__':
import doctest
if doctest.testmod().failed == 0:
print "\n*** ALL TESTS PASSED. RIGHT ON!\n"
|
[
"[email protected]"
] | |
01fbc51fb60705df1d17f7752984ecf95387c70c
|
76e6d4f93078327fef8672133fc75a6f12abc240
|
/ABC166/Test_C.py
|
21670db70c1607836575dd879ec8512eea46c6fc
|
[] |
no_license
|
adusa1019/atcoder
|
1e8f33253f6f80a91d069b2f3b568ce7a2964940
|
f7dbdfc021425160a072f4ce4e324953a376133a
|
refs/heads/master
| 2021-08-08T04:41:36.098678 | 2021-02-01T07:34:34 | 2021-02-01T07:34:34 | 89,038,783 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 178 |
py
|
import pytest
from C import solve
def test_solve():
assert solve('4 3\n1 2 3 4\n1 3\n2 3\n2 4') == '2'
assert solve('6 5\n8 6 9 1 2 1\n1 3\n4 2\n4 3\n4 6\n4 6') == '3'
|
[
"[email protected]"
] | |
b9eb79ac90d988d56bfe2b1946ca1d9a20bc13c4
|
20a18ea0d2738477c5a117f80154c195c6ff2679
|
/nova/tests/unit/scheduler/filters/test_vcpu_model_filter.py
|
29e35d8e88efc3989c239a0f1635d3a3eb84a908
|
[
"Apache-2.0"
] |
permissive
|
hustlzp1981/stx-nova
|
1300fa9757a29b2d00ef587c71ebd98171077d10
|
c52432b3e7a240817a2de06321a2459f4862ab6a
|
refs/heads/master
| 2020-04-26T03:21:12.797447 | 2019-03-01T17:40:14 | 2019-03-01T17:40:14 | 173,264,343 | 0 | 0 |
Apache-2.0
| 2019-03-01T08:28:15 | 2019-03-01T08:28:15 | null |
UTF-8
|
Python
| false | false | 7,430 |
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2016-2017 Wind River Systems, Inc.
#
from nova import objects
from nova.scheduler.filters import vcpu_model_filter
from nova import test
from nova.tests.unit.scheduler import fakes
class TestVCPUModelFilter(test.NoDBTestCase):
def setUp(self):
super(TestVCPUModelFilter, self).setUp()
self.filt_cls = vcpu_model_filter.VCpuModelFilter()
def test_vcpu_model_not_specified(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=1024, extra_specs={}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1', {})
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
def test_vcpu_model_flavor_passes(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Nehalem'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell"}'})
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
def test_vcpu_model_flavor_fails(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Nehalem'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Conroe"}'})
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
def test_vcpu_model_image_passes(self):
props = objects.ImageMetaProps(hw_cpu_model='Nehalem')
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=1024, extra_specs={}),
image=objects.ImageMeta(properties=props),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell"}'})
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
def test_vcpu_model_image_fails(self):
props = objects.ImageMetaProps(hw_cpu_model='Nehalem')
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=1024, extra_specs={}),
image=objects.ImageMeta(properties=props),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Conroe"}'})
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
def test_passthrough_vcpu_model_flavor_passes(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Passthrough'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell", "features": ["vmx"]}'})
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
def test_passthrough_migrate_vcpu_model_flavor_passes(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Passthrough'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['migrating'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell", '
'"features": ["pge", "avx", "vmx"]}'})
self.stub_out('nova.objects.ComputeNode.get_by_host_and_nodename',
self._fake_compute_node_get_by_host_and_nodename)
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
def test_passthrough_migrate_vcpu_model_flavor_fails(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Passthrough'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['migrating'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "IvyBridge", '
'"features": ["pge", "avx", "vmx"]}'})
self.stub_out('nova.objects.ComputeNode.get_by_host_and_nodename',
self._fake_compute_node_get_by_host_and_nodename)
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
def test_passthrough_migrate_vcpu_model_flavor_features_fails(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Passthrough'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['migrating'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell", '
'"features": ["pge", "avx", "vmx", "clflush"]}'})
self.stub_out('nova.objects.ComputeNode.get_by_host_and_nodename',
self._fake_compute_node_get_by_host_and_nodename)
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
def test_passthrough_migrate_vcpu_model_flavor_kvm_fails(self):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(extra_specs={'hw:cpu_model': 'Passthrough'}),
image=objects.ImageMeta(properties=objects.ImageMetaProps()),
scheduler_hints={'task_state': ['scheduling'], 'host': ['host1'],
'node': ['node1']})
host = fakes.FakeHostState('host1', 'node1',
{'cpu_info': '{"model": "Broadwell", '
'"features": ["pge", "avx"]}'})
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
def _fake_compute_node_get_by_host_and_nodename(self, cn, ctx, host, node):
cpu_info = '{"model": "Broadwell", "features": ["pge", "avx", "vmx"]}'
compute_node = objects.ComputeNode(cpu_info=cpu_info)
return compute_node
|
[
"[email protected]"
] | |
dfc6c6bcfb803410e8e29e9372cbfc20346520ac
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/sdssj_101342.12+260620.0/sdB_SDSSJ_101342.12+260620.0_lc.py
|
9a011b1d702d1b617d49f9c4a2d9a77ee7bb3f10
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 370 |
py
|
from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[153.4255,26.105556], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_SDSSJ_101342.12+260620.0 /sdB_SDSSJ_101342.12+260620.0_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
6d08ddadf72bb842f38dcab1040eb5c887a7b5ba
|
e6e81d0cd02223ca27f2c3f544b3c116e7617270
|
/LeetCodePremium/77.combinations.py
|
4e7d7a1ed0cc1d5b18cdec815c2f06e8eceb101c
|
[] |
no_license
|
ashjambhulkar/objectoriented
|
86166640b0546713095dd5d8804fc78d31782662
|
6f07b50590ceef231be38d6d7b8c73a40c1152e9
|
refs/heads/master
| 2022-05-03T23:28:38.674275 | 2022-04-26T21:37:31 | 2022-04-26T21:37:31 | 249,091,045 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 994 |
py
|
#
# @lc app=leetcode id=77 lang=python3
#
# [77] Combinations
#
# @lc code=start
class Solution:
def combine(self, n: int, k: int) -> List[List[int]]:
# sample = [i for i in range(1, n+1)]
# result = []
# def helper(sample, temp):
# if len(temp) == k:
# result.append(temp[:])
# elif len(temp) <= k:
# for i in range(len(sample)):
# temp.append(sample[i])
# helper(sample[i+1:],temp)
# temp.pop()
# helper(sample, [])
# return result
sample = [i for i in range(1, n+1)]
result = []
def helper(sample, temp):
if len(temp) == k:
result.append(temp[:])
for i in range(len(sample)):
temp.append(sample[i])
helper(sample[i+1:], temp)
temp.remove(sample[i])
helper(sample, [])
return result
# @lc code=end
|
[
"[email protected]"
] | |
fa732dd4d96554ba0973ad299250f13f74a6166d
|
a838d4bed14d5df5314000b41f8318c4ebe0974e
|
/sdk/monitor/azure-monitor-opentelemetry-exporter/azure/monitor/opentelemetry/exporter/_generated/models/_models.py
|
2eb7716c2c9269fe361603bddfbac2c620bc5c8d
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
scbedd/azure-sdk-for-python
|
ee7cbd6a8725ddd4a6edfde5f40a2a589808daea
|
cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a
|
refs/heads/master
| 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 |
MIT
| 2019-08-11T21:16:01 | 2018-11-28T21:34:49 |
Python
|
UTF-8
|
Python
| false | false | 36,791 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class MonitorDomain(msrest.serialization.Model):
"""The abstract common base of all domains.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
"""
_validation = {
'version': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(MonitorDomain, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.version = kwargs.get('version', 2)
class AvailabilityData(MonitorDomain):
"""Instances of AvailabilityData represent the result of executing an availability test.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param id: Required. Identifier of a test run. Use it to correlate steps of test run and
telemetry generated by the service.
:type id: str
:param name: Required. Name of the test that these availability results represent.
:type name: str
:param duration: Required. Duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000 days.
:type duration: str
:param success: Required. Success flag.
:type success: bool
:param run_location: Name of the location where the test was run from.
:type run_location: str
:param message: Diagnostic message for the result.
:type message: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'id': {'required': True, 'max_length': 512, 'min_length': 0},
'name': {'required': True, 'max_length': 1024, 'min_length': 0},
'duration': {'required': True},
'success': {'required': True},
'run_location': {'max_length': 1024, 'min_length': 0},
'message': {'max_length': 8192, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'success': {'key': 'success', 'type': 'bool'},
'run_location': {'key': 'runLocation', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(AvailabilityData, self).__init__(**kwargs)
self.id = kwargs['id']
self.name = kwargs['name']
self.duration = kwargs['duration']
self.success = kwargs['success']
self.run_location = kwargs.get('run_location', None)
self.message = kwargs.get('message', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class MessageData(MonitorDomain):
"""Instances of Message represent printf-like trace statements that are text-searched. Log4Net, NLog and other text-based log file entries are translated into instances of this type. The message does not have measurements.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param message: Required. Trace message.
:type message: str
:param severity_level: Trace severity level. Possible values include: "Verbose", "Information",
"Warning", "Error", "Critical".
:type severity_level: str or ~azure_monitor_client.models.SeverityLevel
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'message': {'required': True, 'max_length': 32768, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'severity_level': {'key': 'severityLevel', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(MessageData, self).__init__(**kwargs)
self.message = kwargs['message']
self.severity_level = kwargs.get('severity_level', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class MetricDataPoint(msrest.serialization.Model):
"""Metric data single measurement.
All required parameters must be populated in order to send to Azure.
:param namespace: Namespace of the metric.
:type namespace: str
:param name: Required. Name of the metric.
:type name: str
:param data_point_type: Metric type. Single measurement or the aggregated value. Possible
values include: "Measurement", "Aggregation".
:type data_point_type: str or ~azure_monitor_client.models.DataPointType
:param value: Required. Single value for measurement. Sum of individual measurements for the
aggregation.
:type value: float
:param count: Metric weight of the aggregated metric. Should not be set for a measurement.
:type count: int
:param min: Minimum value of the aggregated metric. Should not be set for a measurement.
:type min: float
:param max: Maximum value of the aggregated metric. Should not be set for a measurement.
:type max: float
:param std_dev: Standard deviation of the aggregated metric. Should not be set for a
measurement.
:type std_dev: float
"""
_validation = {
'namespace': {'max_length': 256, 'min_length': 0},
'name': {'required': True, 'max_length': 1024, 'min_length': 0},
'value': {'required': True},
}
_attribute_map = {
'namespace': {'key': 'ns', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'data_point_type': {'key': 'kind', 'type': 'str'},
'value': {'key': 'value', 'type': 'float'},
'count': {'key': 'count', 'type': 'int'},
'min': {'key': 'min', 'type': 'float'},
'max': {'key': 'max', 'type': 'float'},
'std_dev': {'key': 'stdDev', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(MetricDataPoint, self).__init__(**kwargs)
self.namespace = kwargs.get('namespace', None)
self.name = kwargs['name']
self.data_point_type = kwargs.get('data_point_type', None)
self.value = kwargs['value']
self.count = kwargs.get('count', None)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
self.std_dev = kwargs.get('std_dev', None)
class MetricsData(MonitorDomain):
"""An instance of the Metric item is a list of measurements (single data points) and/or aggregations.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param metrics: Required. List of metrics. Only one metric in the list is currently supported
by Application Insights storage. If multiple data points were sent only the first one will be
used.
:type metrics: list[~azure_monitor_client.models.MetricDataPoint]
:param properties: Collection of custom properties.
:type properties: dict[str, str]
"""
_validation = {
'version': {'required': True},
'metrics': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'metrics': {'key': 'metrics', 'type': '[MetricDataPoint]'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MetricsData, self).__init__(**kwargs)
self.metrics = kwargs['metrics']
self.properties = kwargs.get('properties', None)
class MonitorBase(msrest.serialization.Model):
"""Data struct to contain only C section with custom fields.
:param base_type: Name of item (B section) if any. If telemetry data is derived straight from
this, this should be null.
:type base_type: str
:param base_data: The data payload for the telemetry request.
:type base_data: ~azure_monitor_client.models.MonitorDomain
"""
_attribute_map = {
'base_type': {'key': 'baseType', 'type': 'str'},
'base_data': {'key': 'baseData', 'type': 'MonitorDomain'},
}
def __init__(
self,
**kwargs
):
super(MonitorBase, self).__init__(**kwargs)
self.base_type = kwargs.get('base_type', None)
self.base_data = kwargs.get('base_data', None)
class PageViewData(MonitorDomain):
"""An instance of PageView represents a generic action on a page like a button click. It is also the base type for PageView.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param id: Required. Identifier of a page view instance. Used for correlation between page view
and other telemetry items.
:type id: str
:param name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
metrics.
:type name: str
:param url: Request URL with all query string parameters.
:type url: str
:param duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
(PageViewData), this is the duration. For a page view with performance information
(PageViewPerfData), this is the page load time. Must be less than 1000 days.
:type duration: str
:param referred_uri: Fully qualified page URI or URL of the referring page; if unknown, leave
blank.
:type referred_uri: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'id': {'required': True, 'max_length': 512, 'min_length': 0},
'name': {'required': True, 'max_length': 1024, 'min_length': 0},
'url': {'max_length': 2048, 'min_length': 0},
'referred_uri': {'max_length': 2048, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'referred_uri': {'key': 'referredUri', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(PageViewData, self).__init__(**kwargs)
self.id = kwargs['id']
self.name = kwargs['name']
self.url = kwargs.get('url', None)
self.duration = kwargs.get('duration', None)
self.referred_uri = kwargs.get('referred_uri', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class PageViewPerfData(MonitorDomain):
"""An instance of PageViewPerf represents: a page view with no performance data, a page view with performance data, or just the performance data of an earlier page request.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param id: Required. Identifier of a page view instance. Used for correlation between page view
and other telemetry items.
:type id: str
:param name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
metrics.
:type name: str
:param url: Request URL with all query string parameters.
:type url: str
:param duration: Request duration in format: DD.HH:MM:SS.MMMMMM. For a page view
(PageViewData), this is the duration. For a page view with performance information
(PageViewPerfData), this is the page load time. Must be less than 1000 days.
:type duration: str
:param perf_total: Performance total in TimeSpan 'G' (general long) format: d:hh:mm:ss.fffffff.
:type perf_total: str
:param network_connect: Network connection time in TimeSpan 'G' (general long) format:
d:hh:mm:ss.fffffff.
:type network_connect: str
:param sent_request: Sent request time in TimeSpan 'G' (general long) format:
d:hh:mm:ss.fffffff.
:type sent_request: str
:param received_response: Received response time in TimeSpan 'G' (general long) format:
d:hh:mm:ss.fffffff.
:type received_response: str
:param dom_processing: DOM processing time in TimeSpan 'G' (general long) format:
d:hh:mm:ss.fffffff.
:type dom_processing: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'id': {'required': True, 'max_length': 512, 'min_length': 0},
'name': {'required': True, 'max_length': 1024, 'min_length': 0},
'url': {'max_length': 2048, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'perf_total': {'key': 'perfTotal', 'type': 'str'},
'network_connect': {'key': 'networkConnect', 'type': 'str'},
'sent_request': {'key': 'sentRequest', 'type': 'str'},
'received_response': {'key': 'receivedResponse', 'type': 'str'},
'dom_processing': {'key': 'domProcessing', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(PageViewPerfData, self).__init__(**kwargs)
self.id = kwargs['id']
self.name = kwargs['name']
self.url = kwargs.get('url', None)
self.duration = kwargs.get('duration', None)
self.perf_total = kwargs.get('perf_total', None)
self.network_connect = kwargs.get('network_connect', None)
self.sent_request = kwargs.get('sent_request', None)
self.received_response = kwargs.get('received_response', None)
self.dom_processing = kwargs.get('dom_processing', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class RemoteDependencyData(MonitorDomain):
"""An instance of Remote Dependency represents an interaction of the monitored component with a remote component/service like SQL or an HTTP endpoint.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param id: Identifier of a dependency call instance. Used for correlation with the request
telemetry item corresponding to this dependency call.
:type id: str
:param name: Required. Name of the command initiated with this dependency call. Low cardinality
value. Examples are stored procedure name and URL path template.
:type name: str
:param result_code: Result code of a dependency call. Examples are SQL error code and HTTP
status code.
:type result_code: str
:param data: Command initiated by this dependency call. Examples are SQL statement and HTTP URL
with all query parameters.
:type data: str
:param type: Dependency type name. Very low cardinality value for logical grouping of
dependencies and interpretation of other fields like commandName and resultCode. Examples are
SQL, Azure table, and HTTP.
:type type: str
:param target: Target site of a dependency call. Examples are server name, host address.
:type target: str
:param duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
1000 days.
:type duration: str
:param success: Indication of successful or unsuccessful call.
:type success: bool
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'id': {'max_length': 512, 'min_length': 0},
'name': {'required': True, 'max_length': 1024, 'min_length': 0},
'result_code': {'max_length': 1024, 'min_length': 0},
'data': {'max_length': 8192, 'min_length': 0},
'type': {'max_length': 1024, 'min_length': 0},
'target': {'max_length': 1024, 'min_length': 0},
'duration': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'data': {'key': 'data', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'success': {'key': 'success', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(RemoteDependencyData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs['name']
self.result_code = kwargs.get('result_code', None)
self.data = kwargs.get('data', None)
self.type = kwargs.get('type', None)
self.target = kwargs.get('target', None)
self.duration = kwargs['duration']
self.success = kwargs.get('success', True)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class RequestData(MonitorDomain):
"""An instance of Request represents completion of an external request to the application to do work and contains a summary of that request execution and the results.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param id: Required. Identifier of a request call instance. Used for correlation between
request and other telemetry items.
:type id: str
:param name: Name of the request. Represents code path taken to process request. Low
cardinality value to allow better grouping of requests. For HTTP requests it represents the
HTTP method and URL path template like 'GET /values/{id}'.
:type name: str
:param duration: Required. Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than
1000 days.
:type duration: str
:param success: Required. Indication of successful or unsuccessful call.
:type success: bool
:param response_code: Required. Result of a request execution. HTTP status code for HTTP
requests.
:type response_code: str
:param source: Source of the request. Examples are the instrumentation key of the caller or the
ip address of the caller.
:type source: str
:param url: Request URL with all query string parameters.
:type url: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'id': {'required': True, 'max_length': 512, 'min_length': 0},
'name': {'max_length': 1024, 'min_length': 0},
'duration': {'required': True},
'success': {'required': True},
'response_code': {'required': True, 'max_length': 1024, 'min_length': 0},
'source': {'max_length': 1024, 'min_length': 0},
'url': {'max_length': 2048, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'success': {'key': 'success', 'type': 'bool'},
'response_code': {'key': 'responseCode', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(RequestData, self).__init__(**kwargs)
self.id = kwargs['id']
self.name = kwargs.get('name', None)
self.duration = kwargs['duration']
self.success = kwargs.get('success', True)
self.response_code = kwargs['response_code']
self.source = kwargs.get('source', None)
self.url = kwargs.get('url', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class StackFrame(msrest.serialization.Model):
"""Stack frame information.
All required parameters must be populated in order to send to Azure.
:param level: Required.
:type level: int
:param method: Required. Method name.
:type method: str
:param assembly: Name of the assembly (dll, jar, etc.) containing this function.
:type assembly: str
:param file_name: File name or URL of the method implementation.
:type file_name: str
:param line: Line number of the code implementation.
:type line: int
"""
_validation = {
'level': {'required': True},
'method': {'required': True, 'max_length': 1024, 'min_length': 0},
'assembly': {'max_length': 1024, 'min_length': 0},
'file_name': {'max_length': 1024, 'min_length': 0},
}
_attribute_map = {
'level': {'key': 'level', 'type': 'int'},
'method': {'key': 'method', 'type': 'str'},
'assembly': {'key': 'assembly', 'type': 'str'},
'file_name': {'key': 'fileName', 'type': 'str'},
'line': {'key': 'line', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(StackFrame, self).__init__(**kwargs)
self.level = kwargs['level']
self.method = kwargs['method']
self.assembly = kwargs.get('assembly', None)
self.file_name = kwargs.get('file_name', None)
self.line = kwargs.get('line', None)
class TelemetryErrorDetails(msrest.serialization.Model):
"""The error details.
:param index: The index in the original payload of the item.
:type index: int
:param status_code: The item specific `HTTP Response status code <#Response Status Codes>`_.
:type status_code: int
:param message: The error message.
:type message: str
"""
_attribute_map = {
'index': {'key': 'index', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TelemetryErrorDetails, self).__init__(**kwargs)
self.index = kwargs.get('index', None)
self.status_code = kwargs.get('status_code', None)
self.message = kwargs.get('message', None)
class TelemetryEventData(MonitorDomain):
"""Instances of Event represent structured event records that can be grouped and searched by their properties. Event data item also creates a metric of event count by name.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param name: Required. Event name. Keep it low cardinality to allow proper grouping and useful
metrics.
:type name: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'name': {'required': True, 'max_length': 512, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(TelemetryEventData, self).__init__(**kwargs)
self.name = kwargs['name']
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class TelemetryExceptionData(MonitorDomain):
"""An instance of Exception represents a handled or unhandled exception that occurred during execution of the monitored application.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param version: Required. Schema version.
:type version: int
:param exceptions: Required. Exception chain - list of inner exceptions.
:type exceptions: list[~azure_monitor_client.models.TelemetryExceptionDetails]
:param severity_level: Severity level. Mostly used to indicate exception severity level when it
is reported by logging library. Possible values include: "Verbose", "Information", "Warning",
"Error", "Critical".
:type severity_level: str or ~azure_monitor_client.models.SeverityLevel
:param problem_id: Identifier of where the exception was thrown in code. Used for exceptions
grouping. Typically a combination of exception type and a function from the call stack.
:type problem_id: str
:param properties: Collection of custom properties.
:type properties: dict[str, str]
:param measurements: Collection of custom measurements.
:type measurements: dict[str, float]
"""
_validation = {
'version': {'required': True},
'exceptions': {'required': True},
'problem_id': {'max_length': 1024, 'min_length': 0},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'version': {'key': 'ver', 'type': 'int'},
'exceptions': {'key': 'exceptions', 'type': '[TelemetryExceptionDetails]'},
'severity_level': {'key': 'severityLevel', 'type': 'str'},
'problem_id': {'key': 'problemId', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'measurements': {'key': 'measurements', 'type': '{float}'},
}
def __init__(
self,
**kwargs
):
super(TelemetryExceptionData, self).__init__(**kwargs)
self.exceptions = kwargs['exceptions']
self.severity_level = kwargs.get('severity_level', None)
self.problem_id = kwargs.get('problem_id', None)
self.properties = kwargs.get('properties', None)
self.measurements = kwargs.get('measurements', None)
class TelemetryExceptionDetails(msrest.serialization.Model):
"""Exception details of the exception in a chain.
All required parameters must be populated in order to send to Azure.
:param id: In case exception is nested (outer exception contains inner one), the id and outerId
properties are used to represent the nesting.
:type id: int
:param outer_id: The value of outerId is a reference to an element in ExceptionDetails that
represents the outer exception.
:type outer_id: int
:param type_name: Exception type name.
:type type_name: str
:param message: Required. Exception message.
:type message: str
:param has_full_stack: Indicates if full exception stack is provided in the exception. The
stack may be trimmed, such as in the case of a StackOverflow exception.
:type has_full_stack: bool
:param stack: Text describing the stack. Either stack or parsedStack should have a value.
:type stack: str
:param parsed_stack: List of stack frames. Either stack or parsedStack should have a value.
:type parsed_stack: list[~azure_monitor_client.models.StackFrame]
"""
_validation = {
'type_name': {'max_length': 1024, 'min_length': 0},
'message': {'required': True, 'max_length': 32768, 'min_length': 0},
'stack': {'max_length': 32768, 'min_length': 0},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'outer_id': {'key': 'outerId', 'type': 'int'},
'type_name': {'key': 'typeName', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'has_full_stack': {'key': 'hasFullStack', 'type': 'bool'},
'stack': {'key': 'stack', 'type': 'str'},
'parsed_stack': {'key': 'parsedStack', 'type': '[StackFrame]'},
}
def __init__(
self,
**kwargs
):
super(TelemetryExceptionDetails, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.outer_id = kwargs.get('outer_id', None)
self.type_name = kwargs.get('type_name', None)
self.message = kwargs['message']
self.has_full_stack = kwargs.get('has_full_stack', True)
self.stack = kwargs.get('stack', None)
self.parsed_stack = kwargs.get('parsed_stack', None)
class TelemetryItem(msrest.serialization.Model):
"""System variables for a telemetry item.
All required parameters must be populated in order to send to Azure.
:param version: Envelope version. For internal use only. By assigning this the default, it will
not be serialized within the payload unless changed to a value other than #1.
:type version: int
:param name: Required. Type name of telemetry data item.
:type name: str
:param time: Required. Event date time when telemetry item was created. This is the wall clock
time on the client when the event was generated. There is no guarantee that the client's time
is accurate. This field must be formatted in UTC ISO 8601 format, with a trailing 'Z'
character, as described publicly on https://en.wikipedia.org/wiki/ISO_8601#UTC. Note: the
number of decimal seconds digits provided are variable (and unspecified). Consumers should
handle this, i.e. managed code consumers should not use format 'O' for parsing as it specifies
a fixed length. Example: 2009-06-15T13:45:30.0000000Z.
:type time: ~datetime.datetime
:param sample_rate: Sampling rate used in application. This telemetry item represents 1 /
sampleRate actual telemetry items.
:type sample_rate: float
:param sequence: Sequence field used to track absolute order of uploaded events.
:type sequence: str
:param instrumentation_key: The instrumentation key of the Application Insights resource.
:type instrumentation_key: str
:param tags: A set of tags. Key/value collection of context properties. See ContextTagKeys for
information on available properties.
:type tags: dict[str, str]
:param data: Telemetry data item.
:type data: ~azure_monitor_client.models.MonitorBase
"""
_validation = {
'name': {'required': True},
'time': {'required': True},
'sequence': {'max_length': 64, 'min_length': 0},
}
_attribute_map = {
'version': {'key': 'ver', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'sample_rate': {'key': 'sampleRate', 'type': 'float'},
'sequence': {'key': 'seq', 'type': 'str'},
'instrumentation_key': {'key': 'iKey', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'data': {'key': 'data', 'type': 'MonitorBase'},
}
def __init__(
self,
**kwargs
):
super(TelemetryItem, self).__init__(**kwargs)
self.version = kwargs.get('version', 1)
self.name = kwargs['name']
self.time = kwargs['time']
self.sample_rate = kwargs.get('sample_rate', 100)
self.sequence = kwargs.get('sequence', None)
self.instrumentation_key = kwargs.get('instrumentation_key', None)
self.tags = kwargs.get('tags', None)
self.data = kwargs.get('data', None)
class TrackResponse(msrest.serialization.Model):
"""Response containing the status of each telemetry item.
:param items_received: The number of items received.
:type items_received: int
:param items_accepted: The number of items accepted.
:type items_accepted: int
:param errors: An array of error detail objects.
:type errors: list[~azure_monitor_client.models.TelemetryErrorDetails]
"""
_attribute_map = {
'items_received': {'key': 'itemsReceived', 'type': 'int'},
'items_accepted': {'key': 'itemsAccepted', 'type': 'int'},
'errors': {'key': 'errors', 'type': '[TelemetryErrorDetails]'},
}
def __init__(
self,
**kwargs
):
super(TrackResponse, self).__init__(**kwargs)
self.items_received = kwargs.get('items_received', None)
self.items_accepted = kwargs.get('items_accepted', None)
self.errors = kwargs.get('errors', None)
|
[
"[email protected]"
] | |
55218c19699e0f4849573cdf5d530c474390d190
|
65a3f548503cd1bdd9a429704aec630c0a507b4b
|
/src/genie/libs/parser/nxos/tests/test_show_igmp.py
|
c6905b6a9441834d1d68c3345e34ad3712d4ebff
|
[
"Apache-2.0"
] |
permissive
|
LouiseSianEvans/genieparser
|
75c3c73612db4a7cb31f657dc6ad9f25b5bfebb5
|
7dd4d81834479e35a6c08254e10e7692b00b897b
|
refs/heads/master
| 2020-04-30T15:25:04.158694 | 2019-03-27T22:58:15 | 2019-03-27T22:58:15 | 176,919,539 | 0 | 0 |
Apache-2.0
| 2019-03-21T10:10:46 | 2019-03-21T10:10:43 |
Python
|
UTF-8
|
Python
| false | false | 51,984 |
py
|
# Python
import unittest
from unittest.mock import Mock
# ATS
from ats.topology import Device
# Metaparset
from genie.metaparser.util.exceptions import SchemaEmptyParserError, \
SchemaMissingKeyError
# Parser
from genie.libs.parser.nxos.show_igmp import ShowIpIgmpInterface, \
ShowIpIgmpGroups, \
ShowIpIgmpLocalGroups, \
ShowIpIgmpSnooping
#=========================================================
# Unit test for show ip igmp snooping
#
#=========================================================
class test_show_ip_igmp_snooping(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output_snooping_1 = \
{
'global_configuration': {
'enabled': 'enabled',
'v1v2_report_suppression': 'enabled',
'v3_report_suppression': 'disabled',
'link_local_groups_suppression': 'enabled',
'vpc_multicast_optimization': 'disabled',
},
'vlans': {
'1': { # configuration_vlan_id
'ip_igmp_snooping': 'enabled',
'v1v2_report_suppression': 'enabled',
'v3_report_suppression': 'disabled',
'link_local_groups_suppression': 'enabled',
'lookup_mode': 'ip',
'switch_querier': 'disabled',
'igmp_explicit_tracking': 'enabled',
'v2_fast_leave': 'disabled',
'router_ports_count': 1,
'groups_count': 0,
'vlan_vpc_function': 'enabled',
'active_ports': ['Po20','Po30'],
'report_flooding': 'disabled',
'report_flooding_interfaces': 'n/a',
'group_address_for_proxy_leaves': 'no',
},
'100': { # configuration_vlan_id
'ip_igmp_snooping': 'enabled',
'v1v2_report_suppression': 'enabled',
'v3_report_suppression': 'disabled',
'link_local_groups_suppression': 'enabled',
'lookup_mode': 'ip',
'igmp_querier': {
'address': '50.1.1.1',
'version': 2,
'interval': 125,
'last_member_query_interval': 1,
'robustness': 2,
},
'switch_querier': 'disabled',
'igmp_explicit_tracking': 'enabled',
'v2_fast_leave': 'disabled',
'router_ports_count': 2,
'groups_count': 0,
'vlan_vpc_function': 'enabled',
'active_ports': ['Po20', 'Po30'],
'report_flooding': 'disabled',
'report_flooding_interfaces': 'n/a',
'group_address_for_proxy_leaves': 'no',
},
'101': { # configuration_vlan_id
'ip_igmp_snooping': 'enabled',
'v1v2_report_suppression': 'enabled',
'v3_report_suppression': 'disabled',
'link_local_groups_suppression': 'enabled',
'lookup_mode': 'ip',
'switch_querier': 'disabled',
'igmp_explicit_tracking': 'enabled',
'v2_fast_leave': 'disabled',
'router_ports_count': 1,
'groups_count': 0,
'vlan_vpc_function': 'enabled',
'active_ports': ['Po20', 'Po30'],
'report_flooding': 'disabled',
'report_flooding_interfaces': 'n/a',
'group_address_for_proxy_leaves': 'no',
},
},
}
golden_output_snooping_1 = {'execute.return_value': '''
N95_1# show ip igmp snooping
Global IGMP Snooping Information:
IGMP Snooping enabled
IGMPv1/v2 Report Suppression enabled
IGMPv3 Report Suppression disabled
Link Local Groups Suppression enabled
VPC Multicast optimization disabled
IGMP Snooping information for vlan 1
IGMP snooping enabled
Lookup mode: IP
IGMP querier none
Switch-querier disabled
IGMP Explicit tracking enabled
IGMPv2 Fast leave disabled
IGMPv1/v2 Report suppression enabled
IGMPv3 Report suppression disabled
Link Local Groups suppression enabled
Router port detection using PIM Hellos, IGMP Queries
Number of router-ports: 1
Number of groups: 0
VLAN vPC function enabled
Active ports:
Po20 Po30
Report Flooding: Disabled
Interfaces for Report Flooding: n/a
Use Group Address for Proxy Leaves: no
IGMP Snooping information for vlan 100
IGMP snooping enabled
Lookup mode: IP
IGMP querier present, address: 50.1.1.1, version: 2, i/f Vlan100
Querier interval: 125 secs
Querier last member query interval: 1 secs
Querier robustness: 2
Switch-querier disabled
IGMP Explicit tracking enabled
IGMPv2 Fast leave disabled
IGMPv1/v2 Report suppression enabled
IGMPv3 Report suppression disabled
Link Local Groups suppression enabled
Router port detection using PIM Hellos, IGMP Queries
Number of router-ports: 2
Number of groups: 0
VLAN vPC function enabled
Active ports:
Po20 Po30
Report Flooding: Disabled
Interfaces for Report Flooding: n/a
Use Group Address for Proxy Leaves: no
IGMP Snooping information for vlan 101
IGMP snooping enabled
Lookup mode: IP
IGMP querier none
Switch-querier disabled
IGMP Explicit tracking enabled
IGMPv2 Fast leave disabled
IGMPv1/v2 Report suppression enabled
IGMPv3 Report suppression disabled
Link Local Groups suppression enabled
Router port detection using PIM Hellos, IGMP Queries
Number of router-ports: 1
Number of groups: 0
VLAN vPC function enabled
Active ports:
Po20 Po30
Report Flooding: Disabled
Interfaces for Report Flooding: n/a
Use Group Address for Proxy Leaves: no
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpIgmpSnooping(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_snooping_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_snooping_1)
obj = ShowIpIgmpSnooping(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_snooping_1)
# ==============================================
# Unit test for 'show ip igmp interface vrf all'
# Unit test for 'show ip igmp interface'
# Unit test for 'show ip igmp interface vrf <WORD>'
# ==============================================
class test_show_ip_igmp_interface(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"vrfs": {
"default": {
"groups_count": 2,
"interface": {
"Ethernet2/2": {
"query_max_response_time": 10,
"vrf_name": "default",
"statistics": {
"general": {
"sent": {
"v2_reports": 0,
"v2_queries": 16,
"v2_leaves": 0
},
"received": {
"v2_reports": 0,
"v2_queries": 16,
"v2_leaves": 0
}
}
},
"configured_query_max_response_time": 10,
"pim_dr": True,
"vrf_id": 1,
"querier": "10.1.3.1",
"membership_count": 0,
"last_member": {
"query_count": 2,
"mrt": 1,
},
"startup_query": {
"interval": 31,
"configured_interval": 31,
"count": 2,
},
"link_status": "up",
"subnet": "10.1.3.0/24",
"address": "10.1.3.1",
"link_local_groups_reporting": False,
"unsolicited_report_interval": 10,
"enable_refcount": 1,
"enable": True,
"next_query_sent_in": "00:00:55",
"configured_query_interval": 125,
"old_membership_count": 0,
"group_timeout": 260,
"configured_robustness_variable": 2,
"vpc_svi": False,
"querier_version": 2,
"version": 2,
"query_interval": 125,
"querier_timeout": 255,
"immediate_leave": False,
"configured_group_timeout": 260,
"host_version": 2,
"configured_querier_timeout": 255,
"robustness_variable": 2,
"oper_status": "up"
},
"Ethernet2/1": {
"query_max_response_time": 15,
"vrf_name": "default",
"statistics": {
"errors": {
"router_alert_check": 19,
},
"general": {
"sent": {
"v2_reports": 0,
"v3_queries": 11,
"v2_leaves": 0,
"v3_reports": 56,
"v2_queries": 5
},
"received": {
"v2_reports": 0,
"v3_queries": 11,
"v2_leaves": 0,
"v3_reports": 56,
"v2_queries": 5
}
}
},
"configured_query_max_response_time": 15,
"max_groups": 10,
"vrf_id": 1,
"querier": "10.1.2.1",
"membership_count": 4,
"last_member": {
"query_count": 5,
"mrt": 1,
},
"startup_query": {
"interval": 33,
"configured_interval": 31,
"count": 5,
},
"pim_dr": True,
"link_status": "up",
"subnet": "10.1.2.0/24",
"address": "10.1.2.1",
"link_local_groups_reporting": False,
"unsolicited_report_interval": 10,
"enable_refcount": 9,
"enable": True,
"group_policy": "access-group-filter",
"next_query_sent_in": "00:00:47",
"configured_query_interval": 133,
"old_membership_count": 0,
"group_timeout": 680,
"configured_robustness_variable": 5,
"vpc_svi": False,
"querier_version": 3,
"available_groups": 10,
"version": 3,
"query_interval": 133,
"querier_timeout": 672,
"immediate_leave": True,
"configured_group_timeout": 260,
"host_version": 3,
"configured_querier_timeout": 255,
"robustness_variable": 5,
"oper_status": "up"
}
}
},
"VRF1": {
"groups_count": 2,
"interface": {
"Ethernet2/4": {
"query_max_response_time": 15,
"vrf_name": "VRF1",
"statistics": {
"general": {
"sent": {
"v2_reports": 0,
"v3_queries": 8,
"v2_leaves": 0,
"v3_reports": 44,
"v2_queries": 8
},
"received": {
"v2_reports": 0,
"v3_queries": 8,
"v2_leaves": 0,
"v3_reports": 44,
"v2_queries": 8
}
}
},
"configured_query_max_response_time": 15,
"max_groups": 10,
"vrf_id": 3,
"querier": "20.1.2.1",
"membership_count": 4,
"last_member": {
"query_count": 5,
"mrt": 1,
},
"startup_query": {
"interval": 33,
"configured_interval": 31,
"count": 5,
},
"pim_dr": True,
"link_status": "up",
"subnet": "20.1.2.0/24",
"address": "20.1.2.1",
"link_local_groups_reporting": False,
"unsolicited_report_interval": 10,
"enable_refcount": 9,
"enable": True,
"group_policy": "access-group-filter",
"next_query_sent_in": "00:00:06",
"configured_query_interval": 133,
"old_membership_count": 0,
"group_timeout": 680,
"configured_robustness_variable": 5,
"vpc_svi": False,
"querier_version": 3,
"available_groups": 10,
"version": 3,
"query_interval": 133,
"querier_timeout": 672,
"immediate_leave": True,
"configured_group_timeout": 260,
"host_version": 3,
"configured_querier_timeout": 255,
"robustness_variable": 5,
"oper_status": "up"
},
"Ethernet2/3": {
"query_max_response_time": 10,
"vrf_name": "VRF1",
"statistics": {
"general": {
"sent": {
"v2_reports": 0,
"v2_queries": 16,
"v2_leaves": 0
},
"received": {
"v2_reports": 0,
"v2_queries": 16,
"v2_leaves": 0
}
}
},
"configured_query_max_response_time": 10,
"pim_dr": True,
"vrf_id": 3,
"querier": "20.1.3.1",
"membership_count": 0,
"last_member": {
"query_count": 2,
"mrt": 1,
},
"startup_query": {
"interval": 31,
"configured_interval": 31,
"count": 2,
},
"link_status": "up",
"subnet": "20.1.3.0/24",
"address": "20.1.3.1",
"link_local_groups_reporting": False,
"unsolicited_report_interval": 10,
"enable_refcount": 1,
"enable": True,
"next_query_sent_in": "00:00:47",
"configured_query_interval": 125,
"old_membership_count": 0,
"group_timeout": 260,
"configured_robustness_variable": 2,
"vpc_svi": False,
"querier_version": 2,
"version": 2,
"query_interval": 125,
"querier_timeout": 255,
"immediate_leave": False,
"configured_group_timeout": 260,
"host_version": 2,
"configured_querier_timeout": 255,
"robustness_variable": 2,
"oper_status": "up"
}
}
},
"tenant1": {
"groups_count": 0,
},
"manegement": {
"groups_count": 0,
}
}
}
golden_output = {'execute.return_value': '''\
IGMP Interfaces for VRF "default", count: 2
Ethernet2/1, Interface status: protocol-up/link-up/admin-up
IP address: 10.1.2.1, IP subnet: 10.1.2.0/24
Active querier: 10.1.2.1, version: 3, next query sent in: 00:00:47
Membership count: 4
Old Membership count 0
IGMP version: 3, host version: 3
IGMP query interval: 133 secs, configured value: 133 secs
IGMP max response time: 15 secs, configured value: 15 secs
IGMP startup query interval: 33 secs, configured value: 31 secs
IGMP startup query count: 5
IGMP last member mrt: 1 secs
IGMP last member query count: 5
IGMP group timeout: 680 secs, configured value: 260 secs
IGMP querier timeout: 672 secs, configured value: 255 secs
IGMP unsolicited report interval: 10 secs
IGMP robustness variable: 5, configured value: 5
IGMP reporting for link-local groups: disabled
IGMP interface enable refcount: 9
IGMP interface immediate leave: enabled
IGMP VRF name default (id 1)
IGMP Report Policy: access-group-filter
IGMP State Limit: 10, Available States: 10
IGMP interface statistics: (only non-zero values displayed)
General (sent/received):
v2-queries: 5/5, v2-reports: 0/0, v2-leaves: 0/0
v3-queries: 11/11, v3-reports: 56/56
Errors:
Packets dropped due to router-alert check: 19
Interface PIM DR: Yes
Interface vPC SVI: No
Interface vPC CFS statistics:
Ethernet2/2, Interface status: protocol-up/link-up/admin-up
IP address: 10.1.3.1, IP subnet: 10.1.3.0/24
Active querier: 10.1.3.1, version: 2, next query sent in: 00:00:55
Membership count: 0
Old Membership count 0
IGMP version: 2, host version: 2
IGMP query interval: 125 secs, configured value: 125 secs
IGMP max response time: 10 secs, configured value: 10 secs
IGMP startup query interval: 31 secs, configured value: 31 secs
IGMP startup query count: 2
IGMP last member mrt: 1 secs
IGMP last member query count: 2
IGMP group timeout: 260 secs, configured value: 260 secs
IGMP querier timeout: 255 secs, configured value: 255 secs
IGMP unsolicited report interval: 10 secs
IGMP robustness variable: 2, configured value: 2
IGMP reporting for link-local groups: disabled
IGMP interface enable refcount: 1
IGMP interface immediate leave: disabled
IGMP VRF name default (id 1)
IGMP Report Policy: None
IGMP State Limit: None
IGMP interface statistics: (only non-zero values displayed)
General (sent/received):
v2-queries: 16/16, v2-reports: 0/0, v2-leaves: 0/0
Errors:
Interface PIM DR: Yes
Interface vPC SVI: No
Interface vPC CFS statistics:
IGMP Interfaces for VRF "VRF1", count: 2
Ethernet2/3, Interface status: protocol-up/link-up/admin-up
IP address: 20.1.3.1, IP subnet: 20.1.3.0/24
Active querier: 20.1.3.1, version: 2, next query sent in: 00:00:47
Membership count: 0
Old Membership count 0
IGMP version: 2, host version: 2
IGMP query interval: 125 secs, configured value: 125 secs
IGMP max response time: 10 secs, configured value: 10 secs
IGMP startup query interval: 31 secs, configured value: 31 secs
IGMP startup query count: 2
IGMP last member mrt: 1 secs
IGMP last member query count: 2
IGMP group timeout: 260 secs, configured value: 260 secs
IGMP querier timeout: 255 secs, configured value: 255 secs
IGMP unsolicited report interval: 10 secs
IGMP robustness variable: 2, configured value: 2
IGMP reporting for link-local groups: disabled
IGMP interface enable refcount: 1
IGMP interface immediate leave: disabled
IGMP VRF name VRF1 (id 3)
IGMP Report Policy: None
IGMP State Limit: None
IGMP interface statistics: (only non-zero values displayed)
General (sent/received):
v2-queries: 16/16, v2-reports: 0/0, v2-leaves: 0/0
Errors:
Interface PIM DR: Yes
Interface vPC SVI: No
Interface vPC CFS statistics:
Ethernet2/4, Interface status: protocol-up/link-up/admin-up
IP address: 20.1.2.1, IP subnet: 20.1.2.0/24
Active querier: 20.1.2.1, version: 3, next query sent in: 00:00:06
Membership count: 4
Old Membership count 0
IGMP version: 3, host version: 3
IGMP query interval: 133 secs, configured value: 133 secs
IGMP max response time: 15 secs, configured value: 15 secs
IGMP startup query interval: 33 secs, configured value: 31 secs
IGMP startup query count: 5
IGMP last member mrt: 1 secs
IGMP last member query count: 5
IGMP group timeout: 680 secs, configured value: 260 secs
IGMP querier timeout: 672 secs, configured value: 255 secs
IGMP unsolicited report interval: 10 secs
IGMP robustness variable: 5, configured value: 5
IGMP reporting for link-local groups: disabled
IGMP interface enable refcount: 9
IGMP interface immediate leave: enabled
IGMP VRF name VRF1 (id 3)
IGMP Report Policy: access-group-filter
IGMP State Limit: 10, Available States: 10
IGMP interface statistics: (only non-zero values displayed)
General (sent/received):
v2-queries: 8/8, v2-reports: 0/0, v2-leaves: 0/0
v3-queries: 8/8, v3-reports: 44/44
Errors:
Interface PIM DR: Yes
Interface vPC SVI: No
Interface vPC CFS statistics:
IGMP Interfaces for VRF "tenant1", count: 0
IGMP Interfaces for VRF "manegement", count: 0
'''}
golden_parsed_output_1 = {
"vrfs": {
"tenant1": {
"groups_count": 0,
},
}
}
golden_output_1 = {'execute.return_value': '''\
IGMP Interfaces for VRF "tenant1", count: 0
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpIgmpInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpIgmpInterface(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output,self.golden_parsed_output)
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIpIgmpInterface(device=self.device)
parsed_output = obj.parse(vrf='tenant1')
self.assertEqual(parsed_output,self.golden_parsed_output_1)
# ==============================================
# Unit test for 'show ip igmp groups'
# Unit test for 'show ip igmp groups vrf all'
# Unit test for 'show ip igmp groups vrf <WORD>'
# ==============================================
class test_show_ip_igmp_groups(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"vrfs": {
"VRF1": {
"interface": {
"Ethernet2/4": {
"group": {
"239.6.6.6": {
"expire": "never",
"type": "S",
"last_reporter": "20.1.2.1",
"up_time": "00:15:27"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"expire": "never",
"type": "S",
"last_reporter": "20.1.2.1",
"up_time": "00:15:27"
}
},
},
"239.5.5.5": {
"expire": "never",
"type": "S",
"last_reporter": "20.1.2.1",
"up_time": "00:15:27"
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"expire": "never",
"type": "S",
"last_reporter": "20.1.2.1",
"up_time": "00:15:27"
}
},
}
}
}
},
"total_entries": 4
},
"default": {
"interface": {
"Ethernet2/1": {
"group": {
"239.6.6.6": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:53"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:34"
}
},
},
"239.5.5.5": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:21:00"
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:42"
}
},
}
}
}
},
"total_entries": 4
}
}
}
golden_output = {'execute.return_value': '''\
IGMP Connected Group Membership for VRF "default" - 4 total entries
Type: S - Static, D - Dynamic, L - Local, T - SSM Translated
Group Address Type Interface Uptime Expires Last Reporter
239.5.5.5 S Ethernet2/1 00:21:00 never 10.1.2.1
239.6.6.6 S Ethernet2/1 00:20:53 never 10.1.2.1
239.7.7.7
2.2.2.1 S Ethernet2/1 00:20:42 never 10.1.2.1
239.8.8.8
2.2.2.2 S Ethernet2/1 00:20:34 never 10.1.2.1
IGMP Connected Group Membership for VRF "VRF1" - 4 total entries
Type: S - Static, D - Dynamic, L - Local, T - SSM Translated
Group Address Type Interface Uptime Expires Last Reporter
239.5.5.5 S Ethernet2/4 00:15:27 never 20.1.2.1
239.6.6.6 S Ethernet2/4 00:15:27 never 20.1.2.1
239.7.7.7
2.2.2.1 S Ethernet2/4 00:15:27 never 20.1.2.1
239.8.8.8
2.2.2.2 S Ethernet2/4 00:15:27 never 20.1.2.1
'''}
golden_parsed_output_1 = {
"vrfs": {
"default": {
"interface": {
"Ethernet2/1": {
"group": {
"239.6.6.6": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:53"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:34"
}
},
},
"239.5.5.5": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:21:00"
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"expire": "never",
"type": "S",
"last_reporter": "10.1.2.1",
"up_time": "00:20:42"
}
},
}
}
}
},
"total_entries": 4
}
}
}
golden_output_1 = {'execute.return_value': '''\
IGMP Connected Group Membership for VRF "default" - 4 total entries
Type: S - Static, D - Dynamic, L - Local, T - SSM Translated
Group Address Type Interface Uptime Expires Last Reporter
239.5.5.5 S Ethernet2/1 00:21:00 never 10.1.2.1
239.6.6.6 S Ethernet2/1 00:20:53 never 10.1.2.1
239.7.7.7
2.2.2.1 S Ethernet2/1 00:20:42 never 10.1.2.1
239.8.8.8
2.2.2.2 S Ethernet2/1 00:20:34 never 10.1.2.1
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpIgmpGroups(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpIgmpGroups(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIpIgmpGroups(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
# ==============================================
# Unit test for 'show ip igmp local-groups'
# Unit test for 'show ip igmp local-groups vrf all'
# Unit test for 'show ip igmp local-groups vrf <WORD>'
# ==============================================
class test_show_ip_igmp_local_groups(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"vrfs": {
"default": {
"interface": {
"Ethernet2/1": {
"join_group": {
"239.1.1.1 *": {
"source": "*",
"group": "239.1.1.1"
},
"239.3.3.3 1.1.1.1": {
"source": "1.1.1.1",
"group": "239.3.3.3"
},
"239.2.2.2 *": {
"source": "*",
"group": "239.2.2.2"
},
"239.4.4.4 1.1.1.2": {
"source": "1.1.1.2",
"group": "239.4.4.4"
}
},
"static_group": {
"239.5.5.5 *": {
"source": "*",
"group": "239.5.5.5"
},
"239.8.8.8 2.2.2.2": {
"source": "2.2.2.2",
"group": "239.8.8.8"
},
"239.6.6.6 *": {
"source": "*",
"group": "239.6.6.6"
},
"239.7.7.7 2.2.2.1": {
"source": "2.2.2.1",
"group": "239.7.7.7"
}
},
"group": {
"239.1.1.1": {
"last_reporter": "00:00:13",
"type": "local"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"last_reporter": "01:06:47",
"type": "static"
}
},
},
"239.2.2.2": {
"last_reporter": "00:00:18",
"type": "local"
},
"239.4.4.4": {
"source": {
"1.1.1.2": {
"last_reporter": "00:00:06",
"type": "local"
}
},
},
"239.6.6.6": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.5.5.5": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.3.3.3": {
"source": {
"1.1.1.1": {
"last_reporter": "00:00:11",
"type": "local"
}
},
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"last_reporter": "01:06:47",
"type": "static"
}
},
}
}
}
}
},
"VRF1": {
"interface": {
"Ethernet2/4": {
"join_group": {
"239.1.1.1 *": {
"source": "*",
"group": "239.1.1.1"
},
"239.3.3.3 1.1.1.1": {
"source": "1.1.1.1",
"group": "239.3.3.3"
},
"239.2.2.2 *": {
"source": "*",
"group": "239.2.2.2"
},
"239.4.4.4 1.1.1.2": {
"source": "1.1.1.2",
"group": "239.4.4.4"
}
},
"static_group": {
"239.5.5.5 *": {
"source": "*",
"group": "239.5.5.5"
},
"239.8.8.8 2.2.2.2": {
"source": "2.2.2.2",
"group": "239.8.8.8"
},
"239.6.6.6 *": {
"source": "*",
"group": "239.6.6.6"
},
"239.7.7.7 2.2.2.1": {
"source": "2.2.2.1",
"group": "239.7.7.7"
}
},
"group": {
"239.1.1.1": {
"last_reporter": "00:00:50",
"type": "local"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"last_reporter": "01:06:47",
"type": "static"
}
},
},
"239.2.2.2": {
"last_reporter": "00:00:54",
"type": "local"
},
"239.4.4.4": {
"source": {
"1.1.1.2": {
"last_reporter": "00:00:55",
"type": "local"
}
},
},
"239.6.6.6": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.5.5.5": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.3.3.3": {
"source": {
"1.1.1.1": {
"last_reporter": "00:01:01",
"type": "local"
}
},
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"last_reporter": "01:06:47",
"type": "static"
}
},
}}}}}}
}
golden_output = {'execute.return_value': '''\
IGMP Locally Joined Group Membership for VRF "default"
Group Address Source Address Type Interface Last Reported
239.1.1.1 * Local Eth2/1 00:00:13
239.2.2.2 * Local Eth2/1 00:00:18
239.3.3.3 1.1.1.1 Local Eth2/1 00:00:11
239.4.4.4 1.1.1.2 Local Eth2/1 00:00:06
239.5.5.5 * Static Eth2/1 01:06:47
239.6.6.6 * Static Eth2/1 01:06:47
239.7.7.7 2.2.2.1 Static Eth2/1 01:06:47
239.8.8.8 2.2.2.2 Static Eth2/1 01:06:47
IGMP Locally Joined Group Membership for VRF "VRF1"
Group Address Source Address Type Interface Last Reported
239.1.1.1 * Local Eth2/4 00:00:50
239.2.2.2 * Local Eth2/4 00:00:54
239.3.3.3 1.1.1.1 Local Eth2/4 00:01:01
239.4.4.4 1.1.1.2 Local Eth2/4 00:00:55
239.5.5.5 * Static Eth2/4 01:06:47
239.6.6.6 * Static Eth2/4 01:06:47
239.7.7.7 2.2.2.1 Static Eth2/4 01:06:47
239.8.8.8 2.2.2.2 Static Eth2/4 01:06:47
'''}
golden_parsed_output_1 = {
"vrfs": {
"VRF1": {
"interface": {
"Ethernet2/4": {
"join_group": {
"239.1.1.1 *": {
"source": "*",
"group": "239.1.1.1"
},
"239.3.3.3 1.1.1.1": {
"source": "1.1.1.1",
"group": "239.3.3.3"
},
"239.2.2.2 *": {
"source": "*",
"group": "239.2.2.2"
},
"239.4.4.4 1.1.1.2": {
"source": "1.1.1.2",
"group": "239.4.4.4"
}
},
"static_group": {
"239.5.5.5 *": {
"source": "*",
"group": "239.5.5.5"
},
"239.8.8.8 2.2.2.2": {
"source": "2.2.2.2",
"group": "239.8.8.8"
},
"239.6.6.6 *": {
"source": "*",
"group": "239.6.6.6"
},
"239.7.7.7 2.2.2.1": {
"source": "2.2.2.1",
"group": "239.7.7.7"
}
},
"group": {
"239.1.1.1": {
"last_reporter": "00:00:50",
"type": "local"
},
"239.8.8.8": {
"source": {
"2.2.2.2": {
"last_reporter": "01:06:47",
"type": "static"
}
},
},
"239.2.2.2": {
"last_reporter": "00:00:54",
"type": "local"
},
"239.4.4.4": {
"source": {
"1.1.1.2": {
"last_reporter": "00:00:55",
"type": "local"
}
},
},
"239.6.6.6": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.5.5.5": {
"last_reporter": "01:06:47",
"type": "static"
},
"239.3.3.3": {
"source": {
"1.1.1.1": {
"last_reporter": "00:01:01",
"type": "local"
}
},
},
"239.7.7.7": {
"source": {
"2.2.2.1": {
"last_reporter": "01:06:47",
"type": "static"
}
},
}}}}}}
}
golden_output_1 = {'execute.return_value': '''\
IGMP Locally Joined Group Membership for VRF "VRF1"
Group Address Source Address Type Interface Last Reported
239.1.1.1 * Local Eth2/4 00:00:50
239.2.2.2 * Local Eth2/4 00:00:54
239.3.3.3 1.1.1.1 Local Eth2/4 00:01:01
239.4.4.4 1.1.1.2 Local Eth2/4 00:00:55
239.5.5.5 * Static Eth2/4 01:06:47
239.6.6.6 * Static Eth2/4 01:06:47
239.7.7.7 2.2.2.1 Static Eth2/4 01:06:47
239.8.8.8 2.2.2.2 Static Eth2/4 01:06:47
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpIgmpLocalGroups(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpIgmpLocalGroups(device=self.device)
parsed_output = obj.parse(vrf='all')
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIpIgmpLocalGroups(device=self.device)
parsed_output = obj.parse(vrf='VRF1')
self.assertEqual(parsed_output, self.golden_parsed_output_1)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
972ea4c7a9f98ddd846f80c492b024ad39d1440a
|
60a7e7dc2ba82c5c74352dc8466a9becba068e2e
|
/backend2/userapi1/migrations/0001_initial.py
|
8c0c9e319f589c09a402c93581e1c8af2e666c7e
|
[] |
no_license
|
TestTask12/SKD-Django-
|
d7b445d8afd32fe5aa877c31451b7f2d932d2fe7
|
47bf79071d2781d129794e9b47d396cfd9162d00
|
refs/heads/master
| 2023-07-06T10:08:04.156998 | 2021-08-02T12:26:04 | 2021-08-02T12:26:04 | 387,738,104 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,970 |
py
|
# Generated by Django 3.2.5 on 2021-07-23 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(db_index=True, max_length=255, unique=True)),
('email', models.EmailField(db_index=True, max_length=255, unique=True)),
('is_verified', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
[
"[email protected]"
] | |
cd347060e1da3d2391d0143e12892f9eac6c4346
|
9df2fb0bc59ab44f026b0a2f5ef50c72b2fb2ceb
|
/sdk/storage/azure-mgmt-storage/generated_samples/blob_containers_put_immutability_policy_allow_protected_append_writes_all.py
|
959e1d06da38a2e43845d975a656d18959590c16
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
openapi-env-test/azure-sdk-for-python
|
b334a2b65eeabcf9b7673879a621abb9be43b0f6
|
f61090e96094cfd4f43650be1a53425736bd8985
|
refs/heads/main
| 2023-08-30T14:22:14.300080 | 2023-06-08T02:53:04 | 2023-06-08T02:53:04 | 222,384,897 | 1 | 0 |
MIT
| 2023-09-08T08:38:48 | 2019-11-18T07:09:24 |
Python
|
UTF-8
|
Python
| false | false | 1,700 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.storage import StorageManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-storage
# USAGE
python blob_containers_put_immutability_policy_allow_protected_append_writes_all.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = StorageManagementClient(
credential=DefaultAzureCredential(),
subscription_id="{subscription-id}",
)
response = client.blob_containers.create_or_update_immutability_policy(
resource_group_name="res1782",
account_name="sto7069",
container_name="container6397",
)
print(response)
# x-ms-original-file: specification/storage/resource-manager/Microsoft.Storage/stable/2022-09-01/examples/BlobContainersPutImmutabilityPolicyAllowProtectedAppendWritesAll.json
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
980c0b0837b2db2ae0a95f0e3aec09938d06100f
|
a5698f82064aade6af0f1da21f504a9ef8c9ac6e
|
/huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/cce_job_spec.py
|
dc9786d83d3d306eea915bbbb9939ea87a5e228b
|
[
"Apache-2.0"
] |
permissive
|
qizhidong/huaweicloud-sdk-python-v3
|
82a2046fbb7d62810984399abb2ca72b3b47fac6
|
6cdcf1da8b098427e58fc3335a387c14df7776d0
|
refs/heads/master
| 2023-04-06T02:58:15.175373 | 2021-03-30T10:47:29 | 2021-03-30T10:47:29 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,808 |
py
|
# coding: utf-8
import pprint
import re
import six
class CCEJobSpec:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'cluster_uid': 'str',
'extend_param': 'dict(str, str)',
'resource_id': 'str',
'resource_name': 'str',
'sub_jobs': 'list[CCEJob]',
'type': 'str'
}
attribute_map = {
'cluster_uid': 'clusterUID',
'extend_param': 'extendParam',
'resource_id': 'resourceID',
'resource_name': 'resourceName',
'sub_jobs': 'subJobs',
'type': 'type'
}
def __init__(self, cluster_uid=None, extend_param=None, resource_id=None, resource_name=None, sub_jobs=None, type=None):
"""CCEJobSpec - a model defined in huaweicloud sdk"""
self._cluster_uid = None
self._extend_param = None
self._resource_id = None
self._resource_name = None
self._sub_jobs = None
self._type = None
self.discriminator = None
if cluster_uid is not None:
self.cluster_uid = cluster_uid
if extend_param is not None:
self.extend_param = extend_param
if resource_id is not None:
self.resource_id = resource_id
if resource_name is not None:
self.resource_name = resource_name
if sub_jobs is not None:
self.sub_jobs = sub_jobs
if type is not None:
self.type = type
@property
def cluster_uid(self):
"""Gets the cluster_uid of this CCEJobSpec.
作业所在的集群的ID。
:return: The cluster_uid of this CCEJobSpec.
:rtype: str
"""
return self._cluster_uid
@cluster_uid.setter
def cluster_uid(self, cluster_uid):
"""Sets the cluster_uid of this CCEJobSpec.
作业所在的集群的ID。
:param cluster_uid: The cluster_uid of this CCEJobSpec.
:type: str
"""
self._cluster_uid = cluster_uid
@property
def extend_param(self):
"""Gets the extend_param of this CCEJobSpec.
扩展参数。
:return: The extend_param of this CCEJobSpec.
:rtype: dict(str, str)
"""
return self._extend_param
@extend_param.setter
def extend_param(self, extend_param):
"""Sets the extend_param of this CCEJobSpec.
扩展参数。
:param extend_param: The extend_param of this CCEJobSpec.
:type: dict(str, str)
"""
self._extend_param = extend_param
@property
def resource_id(self):
"""Gets the resource_id of this CCEJobSpec.
作业操作的资源ID。
:return: The resource_id of this CCEJobSpec.
:rtype: str
"""
return self._resource_id
@resource_id.setter
def resource_id(self, resource_id):
"""Sets the resource_id of this CCEJobSpec.
作业操作的资源ID。
:param resource_id: The resource_id of this CCEJobSpec.
:type: str
"""
self._resource_id = resource_id
@property
def resource_name(self):
"""Gets the resource_name of this CCEJobSpec.
作业操作的资源名称。
:return: The resource_name of this CCEJobSpec.
:rtype: str
"""
return self._resource_name
@resource_name.setter
def resource_name(self, resource_name):
"""Sets the resource_name of this CCEJobSpec.
作业操作的资源名称。
:param resource_name: The resource_name of this CCEJobSpec.
:type: str
"""
self._resource_name = resource_name
@property
def sub_jobs(self):
"""Gets the sub_jobs of this CCEJobSpec.
子作业的列表。 - 包含了所有子作业的详细信息 - 在创建集群、节点等场景下,通常会由多个子作业共同组成创建作业,在子作业都完成后,作业才会完成
:return: The sub_jobs of this CCEJobSpec.
:rtype: list[CCEJob]
"""
return self._sub_jobs
@sub_jobs.setter
def sub_jobs(self, sub_jobs):
"""Sets the sub_jobs of this CCEJobSpec.
子作业的列表。 - 包含了所有子作业的详细信息 - 在创建集群、节点等场景下,通常会由多个子作业共同组成创建作业,在子作业都完成后,作业才会完成
:param sub_jobs: The sub_jobs of this CCEJobSpec.
:type: list[CCEJob]
"""
self._sub_jobs = sub_jobs
@property
def type(self):
"""Gets the type of this CCEJobSpec.
作业的类型,例:“CreateCluster”- 创建集群。
:return: The type of this CCEJobSpec.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CCEJobSpec.
作业的类型,例:“CreateCluster”- 创建集群。
:param type: The type of this CCEJobSpec.
:type: str
"""
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CCEJobSpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
bec2ac58d9e5663e3e2e4ecdcd53faa846e92d59
|
e65ae5bd9ae1c93e7117e630f7340bc73aa71212
|
/lib/gevent/greentest/test__greenletset.py
|
ffe4e308e45094f2aa246d25c391c328edcf36aa
|
[
"MIT"
] |
permissive
|
nadirhamid/oneline
|
e98ff1ed81da0536f9602ecdde2fb2a4fe80d256
|
833ebef0e26ae8e0cc452756381227746d830b23
|
refs/heads/master
| 2021-01-21T04:27:41.715047 | 2016-05-30T03:50:34 | 2016-05-30T03:50:34 | 23,320,578 | 1 | 2 |
NOASSERTION
| 2020-03-12T17:22:24 | 2014-08-25T16:29:36 |
Python
|
UTF-8
|
Python
| false | false | 3,222 |
py
|
import time
import greentest
import gevent
from gevent import pool
DELAY = 0.1
class SpecialError(Exception):
pass
class Undead(object):
def __init__(self):
self.shot_count = 0
def __call__(self):
while True:
try:
gevent.sleep(1)
except SpecialError:
break
except:
self.shot_count += 1
class Test(greentest.TestCase):
def test_basic(self):
DELAY = 0.05
s = pool.Group()
s.spawn(gevent.sleep, DELAY)
assert len(s) == 1, s
s.spawn(gevent.sleep, DELAY * 2.)
assert len(s) == 2, s
gevent.sleep(DELAY * 3. / 2.)
assert len(s) == 1, s
gevent.sleep(DELAY)
assert not s, s
def test_waitall(self):
s = pool.Group()
s.spawn(gevent.sleep, DELAY)
s.spawn(gevent.sleep, DELAY * 2)
assert len(s) == 2, s
start = time.time()
s.join(raise_error=True)
delta = time.time() - start
assert not s, s
assert len(s) == 0, s
assert DELAY * 1.9 <= delta <= DELAY * 2.5, (delta, DELAY)
def test_kill_block(self):
s = pool.Group()
s.spawn(gevent.sleep, DELAY)
s.spawn(gevent.sleep, DELAY * 2)
assert len(s) == 2, s
start = time.time()
s.kill()
assert not s, s
assert len(s) == 0, s
delta = time.time() - start
assert delta < DELAY * 0.8, delta
def test_kill_noblock(self):
s = pool.Group()
s.spawn(gevent.sleep, DELAY)
s.spawn(gevent.sleep, DELAY * 2)
assert len(s) == 2, s
s.kill(block=False)
assert len(s) == 2, s
gevent.sleep(0.0001)
assert len(s) == 0, s
assert not s, s
def test_kill_fires_once(self):
u1 = Undead()
u2 = Undead()
p1 = gevent.spawn(u1)
p2 = gevent.spawn(u2)
def check(count1, count2):
assert p1, p1
assert p2, p2
assert not p1.dead, p1
assert not p2.dead, p2
self.assertEqual(u1.shot_count, count1)
self.assertEqual(u2.shot_count, count2)
gevent.sleep(0.01)
s = pool.Group([p1, p2])
assert len(s) == 2, s
check(0, 0)
s.killone(p1, block=False)
check(0, 0)
gevent.sleep(0)
check(1, 0)
s.killone(p1)
check(1, 0)
s.killone(p1)
check(1, 0)
s.kill(block=False)
s.kill(block=False)
s.kill(block=False)
check(1, 0)
gevent.sleep(DELAY)
check(1, 1)
X = object()
kill_result = gevent.with_timeout(DELAY, s.kill, block=True, timeout_value=X)
assert kill_result is X, repr(kill_result)
assert len(s) == 2, s
check(1, 1)
p1.kill(SpecialError)
p2.kill(SpecialError)
def test_killall_subclass(self):
p1 = GreenletSubclass.spawn(lambda: 1 / 0)
p2 = GreenletSubclass.spawn(lambda: gevent.sleep(10))
s = pool.Group([p1, p2])
s.kill()
class GreenletSubclass(gevent.Greenlet):
pass
if __name__ == '__main__':
greentest.main()
|
[
"[email protected]"
] | |
26a281cbfa087655cdab4c27c6fd0c9a2db3e8f2
|
77da9a5aac6598df2dea6088e068e973e3da52ad
|
/benchmarks/1d16pu/config.py
|
b5869320a3d278e60c3593dff0db31f503088bc8
|
[] |
no_license
|
GiggleLiu/projectx
|
e9e5960d5892c4efcad0a0a7cc5e7ff72ca50458
|
170f990939549949de203004c03ed68762ba23b4
|
refs/heads/master
| 2021-09-08T17:28:22.473021 | 2018-03-11T10:32:32 | 2018-03-11T10:32:32 | 109,708,185 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,362 |
py
|
import numpy as np
from models.wanglei6 import WangLei6
powerlist_list = [
[[1,0,1],[1,1]],
[[1,0,1],[1,1]],
[[1,0,1],[1,1]],
[[1,0,1],[1,1]],
[[1,0,1],[1,1]],
[[1,0,1],[1,1]],
]
num_features_list = [
[20], [20], [20], [20], [20],
[20], [20], [20], [20], [20],
]
nonlinear_list_list = [
['sinh','none','none'],
['sinh','none','none'],
]
def modifyconfig_and_getnn(config, bench_id):
is_unitary = False
momentum = 0
eta0 = 0.2
eta1 = 0.2
NP = 0
NC = 1
itype = 'complex128'
poly_order = 10
usesum = False
powerlist = powerlist_list[bench_id]
num_features = num_features_list[bench_id]
nonlinear_list = nonlinear_list_list[bench_id]
soften_gradient = False
if bench_id == 1:
momentum=np.pi
if momentum==0: # not ground state
config['hamiltonian']['EG'] = -6.6889395
rbm = WangLei6(input_shape=tuple(config['hamiltonian']['size']), num_features=num_features,
itype=itype,dtype0=itype, dtype1=itype, powerlist=powerlist,
usesum=usesum, nonlinear_list=nonlinear_list, poly_order=poly_order, do_BN=False,
momentum=momentum, eta0=eta0, eta1=eta1, NP=NP, NC=NC,is_unitary=is_unitary,
soften_gradient = soften_gradient)
return rbm
|
[
"[email protected]"
] | |
33cfc768934e8033b6bdcf3758e69a8c15adb0b0
|
90c6262664d013d47e9a3a9194aa7a366d1cabc4
|
/tests/opcodes/cases/test_map_mem_nat_100.py
|
68ed860f3164c948df0a2f3f9efad4284fc8d31b
|
[
"MIT"
] |
permissive
|
tqtezos/pytezos
|
3942fdab7aa7851e9ea81350fa360180229ec082
|
a4ac0b022d35d4c9f3062609d8ce09d584b5faa8
|
refs/heads/master
| 2021-07-10T12:24:24.069256 | 2020-04-04T12:46:24 | 2020-04-04T12:46:24 | 227,664,211 | 1 | 0 |
MIT
| 2020-12-30T16:44:56 | 2019-12-12T17:47:53 |
Python
|
UTF-8
|
Python
| false | false | 886 |
py
|
from unittest import TestCase
from tests import abspath
from pytezos.repl.interpreter import Interpreter
from pytezos.michelson.converter import michelson_to_micheline
from pytezos.repl.parser import parse_expression
class OpcodeTestmap_mem_nat_100(TestCase):
def setUp(self):
self.maxDiff = None
self.i = Interpreter(debug=True)
def test_opcode_map_mem_nat_100(self):
res = self.i.execute(f'INCLUDE "{abspath("opcodes/contracts/map_mem_nat.tz")}"')
self.assertTrue(res['success'])
res = self.i.execute('RUN 1 (Pair { Elt 1 0 } None)')
self.assertTrue(res['success'])
exp_val_expr = michelson_to_micheline('(Pair { Elt 1 0 } (Some True))')
exp_val = parse_expression(exp_val_expr, res['result']['storage'].type_expr)
self.assertEqual(exp_val, res['result']['storage']._val)
|
[
"[email protected]"
] | |
eebf77058bb72e39adc7e144c70efde6975f3eb4
|
c380e7c61c97cb03531e4f33fe31c99c7593c70b
|
/contrib/splunk-sdk-python/examples/explorer/explorer.py
|
ae4f6d06e2597a0764a43d3e6d0bccc93b209569
|
[
"Apache-2.0"
] |
permissive
|
skada/splunk-appframework
|
09bd777bc7c28b6b460eb8b7397288b559d9bf3d
|
8d07f81a9c37b8a0a2b9432bdd6fd78e98e458cc
|
refs/heads/master
| 2020-12-01T01:17:03.481885 | 2013-03-09T05:51:27 | 2013-03-09T05:51:27 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,029 |
py
|
#!/usr/bin/env python
#
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import server
import webbrowser
import sys
import os
sys.path.insert(0, '../../') # Use splunklib and utils without installing
import utils
import urllib
PORT = 8080
def main(argv):
usage = "usage: %prog [options]"
redirect_port_args = {
"redirectport": {
"flags": ["--redirectport"],
"default": PORT,
"help": "Port to use for redirect server (default: %s)" % PORT,
},
}
opts = utils.parse(argv, redirect_port_args, ".splunkrc", usage=usage)
args = [("scheme", opts.kwargs["scheme"]),
("host", opts.kwargs["host"]),
("port", opts.kwargs["port"]),
("redirecthost", "localhost"),
("redirectport", opts.kwargs["redirectport"]),
("username", opts.kwargs["username"]),
("password", opts.kwargs["password"])]
if 'app' in opts.kwargs.keys():
args.append(('app', opts.kwargs['app']))
if 'owner' in opts.kwargs.keys():
args.append(('owner', opts.kwargs['owner']))
# Encode these arguments
args = urllib.urlencode(args)
# Launch the browser
webbrowser.open("file://%s" % os.path.join(os.getcwd(), "explorer.html?%s" % args))
# And server the files
server.serve(opts.kwargs["redirectport"])
if __name__ == "__main__":
try:
main(sys.argv[1:])
except KeyboardInterrupt:
pass
except:
raise
|
[
"[email protected]"
] | |
973003a6f81d1d8e405f3dfccf3199500bca22db
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/pg_0833+211/sdB_PG_0833+211_lc.py
|
04021942ce6aaaa0c17094607e8f65d9c8e1e284
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 346 |
py
|
from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[129.082042,20.963314], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_PG_0833+211 /sdB_PG_0833+211_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
ae9fd9989ac06924eaff500bcc8d0d390707e66e
|
cbd865bdba079069ba52e4bf78dd1395acb99d5b
|
/30.py
|
08bb5c96aa2900c43d824aea65e74eae9541e17a
|
[] |
no_license
|
anhnguyendepocen/100-pythonExercises
|
52e72c214885e993207241b28124382365f28126
|
1f69184ba819b1a9d3880530aa349ae677dc1254
|
refs/heads/master
| 2022-01-17T14:00:16.390389 | 2019-03-23T19:22:13 | 2019-03-23T19:22:13 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 345 |
py
|
# Exercise No.30
# Why is there an error here?
# def foo(a=2, b):
# return a + b
# Solution
# Because default arguments must be listed at the end of the parameters list. If you list them at the beginning python
# will get confused because it is not sure if you are changing the default argument's value or passing the second
# argument.
|
[
"[email protected]"
] | |
aaeaef1286a253db035fa39dbb376b08736d9761
|
aa9afb14c2e0871afc200d2a7e981334ae97974a
|
/item_engine/builders/package/mb1.py
|
aa7e39d194dd52ee9a7a12ae037540dfd42744ad
|
[
"MIT"
] |
permissive
|
GabrielAmare/TextEngine
|
bd53357526cb0e31f5aae80dacfa02b004683017
|
39ceb323a63af35e32c4be34ae35a77e811bc973
|
refs/heads/main
| 2023-05-01T13:03:54.929892 | 2021-05-21T07:44:56 | 2021-05-21T07:44:56 | 344,866,828 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 667 |
py
|
from item_engine.textbase.elements import Char, Token
from typing import Iterator
def function(src: Iterator[Char]) -> Iterator[Token]:
cur: Token = Token(at=0, to=0, value=0)
for old in src:
while cur.to == old.at:
new: Token = cur.develop(_function(cur, old), old)
if not new.is_terminal:
cur = new
continue
if new.is_valid:
cur = Token(at=new.to, to=new.to, value=0)
yield new
continue
if old.value == 'EOF':
yield Token.EOF(old.to)
break
raise SyntaxError((cur, old, new))
|
[
"[email protected]"
] | |
8b262c3bfa84526014f073bef31780e8a33dcc89
|
04dbbfea381996e207f4ff65ce1fc5a297cd2b79
|
/MIT_ocw/6.006F11/JUNK/timing_exc.py
|
8c9685bec8e99aa00d62a059bf8762dd3773c80d
|
[] |
no_license
|
Ru0ch3n/MOOCs
|
30081d1905ed5916bcbad640a30cdace3a1e3ce0
|
93acda7879cb5fd96cddb5be9148bb22e025fbb7
|
refs/heads/master
| 2021-09-21T16:34:31.536725 | 2018-08-28T18:05:48 | 2018-08-28T18:05:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,475 |
py
|
import math
import string
import timeit
import scipy.optimize
# Parameter generation routines
def lg(x):
return math.log(x)/math.log(2.0)
def sqrt(x):
return math.sqrt(x)
def make_param_list(spec_string,growth_factor):
"""
Generate a list of dictionaries
given maximum and minimum values for each range.
Each min and max value is a *string* that can be evaluted;
each string may depend on earlier variable values
Values increment by factor of growth_factor from min to max
Example:
make_param_list("1<=n<=1000")
make_param_list("1<=n<=1000;1<=m<=1000;min(n,m)<=k<=max(n,m)")
"""
var_list = []
spec_list = string.split(spec_string,";")
D = {}
D['lg']=lg
D['sqrt'] = sqrt
D_list = [D]
for spec in spec_list:
spec_parts = string.split(spec,"<=")
assert len(spec_parts)==3
lower_spec = spec_parts[0]
var_name = spec_parts[1]
assert len(var_name)==1
var_list.append(var_name)
upper_spec = spec_parts[2]
new_D_list = []
for D in D_list:
new_D = D.copy()
val = eval(lower_spec,D)
while val<=eval(upper_spec,D):
new_D[var_name] = val
new_D_list.append(new_D.copy())
val *= growth_factor
D_list = new_D_list
return (var_list,D_list)
def fit(var_list,param_list,run_times,f_list):
"""
Return matrix A needed for least-squares fit.
Given:
list of variable names
list of sample dicts for various parameter sets
list of corresponding run times
list of functions to be considered for fit
these are *strings*, e.g. "n","n**2","min(n,m)",etc.
prints:
coefficients for each function in f_list
"""
print "var_list",var_list
print "Function list:",f_list
print "run times:",
for i in range(len(param_list)):
print
for v in var_list:
print v,"= %6s"%param_list[i][v],
print ": %8f"%run_times[i],"microseconds",
print
rows = len(run_times)
cols = len(f_list)
A = [ [0 for j in range(cols)] for i in range(rows) ]
for i in range(rows):
D = param_list[i]
for j in range(cols):
A[i][j] = float(eval(f_list[j],D))
b = run_times
(x,resids,rank,s) = fit2(A,b)
print "Coefficients as interpolated from data:"
for j in range(cols):
sign = ''
if x[j]>0 and j>0:
sign="+"
elif x[j]>0:
sign = " "
print "%s%g*%s"%(sign,x[j],f_list[j])
print "(measuring time in microseconds)"
print "Sum of squares of residuals:",resids
print "RMS error = %0.2g percent"%(math.sqrt(resids/len(A))*100.0)
def fit2(A,b):
""" Relative error minimizer """
def f(x):
assert len(x) == len(A[0])
resids = []
for i in range(len(A)):
sum = 0.0
for j in range(len(A[0])):
sum += A[i][j]*x[j]
relative_error = (sum-b[i])/b[i]
resids.append(relative_error)
return resids
ans = scipy.optimize.leastsq(f,[0.0]*len(A[0]))
# print "ans:",ans
if len(A[0])==1:
x = [ans[0]]
else:
x = ans[0]
resids = sum([r*r for r in f(x)])
return (x,resids,0,0)
# def int2str(num):
# result = ''
# while num > 0:
# result += str(num %10);
# num /= 10
# return result[::-1]
int2str = """\
def int2str(num):
result = ''
while num > 0:
result += str(num %10);
num /= 10
return result[::-1]
"""
def test_number():
print
print "Test Number-1 -- time to compute int('1'*n)"
spec_string = "1000<=n<=10000"
growth_factor = 2
print "Spec_string: ",spec_string,"by factors of",growth_factor
var_list, param_list = make_param_list(spec_string,growth_factor)
f_list = ("n**2","n","1")
run_times = []
trials = 1000
for D in param_list:
t = timeit.Timer("string.atoi(x)","import string;x='1'*%(n)s"%D)
run_times.append(t.timeit(trials)*1e6/float(trials))
fit(var_list,param_list,run_times,f_list)
f_list = ("n","1")
fit(var_list,param_list,run_times,f_list)
print
print "Test Number-2 -- time to compute repr(2**n)"
spec_string = "1000<=n<=10000"
growth_factor = 2
print "Spec_string: ",spec_string,"by factors of",growth_factor
var_list, param_list = make_param_list(spec_string,growth_factor)
f_list = ("n**2","n","1")
run_times = []
trials = 1000
for D in param_list:
t = timeit.Timer("repr(x)","x=2**%(n)s"%D)
run_times.append(t.timeit(trials)*1e6/float(trials))
fit(var_list,param_list,run_times,f_list)
f_list = ("n","1")
fit(var_list,param_list,run_times,f_list)
print
print "Test Number-3 -- time to compute int2str(2**n)"
spec_string = "1000<=n<=10000"
growth_factor = 2
print "Spec_string: ",spec_string,"by factors of",growth_factor
var_list, param_list = make_param_list(spec_string,growth_factor)
f_list = ("n**2","n","1")
run_times = []
trials = 1000
for D in param_list:
t = timeit.Timer("int2str(x)", int2str+"x=2**%(n)s"%D)
run_times.append(t.timeit(trials)*1e6/float(trials))
fit(var_list,param_list,run_times,f_list)
f_list = ("n","1")
fit(var_list,param_list,run_times,f_list)
if __name__ == '__main__':
test_number()
|
[
"[email protected]"
] | |
dc2171d0ee0d6fe1239997e6fb0b4e5ca1c6b440
|
38558ac2e78837e7f975364f03a1f55fb02103af
|
/PRA practice/PRA S.py
|
b414adcbe405d0e17e933b2fa4b89f09bb0e79b5
|
[] |
no_license
|
SOURADEEP-DONNY/WORKING-WITH-PYTHON
|
a0bc2ff5ddab1b25563927c8f361c6512683d6ff
|
5198d14f0711a3ba7f2fe8bac61d6404c20ea40c
|
refs/heads/master
| 2023-07-14T04:49:08.399519 | 2021-08-29T15:22:33 | 2021-08-29T15:22:33 | 270,723,307 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,218 |
py
|
class Employee:
def __init__(self,emp_id,emp_name,role):
self.employeeId=emp_id
self.employeeName=emp_name
self.geInRole=role
self.status="In Service"
class Organization:
def __init__(self,emp_list):
self.employeeList=emp_list
def updateEmployeeStatus(self,noOfYears):
for i in self.employeeList:
if i.geInRole > noOfYears:
i.status="Retirement Due"
return self.employeeList
def countEmployees(self):
count=0
for i in self.employeeList:
if i.status=="Retirement Due":
count+=1
return count
if __name__=="__main__":
num=int (input())
emp_list=[]
for _ in range(num):
id=int(input())
name=input()
role=int(input())
emp_list.append(Employee(id,name,role))
obj=Organization(emp_list)
noOfYears=int(input())
result1=obj.updateEmployeeStatus(noOfYears)
result2=obj.countEmployees()
if(result2>0):
print("Count of employee updated=",result2)
else:
print("No employee updated")
for i in result1:
print(i.employeeId,i.employeeName,i.status)
|
[
"[email protected]"
] | |
f72b33efb196f6401894f4994d0802946bc39515
|
6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386
|
/google/cloud/gkehub/v1alpha/gkehub-v1alpha-py/google/cloud/gkehub/servicemesh_v1alpha/types/servicemesh.py
|
c524b4cb240d7c3bf95c02aa269442cef01aa673
|
[
"Apache-2.0"
] |
permissive
|
oltoco/googleapis-gen
|
bf40cfad61b4217aca07068bd4922a86e3bbd2d5
|
00ca50bdde80906d6f62314ef4f7630b8cdb6e15
|
refs/heads/master
| 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,838 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import struct_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.gkehub.servicemesh.v1alpha',
manifest={
'FeatureState',
'MembershipState',
'AnalysisMessageBase',
'AnalysisMessage',
},
)
class FeatureState(proto.Message):
r"""**Service Mesh**: State for the whole Hub, as analyzed by the
Service Mesh Hub Controller.
Attributes:
analysis_messages (Sequence[google.cloud.gkehub.servicemesh_v1alpha.types.AnalysisMessage]):
Output only. Results of running Service Mesh
analyzers.
"""
analysis_messages = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='AnalysisMessage',
)
class MembershipState(proto.Message):
r"""**Service Mesh**: State for a single Membership, as analyzed by the
Service Mesh Hub Controller.
Attributes:
analysis_messages (Sequence[google.cloud.gkehub.servicemesh_v1alpha.types.AnalysisMessage]):
Output only. Results of running Service Mesh
analyzers.
"""
analysis_messages = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='AnalysisMessage',
)
class AnalysisMessageBase(proto.Message):
r"""AnalysisMessageBase describes some common information that is
needed for all messages.
Attributes:
type_ (google.cloud.gkehub.servicemesh_v1alpha.types.AnalysisMessageBase.Type):
Represents the specific type of a message.
level (google.cloud.gkehub.servicemesh_v1alpha.types.AnalysisMessageBase.Level):
Represents how severe a message is.
documentation_url (str):
A url pointing to the Service Mesh or Istio
documentation for this specific error type.
"""
class Level(proto.Enum):
r"""The values here are chosen so that more severe messages get
sorted higher, as well as leaving space in between to add more
later See istio.analysis.v1alpha1.AnalysisMessageBase.Level
"""
LEVEL_UNSPECIFIED = 0
ERROR = 3
WARNING = 8
INFO = 12
class Type(proto.Message):
r"""A unique identifier for the type of message. Display_name is
intended to be human-readable, code is intended to be machine
readable. There should be a one-to-one mapping between display_name
and code. (i.e. do not re-use display_names or codes between message
types.) See istio.analysis.v1alpha1.AnalysisMessageBase.Type
Attributes:
display_name (str):
A human-readable name for the message type. e.g.
"InternalError", "PodMissingProxy". This should be the same
for all messages of the same type. (This corresponds to the
``name`` field in open-source Istio.)
code (str):
A 7 character code matching ``^IST[0-9]{4}$`` or
``^ASM[0-9]{4}$``, intended to uniquely identify the message
type. (e.g. "IST0001" is mapped to the "InternalError"
message type.)
"""
display_name = proto.Field(
proto.STRING,
number=1,
)
code = proto.Field(
proto.STRING,
number=2,
)
type_ = proto.Field(
proto.MESSAGE,
number=1,
message=Type,
)
level = proto.Field(
proto.ENUM,
number=2,
enum=Level,
)
documentation_url = proto.Field(
proto.STRING,
number=3,
)
class AnalysisMessage(proto.Message):
r"""AnalysisMessage is a single message produced by an analyzer,
and it used to communicate to the end user about the state of
their Service Mesh configuration.
Attributes:
message_base (google.cloud.gkehub.servicemesh_v1alpha.types.AnalysisMessageBase):
Details common to all types of Istio and
ServiceMesh analysis messages.
description (str):
A human readable description of what the
error means. It is suitable for non-
internationalize display purposes.
resource_paths (Sequence[str]):
A list of strings specifying the resource identifiers that
were the cause of message generation. A "path" here may be:
- MEMBERSHIP_ID if the cause is a specific member cluster
- MEMBERSHIP_ID/(NAMESPACE/)?RESOURCETYPE/NAME if the cause
is a resource in a cluster
args (google.protobuf.struct_pb2.Struct):
A UI can combine these args with a template (based on
message_base.type) to produce an internationalized message.
"""
message_base = proto.Field(
proto.MESSAGE,
number=1,
message='AnalysisMessageBase',
)
description = proto.Field(
proto.STRING,
number=2,
)
resource_paths = proto.RepeatedField(
proto.STRING,
number=3,
)
args = proto.Field(
proto.MESSAGE,
number=4,
message=struct_pb2.Struct,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.