blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0ecfd4ade7a4eb7a0332e62085aa20d4d147faea | 0550c08cee19be891fde34fa109b5a4ad9f07e3a | /countingdnanucleotides/countingdnanucleotides.py | f3f714d93042c72c14090a87793d10895a8e4cca | [] | no_license | bendavidsteel/rosalind-solutions | 92653c49d8ef938306ac1289ccb4e4cfe4b8d3ae | 0749f2662efcac62383a8476ce13fcdd039928b1 | refs/heads/master | 2020-03-28T04:17:00.959446 | 2018-09-06T21:32:06 | 2018-09-06T21:32:06 | 147,705,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | a = 0
c = 0
g = 0
t = 0
i = 0
with open('rosalind_dna.txt') as stringfile:
s = [x.strip('\n') for x in stringfile.readlines()][0]
for i in range(len(s)):
if s[i] == 'A':
a += 1
elif s[i] == 'C':
c += 1
elif s[i] == 'G':
g += 1
elif s[i] == 'T':
t += 1
output = open("output.txt", 'w')
output.write(str(a) + ' ' + str(c) + ' ' + str(g) + ' ' + str(t))
output.close()
| [
"[email protected]"
] | |
7d2d02d8ebc5d63a3b86766ce7a466835da3c7fb | 16caebb320bb10499d3712bf0bdc07539a4d0007 | /objc/_SleepHealth.py | b5472aa777bd2319957d026862d02d97921f506b | [] | no_license | swosnick/Apple-Frameworks-Python | 876d30f308a7ac1471b98a9da2fabd22f30c0fa5 | 751510137e9fa35cc806543db4e4415861d4f252 | refs/heads/master | 2022-12-08T07:08:40.154553 | 2020-09-04T17:36:24 | 2020-09-04T17:36:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | '''
Classes from the 'SleepHealth' framework.
'''
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
HKSHSleepDaySummary = _Class('HKSHSleepDaySummary')
HKSleepHealthStore = _Class('HKSleepHealthStore')
HKSHSleepPeriod = _Class('HKSHSleepPeriod')
HKSHSleepPeriodSegment = _Class('HKSHSleepPeriodSegment')
HKSHGoalProgressEngine = _Class('HKSHGoalProgressEngine')
HKSHGoalProgress = _Class('HKSHGoalProgress')
HKSHSleepDaySummaryQuery = _Class('HKSHSleepDaySummaryQuery')
HKSHSleepDaySummaryQueryConfiguration = _Class('HKSHSleepDaySummaryQueryConfiguration')
| [
"[email protected]"
] | |
da4dcc87474cb0400f18d2293569fa8d6e209747 | 1a9852fe468f18e1ac3042c09286ccda000a4135 | /Specialist Certificate in Data Analytics Essentials/DataCamp/06-Writing_Functions_in_Python/e11_a_read-only_open_context_manager.py | 956cc65e4b032f0ee852b2821b8fb559599e271b | [] | no_license | sarmabhamidipati/UCD | 452b2f1e166c1079ec06d78e473730e141f706b2 | 101ca3152207e2fe67cca118923896551d5fee1c | refs/heads/master | 2023-08-14T15:41:24.312859 | 2021-09-22T17:33:01 | 2021-09-22T17:33:01 | 386,592,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 764 | py | """
A read-only open() context manager
Yield control from open_read_only() to the context block, ensuring that the read_only_file object
gets assigned to my_file.
Use read_only_file's .close() method to ensure that you don't leave open files lying around.
"""
from contextlib import contextmanager
import time
@contextmanager
def open_read_only(filename):
"""Open a file in read-only mode.
Args:
filename (str): The location of the file to read
Yields:
file object
"""
read_only_file = open(filename, mode='r')
# Yield read_only_file so it can be assigned to my_file
yield read_only_file
# Close read_only_file
read_only_file.close()
with open_read_only('my_file.txt') as my_file:
print(my_file.read())
| [
"[email protected]"
] | |
d3770de2ae95f5979154c27ae9ccd77472d0e4d2 | 0a6f284b1a7c8b16911ebf33076abc38778c752f | /app/run.py | 37911045108a9bf98a86cd405ae50114df2a13ca | [
"Apache-2.0"
] | permissive | branky/blockd3 | 2298b3eafd1b9c50b0374dd1456c0fcdf2068fab | 27e78fd89f44af95ad65b1203c02156db64333d0 | refs/heads/master | 2020-12-25T09:00:35.047437 | 2012-11-19T06:08:49 | 2012-11-19T06:08:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import logging
from ghost import GhostTestCase, Ghost
from app import make_app
app = make_app("test")
PORT = 5000
base_url = "http://localhost:%s/dist/" % PORT
class Blockd3GhostTest(GhostTestCase):
port = PORT
display = False
log_level = logging.INFO
def __new__(cls, *args, **kwargs):
"""Creates Ghost instance."""
if not hasattr(cls, 'ghost'):
cls.ghost = Ghost(display=cls.display,
wait_timeout=10,
viewport_size=cls.viewport_size,
log_level=cls.log_level)
return super(Blockd3GhostTest, cls).__new__(cls, *args, **kwargs)
@classmethod
def create_app(cls):
return app
def test_open(self):
"""
Test that the page loads
"""
page, resources = self.ghost.open(base_url)
self.assertEqual(page.url, base_url)
self.ghost.click("#run")
if __name__ == "__main__":
unittest.main() | [
"[email protected]"
] | |
4037da61e9d0b78c4af2e78d472c172f4b190b68 | 436177bf038f9941f67e351796668700ffd1cef2 | /venv/Lib/site-packages/mpl_toolkits/axes_grid1/inset_locator.py | 49576ff9f37100ac6d33812731d5634d57e34693 | [] | no_license | python019/matplotlib_simple | 4359d35f174cd2946d96da4d086026661c3d1f9c | 32e9a8e773f9423153d73811f69822f9567e6de4 | refs/heads/main | 2023-08-22T18:17:38.883274 | 2021-10-07T15:55:50 | 2021-10-07T15:55:50 | 380,471,961 | 29 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,733 | py | """
A collection of functions and objects for creating or placing inset axes.
"""
from matplotlib import _api, docstring
from matplotlib.offsetbox import AnchoredOffsetbox
from matplotlib.patches import Patch, Rectangle
from matplotlib.path import Path
from matplotlib.transforms import Bbox, BboxTransformTo
from matplotlib.transforms import IdentityTransform, TransformedBbox
from . import axes_size as Size
from .parasite_axes import HostAxes
class InsetPosition:
@docstring.dedent_interpd
def __init__(self, parent, lbwh):
"""
An object for positioning an inset axes.
This is created by specifying the normalized coordinates in the axes,
instead of the figure.
Parameters
----------
parent : `matplotlib.axes.Axes`
Axes to use for normalizing coordinates.
lbwh : iterable of four floats
The left edge, bottom edge, width, and height of the inset axes, in
units of the normalized coordinate of the *parent* axes.
See Also
--------
:meth:`matplotlib.axes.Axes.set_axes_locator`
Examples
--------
The following bounds the inset axes to a box with 20%% of the parent
axes's height and 40%% of the width. The size of the axes specified
([0, 0, 1, 1]) ensures that the axes completely fills the bounding box:
>>> parent_axes = plt.gca()
>>> ax_ins = plt.axes([0, 0, 1, 1])
>>> ip = InsetPosition(ax, [0.5, 0.1, 0.4, 0.2])
>>> ax_ins.set_axes_locator(ip)
"""
self.parent = parent
self.lbwh = lbwh
def __call__(self, ax, renderer):
bbox_parent = self.parent.get_position(original=False)
trans = BboxTransformTo(bbox_parent)
bbox_inset = Bbox.from_bounds(*self.lbwh)
bb = TransformedBbox(bbox_inset, trans)
return bb
class AnchoredLocatorBase(AnchoredOffsetbox):
def __init__(self, bbox_to_anchor, offsetbox, loc,
borderpad=0.5, bbox_transform=None):
super().__init__(
loc, pad=0., child=None, borderpad=borderpad,
bbox_to_anchor=bbox_to_anchor, bbox_transform=bbox_transform
)
def draw(self, renderer):
raise RuntimeError("No draw method should be called")
def __call__(self, ax, renderer):
self.axes = ax
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
self._update_offset_func(renderer, fontsize)
width, height, xdescent, ydescent = self.get_extent(renderer)
px, py = self.get_offset(width, height, 0, 0, renderer)
bbox_canvas = Bbox.from_bounds(px, py, width, height)
tr = ax.figure.transFigure.inverted()
bb = TransformedBbox(bbox_canvas, tr)
return bb
class AnchoredSizeLocator(AnchoredLocatorBase):
def __init__(self, bbox_to_anchor, x_size, y_size, loc,
borderpad=0.5, bbox_transform=None):
super().__init__(
bbox_to_anchor, None, loc,
borderpad=borderpad, bbox_transform=bbox_transform
)
self.x_size = Size.from_any(x_size)
self.y_size = Size.from_any(y_size)
def get_extent(self, renderer):
bbox = self.get_bbox_to_anchor()
dpi = renderer.points_to_pixels(72.)
r, a = self.x_size.get_size(renderer)
width = bbox.width * r + a * dpi
r, a = self.y_size.get_size(renderer)
height = bbox.height * r + a * dpi
xd, yd = 0, 0
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
pad = self.pad * fontsize
return width + 2 * pad, height + 2 * pad, xd + pad, yd + pad
class AnchoredZoomLocator(AnchoredLocatorBase):
def __init__(self, parent_axes, zoom, loc,
borderpad=0.5,
bbox_to_anchor=None,
bbox_transform=None):
self.parent_axes = parent_axes
self.zoom = zoom
if bbox_to_anchor is None:
bbox_to_anchor = parent_axes.bbox
super().__init__(
bbox_to_anchor, None, loc, borderpad=borderpad,
bbox_transform=bbox_transform)
def get_extent(self, renderer):
bb = TransformedBbox(self.axes.viewLim, self.parent_axes.transData)
fontsize = renderer.points_to_pixels(self.prop.get_size_in_points())
pad = self.pad * fontsize
return (abs(bb.width * self.zoom) + 2 * pad,
abs(bb.height * self.zoom) + 2 * pad,
pad, pad)
class BboxPatch(Patch):
@docstring.dedent_interpd
def __init__(self, bbox, **kwargs):
"""
Patch showing the shape bounded by a Bbox.
Parameters
----------
bbox : `matplotlib.transforms.Bbox`
Bbox to use for the extents of this patch.
**kwargs
Patch properties. Valid arguments include:
%(Patch_kwdoc)s
"""
if "transform" in kwargs:
raise ValueError("transform should not be set")
kwargs["transform"] = IdentityTransform()
super().__init__(**kwargs)
self.bbox = bbox
def get_path(self):
# docstring inherited
x0, y0, x1, y1 = self.bbox.extents
return Path([(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)],
closed=True)
class BboxConnector(Patch):
@staticmethod
def get_bbox_edge_pos(bbox, loc):
"""
Helper function to obtain the location of a corner of a bbox
Parameters
----------
bbox : `matplotlib.transforms.Bbox`
loc : {1, 2, 3, 4}
Corner of *bbox*. Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
Returns
-------
x, y : float
Coordinates of the corner specified by *loc*.
"""
x0, y0, x1, y1 = bbox.extents
if loc == 1:
return x1, y1
elif loc == 2:
return x0, y1
elif loc == 3:
return x0, y0
elif loc == 4:
return x1, y0
@staticmethod
def connect_bbox(bbox1, bbox2, loc1, loc2=None):
"""
Helper function to obtain a Path from one bbox to another.
Parameters
----------
bbox1, bbox2 : `matplotlib.transforms.Bbox`
Bounding boxes to connect.
loc1 : {1, 2, 3, 4}
Corner of *bbox1* to use. Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
loc2 : {1, 2, 3, 4}, optional
Corner of *bbox2* to use. If None, defaults to *loc1*.
Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
Returns
-------
path : `matplotlib.path.Path`
A line segment from the *loc1* corner of *bbox1* to the *loc2*
corner of *bbox2*.
"""
if isinstance(bbox1, Rectangle):
bbox1 = TransformedBbox(Bbox.unit(), bbox1.get_transform())
if isinstance(bbox2, Rectangle):
bbox2 = TransformedBbox(Bbox.unit(), bbox2.get_transform())
if loc2 is None:
loc2 = loc1
x1, y1 = BboxConnector.get_bbox_edge_pos(bbox1, loc1)
x2, y2 = BboxConnector.get_bbox_edge_pos(bbox2, loc2)
return Path([[x1, y1], [x2, y2]])
@docstring.dedent_interpd
def __init__(self, bbox1, bbox2, loc1, loc2=None, **kwargs):
"""
Connect two bboxes with a straight line.
Parameters
----------
bbox1, bbox2 : `matplotlib.transforms.Bbox`
Bounding boxes to connect.
loc1 : {1, 2, 3, 4}
Corner of *bbox1* to draw the line. Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
loc2 : {1, 2, 3, 4}, optional
Corner of *bbox2* to draw the line. If None, defaults to *loc1*.
Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
**kwargs
Patch properties for the line drawn. Valid arguments include:
%(Patch_kwdoc)s
"""
if "transform" in kwargs:
raise ValueError("transform should not be set")
kwargs["transform"] = IdentityTransform()
if 'fill' in kwargs:
super().__init__(**kwargs)
else:
fill = bool({'fc', 'facecolor', 'color'}.intersection(kwargs))
super().__init__(fill=fill, **kwargs)
self.bbox1 = bbox1
self.bbox2 = bbox2
self.loc1 = loc1
self.loc2 = loc2
def get_path(self):
# docstring inherited
return self.connect_bbox(self.bbox1, self.bbox2,
self.loc1, self.loc2)
class BboxConnectorPatch(BboxConnector):
@docstring.dedent_interpd
def __init__(self, bbox1, bbox2, loc1a, loc2a, loc1b, loc2b, **kwargs):
"""
Connect two bboxes with a quadrilateral.
The quadrilateral is specified by two lines that start and end at
corners of the bboxes. The four sides of the quadrilateral are defined
by the two lines given, the line between the two corners specified in
*bbox1* and the line between the two corners specified in *bbox2*.
Parameters
----------
bbox1, bbox2 : `matplotlib.transforms.Bbox`
Bounding boxes to connect.
loc1a, loc2a : {1, 2, 3, 4}
Corners of *bbox1* and *bbox2* to draw the first line.
Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
loc1b, loc2b : {1, 2, 3, 4}
Corners of *bbox1* and *bbox2* to draw the second line.
Valid values are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4
**kwargs
Patch properties for the line drawn:
%(Patch_kwdoc)s
"""
if "transform" in kwargs:
raise ValueError("transform should not be set")
super().__init__(bbox1, bbox2, loc1a, loc2a, **kwargs)
self.loc1b = loc1b
self.loc2b = loc2b
def get_path(self):
# docstring inherited
path1 = self.connect_bbox(self.bbox1, self.bbox2, self.loc1, self.loc2)
path2 = self.connect_bbox(self.bbox2, self.bbox1,
self.loc2b, self.loc1b)
path_merged = [*path1.vertices, *path2.vertices, path1.vertices[0]]
return Path(path_merged)
def _add_inset_axes(parent_axes, inset_axes):
"""Helper function to add an inset axes and disable navigation in it"""
parent_axes.figure.add_axes(inset_axes)
inset_axes.set_navigate(False)
@docstring.dedent_interpd
def inset_axes(parent_axes, width, height, loc='upper right',
bbox_to_anchor=None, bbox_transform=None,
axes_class=None,
axes_kwargs=None,
borderpad=0.5):
"""
Create an inset axes with a given width and height.
Both sizes used can be specified either in inches or percentage.
For example,::
inset_axes(parent_axes, width='40%%', height='30%%', loc=3)
creates in inset axes in the lower left corner of *parent_axes* which spans
over 30%% in height and 40%% in width of the *parent_axes*. Since the usage
of `.inset_axes` may become slightly tricky when exceeding such standard
cases, it is recommended to read :doc:`the examples
</gallery/axes_grid1/inset_locator_demo>`.
Notes
-----
The meaning of *bbox_to_anchor* and *bbox_to_transform* is interpreted
differently from that of legend. The value of bbox_to_anchor
(or the return value of its get_points method; the default is
*parent_axes.bbox*) is transformed by the bbox_transform (the default
is Identity transform) and then interpreted as points in the pixel
coordinate (which is dpi dependent).
Thus, following three calls are identical and creates an inset axes
with respect to the *parent_axes*::
axins = inset_axes(parent_axes, "30%%", "40%%")
axins = inset_axes(parent_axes, "30%%", "40%%",
bbox_to_anchor=parent_axes.bbox)
axins = inset_axes(parent_axes, "30%%", "40%%",
bbox_to_anchor=(0, 0, 1, 1),
bbox_transform=parent_axes.transAxes)
Parameters
----------
parent_axes : `matplotlib.axes.Axes`
Axes to place the inset axes.
width, height : float or str
Size of the inset axes to create. If a float is provided, it is
the size in inches, e.g. *width=1.3*. If a string is provided, it is
the size in relative units, e.g. *width='40%%'*. By default, i.e. if
neither *bbox_to_anchor* nor *bbox_transform* are specified, those
are relative to the parent_axes. Otherwise they are to be understood
relative to the bounding box provided via *bbox_to_anchor*.
loc : int or str, default: 1
Location to place the inset axes. The valid locations are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10
bbox_to_anchor : tuple or `matplotlib.transforms.BboxBase`, optional
Bbox that the inset axes will be anchored to. If None,
a tuple of (0, 0, 1, 1) is used if *bbox_transform* is set
to *parent_axes.transAxes* or *parent_axes.figure.transFigure*.
Otherwise, *parent_axes.bbox* is used. If a tuple, can be either
[left, bottom, width, height], or [left, bottom].
If the kwargs *width* and/or *height* are specified in relative units,
the 2-tuple [left, bottom] cannot be used. Note that,
unless *bbox_transform* is set, the units of the bounding box
are interpreted in the pixel coordinate. When using *bbox_to_anchor*
with tuple, it almost always makes sense to also specify
a *bbox_transform*. This might often be the axes transform
*parent_axes.transAxes*.
bbox_transform : `matplotlib.transforms.Transform`, optional
Transformation for the bbox that contains the inset axes.
If None, a `.transforms.IdentityTransform` is used. The value
of *bbox_to_anchor* (or the return value of its get_points method)
is transformed by the *bbox_transform* and then interpreted
as points in the pixel coordinate (which is dpi dependent).
You may provide *bbox_to_anchor* in some normalized coordinate,
and give an appropriate transform (e.g., *parent_axes.transAxes*).
axes_class : `matplotlib.axes.Axes` type, optional
If specified, the inset axes created will be created with this class's
constructor.
axes_kwargs : dict, optional
Keyworded arguments to pass to the constructor of the inset axes.
Valid arguments include:
%(Axes_kwdoc)s
borderpad : float, default: 0.5
Padding between inset axes and the bbox_to_anchor.
The units are axes font size, i.e. for a default font size of 10 points
*borderpad = 0.5* is equivalent to a padding of 5 points.
Returns
-------
inset_axes : *axes_class*
Inset axes object created.
"""
if axes_class is None:
axes_class = HostAxes
if axes_kwargs is None:
inset_axes = axes_class(parent_axes.figure, parent_axes.get_position())
else:
inset_axes = axes_class(parent_axes.figure, parent_axes.get_position(),
**axes_kwargs)
if bbox_transform in [parent_axes.transAxes,
parent_axes.figure.transFigure]:
if bbox_to_anchor is None:
_api.warn_external("Using the axes or figure transform requires a "
"bounding box in the respective coordinates. "
"Using bbox_to_anchor=(0, 0, 1, 1) now.")
bbox_to_anchor = (0, 0, 1, 1)
if bbox_to_anchor is None:
bbox_to_anchor = parent_axes.bbox
if (isinstance(bbox_to_anchor, tuple) and
(isinstance(width, str) or isinstance(height, str))):
if len(bbox_to_anchor) != 4:
raise ValueError("Using relative units for width or height "
"requires to provide a 4-tuple or a "
"`Bbox` instance to `bbox_to_anchor.")
axes_locator = AnchoredSizeLocator(bbox_to_anchor,
width, height,
loc=loc,
bbox_transform=bbox_transform,
borderpad=borderpad)
inset_axes.set_axes_locator(axes_locator)
_add_inset_axes(parent_axes, inset_axes)
return inset_axes
@docstring.dedent_interpd
def zoomed_inset_axes(parent_axes, zoom, loc='upper right',
bbox_to_anchor=None, bbox_transform=None,
axes_class=None,
axes_kwargs=None,
borderpad=0.5):
"""
Create an anchored inset axes by scaling a parent axes. For usage, also see
:doc:`the examples </gallery/axes_grid1/inset_locator_demo2>`.
Parameters
----------
parent_axes : `matplotlib.axes.Axes`
Axes to place the inset axes.
zoom : float
Scaling factor of the data axes. *zoom* > 1 will enlargen the
coordinates (i.e., "zoomed in"), while *zoom* < 1 will shrink the
coordinates (i.e., "zoomed out").
loc : int or str, default: 'upper right'
Location to place the inset axes. The valid locations are::
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10
bbox_to_anchor : tuple or `matplotlib.transforms.BboxBase`, optional
Bbox that the inset axes will be anchored to. If None,
*parent_axes.bbox* is used. If a tuple, can be either
[left, bottom, width, height], or [left, bottom].
If the kwargs *width* and/or *height* are specified in relative units,
the 2-tuple [left, bottom] cannot be used. Note that
the units of the bounding box are determined through the transform
in use. When using *bbox_to_anchor* it almost always makes sense to
also specify a *bbox_transform*. This might often be the axes transform
*parent_axes.transAxes*.
bbox_transform : `matplotlib.transforms.Transform`, optional
Transformation for the bbox that contains the inset axes.
If None, a `.transforms.IdentityTransform` is used (i.e. pixel
coordinates). This is useful when not providing any argument to
*bbox_to_anchor*. When using *bbox_to_anchor* it almost always makes
sense to also specify a *bbox_transform*. This might often be the
axes transform *parent_axes.transAxes*. Inversely, when specifying
the axes- or figure-transform here, be aware that not specifying
*bbox_to_anchor* will use *parent_axes.bbox*, the units of which are
in display (pixel) coordinates.
axes_class : `matplotlib.axes.Axes` type, optional
If specified, the inset axes created will be created with this class's
constructor.
axes_kwargs : dict, optional
Keyworded arguments to pass to the constructor of the inset axes.
Valid arguments include:
%(Axes_kwdoc)s
borderpad : float, default: 0.5
Padding between inset axes and the bbox_to_anchor.
The units are axes font size, i.e. for a default font size of 10 points
*borderpad = 0.5* is equivalent to a padding of 5 points.
Returns
-------
inset_axes : *axes_class*
Inset axes object created.
"""
if axes_class is None:
axes_class = HostAxes
if axes_kwargs is None:
inset_axes = axes_class(parent_axes.figure, parent_axes.get_position())
else:
inset_axes = axes_class(parent_axes.figure, parent_axes.get_position(),
**axes_kwargs)
axes_locator = AnchoredZoomLocator(parent_axes, zoom=zoom, loc=loc,
bbox_to_anchor=bbox_to_anchor,
bbox_transform=bbox_transform,
borderpad=borderpad)
inset_axes.set_axes_locator(axes_locator)
_add_inset_axes(parent_axes, inset_axes)
return inset_axes
@docstring.dedent_interpd
def mark_inset(parent_axes, inset_axes, loc1, loc2, **kwargs):
"""
Draw a box to mark the location of an area represented by an inset axes.
This function draws a box in *parent_axes* at the bounding box of
*inset_axes*, and shows a connection with the inset axes by drawing lines
at the corners, giving a "zoomed in" effect.
Parameters
----------
parent_axes : `matplotlib.axes.Axes`
Axes which contains the area of the inset axes.
inset_axes : `matplotlib.axes.Axes`
The inset axes.
loc1, loc2 : {1, 2, 3, 4}
Corners to use for connecting the inset axes and the area in the
parent axes.
**kwargs
Patch properties for the lines and box drawn:
%(Patch_kwdoc)s
Returns
-------
pp : `matplotlib.patches.Patch`
The patch drawn to represent the area of the inset axes.
p1, p2 : `matplotlib.patches.Patch`
The patches connecting two corners of the inset axes and its area.
"""
rect = TransformedBbox(inset_axes.viewLim, parent_axes.transData)
if 'fill' in kwargs:
pp = BboxPatch(rect, **kwargs)
else:
fill = bool({'fc', 'facecolor', 'color'}.intersection(kwargs))
pp = BboxPatch(rect, fill=fill, **kwargs)
parent_axes.add_patch(pp)
p1 = BboxConnector(inset_axes.bbox, rect, loc1=loc1, **kwargs)
inset_axes.add_patch(p1)
p1.set_clip_on(False)
p2 = BboxConnector(inset_axes.bbox, rect, loc1=loc2, **kwargs)
inset_axes.add_patch(p2)
p2.set_clip_on(False)
return pp, p1, p2
| [
"[email protected]"
] | |
28c05a44ba70abe18d6362f2f5149765c73adee1 | 4a4a24bf9521ef659d16fb08403242a77a9b9d77 | /aos_l10n_id/models/localization.py | 697c48b804e8e16763168c1459d8a44355bd4266 | [] | no_license | hassanfadl/Odoo12-1 | 601c4969c9d483590e8481e92ecaf4dddaac3847 | bb057424138f99d0a645d185fbd26648385fbdf7 | refs/heads/main | 2023-07-31T22:59:19.597624 | 2021-10-01T06:35:58 | 2021-10-01T06:35:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,822 | py | ##############################################################################
#
# Copyright (C) 2011 ADSOFT OpenERP Partner Indonesia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import api, fields, models, _
#from openerp import api, fields, models, _
import logging
_logger = logging.getLogger(__name__)
# try:
# import phonenumbers
# except Exception as e:
# _logger.warning(
# 'Import Error for phonenumbers, you will not be able to validate phone number.\n'
# 'Consider Installing phonenumbers or dependencies: https://pypi.python.org/pypi/phonenumbers/7.2.6.')
# raise e
class res_country_state(models.Model):
_inherit = "res.country.state"
#name = fields.Char(string='Province')
kabupaten_line = fields.One2many('res.kabupaten', 'state_id', string='Kabupaten')
class ResKabupaten(models.Model):
_name = "res.kabupaten"
_description = "List Kabupaten"
name = fields.Char(string='Kabupaten')
state_id = fields.Many2one('res.country.state', string="Province")
kecamatan_line = fields.One2many('res.kecamatan', 'kabupaten_id', string='Kecamatan')
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKabupaten, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class ResKecamatan(models.Model):
_name = "res.kecamatan"
_description = "List Kecamatan"
name = fields.Char(string='Kecamatan')
state_id = fields.Many2one('res.country.state', string="Province")
kabupaten_id = fields.Many2one('res.kabupaten', string="Kabupaten")
kelurahan_line = fields.One2many('res.kelurahan', 'kecamatan_id', string='Kelurahan')
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_kabupaten'):
args += [('kabupaten_id', '=', self._context['search_default_kabupaten'])]
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKecamatan, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class ResKelurahan(models.Model):
_name = "res.kelurahan"
_description = "List Kelurahan"
name = fields.Char(string='Kelurahan')
state_id = fields.Many2one('res.country.state', string="Province")
kabupaten_id = fields.Many2one('res.kabupaten', string="Kabupaten")
kecamatan_id = fields.Many2one('res.kecamatan', string="Kecamatan")
zip = fields.Char("Kode Post")
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_zip'):
args += [('zip', '=', self._context['search_default_zip'])]
if self._context.get('search_default_kecamatan'):
args += [('kecamatan_id', '=', self._context['search_default_kecamatan'])]
if self._context.get('search_default_kabupaten'):
args += [('kabupaten_id', '=', self._context['search_default_kabupaten'])]
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKelurahan, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class res_race(models.Model):
_name = "res.race"
_description = "List RAS/Suku"
name = fields.Char(string='RAS', required=True , translate=True)
class res_religion(models.Model):
_name = "res.religion"
_description = "List Agama"
name = fields.Char(string='Religion', required=True , translate=True)
| [
"[email protected]"
] | |
8728e365e7d7eb7024f6524d63406cd1993322f7 | 4b1d977acfde9354685157e02459c016c041421d | /tests/test_molecules.py | 88a58a7272d50cddca1b5caf2d88a8175dd2b880 | [] | no_license | fujiisoup/pyspectra | f6c50d929e992ab6064ef978a4de0c0647ff3d4b | 152bf37dee7e9eeabf42d24496566022d00d31ec | refs/heads/master | 2023-07-25T08:23:13.637233 | 2023-07-05T16:32:30 | 2023-07-05T16:32:30 | 246,492,492 | 3 | 0 | null | 2023-07-05T16:32:32 | 2020-03-11T06:28:08 | Python | UTF-8 | Python | false | false | 2,363 | py | import numpy as np
from pyspectra import molecules, units, refractive_index, data
def test_level():
# fulcher
constants = data.diatomic_molecules("H2").sel(state="X 1Σg 1sσ2")
for key in constants:
print(key, constants[key].item())
upper = molecules.level("H2", "d 3Πu 3pπ", 0, 1)
lower = molecules.level("H2", "a 3Σg+ 2sσ", 0, 1)
wavelength = refractive_index.vacuum_to_air(units.eV_to_nm(upper - lower))
print(wavelength, units.eV_to_nm(upper - lower))
assert np.allclose(601.8299, wavelength, atol=2e-3, rtol=0)
def test_OH_X2():
"""
randomly choose levels from Table 27
"""
qnums = []
levels = []
# v, J, parity, 3/2 or 1/2
# qnums.append([0, 0.5, +1, 1]) # F1e
# levels.append(0.0000)
qnums.append([0, 0.5, +1, 2]) # F2e
levels.append(88.1066)
# qnums.append([0, 0.5, -1, 1]) # F1f
# levels.append(0.0000)
qnums.append([0, 0.5, -1, 2]) # F2f
levels.append(88.2642)
qnums.append([0, 1.5, +1, 1]) # F1e
levels.append(-38.2480)
qnums.append([0, 1.5, +1, 2]) # F2e
levels.append(149.3063)
qnums.append([0, 1.5, -1, 1]) # F1f
levels.append(-38.1926)
qnums.append([0, 1.5, -1, 2]) # F2f
levels.append(149.5662)
qnums.append([0, 10.5, +1, 1]) # F1e
levels.append(1976.8000)
qnums.append([0, 10.5, +1, 2]) # F2e
levels.append(2414.9290)
qnums.append([0, 10.5, -1, 1]) # F1f
levels.append(1981.4015)
qnums.append([0, 10.5, -1, 2]) # F2f
levels.append(2412.0731)
v, J, parity, spin = np.array(qnums).T
energies = molecules.level_OH_X2(v, J, parity, spin)
# for lev, en in zip(levels, energies):
# print('{} : {}'.format(lev, en))
assert np.allclose(energies, levels, atol=0.1)
qnums = []
levels = []
qnums.append([4, 13.5, +1, 1]) # F1e
levels.append(16062.2776)
qnums.append([4, 13.5, +1, 2]) # F2e
levels.append(16522.0293)
qnums.append([4, 13.5, -1, 1]) # F1f
levels.append(16068.1260)
qnums.append([4, 13.5, -1, 2]) # F2f
levels.append(16517.9751)
v, J, parity, spin = np.array(qnums).T
energies = molecules.level_OH_X2(v, J, parity, spin)
# for lev, en in zip(levels, energies):
# print('{} : {}'.format(lev, en))
assert np.allclose(energies, levels, atol=0.1)
| [
"[email protected]"
] | |
2bdc663042e1e1aefc99f900694814b55def8c35 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-arms/aliyunsdkarms/request/v20190808/SearchEventsRequest.py | 3fae22f10c2789e7944a1a6c990d8133f134697a | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 2,864 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkarms.endpoint import endpoint_data
class SearchEventsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'SearchEvents','arms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_IsTrigger(self): # Integer
return self.get_query_params().get('IsTrigger')
def set_IsTrigger(self, IsTrigger): # Integer
self.add_query_param('IsTrigger', IsTrigger)
def get_AppType(self): # String
return self.get_query_params().get('AppType')
def set_AppType(self, AppType): # String
self.add_query_param('AppType', AppType)
def get_EndTime(self): # Long
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # Long
self.add_query_param('EndTime', EndTime)
def get_Pid(self): # String
return self.get_query_params().get('Pid')
def set_Pid(self, Pid): # String
self.add_query_param('Pid', Pid)
def get_CurrentPage(self): # Integer
return self.get_query_params().get('CurrentPage')
def set_CurrentPage(self, CurrentPage): # Integer
self.add_query_param('CurrentPage', CurrentPage)
def get_StartTime(self): # Long
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # Long
self.add_query_param('StartTime', StartTime)
def get_AlertType(self): # Integer
return self.get_query_params().get('AlertType')
def set_AlertType(self, AlertType): # Integer
self.add_query_param('AlertType', AlertType)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AlertId(self): # Long
return self.get_query_params().get('AlertId')
def set_AlertId(self, AlertId): # Long
self.add_query_param('AlertId', AlertId)
| [
"[email protected]"
] | |
9b1a9ff5d3d3ad9d0086cc8d179cdb717f6b6bde | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/yadayada_acd_cli/acd_cli-master/acdcli/utils/conf.py | 9ebf249680620acffb9a172fb3c9591bb51f646c | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 584 | py | import configparser
import logging
import os
logger = logging.getLogger(__name__)
def get_conf(path, filename, default_conf: configparser.ConfigParser) \
-> configparser.ConfigParser:
conf = configparser.ConfigParser()
conf.read_dict(default_conf)
conffn = os.path.join(path, filename)
try:
with open(conffn) as cf:
conf.read_file(cf)
except OSError:
pass
logger.debug('configuration resulting from merging default and %s: %s' % (filename,
{section: dict(conf[section]) for section in conf}))
return conf
| [
"[email protected]"
] | |
24c1f145fb8771680cd3bc3dafa1f4db36c625b3 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/dbgac/rsfromepg.py | 857baf4940ff69594982fe829e028d01e3e1d557 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,594 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RsFromEpg(Mo):
"""
A source relation to the set of requirements for an application-level endpoint group instance.
"""
meta = SourceRelationMeta("cobra.model.dbgac.RsFromEpg", "cobra.model.fv.AEPg")
meta.cardinality = SourceRelationMeta.N_TO_M
meta.moClassName = "dbgacRsFromEpg"
meta.rnFormat = "rsfromEpg-[%(tDn)s]"
meta.category = MoCategory.RELATIONSHIP_TO_GLOBAL
meta.label = "Source EPG Relation for Atomic Counter Policy"
meta.writeAccessMask = 0xc001
meta.readAccessMask = 0xc001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.tag.Tag")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.aaa.RbacAnnotation")
meta.childClasses.add("cobra.model.tag.Annotation")
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Annotation", "annotationKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.aaa.RbacAnnotation", "rbacDom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Tag", "tagKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.parentClasses.add("cobra.model.dbgac.EpgToEp")
meta.parentClasses.add("cobra.model.dbgac.EpgToIp")
meta.parentClasses.add("cobra.model.dbgac.EpgToEpg")
meta.superClasses.add("cobra.model.reln.Inst")
meta.superClasses.add("cobra.model.reln.To")
meta.rnPrefixes = [
('rsfromEpg-', True),
]
prop = PropMeta("str", "annotation", "annotation", 37761, PropCategory.REGULAR)
prop.label = "Annotation. Suggested format orchestrator:value"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("annotation", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "extMngdBy", "extMngdBy", 39900, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "undefined"
prop._addConstant("msc", "msc", 1)
prop._addConstant("undefined", "undefined", 0)
meta.props.add("extMngdBy", prop)
prop = PropMeta("str", "forceResolve", "forceResolve", 107, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("forceResolve", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14593, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "rType", "rType", 106, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "mo"
prop._addConstant("local", "local", 3)
prop._addConstant("mo", "mo", 1)
prop._addConstant("service", "service", 2)
meta.props.add("rType", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "state", "state", 103, PropCategory.REGULAR)
prop.label = "State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unformed"
prop._addConstant("cardinality-violation", "cardinality-violation", 5)
prop._addConstant("formed", "formed", 1)
prop._addConstant("invalid-target", "invalid-target", 4)
prop._addConstant("missing-target", "missing-target", 2)
prop._addConstant("unformed", "unformed", 0)
meta.props.add("state", prop)
prop = PropMeta("str", "stateQual", "stateQual", 104, PropCategory.REGULAR)
prop.label = "State Qualifier"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("stateQual", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tCl", "tCl", 12875, PropCategory.REGULAR)
prop.label = "Target-class"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1981
prop.defaultValueStr = "fvAEPg"
prop._addConstant("fvAEPg", None, 1981)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("tCl", prop)
prop = PropMeta("str", "tDn", "tDn", 12874, PropCategory.REGULAR)
prop.label = "Target-dn"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("tDn", prop)
prop = PropMeta("str", "tType", "tType", 105, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "mo"
prop._addConstant("all", "all", 2)
prop._addConstant("mo", "mo", 1)
prop._addConstant("name", "name", 0)
meta.props.add("tType", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
meta.namingProps.append(getattr(meta.props, "tDn"))
getattr(meta.props, "tDn").needDelimiter = True
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgIpPolToLocale3", "Fabric Nodes(EP)", "cobra.model.fabric.Node"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgIpPolToLocale2", "Fabric Nodes(Service EPg)", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgIpPolToLocale1", "Fabric Nodes(EPg)", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgEpPolToLocale3", "Fabric Nodes(EP)", "cobra.model.fabric.Node"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgEpPolToLocale2", "Fabric Nodes(Service EPg)", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgEpPolToLocale1", "Fabric Nodes(EPg)", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AcEpgEpgPolToLocale", "Fabric Nodes", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, tDn, markDirty=True, **creationProps):
namingVals = [tDn]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
4b52b6b9730607564cb4bb97e081a34ed237d59b | 871dddb5c8059d96b767a323b0f87d3fbb62e786 | /test/unit/vint/ast/plugin/scope_plugin/test_scope_detector.py | 9b57c816b88abb523c7f5ccfecb0d56e9ce5d76b | [
"MIT"
] | permissive | msabramo/vint | 6ef12ed61d54d0d2b2a9d1da1ce90c0e2c734ab2 | f13569f2a62ff13ff8ad913e7d6fb2c57953af20 | refs/heads/master | 2023-08-24T01:20:14.699485 | 2014-12-31T18:28:59 | 2014-12-31T18:28:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,134 | py | import pytest
from vint.ast.node_type import NodeType
from vint.ast.plugin.scope_plugin.scope_detector import ScopeDetector, ScopeVisibility as Vis
from vint.ast.plugin.scope_plugin.identifier_classifier import (
IDENTIFIER_ATTRIBUTE,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG,
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG,
)
def create_scope(visibility):
return {
'scope_visibility': visibility,
}
def create_scope_visibility_hint(visibility, is_implicit=False):
return {
'scope_visibility': visibility,
'is_implicit': is_implicit,
}
def create_id(id_value, is_declarative=True, is_function=False, is_autoload=False):
return {
'type': NodeType.IDENTIFIER.value,
'value': id_value,
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: is_declarative,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: False,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: False,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: is_function,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: is_autoload,
},
}
def create_env(env_value):
return {
'type': NodeType.ENV.value,
'value': env_value,
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: True,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: False,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: False,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: False,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: False,
},
}
def create_option(opt_value):
return {
'type': NodeType.OPTION.value,
'value': opt_value,
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: True,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: False,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: False,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: False,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: False,
},
}
def create_reg(reg_value):
return {
'type': NodeType.REG.value,
'value': reg_value,
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: True,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: False,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: False,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: False,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: False,
},
}
def create_curlyname(is_declarative=True):
""" Create a node as a `my_{'var'}`
"""
return {
'type': NodeType.CURLYNAME.value,
'value': [
{
'type': NodeType.CURLYNAMEPART.value,
'value': 'my_',
},
{
'type': NodeType.CURLYNAMEEXPR.value,
'value': {
'type': NodeType.CURLYNAMEEXPR.value,
'value': 'var',
},
}
],
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: is_declarative,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: True,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: False,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: False,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: False,
},
}
def create_subscript_member(is_declarative=True):
return {
'type': NodeType.IDENTIFIER.value,
'value': 'member',
IDENTIFIER_ATTRIBUTE: {
IDENTIFIER_ATTRIBUTE_DEFINITION_FLAG: is_declarative,
IDENTIFIER_ATTRIBUTE_DYNAMIC_FLAG: False,
IDENTIFIER_ATTRIBUTE_MEMBER_FLAG: True,
IDENTIFIER_ATTRIBUTE_FUNCTION_FLAG: False,
IDENTIFIER_ATTRIBUTE_AUTOLOAD_FLAG: False,
},
}
@pytest.mark.parametrize(
'context_scope_visibility, id_node, expected_scope_visibility, expected_implicity', [
# Declarative variable test
(Vis.SCRIPT_LOCAL, create_id('g:explicit_global'), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('implicit_global'), Vis.GLOBAL_LIKE, True),
(Vis.FUNCTION_LOCAL, create_id('g:explicit_global'), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('b:buffer_local'), Vis.GLOBAL_LIKE, False),
(Vis.FUNCTION_LOCAL, create_id('b:buffer_local'), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('w:window_local'), Vis.GLOBAL_LIKE, False),
(Vis.FUNCTION_LOCAL, create_id('w:window_local'), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('s:script_local'), Vis.SCRIPT_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('s:script_local'), Vis.SCRIPT_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('l:explicit_function_local'), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('implicit_function_local'), Vis.FUNCTION_LOCAL, True),
(Vis.FUNCTION_LOCAL, create_id('a:param'), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('a:000'), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('a:1'), Vis.FUNCTION_LOCAL, False),
(Vis.SCRIPT_LOCAL, create_id('v:count'), Vis.BUILTIN, False),
(Vis.FUNCTION_LOCAL, create_id('v:count'), Vis.BUILTIN, False),
(Vis.FUNCTION_LOCAL, create_id('count'), Vis.BUILTIN, True),
(Vis.SCRIPT_LOCAL, create_curlyname(), Vis.UNANALYZABLE, False),
(Vis.FUNCTION_LOCAL, create_curlyname(), Vis.UNANALYZABLE, False),
(Vis.SCRIPT_LOCAL, create_subscript_member(), Vis.UNANALYZABLE, False),
(Vis.FUNCTION_LOCAL, create_subscript_member(), Vis.UNANALYZABLE, False),
# Referencing variable test
(Vis.SCRIPT_LOCAL, create_id('g:explicit_global', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('implicit_global', is_declarative=False), Vis.GLOBAL_LIKE, True),
(Vis.FUNCTION_LOCAL, create_id('g:explicit_global', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('b:buffer_local', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.FUNCTION_LOCAL, create_id('b:buffer_local', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('w:window_local', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.FUNCTION_LOCAL, create_id('w:window_local', is_declarative=False), Vis.GLOBAL_LIKE, False),
(Vis.SCRIPT_LOCAL, create_id('s:script_local', is_declarative=False), Vis.SCRIPT_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('s:script_local', is_declarative=False), Vis.SCRIPT_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('l:explicit_function_local', is_declarative=False), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('implicit_function_local', is_declarative=False), Vis.FUNCTION_LOCAL, True),
(Vis.FUNCTION_LOCAL, create_id('a:param', is_declarative=False), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('a:000', is_declarative=False), Vis.FUNCTION_LOCAL, False),
(Vis.FUNCTION_LOCAL, create_id('a:1', is_declarative=False), Vis.FUNCTION_LOCAL, False),
(Vis.SCRIPT_LOCAL, create_id('v:count', is_declarative=False), Vis.BUILTIN, False),
(Vis.FUNCTION_LOCAL, create_id('v:count', is_declarative=False), Vis.BUILTIN, False),
(Vis.FUNCTION_LOCAL, create_id('count', is_declarative=False), Vis.BUILTIN, True),
(Vis.SCRIPT_LOCAL, create_curlyname(is_declarative=False), Vis.UNANALYZABLE, False),
(Vis.FUNCTION_LOCAL, create_curlyname(is_declarative=False), Vis.UNANALYZABLE, False),
(Vis.SCRIPT_LOCAL, create_subscript_member(is_declarative=False), Vis.UNANALYZABLE, False),
(Vis.FUNCTION_LOCAL, create_subscript_member(is_declarative=False), Vis.UNANALYZABLE, False),
(Vis.FUNCTION_LOCAL, create_id('file#func', is_autoload=True, is_function=True, is_declarative=False), Vis.GLOBAL_LIKE, True),
]
)
def test_detect_scope_visibility(context_scope_visibility, id_node, expected_scope_visibility, expected_implicity):
scope = create_scope(context_scope_visibility)
scope_visibility_hint = ScopeDetector.detect_scope_visibility(id_node, scope)
expected_scope_visibility_hint = create_scope_visibility_hint(expected_scope_visibility,
is_implicit=expected_implicity)
assert expected_scope_visibility_hint == scope_visibility_hint
@pytest.mark.parametrize(
'context_scope_visibility, node, expected_variable_name', [
(Vis.SCRIPT_LOCAL, create_id('g:explicit_global'), 'g:explicit_global'),
(Vis.SCRIPT_LOCAL, create_id('implicit_global'), 'g:implicit_global'),
(Vis.SCRIPT_LOCAL, create_id('implicit_global', is_declarative=False), 'g:implicit_global'),
(Vis.FUNCTION_LOCAL, create_id('l:explicit_function_local'), 'l:explicit_function_local'),
(Vis.FUNCTION_LOCAL, create_id('implicit_function_local'), 'l:implicit_function_local'),
(Vis.FUNCTION_LOCAL, create_id('implicit_function_local', is_declarative=False), 'l:implicit_function_local'),
(Vis.SCRIPT_LOCAL, create_id('v:count'), 'v:count'),
(Vis.FUNCTION_LOCAL, create_id('v:count'), 'v:count'),
(Vis.FUNCTION_LOCAL, create_id('count'), 'v:count'),
(Vis.SCRIPT_LOCAL, create_env('$ENV'), '$ENV'),
(Vis.SCRIPT_LOCAL, create_option('&OPT'), '&OPT'),
(Vis.SCRIPT_LOCAL, create_reg('@"'), '@"'),
]
)
def test_normalize_variable_name(context_scope_visibility, node, expected_variable_name):
scope = create_scope(context_scope_visibility)
normalize_variable_name = ScopeDetector.normalize_variable_name(node, scope)
assert expected_variable_name == normalize_variable_name
@pytest.mark.parametrize(
'id_value, is_function, expected_result', [
('my_var', False, False),
('count', False, True),
('v:count', False, True),
('MyFunc', True, False),
('localtime', True, True),
]
)
def test_is_builtin_variable(id_value, is_function, expected_result):
id_node = create_id(id_value, is_function=is_function)
result = ScopeDetector.is_builtin_variable(id_node)
assert expected_result == result
@pytest.mark.parametrize(
'id_value, context_scope_visibility, expected_result', [
('g:my_var', Vis.SCRIPT_LOCAL, True),
('g:my_var', Vis.FUNCTION_LOCAL, True),
('my_var', Vis.SCRIPT_LOCAL, True),
('my_var', Vis.FUNCTION_LOCAL, False),
('s:my_var', Vis.SCRIPT_LOCAL, False),
('s:my_var', Vis.FUNCTION_LOCAL, False),
('count', Vis.SCRIPT_LOCAL, True),
('v:count', Vis.SCRIPT_LOCAL, True),
('count', Vis.FUNCTION_LOCAL, True),
('v:count', Vis.FUNCTION_LOCAL, True),
]
)
def test_is_global_variable(id_value, context_scope_visibility, expected_result):
id_node = create_id(id_value)
context_scope = create_scope(context_scope_visibility)
result = ScopeDetector.is_global_variable(id_node, context_scope)
assert expected_result == result
| [
"[email protected]"
] | |
cf8cd812164f4022a58ebe2f98f32461359c3c54 | 830acb926cc5cf5a12f2045c8497d6f4aa1c2ef2 | /Hangman/Problems/Markdown heading/task.py | 0dade62780dd97612ca478a683d4cbd91f967a34 | [] | no_license | BuyankinM/JetBrainsAcademyProjects | ca2223875ea4aab3ee7fceedc8e293bdb6e1fdcf | d5f9fcde4298af714960b2755f762141de796694 | refs/heads/main | 2023-02-26T05:47:26.070972 | 2021-02-03T22:10:53 | 2021-02-03T22:10:53 | 335,762,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | def heading(sym, num=1):
num = max(num, 1)
num = min(num, 6)
return f"{'#' * num} {sym}" | [
"[email protected]"
] | |
f09e09b066b83eb93839703b12f7fe62adf4b05a | 8be3fbe41873b5682eed4da3aab93be657a893bc | /nested_admin/tests/three_deep/tests.py | a25c53f636b475f837708e24585fcaee22e597d2 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | theKomix/django-nested-admin | 0b5f10b88928dc3167a720cf9a36f2ffe428cba7 | 2bfed729ba17bc69e4fe98d4a672b6b34186ae0f | refs/heads/master | 2020-03-30T12:03:51.430420 | 2018-12-26T05:24:04 | 2018-12-26T05:24:04 | 151,206,354 | 0 | 1 | NOASSERTION | 2018-12-07T15:11:31 | 2018-10-02T05:49:00 | Python | UTF-8 | Python | false | false | 4,893 | py | from nested_admin.tests.base import BaseNestedAdminTestCase
from .models import TopLevel, LevelOne, LevelTwo, LevelThree
class TestDeepNesting(BaseNestedAdminTestCase):
root_model = TopLevel
nested_models = (LevelOne, LevelTwo, LevelThree)
@classmethod
def setUpClass(cls):
super(TestDeepNesting, cls).setUpClass()
cls.l1_model, cls.l2_model, cls.l3_model = cls.nested_models
def test_validationerror_on_empty_extra_parent_form(self):
toplevel = TopLevel.objects.create(name='a')
self.load_admin(toplevel)
self.set_field('name', 'c', indexes=[0, 0])
self.set_field('name', 'd', indexes=[0, 0, 0])
self.save_form()
field_id_with_error = self.selenium.execute_script(
"return $('ul.errorlist li').closest('.form-row').find('input').attr('id')")
self.assertEqual(field_id_with_error, "id_children-0-name")
def test_create_new(self):
self.load_admin()
self.set_field('name', 'a')
self.set_field('name', 'b', [0])
self.set_field('name', 'c', [0, 0])
self.set_field('name', 'd', [0, 0, 0])
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
self.assertEqual(len(root_instances), 1, "Too many %s found" % self.root_model.__name__)
root = root_instances[0]
self.assertEqual(root.name, 'a', "%s.name has wrong value" % self.root_model.__name__)
l1_instances = root.children.all()
self.assertNotEqual(len(l1_instances), 0, "%s did not save" % self.l1_model.__name__)
self.assertEqual(len(l1_instances), 1, "Too many %s found" % self.l1_model.__name__)
l1_instance = l1_instances[0]
self.assertEqual(l1_instance.name, 'b', "%s.name has wrong value" % self.l1_model.__name__)
l2_instances = l1_instance.children.all()
self.assertNotEqual(len(l2_instances), 0, "%s did not save" % self.l2_model.__name__)
self.assertEqual(len(l2_instances), 1, "Too many %s found" % self.l2_model.__name__)
l2_instance = l2_instances[0]
self.assertEqual(l2_instance.name, 'c', "%s.name has wrong value" % self.l2_model.__name__)
l3_instances = l2_instance.children.all()
self.assertNotEqual(len(l3_instances), 0, "%s did not save" % self.l3_model.__name__)
self.assertEqual(len(l3_instances), 1, "Too many %s found" % self.l3_model.__name__)
l3_instance = l3_instances[0]
self.assertEqual(l3_instance.name, 'd', "%s.name has wrong value" % self.l3_model.__name__)
def test_create_new_no_extras(self):
self.load_admin()
self.set_field('name', 'a')
self.remove_inline([0])
self.add_inline(name='b')
self.remove_inline([0, 0])
self.add_inline([0], name='c')
self.remove_inline([0, 0, 0])
self.add_inline([0, 0], name='d')
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
self.assertEqual(len(root_instances), 1, "Too many %s found" % self.root_model.__name__)
root = root_instances[0]
self.assertEqual(root.name, 'a', "%s.name has wrong value" % self.root_model.__name__)
l1_instances = root.children.all()
self.assertNotEqual(len(l1_instances), 0, "%s did not save" % self.l1_model.__name__)
self.assertEqual(len(l1_instances), 1, "Too many %s found" % self.l1_model.__name__)
l1_instance = l1_instances[0]
self.assertEqual(l1_instance.name, 'b', "%s.name has wrong value" % self.l1_model.__name__)
l2_instances = l1_instance.children.all()
self.assertNotEqual(len(l2_instances), 0, "%s did not save" % self.l2_model.__name__)
self.assertEqual(len(l2_instances), 1, "Too many %s found" % self.l2_model.__name__)
l2_instance = l2_instances[0]
self.assertEqual(l2_instance.name, 'c', "%s.name has wrong value" % self.l2_model.__name__)
l3_instances = l2_instance.children.all()
self.assertNotEqual(len(l3_instances), 0, "%s did not save" % self.l3_model.__name__)
self.assertEqual(len(l3_instances), 1, "Too many %s found" % self.l3_model.__name__)
l3_instance = l3_instances[0]
self.assertEqual(l3_instance.name, 'd', "%s.name has wrong value" % self.l3_model.__name__)
def test_save_missing_intermediate_inline(self):
self.load_admin()
self.set_field('name', 'a')
self.set_field('name', 'b', [0])
self.set_field('name', 'd', [0, 0, 0])
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
| [
"[email protected]"
] | |
cb3b6dee35a6278db9f968b94e96589d790b669c | 699a43917ce75b2026a450f67d85731a0f719e01 | /comonprefix/venv/Scripts/pip3.7-script.py | e913bfc5f695c5d68e2ba38d50ad1f005852ef42 | [] | no_license | wusanshou2017/Leetcode | 96ab81ae38d6e04739c071acfc0a5f46a1c9620b | c4b85ca0e23700b84e4a8a3a426ab634dba0fa88 | refs/heads/master | 2021-11-16T01:18:27.886085 | 2021-10-14T09:54:47 | 2021-10-14T09:54:47 | 107,402,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | #!E:\lc\comonprefix\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.7'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.7')()
)
| [
"[email protected]"
] | |
20acc266a70d5447f23a333ff82231fd7cc9eac7 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /netex/models/version_type_enumeration.py | 0e80ef2e2f270f90f4bcec2a491a896e7d6de716 | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 155 | py | from enum import Enum
__NAMESPACE__ = "http://www.netex.org.uk/netex"
class VersionTypeEnumeration(Enum):
POINT = "point"
BASELINE = "baseline"
| [
"[email protected]"
] | |
336c4ceef935ca67574f23848288f7334f4204ed | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/ZYXEL-RIP-MIB.py | a6c50ee23acbafeec25fe60539a149260b145846 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 4,835 | py | #
# PySNMP MIB module ZYXEL-RIP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZYXEL-RIP-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:51:35 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
NotificationType, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Gauge32, ObjectIdentity, iso, Counter32, Unsigned32, IpAddress, Counter64, MibIdentifier, Integer32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Gauge32", "ObjectIdentity", "iso", "Counter32", "Unsigned32", "IpAddress", "Counter64", "MibIdentifier", "Integer32", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
esMgmt, = mibBuilder.importSymbols("ZYXEL-ES-SMI", "esMgmt")
zyRouteDomainIpAddress, zyRouteDomainIpMaskBits = mibBuilder.importSymbols("ZYXEL-IP-FORWARD-MIB", "zyRouteDomainIpAddress", "zyRouteDomainIpMaskBits")
zyxelRip = ModuleIdentity((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74))
if mibBuilder.loadTexts: zyxelRip.setLastUpdated('201207010000Z')
if mibBuilder.loadTexts: zyxelRip.setOrganization('Enterprise Solution ZyXEL')
if mibBuilder.loadTexts: zyxelRip.setContactInfo('')
if mibBuilder.loadTexts: zyxelRip.setDescription('The subtree for Routing Information Protocol (RIP)')
zyxelRipSetup = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1))
zyRipState = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 1), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zyRipState.setStatus('current')
if mibBuilder.loadTexts: zyRipState.setDescription('Enabled/Disabled RIP on the Switch.')
zyRipDistance = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zyRipDistance.setStatus('current')
if mibBuilder.loadTexts: zyRipDistance.setDescription('The administrative distance of RIP routes.')
zyxelRipRouteDomainTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 3), )
if mibBuilder.loadTexts: zyxelRipRouteDomainTable.setStatus('current')
if mibBuilder.loadTexts: zyxelRipRouteDomainTable.setDescription('The table contains RIP route domain configuration.')
zyxelRipRouteDomainEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 3, 1), ).setIndexNames((0, "ZYXEL-IP-FORWARD-MIB", "zyRouteDomainIpAddress"), (0, "ZYXEL-IP-FORWARD-MIB", "zyRouteDomainIpMaskBits"))
if mibBuilder.loadTexts: zyxelRipRouteDomainEntry.setStatus('current')
if mibBuilder.loadTexts: zyxelRipRouteDomainEntry.setDescription('An entry contains RIP route domain configuration.')
zyRipRouteDomainDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("outgoing", 1), ("incoming", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zyRipRouteDomainDirection.setStatus('current')
if mibBuilder.loadTexts: zyRipRouteDomainDirection.setDescription('RIP direction which controls the sending and receiving of RIP packet.')
zyRipRouteDomainVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 74, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("v1", 0), ("v2b", 1), ("v2m", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zyRipRouteDomainVersion.setStatus('current')
if mibBuilder.loadTexts: zyRipRouteDomainVersion.setDescription('RIP version which controls the format and the broadcasting method of the RIP packets that the switch sends.')
mibBuilder.exportSymbols("ZYXEL-RIP-MIB", zyxelRipRouteDomainTable=zyxelRipRouteDomainTable, zyRipRouteDomainVersion=zyRipRouteDomainVersion, zyxelRipRouteDomainEntry=zyxelRipRouteDomainEntry, zyxelRip=zyxelRip, zyRipDistance=zyRipDistance, zyRipRouteDomainDirection=zyRipRouteDomainDirection, zyxelRipSetup=zyxelRipSetup, PYSNMP_MODULE_ID=zyxelRip, zyRipState=zyRipState)
| [
"[email protected]"
] | |
7b4bb588bf1cf9c6d114bd85d3027e99acdfd100 | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-emr/aliyunsdkemr/request/v20160408/CreateClusterTemplateRequest.py | bc26ad850899542168d338eb9f7452070bb222f1 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 14,454 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class CreateClusterTemplateRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'CreateClusterTemplate')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_LogPath(self):
return self.get_query_params().get('LogPath')
def set_LogPath(self,LogPath):
self.add_query_param('LogPath',LogPath)
def get_MasterPwd(self):
return self.get_query_params().get('MasterPwd')
def set_MasterPwd(self,MasterPwd):
self.add_query_param('MasterPwd',MasterPwd)
def get_Configurations(self):
return self.get_query_params().get('Configurations')
def set_Configurations(self,Configurations):
self.add_query_param('Configurations',Configurations)
def get_SshEnable(self):
return self.get_query_params().get('SshEnable')
def set_SshEnable(self,SshEnable):
self.add_query_param('SshEnable',SshEnable)
def get_KeyPairName(self):
return self.get_query_params().get('KeyPairName')
def set_KeyPairName(self,KeyPairName):
self.add_query_param('KeyPairName',KeyPairName)
def get_MetaStoreType(self):
return self.get_query_params().get('MetaStoreType')
def set_MetaStoreType(self,MetaStoreType):
self.add_query_param('MetaStoreType',MetaStoreType)
def get_SecurityGroupName(self):
return self.get_query_params().get('SecurityGroupName')
def set_SecurityGroupName(self,SecurityGroupName):
self.add_query_param('SecurityGroupName',SecurityGroupName)
def get_MachineType(self):
return self.get_query_params().get('MachineType')
def set_MachineType(self,MachineType):
self.add_query_param('MachineType',MachineType)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_BootstrapActions(self):
return self.get_query_params().get('BootstrapAction')
def set_BootstrapActions(self, BootstrapActions):
for depth1 in range(len(BootstrapActions)):
if BootstrapActions[depth1].get('Path') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.Path', BootstrapActions[depth1].get('Path'))
if BootstrapActions[depth1].get('ExecutionTarget') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.ExecutionTarget', BootstrapActions[depth1].get('ExecutionTarget'))
if BootstrapActions[depth1].get('ExecutionMoment') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.ExecutionMoment', BootstrapActions[depth1].get('ExecutionMoment'))
if BootstrapActions[depth1].get('Arg') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.Arg', BootstrapActions[depth1].get('Arg'))
if BootstrapActions[depth1].get('Name') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.Name', BootstrapActions[depth1].get('Name'))
if BootstrapActions[depth1].get('ExecutionFailStrategy') is not None:
self.add_query_param('BootstrapAction.' + str(depth1 + 1) + '.ExecutionFailStrategy', BootstrapActions[depth1].get('ExecutionFailStrategy'))
def get_MetaStoreConf(self):
return self.get_query_params().get('MetaStoreConf')
def set_MetaStoreConf(self,MetaStoreConf):
self.add_query_param('MetaStoreConf',MetaStoreConf)
def get_EmrVer(self):
return self.get_query_params().get('EmrVer')
def set_EmrVer(self,EmrVer):
self.add_query_param('EmrVer',EmrVer)
def get_Tags(self):
return self.get_query_params().get('Tag')
def set_Tags(self, Tags):
for depth1 in range(len(Tags)):
if Tags[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value'))
if Tags[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key'))
def get_IsOpenPublicIp(self):
return self.get_query_params().get('IsOpenPublicIp')
def set_IsOpenPublicIp(self,IsOpenPublicIp):
self.add_query_param('IsOpenPublicIp',IsOpenPublicIp)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_InstanceGeneration(self):
return self.get_query_params().get('InstanceGeneration')
def set_InstanceGeneration(self,InstanceGeneration):
self.add_query_param('InstanceGeneration',InstanceGeneration)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_ClusterType(self):
return self.get_query_params().get('ClusterType')
def set_ClusterType(self,ClusterType):
self.add_query_param('ClusterType',ClusterType)
def get_AutoRenew(self):
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self,AutoRenew):
self.add_query_param('AutoRenew',AutoRenew)
def get_OptionSoftWareLists(self):
return self.get_query_params().get('OptionSoftWareList')
def set_OptionSoftWareLists(self, OptionSoftWareLists):
for depth1 in range(len(OptionSoftWareLists)):
if OptionSoftWareLists[depth1] is not None:
self.add_query_param('OptionSoftWareList.' + str(depth1 + 1) , OptionSoftWareLists[depth1])
def get_NetType(self):
return self.get_query_params().get('NetType')
def set_NetType(self,NetType):
self.add_query_param('NetType',NetType)
def get_ZoneId(self):
return self.get_query_params().get('ZoneId')
def set_ZoneId(self,ZoneId):
self.add_query_param('ZoneId',ZoneId)
def get_UseCustomHiveMetaDb(self):
return self.get_query_params().get('UseCustomHiveMetaDb')
def set_UseCustomHiveMetaDb(self,UseCustomHiveMetaDb):
self.add_query_param('UseCustomHiveMetaDb',UseCustomHiveMetaDb)
def get_InitCustomHiveMetaDb(self):
return self.get_query_params().get('InitCustomHiveMetaDb')
def set_InitCustomHiveMetaDb(self,InitCustomHiveMetaDb):
self.add_query_param('InitCustomHiveMetaDb',InitCustomHiveMetaDb)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_IoOptimized(self):
return self.get_query_params().get('IoOptimized')
def set_IoOptimized(self,IoOptimized):
self.add_query_param('IoOptimized',IoOptimized)
def get_SecurityGroupId(self):
return self.get_query_params().get('SecurityGroupId')
def set_SecurityGroupId(self,SecurityGroupId):
self.add_query_param('SecurityGroupId',SecurityGroupId)
def get_EasEnable(self):
return self.get_query_params().get('EasEnable')
def set_EasEnable(self,EasEnable):
self.add_query_param('EasEnable',EasEnable)
def get_DepositType(self):
return self.get_query_params().get('DepositType')
def set_DepositType(self,DepositType):
self.add_query_param('DepositType',DepositType)
def get_DataDiskKMSKeyId(self):
return self.get_query_params().get('DataDiskKMSKeyId')
def set_DataDiskKMSKeyId(self,DataDiskKMSKeyId):
self.add_query_param('DataDiskKMSKeyId',DataDiskKMSKeyId)
def get_UseLocalMetaDb(self):
return self.get_query_params().get('UseLocalMetaDb')
def set_UseLocalMetaDb(self,UseLocalMetaDb):
self.add_query_param('UseLocalMetaDb',UseLocalMetaDb)
def get_TemplateName(self):
return self.get_query_params().get('TemplateName')
def set_TemplateName(self,TemplateName):
self.add_query_param('TemplateName',TemplateName)
def get_UserDefinedEmrEcsRole(self):
return self.get_query_params().get('UserDefinedEmrEcsRole')
def set_UserDefinedEmrEcsRole(self,UserDefinedEmrEcsRole):
self.add_query_param('UserDefinedEmrEcsRole',UserDefinedEmrEcsRole)
def get_DataDiskEncrypted(self):
return self.get_query_params().get('DataDiskEncrypted')
def set_DataDiskEncrypted(self,DataDiskEncrypted):
self.add_query_param('DataDiskEncrypted',DataDiskEncrypted)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_HostGroups(self):
return self.get_query_params().get('HostGroup')
def set_HostGroups(self, HostGroups):
for depth1 in range(len(HostGroups)):
if HostGroups[depth1].get('Period') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.Period', HostGroups[depth1].get('Period'))
if HostGroups[depth1].get('SysDiskCapacity') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.SysDiskCapacity', HostGroups[depth1].get('SysDiskCapacity'))
if HostGroups[depth1].get('PrivatePoolOptionsId') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.PrivatePoolOptionsId', HostGroups[depth1].get('PrivatePoolOptionsId'))
if HostGroups[depth1].get('DiskCapacity') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.DiskCapacity', HostGroups[depth1].get('DiskCapacity'))
if HostGroups[depth1].get('SysDiskType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.SysDiskType', HostGroups[depth1].get('SysDiskType'))
if HostGroups[depth1].get('ClusterId') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.ClusterId', HostGroups[depth1].get('ClusterId'))
if HostGroups[depth1].get('DiskType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.DiskType', HostGroups[depth1].get('DiskType'))
if HostGroups[depth1].get('HostGroupName') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.HostGroupName', HostGroups[depth1].get('HostGroupName'))
if HostGroups[depth1].get('VSwitchId') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.VSwitchId', HostGroups[depth1].get('VSwitchId'))
if HostGroups[depth1].get('DiskCount') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.DiskCount', HostGroups[depth1].get('DiskCount'))
if HostGroups[depth1].get('AutoRenew') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.AutoRenew', HostGroups[depth1].get('AutoRenew'))
if HostGroups[depth1].get('HostGroupId') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.HostGroupId', HostGroups[depth1].get('HostGroupId'))
if HostGroups[depth1].get('NodeCount') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.NodeCount', HostGroups[depth1].get('NodeCount'))
if HostGroups[depth1].get('InstanceType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.InstanceType', HostGroups[depth1].get('InstanceType'))
if HostGroups[depth1].get('Comment') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.Comment', HostGroups[depth1].get('Comment'))
if HostGroups[depth1].get('ChargeType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.ChargeType', HostGroups[depth1].get('ChargeType'))
if HostGroups[depth1].get('MultiInstanceTypes') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.MultiInstanceTypes', HostGroups[depth1].get('MultiInstanceTypes'))
if HostGroups[depth1].get('CreateType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.CreateType', HostGroups[depth1].get('CreateType'))
if HostGroups[depth1].get('HostGroupType') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.HostGroupType', HostGroups[depth1].get('HostGroupType'))
if HostGroups[depth1].get('PrivatePoolOptionsMatchCriteria') is not None:
self.add_query_param('HostGroup.' + str(depth1 + 1) + '.PrivatePoolOptionsMatchCriteria', HostGroups[depth1].get('PrivatePoolOptionsMatchCriteria'))
def get_Configs(self):
return self.get_query_params().get('Config')
def set_Configs(self, Configs):
for depth1 in range(len(Configs)):
if Configs[depth1].get('ConfigKey') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.ConfigKey', Configs[depth1].get('ConfigKey'))
if Configs[depth1].get('FileName') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.FileName', Configs[depth1].get('FileName'))
if Configs[depth1].get('Encrypt') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.Encrypt', Configs[depth1].get('Encrypt'))
if Configs[depth1].get('Replace') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.Replace', Configs[depth1].get('Replace'))
if Configs[depth1].get('ConfigValue') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.ConfigValue', Configs[depth1].get('ConfigValue'))
if Configs[depth1].get('ServiceName') is not None:
self.add_query_param('Config.' + str(depth1 + 1) + '.ServiceName', Configs[depth1].get('ServiceName'))
def get_HighAvailabilityEnable(self):
return self.get_query_params().get('HighAvailabilityEnable')
def set_HighAvailabilityEnable(self,HighAvailabilityEnable):
self.add_query_param('HighAvailabilityEnable',HighAvailabilityEnable) | [
"[email protected]"
] | |
ec9afabfdd6a3fb5b54dcd3df3f3f3a0b67ae01e | a76790fa5f4eb96a8b731f891ca1aa4c16d21256 | /azext_iot/dps/providers/discovery.py | 433c7fe8409c6128ccc6fbaf4f22840408eae3da | [
"MIT"
] | permissive | digimaun/azure-iot-cli-extension | 414fb1c7c22b0f0d0891cd30c28d13366b9f7207 | 9999c536bbf67251d863d365c190866e1d5cc1ad | refs/heads/dev | 2023-06-24T09:42:51.069627 | 2022-12-14T23:29:58 | 2022-12-14T23:29:58 | 579,177,610 | 1 | 0 | NOASSERTION | 2022-12-16T21:25:31 | 2022-12-16T21:25:31 | null | UTF-8 | Python | false | false | 3,899 | py | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.log import get_logger
from azure.cli.core.commands.client_factory import get_subscription_id
from azext_iot.common._azure import IOT_SERVICE_CS_TEMPLATE
from azext_iot.common.base_discovery import BaseDiscovery
from azext_iot.common.shared import DiscoveryResourceType
from azext_iot.common.utility import ensure_iotdps_sdk_min_version
from azext_iot.constants import IOTDPS_TRACK_2_SDK_MIN_VERSION
from azext_iot.dps.models.dps_target import DPSTarget
from azext_iot._factory import iot_service_provisioning_factory
from typing import Any, Dict
logger = get_logger(__name__)
PRIVILEDGED_ACCESS_RIGHTS_SET = set(
["ServiceConfig", "EnrollmentWrite"]
)
class DPSDiscovery(BaseDiscovery):
def __init__(self, cmd):
super().__init__(
cmd=cmd,
necessary_rights_set=PRIVILEDGED_ACCESS_RIGHTS_SET,
resource_type=DiscoveryResourceType.DPS.value
)
def _initialize_client(self):
if not self.client:
# Track 2 could be supported
self.track2 = ensure_iotdps_sdk_min_version(IOTDPS_TRACK_2_SDK_MIN_VERSION)
if getattr(self.cmd, "cli_ctx", None):
# The client we want to use is an attribute of the client returned
# from the factory. This will have to be revisted if the DPS sdk changes.
self.client = iot_service_provisioning_factory(self.cmd.cli_ctx).iot_dps_resource
self.sub_id = get_subscription_id(self.cmd.cli_ctx)
else:
self.client = self.cmd
# Method get_keys_for_key_name needed for policy discovery (see
# BaseDiscovery.find_policy for usage) and is defined as
# list)keys_for_key_name in the DPS Sdk.
self.client.get_keys_for_key_name = self.client.list_keys_for_key_name
def _make_kwargs(self, **kwargs) -> Dict[str, Any]:
# The DPS client needs the provisioning_service_name argument
kwargs["provisioning_service_name"] = kwargs.pop("resource_name")
return kwargs
@classmethod
def get_target_by_cstring(cls, connection_string: str) -> DPSTarget:
return DPSTarget.from_connection_string(cstring=connection_string).as_dict()
def _build_target(
self, resource, policy, key_type: str = None, **kwargs
) -> Dict[str, str]:
# This is more or less a compatibility function which produces the
# same result as _azure.get_iot_dps_connection_string()
# In future iteration we will return a 'Target' object rather than dict
# but that will be better served aligning with vNext pattern for DPS
result = {}
result["cs"] = IOT_SERVICE_CS_TEMPLATE.format(
resource.properties.service_operations_host_name,
policy.key_name,
policy.primary_key if key_type == "primary" else policy.secondary_key,
)
result["entity"] = resource.properties.service_operations_host_name
result["policy"] = policy.key_name
result["primarykey"] = policy.primary_key
result["secondarykey"] = policy.secondary_key
result["subscription"] = self.sub_id
result["cmd"] = self.cmd
result["idscope"] = resource.properties.id_scope
return result
def get_id_scope(self, resource_name: str, rg: str = None) -> str:
"""Get the ID scope. Only needed for certain DPS operations."""
return self.find_resource(
resource_name=resource_name, rg=rg
).properties.id_scope
| [
"[email protected]"
] | |
b2e416b830f8a762c57a51d0493a629a1344ef3f | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/7c04cc141bd3fc5f0ce1/snippet.py | 4a77c7cc3e4f198f1906ec927652c704233af5b1 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 2,740 | py | def _download(host, creds, fp):
chunk_size = 512 * 1024
headers = {
'Content-Type': 'application/octet-stream'
}
filename = os.path.basename(fp)
uri = 'https://%s/mgmt/cm/autodeploy/software-image-downloads/%s' % (host, filename)
requests.packages.urllib3.disable_warnings()
with open(fp, 'wb') as f:
start = 0
end = chunk_size - 1
size = 0
current_bytes = 0
while True:
content_range = "%s-%s/%s" % (start, end, size)
headers['Content-Range'] = content_range
#print headers
resp = requests.get(uri,
auth=creds,
headers=headers,
verify=False,
stream=True)
if resp.status_code == 200:
# If the size is zero, then this is the first time through the
# loop and we don't want to write data because we haven't yet
# figured out the total size of the file.
if size > 0:
current_bytes += chunk_size
for chunk in resp.iter_content(chunk_size):
f.write(chunk)
# Once we've downloaded the entire file, we can break out of
# the loop
if end == size:
break
crange = resp.headers['Content-Range']
# Determine the total number of bytes to read
if size == 0:
size = int(crange.split('/')[-1]) - 1
# If the file is smaller than the chunk size, BIG-IP will
# return an HTTP 400. So adjust the chunk_size down to the
# total file size...
if chunk_size > size:
end = size
# ...and pass on the rest of the code
continue
start += chunk_size
if (current_bytes + chunk_size) > size:
end = size
else:
end = start + chunk_size - 1
if __name__ == "__main__":
import os, requests, argparse, getpass
parser = argparse.ArgumentParser(description='Download File from BIG-IP')
parser.add_argument("host", help='BIG-IP IP or Hostname', )
parser.add_argument("username", help='BIG-IP Username')
parser.add_argument("filepath", help='Destination Filename & Path')
args = vars(parser.parse_args())
hostname = args['host']
username = args['username']
filepath = args['filepath']
print "%s, enter your password: " % args['username'],
password = getpass.getpass()
_download(hostname, (username, password), filepath) | [
"[email protected]"
] | |
7fab1106e8d7ce276f3cfbbdf00034e458456586 | 37568c3e0e8ad4f342adb53c02d08283d553bc95 | /pyservos/protocol2.py | 547ee49f382cc5b533b9db6a5a670b55d190910b | [
"MIT"
] | permissive | MultipedRobotics/pyservos | c39bb3da3e57890fa68432b6f500b0a742cb896b | 26691ab8dd541dbbe4660c73b025ebe6b085e2fc | refs/heads/master | 2023-03-25T13:29:47.343650 | 2021-03-25T01:04:36 | 2021-03-25T01:04:36 | 121,900,021 | 6 | 4 | MIT | 2021-03-25T01:04:37 | 2018-02-17T23:02:25 | Python | UTF-8 | Python | false | false | 11,888 | py | from enum import IntFlag
from pyservos.utils import angle2int, le
from pyservos.common import ResetLevels
# ResetLevels = IntFlag('ResetLevels', 'all allButID allButIDDR')
crc_table = [
0x0000, 0x8005, 0x800F, 0x000A, 0x801B, 0x001E, 0x0014, 0x8011,
0x8033, 0x0036, 0x003C, 0x8039, 0x0028, 0x802D, 0x8027, 0x0022,
0x8063, 0x0066, 0x006C, 0x8069, 0x0078, 0x807D, 0x8077, 0x0072,
0x0050, 0x8055, 0x805F, 0x005A, 0x804B, 0x004E, 0x0044, 0x8041,
0x80C3, 0x00C6, 0x00CC, 0x80C9, 0x00D8, 0x80DD, 0x80D7, 0x00D2,
0x00F0, 0x80F5, 0x80FF, 0x00FA, 0x80EB, 0x00EE, 0x00E4, 0x80E1,
0x00A0, 0x80A5, 0x80AF, 0x00AA, 0x80BB, 0x00BE, 0x00B4, 0x80B1,
0x8093, 0x0096, 0x009C, 0x8099, 0x0088, 0x808D, 0x8087, 0x0082,
0x8183, 0x0186, 0x018C, 0x8189, 0x0198, 0x819D, 0x8197, 0x0192,
0x01B0, 0x81B5, 0x81BF, 0x01BA, 0x81AB, 0x01AE, 0x01A4, 0x81A1,
0x01E0, 0x81E5, 0x81EF, 0x01EA, 0x81FB, 0x01FE, 0x01F4, 0x81F1,
0x81D3, 0x01D6, 0x01DC, 0x81D9, 0x01C8, 0x81CD, 0x81C7, 0x01C2,
0x0140, 0x8145, 0x814F, 0x014A, 0x815B, 0x015E, 0x0154, 0x8151,
0x8173, 0x0176, 0x017C, 0x8179, 0x0168, 0x816D, 0x8167, 0x0162,
0x8123, 0x0126, 0x012C, 0x8129, 0x0138, 0x813D, 0x8137, 0x0132,
0x0110, 0x8115, 0x811F, 0x011A, 0x810B, 0x010E, 0x0104, 0x8101,
0x8303, 0x0306, 0x030C, 0x8309, 0x0318, 0x831D, 0x8317, 0x0312,
0x0330, 0x8335, 0x833F, 0x033A, 0x832B, 0x032E, 0x0324, 0x8321,
0x0360, 0x8365, 0x836F, 0x036A, 0x837B, 0x037E, 0x0374, 0x8371,
0x8353, 0x0356, 0x035C, 0x8359, 0x0348, 0x834D, 0x8347, 0x0342,
0x03C0, 0x83C5, 0x83CF, 0x03CA, 0x83DB, 0x03DE, 0x03D4, 0x83D1,
0x83F3, 0x03F6, 0x03FC, 0x83F9, 0x03E8, 0x83ED, 0x83E7, 0x03E2,
0x83A3, 0x03A6, 0x03AC, 0x83A9, 0x03B8, 0x83BD, 0x83B7, 0x03B2,
0x0390, 0x8395, 0x839F, 0x039A, 0x838B, 0x038E, 0x0384, 0x8381,
0x0280, 0x8285, 0x828F, 0x028A, 0x829B, 0x029E, 0x0294, 0x8291,
0x82B3, 0x02B6, 0x02BC, 0x82B9, 0x02A8, 0x82AD, 0x82A7, 0x02A2,
0x82E3, 0x02E6, 0x02EC, 0x82E9, 0x02F8, 0x82FD, 0x82F7, 0x02F2,
0x02D0, 0x82D5, 0x82DF, 0x02DA, 0x82CB, 0x02CE, 0x02C4, 0x82C1,
0x8243, 0x0246, 0x024C, 0x8249, 0x0258, 0x825D, 0x8257, 0x0252,
0x0270, 0x8275, 0x827F, 0x027A, 0x826B, 0x026E, 0x0264, 0x8261,
0x0220, 0x8225, 0x822F, 0x022A, 0x823B, 0x023E, 0x0234, 0x8231,
0x8213, 0x0216, 0x021C, 0x8219, 0x0208, 0x820D, 0x8207, 0x0202
]
class Protocol2:
"""
This is a wrapper class for the xl-320 and ax-12 servos. It can only talk
to one servo type at a time.
"""
# --------- INSTRUCTIONS -----
PING = 0x01
READ = 0x02
WRITE = 0x03
REG_WRITE = 0x04
ACTION = 0x05
RESET = 0x06
REBOOT = 0x08
CLEAR = 0X10
STATUS = 0x55
SYNC_READ = 0x82
SYNC_WRITE = 0x83
BULK_READ = 0x92
BULK_WRITE = 0x93
# def __init__(self, kind):
# self.base = kind()
def makePingPacket(self, ID=None):
"""
Pings a servo
"""
if not ID:
ID = self.BROADCAST_ADDR
pkt = self.makePacket(ID, self.PING)
return pkt
def makeWritePacket(self, ID, reg, values=None):
"""
Creates a packet that writes a value(s) to servo ID at location reg. Make
sure the values are in little endian (use Packet.le() if necessary) for 16 b
(word size) values.
"""
if values:
params = le(reg) + values
else:
params = le(reg)
pkt = self.makePacket(ID, self.WRITE, params)
return pkt
def makeReadPacket(self, ID, reg, values=None):
"""
Creates a packet that reads the register(s) of servo ID at location reg. Make
sure the values are in little endian (use Packet.le() if necessary) for 16 b
(word size) values.
"""
pkt = self.makePacket(ID, self.READ, [reg, values])
return pkt
# def makeResetPacket(self, ID, level=0):
# """
# Resets a servo.
# """
# params = [XL320.RESET_ALL_BUT_ID]
# pkt = self.makePacket(ID, self.RESET, params)
# return pkt
def makeResetPacket(self, ID, level):
"""
Resets a servo.
"""
if ResetLevels.all == level:
params = [self.RESET_ALL]
elif ResetLevels.allButID == level:
params = [self.RESET_ALL_BUT_ID]
elif ResetLevels.allButIDDR == level:
params = [self.RESET_ALL_BUT_ID_BAUD_RATE]
else:
raise Exception("Invalid reset level")
pkt = self.makePacket(ID, self.RESET, params)
return pkt
def makeRebootPacket(self, ID):
"""
Reboots a servo
"""
pkt = self.makePacket(ID, self.REBOOT)
return pkt
def makeServoMovePacket(self, ID, angle, degrees=True):
"""
Commands the servo to an angle (in degrees)
"""
# if degrees and not (0.0 <= angle <= 300.0):
# raise Exception('makeServoMovePacket(), angle [deg] out of bounds: {}'.format(angle))
# elif (not degrees) and (not (0.0 <= angle <= 5.23598776)):
# raise Exception('makeServoMovePacket(), angle [rads] out of bounds: {}'.format(angle))
# val = int(angle/300*1023)
val = angle2int(angle, degrees)
pkt = self.makeWritePacket(ID, self.GOAL_POSITION, val)
return pkt
def makeSyncMovePacket(self, info, degrees=True):
"""
Write sync angle information to servos.
info = [[ID, angle], [ID, angle], ...]
ID: servo ID
angle: 0-300 degrees or in radians
"""
data = []
# since all servo angles have the same register addr (GOAL_POSITION)
# and data size (2), a sinc packet is smart choice
# compare bulk vs sync for the same commands:
# bulk = 94 bytes
# sync = 50 bytes
for cmd in info:
data.append(cmd[0]) # ID
angle = angle2int(cmd[1], degrees)
data.append(angle[0]) # LSB
data.append(angle[1]) # MSB
pkt = self.makeSyncWritePacket(self.GOAL_POSITION, data)
return pkt
def makeSyncWritePacket(self, reg, info):
"""
Write sync angle information to servos.
Status Packet will not be returned because Broadcast ID(0xFE) is used
info = [[ID, data1, ...], [ID, data1, ...], ...]
"""
data = []
data.append(reg) # addr
data.append(len(info[0])-1) # data length not counting ID
for cmd in info:
data += cmd
ID = self.BROADCAST_ADDR
instr = self.SYNC_WRITE
pkt = self.makePacket(ID, instr, data) # create packet
return pkt
def makeBulkReadPacket(self, data):
"""
data = [[data len, ID, addr], [data len, ID, addr], ...]
"""
ID = self.BROADCAST_ADDR
instr = self.BULK_READ
pkt = self.makePacket(ID, instr, data) # create packet
return pkt
def makeLEDPacket(self, ID, value):
"""
Turn on/off the servo LED and also sets the color.
"""
# value = [value]
# elif self.SERVO_ID == XL320.SERVO_ID:
# # print('Not implemented yet')
# value = [0, value]
pkt = self.makeWritePacket(ID, self.LED, [value])
return pkt
def makeSpeedPacket(self, speed):
"""
Set max speed for all servos
speed - [0-1023] in units of 0.111 rpm. If speed = 0, then max motor
speed is used. You cannot exceed max servo speed.
"""
speed = speed if (speed <= self.MAX_RPM) else self.MAX_RPM
pkt = self.makeWritePacket(
self.BROADCAST_ADDR,
self.GOAL_VELOCITY,
le(speed)
)
return pkt
def decodePacket(self, pkts):
return self.find_packets(pkts)
def processStatusPacket(self, pkt):
return self.status_packet(pkt)
def check_sum(self, data_blk):
"""
Calculate crc
in: data_blk - entire packet except last 2 crc bytes
out: crc_accum - 16 word
"""
data_blk_size = len(data_blk)
crc_accum = 0
for j in range(data_blk_size):
i = ((crc_accum >> 8) ^ data_blk[j]) & 0xFF
crc_accum = ((crc_accum << 8) ^ crc_table[i])
crc_accum &= 0xffff # keep to 16 bits
return crc_accum
def makePacket(self, ID, instr, params=None):
"""
This makes a generic packet.
TODO: look a struct ... does that add value using it?
0xFF, 0xFF, 0xFD, 0x00, ID, LEN_L, LEN_H, INST, PARAM 1, PARAM 2, ..., PARAM N, CRC_L, CRC_H]
in:
ID - servo id
instr - instruction
params - [register, instruction parameter values]
out: packet
"""
pkt = []
# [header, reserved, 0x00, ID, len low, len high, instruction]
pkt += [0xFF, 0xFF, 0xFD, 0x00, ID, 0x00, 0x00, instr] # header
# pkt += [0x00] # reserved byte
# pkt += [ID]
# pkt += [0x00, 0x00] # length placeholder
# pkt += [instr] # instruction
# if reg:
# pkt += le(reg) # not everything has a register
if params:
pkt += params # not everything has parameters
length = le(len(pkt) - 5) # length = len(packet) - (header(3), reserve(1), id(1))
pkt[5] = length[0] # L
pkt[6] = length[1] # H
crc = self.check_sum(pkt)
pkt += le(crc)
return pkt
def find_packets(self, pkt):
"""
Search through a string of binary for a valid xl320 package.
in: buffer to search through
out: a list of valid data packet
"""
# print('findpkt', pkt)
# print('-----------------------')
ret = []
while len(pkt)-10 >= 0:
if pkt[0:4] != [0xFF, 0xFF, 0xFD, 0x00]:
pkt.pop(0) # get rid of the first index
# print(' - pop:', pkt)
continue
# print(' > good packet')
length = (pkt[6] << 8) + pkt[5]
# print(' > length', length)
crc_pos = 5 + length
pkt_crc = pkt[crc_pos:crc_pos + 2]
crc = le(self.check_sum(pkt[:crc_pos]))
# if len(pkt) < (crc_pos + 1):
# print('<<< need more data for findPkt >>>')
# print(' > calc crc', crc)
# print(' > pkt crc', pkt_crc)
if pkt_crc == crc:
pkt_end = crc_pos+2
ret.append(pkt[:pkt_end])
# print(' > found:', pkt[:pkt_end])
# print(' > pkt size', pkt_end)
del pkt[:pkt_end]
# print(' > remaining:', pkt)
else:
pkt_end = crc_pos+2
# print(' - crap:', pkt[:pkt_end])
del pkt[:pkt_end]
# print('findpkt ret:', ret)
return ret
def status_packet(self, pkt):
def getError(err):
errors = [
'Input Voltage', # 0
'Angle Limit',
'Overheating',
'Range',
'Checksum',
'Overload',
'Instrunction',
'None' # 7
]
ret = None
if err != 128:
err_str = []
for i in range(0, 8):
if (err >> i) & 1:
err_str.append(errors[i])
ret = ','.join(err_str)
else:
ret = errors[7]
return ret
ret = {
'id': pkt[2],
'error str': getError(pkt[4]),
'error num': pkt[4],
'params': pkt[5:-1],
'raw': list(pkt)
}
return ret
| [
"[email protected]"
] | |
d0650ff6942b772d3ceb8e2f766ec26f000c88c9 | 0d15e76677f3bf97d21978e73292f0d5c535ebac | /spacy/en/language_data.py | 3c9db8fe2386b2f759130f0b5b16bdb093197839 | [
"MIT"
] | permissive | ExplodingCabbage/spaCy | 62c1adf7a588827f32c16bc990d70f1cfe2b56c2 | 001abe2b9d6690fcd7e7b96242bb1b9cee0f5784 | refs/heads/master | 2020-07-02T22:14:02.890909 | 2016-11-20T02:45:51 | 2016-11-20T02:45:51 | 74,277,675 | 1 | 0 | null | 2016-11-20T13:58:26 | 2016-11-20T13:58:24 | null | UTF-8 | Python | false | false | 63,126 | py | # encoding: utf8
from __future__ import unicode_literals
# improved list from Stone, Denis, Kwantes (2010)
STOP_WORDS = set("""
a about above across after afterwards again against all almost alone
along already also although always am among amongst amoungst amount
an and another any anyhow anyone anything anyway anywhere are around
as at back be became because become becomes becoming been before
beforehand behind being below beside besides between beyond bill
both bottom but by call can cannot cant co computer con could couldnt
cry de describe detail did didn do does doesn doing don done down due
during each eg eight either eleven else elsewhere empty enough etc
even ever every everyone everything everywhere except few fifteen
fify fill find fire first five for former formerly forty found four
from front full further get give go had has hasnt have he hence her
here hereafter hereby herein hereupon hers herself him himself his
how however hundred i ie if in inc indeed interest into is it its
itself keep last latter latterly least less ltd just kg km made make
many may me meanwhile might mill mine more moreover most mostly move
much must my myself name namely neither never nevertheless next nine
no nobody none noone nor not nothing now nowhere of off often on once
one only onto or other others otherwise our ours ourselves out over
own part per perhaps please put rather re quite rather really regarding
same say see seem seemed seeming seems serious several she should
show side since sincere six sixty so some somehow someone something
sometime sometimes somewhere still such system take ten than that the
their them themselves then thence there thereafter thereby therefore
therein thereupon these they thick thin third this those though three
through throughout thru thus to together too top toward towards twelve
twenty two un under until up unless upon us used using various very
very via was we well were what whatever when whence whenever where whereafter
whereas whereby wherein whereupon wherever whether which while whither
who whoever whole whom whose why will with within without would yet you
your yours yourself yourselves
""".split())
TAG_MAP = {
".": {"pos": "punct", "puncttype": "peri"},
",": {"pos": "punct", "puncttype": "comm"},
"-LRB-": {"pos": "punct", "puncttype": "brck", "punctside": "ini"},
"-RRB-": {"pos": "punct", "puncttype": "brck", "punctside": "fin"},
"``": {"pos": "punct", "puncttype": "quot", "punctside": "ini"},
"\"\"": {"pos": "punct", "puncttype": "quot", "punctside": "fin"},
"''": {"pos": "punct", "puncttype": "quot", "punctside": "fin"},
":": {"pos": "punct"},
"$": {"pos": "sym", "other": {"symtype": "currency"}},
"#": {"pos": "sym", "other": {"symtype": "numbersign"}},
"AFX": {"pos": "adj", "hyph": "hyph"},
"CC": {"pos": "conj", "conjtype": "coor"},
"CD": {"pos": "num", "numtype": "card"},
"DT": {"pos": "det"},
"EX": {"pos": "adv", "advtype": "ex"},
"FW": {"pos": "x", "foreign": "foreign"},
"HYPH": {"pos": "punct", "puncttype": "dash"},
"IN": {"pos": "adp"},
"JJ": {"pos": "adj", "degree": "pos"},
"JJR": {"pos": "adj", "degree": "comp"},
"JJS": {"pos": "adj", "degree": "sup"},
"LS": {"pos": "punct", "numtype": "ord"},
"MD": {"pos": "verb", "verbtype": "mod"},
"NIL": {"pos": ""},
"NN": {"pos": "noun", "number": "sing"},
"NNP": {"pos": "propn", "nountype": "prop", "number": "sing"},
"NNPS": {"pos": "propn", "nountype": "prop", "number": "plur"},
"NNS": {"pos": "noun", "number": "plur"},
"PDT": {"pos": "adj", "adjtype": "pdt", "prontype": "prn"},
"POS": {"pos": "part", "poss": "poss"},
"PRP": {"pos": "pron", "prontype": "prs"},
"PRP$": {"pos": "adj", "prontype": "prs", "poss": "poss"},
"RB": {"pos": "adv", "degree": "pos"},
"RBR": {"pos": "adv", "degree": "comp"},
"RBS": {"pos": "adv", "degree": "sup"},
"RP": {"pos": "part"},
"SYM": {"pos": "sym"},
"TO": {"pos": "part", "parttype": "inf", "verbform": "inf"},
"UH": {"pos": "intJ"},
"VB": {"pos": "verb", "verbform": "inf"},
"VBD": {"pos": "verb", "verbform": "fin", "tense": "past"},
"VBG": {"pos": "verb", "verbform": "part", "tense": "pres", "aspect": "prog"},
"VBN": {"pos": "verb", "verbform": "part", "tense": "past", "aspect": "perf"},
"VBP": {"pos": "verb", "verbform": "fin", "tense": "pres"},
"VBZ": {"pos": "verb", "verbform": "fin", "tense": "pres", "number": "sing", "person": 3},
"WDT": {"pos": "adj", "prontype": "int|rel"},
"WP": {"pos": "noun", "prontype": "int|rel"},
"WP$": {"pos": "adj", "poss": "poss", "prontype": "int|rel"},
"WRB": {"pos": "adv", "prontype": "int|rel"},
"SP": {"pos": "space"},
"ADD": {"pos": "x"},
"NFP": {"pos": "punct"},
"GW": {"pos": "x"},
"AFX": {"pos": "x"},
"HYPH": {"pos": "punct"},
"XX": {"pos": "x"},
"BES": {"pos": "verb"},
"HVS": {"pos": "verb"}
}
TOKENIZER_PREFIXES = r''', " ( [ { * < $ £ “ ' `` ` # US$ C$ A$ a- ‘ .... ...'''.split()
TOKENIZER_SUFFIXES = (r''', \" \) \] \} \* \! \? % \$ > : ; ' ” '' 's 'S ’s ’S ’'''
r'''\.\. \.\.\. \.\.\.\. (?<=[a-z0-9)\]”"'%\)])\. '''
r'''(?<=[0-9])km''').strip().split()
TOKENIZER_INFIXES = (r'''\.\.\.+ (?<=[a-z])\.(?=[A-Z]) (?<=[a-zA-Z])-(?=[a-zA-z]) '''
r'''(?<=[a-zA-Z])--(?=[a-zA-z]) (?<=[0-9])-(?=[0-9]) '''
r'''(?<=[A-Za-z]),(?=[A-Za-z])''').split()
TOKENIZER_EXCEPTIONS = {
"and/or": [
{
"F": "and/or",
"L": "and/or",
"pos": "CC"
}],
"Ph.D.": [
{
"F": "Ph.D."
}],
"d.": [
{
"F": "d."
}
],
"Theydve": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
":/": [
{
"F": ":/"
}
],
"shouldn't've": [
{
"F": "should"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"There'll": [
{
"F": "There"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"E.G.": [
{
"F": "E.G."
}
],
"howll": [
{
"F": "how"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"6a.m.": [
{
"F": "6"
},
{
"F": "a.m."
}
],
"Ore.": [
{
"F": "Ore."
}
],
"Hadn't've": [
{
"F": "Had",
"L": "have",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
":>": [
{
"F": ":>"
}
],
"3p.m.": [
{
"F": "3"
},
{
"F": "p.m."
}
],
"who'll": [
{
"F": "who"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"5a.m.": [
{
"F": "5"
},
{
"F": "a.m."
}
],
":(": [
{
"F": ":("
}
],
":0": [
{
"F": ":0"
}
],
"10a.m.": [
{
"F": "10"
},
{
"F": "a.m."
}
],
"aint": [
{
"F": "ai",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
" ": [
{
"pos": "SP",
"F": " "
}
],
"Dec.": [
{
"F": "Dec."
}
],
"Shouldnt": [
{
"F": "Should"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Ky.": [
{
"F": "Ky."
}
],
"when's": [
{
"F": "when"
},
{
"F": "'s"
}
],
"Didnt": [
{
"F": "Did",
"L": "do",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"itll": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"Who're": [
{
"F": "Who"
},
{
"F": "'re"
}
],
"=D": [
{
"F": "=D"
}
],
"Ain't": [
{
"F": "Ai",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Can't": [
{
"F": "Ca",
"L": "can",
"pos": "MD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Whyre": [
{
"F": "Why"
},
{
"F": "re"
}
],
"Aren't": [
{
"F": "Are",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Neednt": [
{
"F": "Need"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"should've": [
{
"F": "should"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"shouldn't": [
{
"F": "should"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Idve": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"weve": [
{
"F": "we"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Va.": [
{
"F": "Va."
}
],
"D.C.": [
{
"F": "D.C."
}
],
"3am": [
{
"F": "3"
},
{
"L": "a.m.",
"F": "am"
}
],
"Ive": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Md.": [
{
"F": "Md."
}
],
";D": [
{
"F": ";D"
}
],
"Mrs.": [
{
"F": "Mrs."
}
],
"Minn.": [
{
"F": "Minn."
}
],
"they'd": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Youdve": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"theyve": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Weren't": [
{
"F": "Were"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"werent": [
{
"F": "were"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"whyre": [
{
"F": "why"
},
{
"F": "re"
}
],
"g.": [
{
"F": "g."
}
],
"I'm": [
{
"L": "-PRON-",
"F": "I"
},
{
"pos": "VBP",
"F": "'m",
"tenspect": 1,
"number": 1,
"L": "be"
}
],
":p": [
{
"F": ":p"
}
],
"She'd've": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"not've": [
{
"F": "not",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"we'll": [
{
"F": "we"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
":O": [
{
"F": ":O"
}
],
"<33": [
{
"F": "<33"
}
],
"Don't": [
{
"L": "do",
"F": "Do"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Whyll": [
{
"F": "Why"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"''": [
{
"F": "''"
}
],
"they've": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"t.": [
{
"F": "t."
}
],
"wasn't": [
{
"F": "was"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"could've": [
{
"pos": "MD",
"F": "could"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"what've": [
{
"F": "what"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"havent": [
{
"pos": "VB",
"F": "have"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Who've": [
{
"F": "Who"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"11am": [
{
"F": "11"
},
{
"L": "a.m.",
"F": "am"
}
],
"Shan't": [
{
"F": "Sha"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"i'll": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"i.e.": [
{
"F": "i.e."
}
],
"you'd": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"w.": [
{
"F": "w."
}
],
"whens": [
{
"F": "when"
},
{
"F": "s"
}
],
"whys": [
{
"F": "why"
},
{
"F": "s"
}
],
"6pm": [
{
"F": "6"
},
{
"L": "p.m.",
"F": "pm"
}
],
"4p.m.": [
{
"F": "4"
},
{
"F": "p.m."
}
],
"Whereve": [
{
"F": "Where"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"o_o": [
{
"F": "o_o"
}
],
"Mo.": [
{
"F": "Mo."
}
],
"Kan.": [
{
"F": "Kan."
}
],
"\u00a0": [
{
"pos": "SP",
"L": " ",
"F": "\u00a0"
}
],
"there'd": [
{
"F": "there"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"N.H.": [
{
"F": "N.H."
}
],
"(^_^)": [
{
"F": "(^_^)"
}
],
"Mont.": [
{
"F": "Mont."
}
],
"hadn't've": [
{
"F": "had",
"L": "have",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"whatll": [
{
"F": "what"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"wouldn't've": [
{
"F": "would"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"there's": [
{
"F": "there"
},
{
"F": "'s"
}
],
"2pm": [
{
"F": "2"
},
{
"L": "p.m.",
"F": "pm"
}
],
"Who'll": [
{
"F": "Who"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"o_O": [
{
"F": "o_O"
}
],
"Nev.": [
{
"F": "Nev."
}
],
"youll": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"wouldve": [
{
"F": "would"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Nov.": [
{
"F": "Nov."
}
],
"z.": [
{
"F": "z."
}
],
"xDD": [
{
"F": "xDD"
}
],
"Sen.": [
{
"F": "Sen."
}
],
"Wouldnt": [
{
"F": "Would"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Thered": [
{
"F": "There"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Youre": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "re"
}
],
"Couldn't've": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"who're": [
{
"F": "who"
},
{
"F": "'re"
}
],
"Whys": [
{
"F": "Why"
},
{
"F": "s"
}
],
"mightn't've": [
{
"F": "might"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Wholl": [
{
"F": "Who"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"hadn't": [
{
"F": "had",
"L": "have",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Havent": [
{
"pos": "VB",
"F": "Have"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Whatve": [
{
"F": "What"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
":)": [
{
"F": ":)"
}
],
"o.O": [
{
"F": "o.O"
}
],
"Thats": [
{
"F": "That"
},
{
"F": "s"
}
],
":((": [
{
"F": ":(("
}
],
"Gov.": [
{
"F": "Gov."
}
],
"Howll": [
{
"F": "How"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"p.": [
{
"F": "p."
}
],
"wouldn't": [
{
"F": "would"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"9pm": [
{
"F": "9"
},
{
"L": "p.m.",
"F": "pm"
}
],
"You'll": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Ala.": [
{
"F": "Ala."
}
],
"12am": [
{
"F": "12"
},
{
"L": "a.m.",
"F": "am"
}
],
"=]": [
{
"F": "=]"
}
],
"Cant": [
{
"F": "Ca",
"L": "can",
"pos": "MD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"i'd": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"a.m.": [
{
"F": "a.m."
}
],
"weren't": [
{
"F": "were"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"would've": [
{
"F": "would"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"i'm": [
{
"L": "-PRON-",
"F": "i"
},
{
"pos": "VBP",
"F": "'m",
"tenspect": 1,
"number": 1,
"L": "be"
}
],
"why'll": [
{
"F": "why"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"we'd've": [
{
"F": "we"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Shouldve": [
{
"F": "Should"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"can't": [
{
"F": "ca",
"L": "can",
"pos": "MD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"thats": [
{
"F": "that"
},
{
"F": "s"
}
],
"1p.m.": [
{
"F": "1"
},
{
"F": "p.m."
}
],
"12a.m.": [
{
"F": "12"
},
{
"F": "a.m."
}
],
"Hes": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "s"
}
],
"Needn't": [
{
"F": "Need"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"It's": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "'s"
}
],
"St.": [
{
"F": "St."
}
],
"Why're": [
{
"F": "Why"
},
{
"F": "'re"
}
],
":(((": [
{
"F": ":((("
}
],
"Hed": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Mt.": [
{
"L": "Mount",
"F": "Mt."
}
],
"couldn't": [
{
"pos": "MD",
"F": "could"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"What've": [
{
"F": "What"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"4a.m.": [
{
"F": "4"
},
{
"F": "a.m."
}
],
"Ind.": [
{
"F": "Ind."
}
],
"It'd": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"<3": [
{
"F": "<3"
}
],
"theydve": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"aren't": [
{
"F": "are",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Mightn't": [
{
"F": "Might"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"'S": [
{
"L": "'s",
"F": "'S"
}
],
"I've": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Whered": [
{
"F": "Where"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Itdve": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"I'ma": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "'ma"
}
],
"whos": [
{
"F": "who"
},
{
"F": "s"
}
],
"They'd": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"What'll": [
{
"F": "What"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
":Y": [
{
"F": ":Y"
}
],
"You've": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Mustve": [
{
"F": "Must"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"whod": [
{
"F": "who"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"mightntve": [
{
"F": "might"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"I'd've": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Must've": [
{
"F": "Must"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"it'd": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Ark.": [
{
"F": "Ark."
}
],
"Wis.": [
{
"F": "Wis."
}
],
"6p.m.": [
{
"F": "6"
},
{
"F": "p.m."
}
],
"what're": [
{
"F": "what"
},
{
"F": "'re"
}
],
"N.C.": [
{
"F": "N.C."
}
],
"Wasn't": [
{
"F": "Was"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"what's": [
{
"F": "what"
},
{
"F": "'s"
}
],
"he'd've": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Jan.": [
{
"F": "Jan."
}
],
"She'd": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"shedve": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Tenn.": [
{
"F": "Tenn."
}
],
"ain't": [
{
"F": "ai",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Wash.": [
{
"F": "Wash."
}
],
"She's": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "'s"
}
],
"i'd've": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"2a.m.": [
{
"F": "2"
},
{
"F": "a.m."
}
],
"We'd've": [
{
"F": "We"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"must've": [
{
"F": "must"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"That's": [
{
"F": "That"
},
{
"F": "'s"
}
],
"Sept.": [
{
"F": "Sept."
}
],
"whatre": [
{
"F": "what"
},
{
"F": "re"
}
],
"you'd've": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Dont": [
{
"L": "do",
"F": "Do"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"i.": [
{
"F": "i."
}
],
"Jun.": [
{
"F": "Jun."
}
],
"thered": [
{
"F": "there"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Youd": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"couldn't've": [
{
"pos": "MD",
"F": "could"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Whens": [
{
"F": "When"
},
{
"F": "s"
}
],
"8a.m.": [
{
"F": "8"
},
{
"F": "a.m."
}
],
"Isnt": [
{
"F": "Is",
"L": "be",
"pos": "VBZ"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"mightve": [
{
"F": "might"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"'ol": [
{
"F": "'ol"
}
],
"2p.m.": [
{
"F": "2"
},
{
"F": "p.m."
}
],
"9a.m.": [
{
"F": "9"
},
{
"F": "a.m."
}
],
"q.": [
{
"F": "q."
}
],
"didnt": [
{
"F": "did",
"L": "do",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"ive": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"It'd've": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"e.g.": [
{
"F": "e.g."
}
],
"\t": [
{
"pos": "SP",
"F": "\t"
}
],
"Mich.": [
{
"F": "Mich."
}
],
"Itll": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"didn't": [
{
"F": "did",
"L": "do",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"3pm": [
{
"F": "3"
},
{
"L": "p.m.",
"F": "pm"
}
],
"Jul.": [
{
"F": "Jul."
}
],
"7pm": [
{
"F": "7"
},
{
"L": "p.m.",
"F": "pm"
}
],
"cant": [
{
"F": "ca",
"L": "can",
"pos": "MD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Miss.": [
{
"F": "Miss."
}
],
"im": [
{
"L": "-PRON-",
"F": "i"
},
{
"pos": "VBP",
"F": "m",
"tenspect": 1,
"number": 1,
"L": "be"
}
],
"Ariz.": [
{
"F": "Ariz."
}
],
"they'd've": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"f.": [
{
"F": "f."
}
],
"Co.": [
{
"F": "Co."
}
],
"Hadntve": [
{
"F": "Had",
"L": "have",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Weve": [
{
"F": "We"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"1a.m.": [
{
"F": "1"
},
{
"F": "a.m."
}
],
"=3": [
{
"F": "=3"
}
],
"Mightnt": [
{
"F": "Might"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"1pm": [
{
"F": "1"
},
{
"L": "p.m.",
"F": "pm"
}
],
"youdve": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Shedve": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"theyd": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Ill.": [
{
"F": "Ill."
}
],
"N.D.": [
{
"F": "N.D."
}
],
"Cannot": [
{
"F": "Can",
"L": "can",
"pos": "MD"
},
{
"F": "not",
"L": "not",
"pos": "RB"
}
],
"s.": [
{
"F": "s."
}
],
"Hadn't": [
{
"F": "Had",
"L": "have",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"What're": [
{
"F": "What"
},
{
"F": "'re"
}
],
"He'll": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"wholl": [
{
"F": "who"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"They're": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "'re"
}
],
"Neb.": [
{
"F": "Neb."
}
],
"shouldnt": [
{
"F": "should"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"\n": [
{
"pos": "SP",
"F": "\n"
}
],
"whered": [
{
"F": "where"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"7a.m.": [
{
"F": "7"
},
{
"F": "a.m."
}
],
"youve": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"4am": [
{
"F": "4"
},
{
"L": "a.m.",
"F": "am"
}
],
"v.": [
{
"F": "v."
}
],
"notve": [
{
"F": "not",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"couldve": [
{
"pos": "MD",
"F": "could"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"mustve": [
{
"F": "must"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Youve": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"therell": [
{
"F": "there"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"might've": [
{
"F": "might"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Mustn't": [
{
"F": "Must"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"wheres": [
{
"F": "where"
},
{
"F": "s"
}
],
"they're": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "'re"
}
],
"idve": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"hows": [
{
"F": "how"
},
{
"F": "s"
}
],
"Fla.": [
{
"F": "Fla."
}
],
"N.M.": [
{
"F": "N.M."
}
],
"youre": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "re"
}
],
"Didn't": [
{
"F": "Did",
"L": "do",
"pos": "VBD"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Couldve": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"10p.m.": [
{
"F": "10"
},
{
"F": "p.m."
}
],
"Del.": [
{
"F": "Del."
}
],
"Oct.": [
{
"F": "Oct."
}
],
"Rep.": [
{
"F": "Rep."
}
],
"cannot": [
{
"F": "can",
"L": "can",
"pos": "MD"
},
{
"F": "not",
"L": "not",
"pos": "RB"
}
],
"Im": [
{
"L": "-PRON-",
"F": "I"
},
{
"pos": "VBP",
"F": "m",
"tenspect": 1,
"number": 1,
"L": "be"
}
],
"howd": [
{
"F": "how"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Okla.": [
{
"F": "Okla."
}
],
"Feb.": [
{
"F": "Feb."
}
],
"you've": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"You're": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "'re"
}
],
"she'll": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Theyll": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"don't": [
{
"L": "do",
"F": "do"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"itd": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
":-)": [
{
"F": ":-)"
}
],
"Hedve": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"isnt": [
{
"F": "is",
"L": "be",
"pos": "VBZ"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"won't": [
{
"F": "wo"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"We're": [
{
"F": "We"
},
{
"F": "'re"
}
],
"3a.m.": [
{
"F": "3"
},
{
"F": "a.m."
}
],
"^_^": [
{
"F": "^_^"
}
],
"\u2018S": [
{
"L": "'s",
"F": "\u2018S"
}
],
"9p.m.": [
{
"F": "9"
},
{
"F": "p.m."
}
],
"dont": [
{
"L": "do",
"F": "do"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"ima": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "ma"
}
],
"Let's": [
{
"F": "Let"
},
{
"L": "us",
"F": "'s"
}
],
"he's": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "'s"
}
],
"we've": [
{
"F": "we"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"What's": [
{
"F": "What"
},
{
"F": "'s"
}
],
"Who's": [
{
"F": "Who"
},
{
"F": "'s"
}
],
"-__-": [
{
"F": "-__-"
}
],
"hedve": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"he'd": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"When's": [
{
"F": "When"
},
{
"F": "'s"
}
],
"Mightn't've": [
{
"F": "Might"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"We've": [
{
"F": "We"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"\u2018s": [
{
"L": "'s",
"F": "\u2018s"
}
],
"Couldntve": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Who'd": [
{
"F": "Who"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
":-/": [
{
"F": ":-/"
}
],
"haven't": [
{
"pos": "VB",
"F": "have"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Gen.": [
{
"F": "Gen."
}
],
"(:": [
{
"F": "(:"
}
],
"arent": [
{
"F": "are",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"You'd've": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"c.": [
{
"F": "c."
}
],
"(=": [
{
"F": "(="
}
],
"Wouldn't": [
{
"F": "Would"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"who's": [
{
"F": "who"
},
{
"F": "'s"
}
],
"12p.m.": [
{
"F": "12"
},
{
"F": "p.m."
}
],
"5am": [
{
"F": "5"
},
{
"L": "a.m.",
"F": "am"
}
],
"Mightve": [
{
"F": "Might"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Theredve": [
{
"F": "There"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"theredve": [
{
"F": "there"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Messrs.": [
{
"F": "Messrs."
}
],
"who'd": [
{
"F": "who"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Where's": [
{
"F": "Where"
},
{
"F": "'s"
}
],
"wont": [
{
"F": "wo"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"she'd've": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"10pm": [
{
"F": "10"
},
{
"L": "p.m.",
"F": "pm"
}
],
"Corp.": [
{
"F": "Corp."
}
],
"Aug.": [
{
"F": "Aug."
}
],
"-_-": [
{
"F": "-_-"
}
],
"y.": [
{
"F": "y."
}
],
"Should've": [
{
"F": "Should"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"11pm": [
{
"F": "11"
},
{
"L": "p.m.",
"F": "pm"
}
],
"8am": [
{
"F": "8"
},
{
"L": "a.m.",
"F": "am"
}
],
"theyre": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "re"
}
],
"l.": [
{
"F": "l."
}
],
"Wouldntve": [
{
"F": "Would"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Ga.": [
{
"F": "Ga."
}
],
"1am": [
{
"F": "1"
},
{
"L": "a.m.",
"F": "am"
}
],
"Where've": [
{
"F": "Where"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"11a.m.": [
{
"F": "11"
},
{
"F": "a.m."
}
],
"mustn't": [
{
"F": "must"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"isn't": [
{
"F": "is",
"L": "be",
"pos": "VBZ"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Bros.": [
{
"F": "Bros."
}
],
"Aint": [
{
"F": "Ai",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"why's": [
{
"F": "why"
},
{
"F": "'s"
}
],
"V_V": [
{
"F": "V_V"
}
],
";p": [
{
"F": ";p"
}
],
"There'd": [
{
"F": "There"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"They'll": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"b.": [
{
"F": "b."
}
],
"how'll": [
{
"F": "how"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Wedve": [
{
"F": "We"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"couldntve": [
{
"pos": "MD",
"F": "could"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"12pm": [
{
"F": "12"
},
{
"L": "p.m.",
"F": "pm"
}
],
"There's": [
{
"F": "There"
},
{
"F": "'s"
}
],
"we'd": [
{
"F": "we"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Dr.": [
{
"F": "Dr."
}
],
"Whod": [
{
"F": "Who"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
":-P": [
{
"F": ":-P"
}
],
"whatve": [
{
"F": "what"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Wouldve": [
{
"F": "Would"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"o.": [
{
"F": "o."
}
],
"there'll": [
{
"F": "there"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
":]": [
{
"F": ":]"
}
],
"needn't": [
{
"F": "need"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"shouldntve": [
{
"F": "should"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"why're": [
{
"F": "why"
},
{
"F": "'re"
}
],
"p.m.": [
{
"F": "p.m."
}
],
"Doesnt": [
{
"F": "Does",
"L": "do",
"pos": "VBZ"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"whereve": [
{
"F": "where"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"they'll": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"I'd": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Might've": [
{
"F": "Might"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"mightnt": [
{
"F": "might"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Kans.": [
{
"F": "Kans."
}
],
"Not've": [
{
"F": "Not",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"e.": [
{
"F": "e."
}
],
"mightn't": [
{
"F": "might"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"you're": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "'re"
}
],
"Mar.": [
{
"F": "Mar."
}
],
"They've": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"\")": [
{
"F": "\")"
}
],
"what'll": [
{
"F": "what"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Calif.": [
{
"F": "Calif."
}
],
"Could've": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Would've": [
{
"F": "Would"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
";)": [
{
"F": ";)"
}
],
";(": [
{
"F": ";("
}
],
"Isn't": [
{
"F": "Is",
"L": "be",
"pos": "VBZ"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"let's": [
{
"F": "let"
},
{
"L": "us",
"F": "'s"
}
],
"'em": [
{
"F": "'em"
}
],
"She'll": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"I.E.": [
{
"F": "I.E."
}
],
"You'd": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"wouldnt": [
{
"F": "would"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"6am": [
{
"F": "6"
},
{
"L": "a.m.",
"F": "am"
}
],
":P": [
{
"F": ":P"
}
],
"Why'll": [
{
"F": "Why"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Where'd": [
{
"F": "Where"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Theyre": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "re"
}
],
"11p.m.": [
{
"F": "11"
},
{
"F": "p.m."
}
],
"Won't": [
{
"F": "Wo"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Couldn't": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"it's": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "'s"
}
],
"r.": [
{
"F": "r."
}
],
"it'll": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"They'd've": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Ima": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "ma"
}
],
"5pm": [
{
"F": "5"
},
{
"L": "p.m.",
"F": "pm"
}
],
"10am": [
{
"F": "10"
},
{
"L": "a.m.",
"F": "am"
}
],
"m.": [
{
"F": "m."
}
],
"whats": [
{
"F": "what"
},
{
"F": "s"
}
],
"How's": [
{
"F": "How"
},
{
"F": "'s"
}
],
"Sep.": [
{
"F": "Sep."
}
],
"Shouldntve": [
{
"F": "Should"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"youd": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"Whatll": [
{
"F": "What"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"Wouldn't've": [
{
"F": "Would"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"How'd": [
{
"F": "How"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"doesnt": [
{
"F": "does",
"L": "do",
"pos": "VBZ"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"h.": [
{
"F": "h."
}
],
"Shouldn't": [
{
"F": "Should"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"He'd've": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Mightntve": [
{
"F": "Might"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"couldnt": [
{
"pos": "MD",
"F": "could"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Haven't": [
{
"pos": "VB",
"F": "Have"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"<333": [
{
"F": "<333"
}
],
"doesn't": [
{
"F": "does",
"L": "do",
"pos": "VBZ"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Hasn't": [
{
"F": "Has"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"how's": [
{
"F": "how"
},
{
"F": "'s"
}
],
"hes": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "s"
}
],
"=[[": [
{
"F": "=[["
}
],
"xD": [
{
"F": "xD"
}
],
"he'll": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"hed": [
{
"L": "-PRON-",
"F": "he"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"7p.m.": [
{
"F": "7"
},
{
"F": "p.m."
}
],
"how'd": [
{
"F": "how"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"u.": [
{
"F": "u."
}
],
"we're": [
{
"F": "we"
},
{
"F": "'re"
}
],
"vs.": [
{
"F": "vs."
}
],
"Hadnt": [
{
"F": "Had",
"L": "have",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Shant": [
{
"F": "Sha"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Theyve": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Hows": [
{
"F": "How"
},
{
"F": "s"
}
],
"We'll": [
{
"F": "We"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"N.Y.": [
{
"F": "N.Y."
}
],
"x.": [
{
"F": "x."
}
],
"8p.m.": [
{
"F": "8"
},
{
"F": "p.m."
}
],
"i've": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Whove": [
{
"F": "Who"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"2am": [
{
"F": "2"
},
{
"L": "a.m.",
"F": "am"
}
],
"La.": [
{
"F": "La."
}
],
"i'ma": [
{
"L": "-PRON-",
"F": "i"
},
{
"F": "'ma"
}
],
"N.J.": [
{
"F": "N.J."
}
],
"Nebr.": [
{
"F": "Nebr."
}
],
"Howd": [
{
"F": "How"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"hadnt": [
{
"F": "had",
"L": "have",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"shant": [
{
"F": "sha"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"There'd've": [
{
"F": "There"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Inc.": [
{
"F": "Inc."
}
],
"I'll": [
{
"L": "-PRON-",
"F": "I"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Why's": [
{
"F": "Why"
},
{
"F": "'s"
}
],
"Adm.": [
{
"F": "Adm."
}
],
"Shouldn't've": [
{
"F": "Should"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"n.": [
{
"F": "n."
}
],
"Wasnt": [
{
"F": "Was"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"whove": [
{
"F": "who"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
";-p": [
{
"F": ";-p"
}
],
"hasn't": [
{
"F": "has"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"wouldntve": [
{
"F": "would"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Wheres": [
{
"F": "Where"
},
{
"F": "s"
}
],
"How'll": [
{
"F": "How"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"there'd've": [
{
"F": "there"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Whos": [
{
"F": "Who"
},
{
"F": "s"
}
],
"shes": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "s"
}
],
"Doesn't": [
{
"F": "Does",
"L": "do",
"pos": "VBZ"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
"Arent": [
{
"F": "Are",
"pos": "VBP",
"number": 2,
"L": "be"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Hasnt": [
{
"F": "Has"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"j.": [
{
"F": "j."
}
],
"He's": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "'s"
}
],
"wasnt": [
{
"F": "was"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"whyll": [
{
"F": "why"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"co.": [
{
"F": "co."
}
],
"mustnt": [
{
"F": "must"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"He'd": [
{
"L": "-PRON-",
"F": "He"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"I.e.": [
{
"F": "I.e."
}
],
"Shes": [
{
"L": "-PRON-",
"F": "She"
},
{
"F": "s"
}
],
"where've": [
{
"F": "where"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Youll": [
{
"L": "-PRON-",
"F": "You"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"Apr.": [
{
"F": "Apr."
}
],
":')": [
{
"F": ":')"
}
],
"Conn.": [
{
"F": "Conn."
}
],
"8pm": [
{
"F": "8"
},
{
"L": "p.m.",
"F": "pm"
}
],
"9am": [
{
"F": "9"
},
{
"L": "a.m.",
"F": "am"
}
],
"hasnt": [
{
"F": "has"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"theyll": [
{
"L": "-PRON-",
"F": "they"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"it'd've": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"itdve": [
{
"L": "-PRON-",
"F": "it"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"Jr.": [
{
"F": "Jr."
}
],
"Rev.": [
{
"F": "Rev."
}
],
"k.": [
{
"F": "k."
}
],
"wedve": [
{
"F": "we"
},
{
"F": "d",
"L": "would",
"pos": "MD"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"=)": [
{
"F": "=)"
}
],
"Colo.": [
{
"F": "Colo."
}
],
"Mr.": [
{
"F": "Mr."
}
],
"Werent": [
{
"F": "Were"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Therell": [
{
"F": "There"
},
{
"F": "ll",
"L": "will",
"pos": "MD"
}
],
"shan't": [
{
"F": "sha"
},
{
"F": "n't",
"L": "not",
"pos": "RB"
}
],
";-)": [
{
"F": ";-)"
}
],
"Wont": [
{
"F": "Wo"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"hadntve": [
{
"F": "had",
"L": "have",
"pos": "VBD"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"who've": [
{
"F": "who"
},
{
"F": "'ve",
"L": "have",
"pos": "VB"
}
],
"Whatre": [
{
"F": "What"
},
{
"F": "re"
}
],
"'s": [
{
"L": "'s",
"F": "'s"
}
],
"where'd": [
{
"F": "where"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"shouldve": [
{
"F": "should"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"a.": [
{
"F": "a."
}
],
"where's": [
{
"F": "where"
},
{
"F": "'s"
}
],
"Ltd.": [
{
"F": "Ltd."
}
],
"Mass.": [
{
"F": "Mass."
}
],
"neednt": [
{
"F": "need"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Pa.": [
{
"F": "Pa."
}
],
"It'll": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"7am": [
{
"F": "7"
},
{
"L": "a.m.",
"F": "am"
}
],
"We'd": [
{
"F": "We"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Whats": [
{
"F": "What"
},
{
"F": "s"
}
],
"\u2014": [
{
"pos": ":",
"L": "--",
"F": "\u2014"
}
],
"E.g.": [
{
"F": "E.g."
}
],
"Ms.": [
{
"F": "Ms."
}
],
":3": [
{
"F": ":3"
}
],
"5p.m.": [
{
"F": "5"
},
{
"F": "p.m."
}
],
"Itd": [
{
"L": "-PRON-",
"F": "It"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"May.": [
{
"F": "May."
}
],
"she'd": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "'d",
"L": "would",
"pos": "MD"
}
],
"Mustnt": [
{
"F": "Must"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"Notve": [
{
"F": "Not",
"L": "not",
"pos": "RB"
},
{
"F": "ve",
"L": "have",
"pos": "VB"
}
],
"you'll": [
{
"L": "-PRON-",
"F": "you"
},
{
"F": "'ll",
"L": "will",
"pos": "MD"
}
],
"Theyd": [
{
"L": "-PRON-",
"F": "They"
},
{
"F": "d",
"L": "would",
"pos": "MD"
}
],
"she's": [
{
"L": "-PRON-",
"F": "she"
},
{
"F": "'s"
}
],
"Couldnt": [
{
"pos": "MD",
"F": "Could"
},
{
"F": "nt",
"L": "not",
"pos": "RB"
}
],
"that's": [
{
"F": "that"
},
{
"F": "'s"
}
],
"4pm": [
{
"F": "4"
},
{
"L": "p.m.",
"F": "pm"
}
],
":))": [
{
"F": ":))"
}
]
}
| [
"[email protected]"
] | |
fdfe941f2d276a821a9342bce3e3e89214a7ecfe | 4b7e282fe480415f5d52c0fc0429f144156190fe | /google/ads/googleads/v7/resources/types/video.py | da5d5c3d0b355e480d86c2f921f4d36b37f58b30 | [
"Apache-2.0"
] | permissive | Z2Xsoft/google-ads-python | c4750357bb19da91bb3b6bf2fa84bef9d2df36d3 | 1779d52a0446c8afb2437b0a9e103dcb849f5590 | refs/heads/main | 2023-08-18T15:22:17.840364 | 2021-09-26T04:08:53 | 2021-09-26T04:08:53 | 410,444,398 | 0 | 0 | Apache-2.0 | 2021-09-26T04:08:53 | 2021-09-26T03:55:38 | null | UTF-8 | Python | false | false | 1,756 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.resources",
marshal="google.ads.googleads.v7",
manifest={"Video",},
)
class Video(proto.Message):
r"""A video.
Attributes:
resource_name (str):
Output only. The resource name of the video. Video resource
names have the form:
``customers/{customer_id}/videos/{video_id}``
id (str):
Output only. The ID of the video.
channel_id (str):
Output only. The owner channel id of the
video.
duration_millis (int):
Output only. The duration of the video in
milliseconds.
title (str):
Output only. The title of the video.
"""
resource_name = proto.Field(proto.STRING, number=1,)
id = proto.Field(proto.STRING, number=6, optional=True,)
channel_id = proto.Field(proto.STRING, number=7, optional=True,)
duration_millis = proto.Field(proto.INT64, number=8, optional=True,)
title = proto.Field(proto.STRING, number=9, optional=True,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
4728768877333c9060e01d5d66cd0b2dc8cd58e2 | 74f04d78486d4986e4f0ef8c3bc480ba00caae4a | /articles/models.py | c9ee25d3e4b0b92deaea58fc10f1b49de02c4ee6 | [] | no_license | kimjy392/reboot-django | e24dd90182ee5d317bf13872ae169ac738a71c6c | 4280c7bffacd759a1b785ae576e9e89a0c2269d8 | refs/heads/master | 2022-12-10T12:18:38.422146 | 2019-10-28T01:12:41 | 2019-10-28T01:12:41 | 207,240,898 | 0 | 10 | null | 2022-12-08T06:13:37 | 2019-09-09T06:36:38 | Python | UTF-8 | Python | false | false | 594 | py | from django.db import models
# Create your models here.
class Reporter(models.Model):
name = models.CharField(max_length=20)
class Article(models.Model):
title = models.CharField(max_length=50)
content = models.TextField()
image = models.ImageField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# reporter = models.ForeignKey(Reporter, on_delete=models.CASCADE)
class Comment(models.Model):
content = models.CharField(max_length=50)
article = models.ForeignKey(Article, on_delete=models.CASCADE) | [
"[email protected]"
] | |
2f14e4dbbb349aed3998968c565c70758358ae4e | 23f73a7a0c0ced134f6c18bb9c200617ce31f1d5 | /src/fauxmo/handlers/hass.py | 6b72bc98a0323f0764aee236d59e2be04d96d90a | [
"MIT"
] | permissive | clach04/fauxmo | f586d0024648f3da6d2ff38b8fe06fdb345bcfbd | 06a0b8ff20f4811de9ac08663e0d76f8fdd83764 | refs/heads/master | 2021-11-27T19:13:36.583893 | 2016-07-22T13:02:39 | 2016-07-22T13:02:39 | 66,683,609 | 2 | 0 | null | 2016-08-26T22:42:52 | 2016-08-26T22:42:52 | null | UTF-8 | Python | false | false | 2,671 | py | # -*- coding: utf-8 -*-
import homeassistant.remote
from homeassistant.const import (SERVICE_TURN_ON, SERVICE_TURN_OFF,
SERVICE_MOVE_UP, SERVICE_MOVE_DOWN)
class HassAPIHandler:
"""Handler for Home Assistant (hass) Python API.
Allows users to specify Home Assistant services in their config.json and
toggle these with the Echo. While this can be done with Home Assistant's
REST API as well (example included), I find it easier to use the Python
API.
"""
def __init__(self, host, password, entity, port=8123):
"""Initialize a HassAPIHandler instance
Args:
host (str): IP address of device running Home Assistant
password (str): Home Assistant password
entity (str): `entity_id` used by hass, one easy way to find is to
curl and grep the REST API, eg:
`curl http://IP/api/bootstrap | grep entity_id`
Kwargs:
port (int): Port running hass on the host computer (default 8123)
"""
self.host = host
self.password = password
self.entity = entity
self.port = port
self.domain = self.entity.split(".")[0]
self.api = homeassistant.remote.API(self.host, self.password,
port=self.port)
self.service_map = {
'switch': {
'on': SERVICE_TURN_ON,
'off': SERVICE_TURN_OFF
},
'rollershutter': {
'on': SERVICE_MOVE_UP,
'off': SERVICE_MOVE_DOWN
}
}
def send(self, signal):
"""Send a signal to the hass `call_service` function, returns True.
The hass Python API doesn't appear to return anything with this
function, but will raise an exception if things didn't seem to work, so
I have it set to just return True, hoping for an exception if there was
a problem.
Args:
signal (const): signal imported from homeassistant.const. I have
imported SERVICE_TURN_ON and SERVICE_TURN_OFF, make sure you import
any others that you need.
"""
homeassistant.remote.call_service(self.api, self.domain, signal,
{'entity_id': self.entity})
return True
def on(self):
on_cmd = self.service_map[self.domain.lower()]['on']
return self.send(on_cmd)
def off(self):
off_cmd = self.service_map[self.domain.lower()]['off']
return self.send(off_cmd)
| [
"[email protected]"
] | |
0554e077b0db3b39fc887e6b4986a336cc20fc9a | 6a7d8b67aad59c51dafdfb8bcffd53864a3d65b0 | /LintCode/inorderSuccessorBST.py | 4967d1c45481e78e4f3fb69538a9e2576d98cf12 | [] | no_license | dicao425/algorithmExercise | 8bba36c1a08a232678e5085d24bac1dbee7e5364 | 36cb33af758b1d01da35982481a8bbfbee5c2810 | refs/heads/master | 2021-10-07T08:56:18.030583 | 2018-12-04T05:59:17 | 2018-12-04T05:59:17 | 103,611,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 980 | py | #!/usr/bin/python
import sys
"""
Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
"""
class Solution:
"""
@param: root: The root of the BST.
@param: p: You need find the successor node of p.
@return: Successor of p.
"""
def inorderSuccessor(self, root, p):
# write your code here
if not root:
return
suc = None
while root and root.val != p.val:
if root.val > p.val:
suc = root
root = root.left
else:
root = root.right
if not root:
return
if not root.right:
return suc
else:
root = root.right
while root.left:
root = root.left
return root
def main():
aa = Solution()
return 0
if __name__ == "__main__":
sys.exit(main()) | [
"[email protected]"
] | |
2261cf66860e5e03da76218a1e66eb199a78667d | fc66f771e95ee36cd502d3cf7220794e6f263226 | /src/utils/at.py | ca3ac83c786efd55252a4fe7853b8b4d9a002805 | [
"MIT"
] | permissive | yuanniufei/IncetOps | 2bcb7851514f3db6bc409746d245da08032ecc06 | e21185a4931a10996a187e63f4487b4402544c69 | refs/heads/master | 2020-03-25T20:50:02.339329 | 2018-08-09T07:35:02 | 2018-08-09T07:35:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,639 | py | # -*- coding: utf-8 -*-
"""
IncetOps.utils.at
~~~~~~~~~~~~~~
AES加密的实现模式CBC。
CBC使用密码和salt(起扰乱作用)按固定算法(md5)产生key和iv。然后用key和iv(初始向量,加密第一块明文)加密(明文)和解密(密文)。
:copyright: (c) 2018 by staugur.
:license: MIT, see LICENSE for more details.
"""
from Crypto.Cipher import AES
from binascii import b2a_hex, a2b_hex
class KeyGenerationClass():
"""密钥生成器"""
def __init__(self, key):
self.key = key
self.mode = AES.MODE_CBC
def encrypt(self, text):
#加密函数,如果text不是16的倍数【加密文本text必须为16的倍数!】,那就补足为16的倍数
cryptor = AES.new(self.key, self.mode, self.key)
#这里密钥key 长度必须为16(AES-128)、24(AES-192)、或32(AES-256)Bytes 长度.目前AES-128足够用
length = 16
count = len(text)
add = length - (count % length)
text = text + ('\0' * add)
self.ciphertext = cryptor.encrypt(text)
#因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题
#所以这里统一把加密后的字符串转化为16进制字符串
return b2a_hex(self.ciphertext)
def decrypt(self, text):
#解密后,去掉补足的空格用strip() 去掉
cryptor = AES.new(self.key, self.mode, self.key)
plain_text = cryptor.decrypt(a2b_hex(text))
return plain_text.rstrip('\0') | [
"[email protected]"
] | |
4e107f975e9b205c04868eafff741a552f4302c0 | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/gos_20200614060821.py | 3db5babdabe6fb83a5ab594602a18e4fa77fbc59 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,896 | py | # # Імпорт фажливих бібліотек
# from BeautifulSoup import BeautifulSoup
# import urllib2
# import re
# # Створення функції пошуку силок
# def getLinks(url):
# # отримання та присвоєння контенту сторінки в змінну
# html_page = urllib2.urlopen(url)
# # Перетворення контенту в обєкт бібліотеки BeautifulSoup
# soup = BeautifulSoup(html_page)
# # створення пустого масиву для лінків
# links = []
# # ЗА ДОПОМОГОЮ ЧИКЛУ ПРОХЛДИМСЯ ПО ВСІХ ЕЛЕМЕНТАХ ДЕ Є СИЛКА
# for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# # Додаємо всі силки в список
# links.append(link.get('href'))
# # повертаємо список
# return links
# -----------------------------------------------------------------------------------------------------------
# # # Імпорт фажливих бібліотек
# import subprocess
# # Створення циклу та використання функції range для генерації послідовних чисел
# for ping in range(1,10):
# # генерування IP адреси базуючись на номері ітерації
# address = "127.0.0." + str(ping)
# # виклик функції call яка робить запит на IP адрес та запис відповіді в змінну
# res = subprocess.call(['ping', '-c', '3', address])
# # За допомогою умовних операторів перевіряємо відповідь та виводимо результат
# if res == 0:
# print "ping to", address, "OK"
# elif res == 2:
# print "no response from", address
# else:
# print "ping to", address, "failed!"
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import requests
# # Ітеруємося по масиву з адресами зображень
# for i, pic_url in enumerate(["http://x.com/nanachi.jpg", "http://x.com/nezuko.jpg"]):
# # Відкриваємо файл базуючись на номері ітерації
# with open('pic{0}.jpg'.format(i), 'wb') as handle:
# # Отримуємо картинку
# response = requests.get(pic_url, stream=True)
# # Використовуючи умовний оператор перевіряємо чи успішно виконався запит
# if not response.ok:
# print(response)
# # Ітеруємося по байтах картинки та записуємо батчаси в 1024 до файлу
# for block in response.iter_content(1024):
# # Якщо байти закінчилися, завершуємо алгоритм
# if not block:
# break
# # Записуємо байти в файл
# handle.write(block)
# -----------------------------------------------------------------------------------------------------------
# # Створюємо клас для рахунку
# class Bank_Account:
# # В конструкторі ініціалізуємо рахунок як 0
# def __init__(self):
# self.balance=0
# print("Hello!!! Welcome to the Deposit & Withdrawal Machine")
# # В методі депозит, використовуючи функцію input() просимо ввести суму поповенння та додаємо цю суму до рахунку
# def deposit(self):
# amount=float(input("Enter amount to be Deposited: "))
# self.balance += amount
# print("\n Amount Deposited:",amount)
# # В методі депозит, використовуючи функцію input() просимо ввести суму отримання та віднімаємо цю суму від рахунку
# def withdraw(self):
# amount = float(input("Enter amount to be Withdrawn: "))
# # За допомогою умовного оператора перевіряємо чи достатнього грошей на рахунку
# if self.balance>=amount:
# self.balance-=amount
# print("\n You Withdrew:", amount)
# else:
# print("\n Insufficient balance ")
# # Виводимо бааланс на екран
# def display(self):
# print("\n Net Available Balance=",self.balance)
# # Створюємо рахунок
# s = Bank_Account()
# # Проводимо операції з рахунком
# s.deposit()
# s.withdraw()
# s.display()
# -----------------------------------------------------------------------------------------------------------
# Створюємо рекурсивну функцію яка приймає десяткове число
def decimalToBinary(n):
# перевіряємо чи число юільше 1
if(n > 1):
# Якщо так, ділемо на 2 юез остачі та рекурсивно викликаємо функцію
decimalToBinary(n//2)
# Якщо ні, виводимо на остачу ділення числа на 2
print(n%2, end=' ')
# Створюємо функцію яка приймає бі число
def binaryToDecimal(binary):
binary1 = binary
decimal, i, n = 0, 0, 0
while(binary != 0):
dec = binary % 10
decimal = decimal + dec * pow(2, i)
binary = binary//10
i += 1
print(decimal)
| [
"[email protected]"
] | |
225cc84d1b8df33afa6b99407f6dad6ab6b09f7f | 1d007e58c7739f36bdb85cb9aa13b3f4584cdfb9 | /day1/day1/urls.py | 28f2a39c7c890b2c071f918d1dcef7609bddfad4 | [] | no_license | rahuladream/Django-Challenge | 65410f053c06f2556628b449b817244dac32e1ac | 0b81a6b69b743799f3d8562d6ec784950980716c | refs/heads/master | 2020-06-14T14:19:54.370195 | 2019-07-03T13:02:07 | 2019-07-03T13:02:07 | 195,024,788 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 912 | py | """day1 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
# from . import admin_site
urlpatterns = [
path('polls/', include('polls.urls', namespace="polls")),
# path('myadmin/', include('admin_site.urls')), # grappelli URLS
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
1f638f6038f33df2aa4f2e79d8b32c4280c955fd | 3db8bc4c7297895c687be374a206b63d5d329e5e | /Python3/019_Remove_Nth_Node_From_End_of_List.py | 0d1d00b05fb6965d0c5f5762555d56236207eb67 | [
"MIT"
] | permissive | Jian-jobs/Leetcode-Python3 | dd06d3238b69ae1419754810dec68705d3344a41 | f2d3bb6ecb7d5d0bca4deaed26162fbe0813a73e | refs/heads/master | 2020-05-15T00:59:28.160898 | 2018-11-16T04:44:32 | 2018-11-16T04:44:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,425 | py | #!usr/bin/env python3
# -*- coding:utf-8 -*-
'''
Given a linked list, remove the nth node from the end of list and return its head.
For example,
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note:
Given n will always be valid.
Try to do this in one pass.
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
# Define this to check if it works well
def myPrint(self):
print(self.val)
if self.next:
self.next.myPrint()
class Solution(object):
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
if not head:
return head
point = ListNode(-1)
point.next = head
prev = point
cur = point
while prev and n >=0:
prev = prev.next
n -= 1
while prev:
prev = prev.next
cur = cur.next
cur.next = cur.next.next
return point.next
if __name__ == "__main__":
n5 = ListNode(5)
n4 = ListNode(4)
n3 = ListNode(3)
n2 = ListNode(2)
n1 = ListNode(1)
n1.next = n2
n2.next = n3
n3.next = n4
n4.next = n5
result = Solution().removeNthFromEnd(n1, 5)
result.myPrint()
| [
"[email protected]"
] | |
eed66ce765aa9eae0228a51ffc68c16ad9405ae4 | 1816378da612c7db376934b033e4fd64951338b6 | /gui/jails/migrations/0007_add_model_JailTemplate.py | dc21d06fbe871543b3648239738a169c72011b35 | [] | no_license | quater/freenas-9.2-xen | 46517a7a23546764347d3c91108c70a8bd648ec6 | 96e580055fa97575f0a0cb23a72495860467bcfb | refs/heads/master | 2021-01-16T22:21:38.781962 | 2014-02-07T05:59:13 | 2014-02-07T05:59:13 | 16,609,785 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,422 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from freenasUI.jails.utils import get_jails_index
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'JailTemplate'
db.create_table(u'jails_jailtemplate', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('jt_name', self.gf('django.db.models.fields.CharField')(max_length=120)),
('jt_url', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal(u'jails', ['JailTemplate'])
#
# The standard jail types
#
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('pluginjail', '%s/freenas-pluginjail.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('portjail', '%s/freenas-portjail.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('standard', '%s/freenas-standard.tgz')" % get_jails_index())
#
# And... some Linux jail templates
#
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('debian-7.1.0', '%s/linux-debian-7.1.0.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('gentoo-20130820', '%s/linux-gentoo-20130820.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('ubuntu-13.04', '%s/linux-ubuntu-13.04.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('centos-6.4', '%s/linux-centos-6.4.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('suse-12.3', '%s/linux-suse-12.3.tgz')" % get_jails_index())
def backwards(self, orm):
# Deleting model 'JailTemplate'
db.delete_table(u'jails_jailtemplate')
models = {
u'jails.jails': {
'Meta': {'object_name': 'Jails'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jail_alias_bridge_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_bridge_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_autostart': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_bridge_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_bridge_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_defaultrouter_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_defaultrouter_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_host': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_mac': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_nat': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_status': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_type': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_vnet': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'})
},
u'jails.jailsconfiguration': {
'Meta': {'object_name': 'JailsConfiguration'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jc_collectionurl': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'jc_ipv4_network': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv4_network_end': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv4_network_start': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv6_network': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_ipv6_network_end': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_ipv6_network_start': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_path': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
u'jails.jailtemplate': {
'Meta': {'object_name': 'JailTemplate'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jt_name': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jt_url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'jails.nullmountpoint': {
'Meta': {'object_name': 'NullMountPoint'},
'destination': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jail': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '300'})
}
}
complete_apps = ['jails']
| [
"[email protected]"
] | |
e2cb2c8e89a8b49e48345e5c5ac0b7f4d4038e0c | d913bac9fa42473aa8cee68c8ad8b4eba5484b89 | /Scripts/features/VoidTender_POS.py | 6f88141ad64358009955cec6efcfc5ed742ca805 | [] | no_license | priyatam0509/Automation-Testing | 07e7c18b4522976f0ade2b72bd46cffd55c5634e | d24805456e5a0126c036c1688a5d112bdcf4467a | refs/heads/main | 2023-02-26T19:07:41.761905 | 2021-01-30T10:13:34 | 2021-01-30T10:13:34 | 334,376,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,424 | py | """
File name: VoidTender_POS.py
Tags:
Description:
Author: Gene Todd
Date created: 2020-04-16 09:40:28
Date last modified:
Python Version: 3.7
"""
import logging, time
from app import Navi, mws, pos, system
from app.framework.tc_helpers import setup, test, teardown, tc_fail
class VoidTender_POS():
"""
Description: Test class that provides an interface for testing.
"""
def __init__(self):
self.log = logging.getLogger()
@setup
def setup(self):
"""
Performs any initialization that is not default.
"""
#if not system.restore_snapshot():
# self.log.debug("No snapshot to restore, if this is not expected please contact automation team")
pos.connect()
pos.sign_on()
@test
def test_voidCash(self):
"""
Basic void tender case using cash. Reason codes enabled.
"""
self.prep_trans()
self.log.info("Adding tender")
pos.enter_keypad(100, after="Enter")
# Assume the tender has already been selected when it was added
self.log.info("Voiding cash tender")
pos.click_tender_key("Void")
# Confirms the reason codes appeared
pos.select_list_item("Cashier Error")
pos.click("Enter")
# Confirm the tender is gone
jrnl = pos.read_transaction_journal()
for line in jrnl:
if "Cash" in line:
tc_fail("Cash tender found in transaction after being voided")
self.log.info("Cash confirmed no longer in transaction journal")
# Pay out the transaction for the next test
self.log.info("Paying out transaction")
pos.click_tender_key("Exact Change")
pos.is_element_present(pos.controls['function keys']['tools'], timeout=5)
@test
def test_noReasonCodes(self):
"""
Tests our ability to void tenders without reason codes enabled
"""
# Disable reason codes
pos.close()
self.log.info("Removing void tender reason code")
Navi.navigate_to('Register Group Maintenance')
mws.click_toolbar('Change')
mws.select_tab('Reason Codes')
mws.set_value('Available Functions', 'Void Tender')
mws.set_value('Require Reason Code', False)
mws.click_toolbar('Save')
pos.connect()
tries = 0
while mws.get_top_bar_text() and tries < 10:
self.log.info("Waiting for reload options...")
tries = tries + 1
time.sleep(.5)
self.prep_trans()
self.log.info("Adding tender")
pos.enter_keypad(100, after="Enter")
# Assume the tender has already been selected when it was added
self.log.info("Voiding cash tender")
pos.click_tender_key("Void")
# Wait for void to process
pos.is_element_present(pos.controls['pay']['exact_amount'], timeout=5)
# Confirm the tender is gone
jrnl = pos.read_transaction_journal()
for line in jrnl:
if "Cash" in line:
tc_fail("Cash tender found in transaction after being voided")
self.log.info("Cash confirmed no longer in transaction journal")
# Pay out the transaction for the next test
self.log.info("Paying out transaction")
pos.click_tender_key("Exact Change")
pos.is_element_present(pos.controls['function keys']['tools'], timeout=5)
@teardown
def teardown(self):
"""
Performs cleanup after this script ends.
"""
pos.close()
# Re-enable reason codes
self.log.info("Removing void tender reason code")
Navi.navigate_to('Register Group Maintenance')
mws.click_toolbar('Change')
mws.select_tab('Reason Codes')
mws.set_value('Available Functions', 'Void Tender')
mws.set_value('Require Reason Code', True)
mws.click_toolbar('Save')
def prep_trans(self):
"""
Helper function for adding an item and getting to the pay screen for tests
"""
self.log.info("Setting up transaction for VoidTender test...")
pos.click("Item 1")
pos.enter_keypad(1000, after="Enter")
pos.click("Pay")
self.log.info("... Setup complete")
| [
"[email protected]"
] | |
714f10f1cdf810ee577d228a32f31af48c09c4ca | 93e55f080779f16f47a7382a3fb0b29a4189e074 | /convertor/huawei/te/lang/cce/te_compute/conv3d_compute.py | 41cc76d90ec5f7cf33e700dfbfa39e91468a5d7b | [] | no_license | jizhuoran/caffe-huawei-atlas-convertor | b00cfdec3888da3bb18794f52a41deea316ada67 | 148511a31bfd195df889291946c43bb585acb546 | refs/heads/master | 2022-11-25T13:59:45.181910 | 2020-07-31T07:37:02 | 2020-07-31T07:37:02 | 283,966,371 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 29,385 | py | """
Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the Apache License Version 2.0.You may not use this file
except in compliance with the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Apache License for more details at
http://www.apache.org/licenses/LICENSE-2.0
conv3d compute
"""
from __future__ import division
from te import tvm
from te.platform import CUBE_MKN
from te.platform import get_soc_spec
from te.domain.tiling.tiling_query import tiling_query
from te.utils import op_utils
from . import util as te_util
from .cube_util import im2col_fractal_3d, im2col_row_major
OP_TAG = "conv3d_"
TENSOR_MAP = {}
DIM_MAP = {}
NAME_INDEX = [0]
SQRT = {}
# filterD must be in [1,255]
FILTER_DHW_MIN = 1
FILTER_DHW_MAX = 255
# pad must be in [0,255]
PAD_MIN = 0
PAD_MAX = 255
# stride must be in [1,63]
STRIDE_MIN = 1
STRIDE_MAX = 63
# fmap H and W must be in [1, 4096]
FMAP_HW_MIN = 1
FMAP_HW_MAX = 4096
def check_d_dimension(fmap_d, filter_d, pad_d, stride_d):
if filter_d < FILTER_DHW_MIN or filter_d > FILTER_DHW_MAX:
raise RuntimeError("kernel D must be in [1,255].")
if (fmap_d + pad_d[0] + pad_d[1]) < filter_d:
raise RuntimeError(
"the depth of feature map after padding"
"can not be less than shape_filter's")
if pad_d[0] < PAD_MIN or pad_d[1] < PAD_MIN or \
pad_d[0] > PAD_MAX or pad_d[1] > PAD_MAX:
raise RuntimeError("padd must be in [0,255].")
if pad_d[0] >= filter_d or pad_d[1] >= filter_d:
raise RuntimeError("padd must be less than shape_filter's")
if stride_d < STRIDE_MIN or stride_d > STRIDE_MAX:
raise RuntimeError("strided must be in [1,63].")
def check_h_dimension(fmap_h, filter_h, pad_h, stride_h):
if fmap_h < FMAP_HW_MIN or fmap_h > FMAP_HW_MAX:
raise RuntimeError("feature H must be in [1,4096].")
if filter_h < FILTER_DHW_MIN or filter_h > FILTER_DHW_MAX:
raise RuntimeError("kernel H must be in [1,255].")
if pad_h[0] < PAD_MIN or pad_h[1] < PAD_MIN or \
pad_h[0] > PAD_MAX or pad_h[1] > PAD_MAX:
raise RuntimeError("padh must be in [0,255].")
if filter_h > (fmap_h + pad_h[0] + pad_h[1]):
# Chip Design demand, Load3D
raise RuntimeError("feature H(after pad) must >= kernel H")
if stride_h < STRIDE_MIN or stride_h > STRIDE_MAX:
raise RuntimeError("strideh must be in [1,63].")
if pad_h[0] >= filter_h or pad_h[1] >= filter_h:
raise RuntimeError("kernel H must > Pad H")
def check_w_dimension(fmap_w, filter_w, pad_w, stride_w):
if fmap_w < FMAP_HW_MIN or fmap_w > FMAP_HW_MAX:
raise RuntimeError("feature W must be in [1,4096].")
if filter_w < FILTER_DHW_MIN or filter_w > FILTER_DHW_MAX:
raise RuntimeError("kernel W must be in [1,255].")
if pad_w[0] < PAD_MIN or pad_w[1] < PAD_MIN or \
pad_w[0] > PAD_MAX or pad_w[1] > PAD_MAX:
raise RuntimeError("padw must be in [0,255].")
if filter_w > (fmap_w + pad_w[0] + pad_w[1]):
# Chip Design demand, Load3D
raise RuntimeError("feature W(after pad) must >= kernel W")
if stride_w < STRIDE_MIN or stride_w > STRIDE_MAX:
raise RuntimeError("stridew must be in [1,63].")
def check_conv3d_shape(shape_fm, shape_filter, pads, stride_dhw, fmp_dtype,
w_dtype):
"""
algorithm: check the input params of conv3d
Parameters
----------
shape_fm: the shape of feature, format is 'NCDHW'.
a list/tuple of 'int' that has length `== 5`
shape_filter: the shape of filter, format is 'NCDHW'.
a list of 'int' that has length `== 5`
pads: tuple/list of 6 integers
[pad_head, pad_tail, pad_top, pad_bottom, pad_left, pad_right]
stride_dhw: A list of `ints` that has length `== 3`.
fmp_dtype: the dtype of feature
w_dtype: the dtype of filter
Returns
-------
None
"""
if shape_fm[1] != shape_filter[1]:
raise RuntimeError("input feature map channel should"
"equal to filter channel")
fmap_n, fmap_c, fmap_d, fmap_h, fmap_w = shape_fm
filter_n, filter_c, filter_d, filter_h, filter_w = shape_filter
pad_d = [pads[0], pads[1]]
check_d_dimension(fmap_d, filter_d, pad_d, stride_dhw[0])
pad_h = [pads[2], pads[3]]
check_h_dimension(fmap_h, filter_h, pad_h, stride_dhw[1])
pad_w = [pads[4], pads[5]]
check_w_dimension(fmap_w, filter_w, pad_w, stride_dhw[2])
# C dimension should align 16
block_size_k = CUBE_MKN[fmp_dtype]['mac'][1]
block_size_m = CUBE_MKN[fmp_dtype]['mac'][0]
famp_c = ((fmap_c + block_size_k - 1) //
block_size_k) * block_size_k
filter_c = fmap_c
block_size_n = CUBE_MKN[w_dtype]['mac'][2]
filter_n = ((filter_n + block_size_n - 1) //
block_size_n) * block_size_n
# calculated by h_i and w_i
h_out = (fmap_h + (pad_h[0] + pad_h[1]) - filter_h) // stride_dhw[1] + 1
w_out = (fmap_w + (pad_w[0] + pad_w[1]) - filter_w) // stride_dhw[2] + 1
d_out = (fmap_d + (pad_d[0] + pad_d[1]) - filter_d) // stride_dhw[0] + 1
load2d_pass_flag = (filter_d == 1) and (filter_h == 1) and \
(filter_w == 1) and \
(list(pads) == [0, 0, 0, 0, 0, 0]) and \
(list(stride_dhw) == [1, 1, 1])
# Chip Design demand only h_dimesion constraint
only_fhkh_pass_flag = (1 <= filter_h <= 11) and \
(stride_dhw[1] == 1) and \
(h_out == 1)
# Chip Design demand both h_dimesion and w_dimension constraint
fhkh_fwkw_pass_flag = (1 <= filter_w <= 11) and (1 <= filter_h <= 11) and \
(stride_dhw[1] == 1) and (stride_dhw[2] == 1) and \
(h_out == 1) and (w_out == 1)
if load2d_pass_flag or only_fhkh_pass_flag or fhkh_fwkw_pass_flag:
pass
else:
if w_out < 2:
# Chip Design demand w_out must >=2
raise RuntimeError("FM_W + pad_left + pad_right - KW>=strideW")
if h_out < 2:
# Chip Design demand h_out must >=2
raise RuntimeError("FM_H + pad_top + pad_bottom - KH>=strideH")
# check for not bigger than L1
l1_buffer_size = get_soc_spec("L1_SIZE")
m_bit_ratio = {"float16": 2, "int8": 1}
point_per_w = (fmap_w - filter_w +
pad_w[0] + pad_w[1]) // stride_dhw[2] + 1
w_in = block_size_m // point_per_w + 2
tmp = ((w_in - 1) * stride_dhw[1] + filter_h) * fmap_w
max_feature_map_l1 = block_size_k * tmp * m_bit_ratio[w_dtype]
if max_feature_map_l1 > l1_buffer_size:
raise RuntimeError(
"Input feature is too large, "
"the minimum tiling may exceeds L1_Buffer")
class Conv3DParam:
"""
class of ConvParam
"""
def __init__(self):
pass
def get_tensor_map(self):
"""
get the tensor_map in convparam
"""
return self.TENSOR_MAP
TENSOR_MAP = {}
dim_map = {}
tiling = None
tiling_query_param = {}
def cube_3d_compute(fmap,
weight,
mad_dtype,
res_dtype,
pads,
stride_dhw,
shape_filter_ncdhw,
cyclebuffer_flag,
bias=False,
tiling=None):
"""
conv
Parameters
----------
fmap : tvm.tensor, Feature Map
weight: tvm.tensor, Filter
mad_dtype : the compute data type
res_dtype : the result data type
pads: the padding shape
[head, tail, top, bottom, left, right]
stride_dhw: the stride value
[stride_d, stride_h, stride_w]
shape_filter_ncdhw: the filter shape
bias: the tag for bias or not
tiling: default none, tiling
-------
Returns
wrapped_tensor
"""
in_dtype = fmap.dtype
w_dtype = weight.dtype
TENSOR_MAP["fmap"] = fmap
TENSOR_MAP["filter"] = weight
if isinstance(bias, tvm.tensor.Tensor):
TENSOR_MAP["bias"] = bias
fmap_shape = te_util.shape_to_list(fmap.shape)
batch_size = fmap_shape[0]
fmap_d = fmap_shape[1]
fmap_c1 = fmap_shape[2]
fmap_h = fmap_shape[3]
fmap_w = fmap_shape[4]
fmap_c0 = fmap_shape[5]
filter_cout, _, filter_d, filter_h, filter_w = shape_filter_ncdhw
pad_head, pad_tail, pad_top, pad_bottom, pad_left, pad_right = pads
stride_d, stride_h, stride_w = stride_dhw
TENSOR_MAP["filter_d"] = filter_d
height_out = (fmap_h + pad_top + pad_bottom - filter_h) // stride_h + 1
width_out = (fmap_w + pad_left + pad_right - filter_w) // stride_w + 1
d_out = (fmap_d + pad_head + pad_tail - filter_d) // stride_d + 1
config = CUBE_MKN[in_dtype]
block_size_k = config['mac'][1]
block_size_m = config['mac'][0]
opti_h_flag = filter_h == 1 and stride_h > 1
TENSOR_MAP["opti_h_flag"] = opti_h_flag
TENSOR_MAP["d_out"] = d_out
TENSOR_MAP["d_dim"] = tiling["block_dim"][-1]
fmap_fuse_shape = (batch_size * d_out, filter_d * fmap_c1, fmap_h, fmap_w,
fmap_c0)
fuse_fmap_tensor = get_fuse_fmap_tensor(fmap_fuse_shape,
fmap,
d_out,
filter_d,
stride_d,
stride_h,
pad_head,
tiling,
opti_h_flag,
cyclebuffer_flag,
tag=OP_TAG)
TENSOR_MAP["fmap_do_tensor"] = fuse_fmap_tensor
# set_fmatrix
# new data layout (N,C1,H,W,C0) -> (N,HoWo,C1,Hk,Wk,C0)
fmap_im2col_row_major_shape = (fmap_fuse_shape[0], height_out * width_out,
fmap_fuse_shape[1], filter_h, filter_w,
fmap_c0)
pad_hw = pads[2:]
stride_hw = [stride_h, stride_w]
fmap_im2col_row_major_res = im2col_row_major(fmap_im2col_row_major_shape,
fuse_fmap_tensor,
filter_w,
pad_hw,
stride_hw,
fmap.dtype,
opti_h_flag,
tag=OP_TAG)
TENSOR_MAP["fmap_im2col_row_major_res"] = fmap_im2col_row_major_res
# im2col
# small-z-big-Z
howo_mad = (height_out * width_out + block_size_m -
1) // block_size_m * block_size_m
# new data layout (N,HoWo,C1,Hk,Wk,C0) -> (N,loop_m,loop_k,cube_m,cube_k)
fmap_im2col_fractal_shape = (fmap_fuse_shape[0], howo_mad // block_size_m,
fmap_fuse_shape[1] * filter_h * filter_w,
block_size_m, block_size_k)
fmap_im2col_fractal_res = im2col_fractal_3d(fmap_im2col_fractal_shape,
fmap_im2col_row_major_res,
fmap_c1,
d_out,
filter_d,
stride_d,
cyclebuffer_flag,
tag=OP_TAG)
TENSOR_MAP["fmap_im2col_fractal_res"] = fmap_im2col_fractal_res
config = CUBE_MKN[res_dtype]
l0a_load2d_flag = get_load2d_flag(stride_dhw, pads, shape_filter_ncdhw)
TENSOR_MAP["l0a_load2d_flag"] = l0a_load2d_flag
mad_shape = (fmap_fuse_shape[0],
(filter_cout + config['mac'][2] - 1) // (config['mac'][2]),
howo_mad, config['mac'][2])
config = CUBE_MKN[w_dtype]
if l0a_load2d_flag:
c_col = mad_by_load2d(mad_shape, fmap, weight, config, mad_dtype, pads,
stride_d, d_out, filter_d)
else:
c_col = mad(mad_shape, fmap_im2col_fractal_res, weight, config,
mad_dtype, pads, stride_d, d_out, fmap_d, filter_d)
TENSOR_MAP["c_col"] = c_col
conv_shape = (fmap_fuse_shape[0],
(filter_cout + config['mac'][2] - 1) // (config['mac'][2]),
height_out * width_out, config['mac'][2])
DIM_MAP["out_img_shape"] = conv_shape
c_ub = tvm.compute(mad_shape,
lambda n, i, j, k: c_col(n, i, j, k).astype(res_dtype),
name='C_UB',
tag=OP_TAG + "C_UB",
attrs={
'true_shape': conv_shape,
'sqrt': False,
'res_dtype': res_dtype,
'kernel_h': filter_h,
'kernel_w': filter_w,
'padding': pads[2:],
'stride': stride_dhw[1:]
})
TENSOR_MAP["c_ub"] = c_ub
dim_map1 = im2col_dim(te_util.shape_to_list(fuse_fmap_tensor.shape),
shape_filter_ncdhw, list(pads), list(stride_dhw),
config)
dim_map_copy = DIM_MAP.copy()
dim_map_copy.update(dim_map1)
Conv3DParam.TENSOR_MAP = TENSOR_MAP
Conv3DParam.dim_map = dim_map_copy
Conv3DParam.tiling = None
return c_ub
def get_fuse_fmap_tensor(fmap_fuse_shape, fmap, d_out, kernel_d, stride_d,
stride_h, pad_head, tiling, opti_h_flag,
cyclebuffer_flag, tag):
"""
calculate expand tensor
Parameters
----------
fmap_fuse_shape : the shape of new tensor
fmap : the input feature
d_out : the D dimension of out shape
stride_d : the D dimension of strides
pad_head : the pad head of pads
tag : the tensor tag
Returns
-------
new tensor
"""
_, fmap_d, fmap_c1, _, _, _ = fmap.shape
# multi core
d_dim = tiling["block_dim"][-1]
if cyclebuffer_flag:
if opti_h_flag:
fmap_fuse_shape = list(fmap_fuse_shape)
fmap_fuse_shape[2] = (fmap_fuse_shape[2] - 1) // stride_h + 1
fuse_fmap_tensor = tvm.compute(
fmap_fuse_shape,
lambda n, dc, h, w, c0: tvm.select(
tvm.all(
n % d_out * stride_d + (dc // fmap_c1 + n % d_out *
(kernel_d - stride_d)) % kernel_d -
pad_head >= 0, n % d_out * stride_d + \
(dc // fmap_c1 + n % d_out * \
(kernel_d - stride_d)) % kernel_d - pad_head < fmap_d,
tvm.any(
n % d_out * stride_d + \
(dc // fmap_c1 + n % d_out * \
(kernel_d - stride_d)) % kernel_d > \
(n % d_out - 1) * stride_d + kernel_d - 1, n % \
(d_out // d_dim) == 0)),
fmap(
n // d_out, n % d_out * stride_d +\
(dc // fmap_c1 + n % d_out * (kernel_d - stride_d)\
) % kernel_d - pad_head, dc % fmap_c1, h*stride_h, w, c0)),
name='fuse_fmap_tensor',
tag=tag + 'fuse_fmap_tensor')
else:
fuse_fmap_tensor = tvm.compute(
fmap_fuse_shape,
lambda n, dc, h, w, c0: tvm.select(
tvm.all(
n % d_out * stride_d + (dc // fmap_c1 + n % d_out *
(kernel_d - stride_d)) % kernel_d -
pad_head >= 0, n % d_out * stride_d + \
(dc // fmap_c1 + n % d_out * \
(kernel_d - stride_d)) % kernel_d - pad_head < fmap_d,
tvm.any(
n % d_out * stride_d + \
(dc // fmap_c1 + n % d_out * \
(kernel_d - stride_d)) % kernel_d > \
(n % d_out - 1) * stride_d + kernel_d - 1, n % \
(d_out // d_dim) == 0)),
fmap(
n // d_out, n % d_out * stride_d +\
(dc // fmap_c1 + n % d_out * (kernel_d - stride_d)\
) % kernel_d - pad_head, dc % fmap_c1, h, w, c0)),
name='fuse_fmap_tensor',
tag=tag + 'fuse_fmap_tensor')
else:
if opti_h_flag:
fmap_fuse_shape = list(fmap_fuse_shape)
fmap_fuse_shape[2] = (fmap_fuse_shape[2] - 1) // stride_h + 1
fuse_fmap_tensor = tvm.compute(
fmap_fuse_shape,
lambda n, dc, h, w, c0: tvm.select(
tvm.all((n % d_out) * stride_d - pad_head + dc // fmap_c1 >= 0,
(n % d_out) * \
stride_d - pad_head + dc // fmap_c1 < fmap_d),
fmap(n // d_out, (n % d_out) * stride_d - pad_head + dc // \
fmap_c1, dc % fmap_c1, h*stride_h, w, c0)),
name='fuse_fmap_tensor',
tag=tag + 'fuse_fmap_tensor')
else:
fuse_fmap_tensor = tvm.compute(
fmap_fuse_shape,
lambda n, dc, h, w, c0: tvm.select(
tvm.all((n % d_out) * stride_d - pad_head + dc // fmap_c1 >= 0,
(n % d_out) * \
stride_d - pad_head + dc // fmap_c1 < fmap_d),
fmap(n // d_out, (n % d_out) * stride_d - pad_head + dc // \
fmap_c1, dc % fmap_c1, h, w, c0)),
name='fuse_fmap_tensor',
tag=tag + 'fuse_fmap_tensor')
return fuse_fmap_tensor
def mad_by_load2d(mad_shape, fmap, weight, config, mad_dtype, pads, stride_d,
d_out, filter_d):
"""
calculate mad
Parameters
----------
mad_shape : the shape of new tensor
fmap : the input feature
weight : the input filter
config : the MKN config
mad_dtype : the compute dtype of mad
Returns
-------
new tensor
"""
fmap_shape = te_util.shape_to_list(fmap.shape)
batch_size = fmap_shape[0]
fmap_d = fmap_shape[1]
fmap_c1 = fmap_shape[2]
fmap_h = fmap_shape[3]
fmap_w = fmap_shape[4]
fmap_c0 = fmap_shape[5]
shape_al1_load2d = (batch_size * fmap_d, fmap_c1, fmap_h * fmap_w, fmap_c0)
al1_load2d = tvm.compute(
shape_al1_load2d,
lambda n, c1, m, c0: fmap(n // fmap_d, n % fmap_d, c1, m // fmap_w, m %
fmap_w, c0),
name=OP_TAG + "al1_load2d")
TENSOR_MAP["al1_load2d"] = al1_load2d
hw_dim = te_util.int_ceil_div(fmap_h * fmap_w,
CUBE_MKN[fmap.dtype]["mac"][0])
shape_al0_load2d = (batch_size * fmap_d, hw_dim, fmap_c1,
CUBE_MKN[fmap.dtype]["mac"][0], fmap_c0)
al0_load2d = tvm.compute(
shape_al0_load2d,
lambda n, m1, c1, m0, c0: al1_load2d(
n, c1, m0 + CUBE_MKN[fmap.dtype]["mac"][0] * m1, c0),
name=OP_TAG + "al0_load2d")
TENSOR_MAP["al0_load2d"] = al0_load2d
c_col = mad(mad_shape, al0_load2d, weight, config, mad_dtype, pads,
stride_d, d_out, fmap_d, filter_d)
return c_col
def get_load2d_flag(stride, pads, shape_filter_ncdhw):
"""
calculate use load2d or not
Parameters
----------
stride : the input strides
pads : the input pads
shape_filter_ncdhw : the shape of filter
Returns
-------
True or False
"""
l0a_load2d_flag = False
_, _, filter_d, filter_h, filter_w = shape_filter_ncdhw
if list(pads) == [0, 0, 0, 0, 0, 0] and list(stride) == [1, 1, 1] and \
[filter_d, filter_h, filter_w] == [1, 1, 1]:
l0a_load2d_flag = True
return l0a_load2d_flag
def get_cyclebuffer_flag(tiling, shape_w, w_dtype, channel_c1, stride_d,
l0a_load2d_flag):
"""
calculate whether to do cyclebuffer
Parameters
----------
tiling : tiling_new
shape_w : filter shape
channel_c1 : fmap c1
stride_d : d channel stride
l0a_load2d_flag : whether fmap to load2d
return
----------
cyclebuffer_flag
"""
cyclebuffer_flag = False
filter_d = shape_w[1]
cyc_size = 0
if tiling["AL1_shape"]:
cyc_size = int(tiling["AL1_shape"][0] * tiling["AL1_shape"][-1] // \
(shape_w[-3] * shape_w[-2] * CUBE_MKN[w_dtype]['mac'][1]))
if cyc_size == filter_d * channel_c1:
cyclebuffer_flag = True
if l0a_load2d_flag or filter_d <= stride_d:
cyclebuffer_flag = False
return cyclebuffer_flag
def im2col_dim(shape_fmap, shape_filter_ncdhw, pads, stride_dhw, config):
"""
calculate shape
Parameters
----------
shape_fmap : shape of feature
shape_filter_ncdhw : shape of filter
pads : the padding shape
stride_dhw : the stride value
config : the MKN infor
Returns
-------
img_shape, fmap_matrix_dim
"""
mac_dim = config['mac']
batch, fmap_c1, fmap_h, fmap_w, fmap_c0 = shape_fmap
filter_cout, _, _, filter_h, filter_w = shape_filter_ncdhw
_, _, pad_top, pad_bottom, pad_left, pad_right = pads
out_h = ((fmap_h + pad_top + pad_bottom) - filter_h) // stride_dhw[1] + 1
out_w = ((fmap_w + pad_left + pad_right) - filter_w) // stride_dhw[2] + 1
fmap_valid_dim = (batch, out_h * out_w,
fmap_c1 * filter_h * filter_w * fmap_c0)
fmap_matrix_dim = (fmap_valid_dim[0],
((fmap_valid_dim[-2] + mac_dim[0] - 1) // mac_dim[0]),
((fmap_valid_dim[-1] + mac_dim[1] - 1) // mac_dim[1]),
mac_dim[0], mac_dim[1])
filter_valid_dim = (fmap_valid_dim[-1], filter_cout)
filter_matrix_dim = ((filter_valid_dim[-2] + mac_dim[1] - 1) // mac_dim[1],
(filter_valid_dim[-1] + mac_dim[2] - 1) // mac_dim[2],
mac_dim[2], mac_dim[1])
return {
"img_shape": shape_fmap,
"fmap_matrix_dim": fmap_matrix_dim,
"filter_matrix_dim": filter_matrix_dim,
"shape_filter_ncdhw": shape_filter_ncdhw
}
def mad(mad_shape, fmap, weight, config, mad_dtype, pads, stride_d, d_out,
fmap_d, filter_d):
"""
calculate mad result tensor
Parameters
----------
mad_shape : shape of mad result
fmap : feature map
weight : filter
config: the config of cube
mad_dtype: dtype of mad output
pads: input pad
stride_d: stride for d channel
d_out: output d channel
fmap_d: input fmap d channel
filter_d: input filter d channel
Returns
-------
mad result tensor
"""
block_size = config['mac'][1]
block_size_m = config['mac'][0]
pad_head = pads[0]
c1khkw = weight.shape[0] // filter_d
axis_k1 = tvm.reduce_axis((0, weight.shape[0]), name='k1')
axis_k0 = tvm.reduce_axis((0, block_size), name='k0')
if mad_dtype in ["float16", "int32"]:
mode = 'f162f16'
else:
mode = 'f162f32'
c_col = tvm.compute(
mad_shape,
lambda n, index_j1, i, index_j0: tvm.sum(
(fmap[n, i // block_size_m, axis_k1, i % block_size_m, axis_k0] *
weight[axis_k1, index_j1, index_j0, axis_k0]).astype(mad_dtype),
axis=[axis_k1, axis_k0]),
name='mad1',
tag=OP_TAG + "c_col",
attrs={
'mode': mode,
'pad_head': pad_head,
'fmap_d': fmap_d,
'stride_d': stride_d,
'd_out': d_out
})
return c_col
def bias_add(in_tensor0, in_tensor1):
"""
calculate conv res + bias in UB
Parameters
----------
in_tensor0: cnv res tensor
in_tensor1: bias vector
Returns
-------
in_tensor0+in_tensor1 tensor
"""
dim_map = {}
dim_map["out_img_shape"] = te_util.shape_to_list(in_tensor0.shape)
NAME_INDEX[0] += 1
with tvm.tag_scope('conv_vector_bias_add'):
c_add_vector = tvm.compute(
dim_map["out_img_shape"],
lambda *indice: in_tensor0(*indice) + in_tensor1(indice[
1] * CUBE_MKN[in_tensor0.dtype]['mac'][2] + indice[3]),
name='bias_add_vector' + "_cc_" + str(NAME_INDEX[0]))
return c_add_vector
def remove_pad(res, res_remove_pad_shape):
"""
remove pad
Parameters
----------
res: input tensor
res_remove_pad_shape: true shape
Returns
-------
res_remove_pad tensor
"""
NAME_INDEX[0] += 1
with tvm.tag_scope('conv_vector_remove_pad'):
res_tensor = tvm.compute(res_remove_pad_shape,
lambda *indice: res(*indice),
name='remove_pad' + "_cc_" +
str(NAME_INDEX[0]))
return res_tensor
@tvm.target.generic_func
def conv3d(data, weight, para_dict):
"""
conv
Parameters
----------
data: feature map
weight: filter
para_dict: dict of params
Returns
-------
tensor : res
"""
in_dtype = data.dtype
w_dtype = weight.dtype
bias_tensor = para_dict["bias_tensor"]
bias_flag = (bias_tensor is not None)
pads = para_dict["pads"]
pad_head, pad_tail, pad_top, pad_bottom, pad_left, pad_right = pads
pad_d = [pad_head, pad_tail]
pad_w = [pad_left, pad_right]
pad_h = [pad_top, pad_bottom]
stride_dhw = para_dict["stride_dhw"]
stride_d, stride_h, stride_w = stride_dhw
shape_filter_ncdhw = para_dict["shape_filter_ncdhw"]
filter_n, filter_c, filter_d, filter_h, filter_w = shape_filter_ncdhw
mad_dtype = para_dict["mad_dtype"]
res_dtype = para_dict["res_dtype"]
block_size_k = CUBE_MKN[w_dtype]['mac'][1]
filter_c1 = (filter_c + block_size_k - 1) // block_size_k
shape_w_ndc1hwc0 = (filter_n, filter_d, filter_c1, filter_h, filter_w,
block_size_k)
fmap_shape_ndc1hwc0 = te_util.shape_to_list(data.shape)
Conv3DParam.tiling_query_param = {
"fmap_shape_ndc1hwc0": fmap_shape_ndc1hwc0,
"shape_w_ndc1hwc0": shape_w_ndc1hwc0,
"in_dtype": in_dtype,
"w_dtype": w_dtype,
"res_dtype": res_dtype,
"mad_dtype": mad_dtype,
"padw": pad_w,
"padh": pad_h,
"padd": pad_d,
"strideh": stride_h,
"stridew": stride_w,
"strided": stride_d,
"bias_flag": bias_flag,
"default_tiling": False
}
tiling_new = tiling_query(a_shape=fmap_shape_ndc1hwc0,
b_shape=shape_w_ndc1hwc0,
a_dtype=in_dtype,
b_dtype=w_dtype,
c_dtype=res_dtype,
mad_dtype=mad_dtype,
padl=pad_w[0],
padr=pad_w[1],
padu=pad_h[0],
padd=pad_h[1],
padf=pad_d[0],
padb=pad_d[1],
strideh=stride_h,
stridew=stride_w,
strided=stride_d,
bias_flag=bias_flag,
op_tag="convolution_3d")
TENSOR_MAP["tiling_new"] = tiling_new
l0a_load2d_flag = get_load2d_flag(stride_dhw, pads, shape_filter_ncdhw)
cyclebuffer_flag = get_cyclebuffer_flag(tiling_new, shape_w_ndc1hwc0,
w_dtype, fmap_shape_ndc1hwc0[2],
stride_d, l0a_load2d_flag)
TENSOR_MAP["cyclebuffer_flag"] = cyclebuffer_flag
conv_res = cube_3d_compute(data,
weight,
mad_dtype,
res_dtype,
pads,
stride_dhw,
shape_filter_ncdhw,
cyclebuffer_flag,
bias=False,
tiling=tiling_new)
res = conv_res
if bias_flag:
res = bias_add(conv_res, bias_tensor)
# Remove H-aligned data in the output shape
res_remove_pad_shape = list(res.shape)
res_remove_pad_shape[2] = conv_res.op.attrs['true_shape'][2].value
res_remove_pad = remove_pad(res, res_remove_pad_shape)
return res_remove_pad
| [
"[email protected]"
] | |
de3a0c28cc1023aa05a34f5fd437c0431ba35fee | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/leap/42142e465a234cfaa158392bdda680b9.py | 2e0e27725c2144af3babc0a50be01d1f5932c483 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 353 | py | def is_leap_year(year):
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
return True
return False
return True
return False
"""
on every year that is evenly divisible by 4
except every year that is evenly divisible by 100
unless the year is also evenly divisible by 400
"""
| [
"[email protected]"
] | |
b5e435d58d0527b0a10b2c3d2ddb08609b44daa9 | da9c4a9a92d49d2fb2983a54e0f64c2a1ce8aa19 | /symphony/cli/pysymphony/graphql/input/add_image.py | 8a71be0cda0a9d98d166adf96a4a1fc7a8c266e2 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | rohan-prasad/magma | 347c370347724488215a0783504788eac41d8ec7 | 2c1f36d2fd04eae90366cc8b314eaab656d7f8ad | refs/heads/master | 2022-10-14T14:08:14.067593 | 2020-06-11T23:52:03 | 2020-06-11T23:54:27 | 271,671,835 | 0 | 0 | NOASSERTION | 2020-06-12T00:20:23 | 2020-06-12T00:17:39 | null | UTF-8 | Python | false | false | 748 | py | #!/usr/bin/env python3
# @generated AUTOGENERATED file. Do not Change!
from dataclasses import dataclass
from datetime import datetime
from functools import partial
from gql.gql.datetime_utils import DATETIME_FIELD
from numbers import Number
from typing import Any, Callable, List, Mapping, Optional
from dataclasses_json import DataClassJsonMixin
from gql.gql.enum_utils import enum_field
from ..enum.image_entity import ImageEntity
@dataclass
class AddImageInput(DataClassJsonMixin):
entityType: ImageEntity = enum_field(ImageEntity)
entityId: str
imgKey: str
fileName: str
fileSize: int
modified: datetime = DATETIME_FIELD
contentType: str
category: Optional[str] = None
annotation: Optional[str] = None
| [
"[email protected]"
] | |
ce7bfaf85f5e55097d06bc2990ecc1757aabd37a | 673f9b85708affe260b892a4eb3b1f6a0bd39d44 | /Botnets/Phases/Phase 2/Algorithms/Algorithms after PDG-2/PDG-2-Fase-2-ENV/lib/python3.6/site-packages/setuptools/sandbox.py | e46dfc8d25e8accf6fb08c13b878da1550e4738f | [
"MIT"
] | permissive | i2tResearch/Ciberseguridad_web | feee3fe299029bef96b158d173ce2d28ef1418e4 | e6cccba69335816442c515d65d9aedea9e7dc58b | refs/heads/master | 2023-07-06T00:43:51.126684 | 2023-06-26T00:53:53 | 2023-06-26T00:53:53 | 94,152,032 | 14 | 0 | MIT | 2023-09-04T02:53:29 | 2017-06-13T00:21:00 | Jupyter Notebook | UTF-8 | Python | false | false | 14,284 | py | import os
import sys
import tempfile
import operator
import functools
import itertools
import re
import contextlib
import pickle
import textwrap
from setuptools.extern import six
from setuptools.extern.six.moves import builtins, map
import pkg_resources.py31compat
from distutils.errors import DistutilsError
from pkg_resources import working_set
if sys.platform.startswith('java'):
import org.python.modules.posix.PosixModule as _os
else:
_os = sys.modules[os.name]
try:
_file = file
except NameError:
_file = None
_open = open
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def _execfile(filename, globals, locals=None):
"""
Python 3 implementation of execfile.
"""
mode = 'rb'
with open(filename, mode) as stream:
script = stream.read()
if locals is None:
locals = globals
code = compile(script, filename, 'exec')
exec(code, globals, locals)
@contextlib.contextmanager
def save_argv(repl=None):
saved = sys.argv[:]
if repl is not None:
sys.argv[:] = repl
try:
yield saved
finally:
sys.argv[:] = saved
@contextlib.contextmanager
def save_path():
saved = sys.path[:]
try:
yield saved
finally:
sys.path[:] = saved
@contextlib.contextmanager
def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved
@contextlib.contextmanager
def pushd(target):
saved = os.getcwd()
os.chdir(target)
try:
yield saved
finally:
os.chdir(saved)
class UnpickleableException(Exception):
"""
An exception representing another Exception that could not be pickled.
"""
@staticmethod
def dump(type, exc):
"""
Always return a dumped (pickled) type and exc. If exc can't be pickled,
wrap it in UnpickleableException first.
"""
try:
return pickle.dumps(type), pickle.dumps(exc)
except Exception:
# get UnpickleableException inside the sandbox
from setuptools.sandbox import UnpickleableException as cls
return cls.dump(cls, cls(repr(exc)))
class ExceptionSaver:
"""
A Context Manager that will save an exception, serialized, and restore it
later.
"""
def __enter__(self):
return self
def __exit__(self, type, exc, tb):
if not exc:
return
# dump the exception
self._saved = UnpickleableException.dump(type, exc)
self._tb = tb
# suppress the exception
return True
def resume(self):
"restore and re-raise any exception"
if '_saved' not in vars(self):
return
type, exc = map(pickle.loads, self._saved)
six.reraise(type, exc, self._tb)
@contextlib.contextmanager
def save_modules():
"""
Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context.
"""
saved = sys.modules.copy()
with ExceptionSaver() as saved_exc:
yield saved
sys.modules.update(saved)
# remove any modules imported since
del_modules = (
mod_name for mod_name in sys.modules
if mod_name not in saved
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
)
_clear_modules(del_modules)
saved_exc.resume()
def _clear_modules(module_names):
for mod_name in list(module_names):
del sys.modules[mod_name]
@contextlib.contextmanager
def save_pkg_resources_state():
saved = pkg_resources.__getstate__()
try:
yield saved
finally:
pkg_resources.__setstate__(saved)
@contextlib.contextmanager
def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
hide_setuptools()
with save_path():
with save_argv():
with override_temp(temp_dir):
with pushd(setup_dir):
# ensure setuptools commands are available
__import__('setuptools')
yield
def _needs_hiding(mod_name):
"""
>>> _needs_hiding('setuptools')
True
>>> _needs_hiding('pkg_resources')
True
>>> _needs_hiding('setuptools_plugin')
False
>>> _needs_hiding('setuptools.__init__')
True
>>> _needs_hiding('distutils')
True
>>> _needs_hiding('os')
False
>>> _needs_hiding('Cython')
True
"""
pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)')
return bool(pattern.match(mod_name))
def hide_setuptools():
"""
Remove references to setuptools' modules from sys.modules to allow the
invocation to import the most appropriate setuptools. This technique is
necessary to avoid issues such as #315 where setuptools upgrading itself
would fail to find a function declared in the metadata.
"""
modules = filter(_needs_hiding, sys.modules)
_clear_modules(modules)
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
setup_dir = os.path.abspath(os.path.dirname(setup_script))
with setup_context(setup_dir):
try:
sys.argv[:] = [setup_script] + list(args)
sys.path.insert(0, setup_dir)
# reset to include setup dir, w/clean callback list
working_set.__init__()
working_set.callbacks.append(lambda dist: dist.activate())
# __file__ should be a byte string on Python 2 (#712)
dunder_file = (
setup_script
if isinstance(setup_script, str) else
setup_script.encode(sys.getfilesystemencoding())
)
with DirectorySandbox(setup_dir):
ns = dict(__file__=dunder_file, __name__='__main__')
_execfile(setup_script, ns)
except SystemExit as v:
if v.args and v.args[0]:
raise
# Normal exit, just return
class AbstractSandbox:
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
_active = False
def __init__(self):
self._attrs = [
name for name in dir(_os)
if not name.startswith('_') and hasattr(self, name)
]
def _copy(self, source):
for name in self._attrs:
setattr(os, name, getattr(source, name))
def __enter__(self):
self._copy(self)
if _file:
builtins.file = self._file
builtins.open = self._open
self._active = True
def __exit__(self, exc_type, exc_value, traceback):
self._active = False
if _file:
builtins.file = _file
builtins.open = _open
self._copy(_os)
def run(self, func):
"""Run 'func' under os sandboxing"""
with self:
return func()
def _mk_dual_path_wrapper(name):
original = getattr(_os, name)
def wrap(self, src, dst, *args, **kw):
if self._active:
src, dst = self._remap_pair(name, src, dst, *args, **kw)
return original(src, dst, *args, **kw)
return wrap
for name in ["rename", "link", "symlink"]:
if hasattr(_os, name):
locals()[name] = _mk_dual_path_wrapper(name)
def _mk_single_path_wrapper(name, original=None):
original = original or getattr(_os, name)
def wrap(self, path, *args, **kw):
if self._active:
path = self._remap_input(name, path, *args, **kw)
return original(path, *args, **kw)
return wrap
if _file:
_file = _mk_single_path_wrapper('file', _file)
_open = _mk_single_path_wrapper('open', _open)
for name in [
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
"startfile", "mkfifo", "mknod", "pathconf", "access"
]:
if hasattr(_os, name):
locals()[name] = _mk_single_path_wrapper(name)
def _mk_single_with_return(name):
original = getattr(_os, name)
def wrap(self, path, *args, **kw):
if self._active:
path = self._remap_input(name, path, *args, **kw)
return self._remap_output(name, original(path, *args, **kw))
return original(path, *args, **kw)
return wrap
for name in ['readlink', 'tempnam']:
if hasattr(_os, name):
locals()[name] = _mk_single_with_return(name)
def _mk_query(name):
original = getattr(_os, name)
def wrap(self, *args, **kw):
retval = original(*args, **kw)
if self._active:
return self._remap_output(name, retval)
return retval
return wrap
for name in ['getcwd', 'tmpnam']:
if hasattr(_os, name):
locals()[name] = _mk_query(name)
def _validate_path(self, path):
"""Called to remap or validate any path, whether input or output"""
return path
def _remap_input(self, operation, path, *args, **kw):
"""Called for path inputs"""
return self._validate_path(path)
def _remap_output(self, operation, path):
"""Called for path outputs"""
return self._validate_path(path)
def _remap_pair(self, operation, src, dst, *args, **kw):
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation + '-from', src, *args, **kw),
self._remap_input(operation + '-to', dst, *args, **kw)
)
if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull]
else:
_EXCEPTIONS = []
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
write_ops = dict.fromkeys([
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
])
_exception_patterns = [
# Allow lib2to3 to attempt to save a pickled grammar object (#121)
r'.*lib2to3.*\.pickle$',
]
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox, '')
self._exceptions = [
os.path.normcase(os.path.realpath(path))
for path in exceptions
]
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
from setuptools.sandbox import SandboxViolation
raise SandboxViolation(operation, args, kw)
if _file:
def _file(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("file", path, mode, *args, **kw)
return _file(path, mode, *args, **kw)
def _open(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("open", path, mode, *args, **kw)
return _open(path, mode, *args, **kw)
def tmpnam(self):
self._violation("tmpnam")
def _ok(self, path):
active = self._active
try:
self._active = False
realpath = os.path.normcase(os.path.realpath(path))
return (
self._exempted(realpath)
or realpath == self._sandbox
or realpath.startswith(self._prefix)
)
finally:
self._active = active
def _exempted(self, filepath):
start_matches = (
filepath.startswith(exception)
for exception in self._exceptions
)
pattern_matches = (
re.match(pattern, filepath)
for pattern in self._exception_patterns
)
candidates = itertools.chain(start_matches, pattern_matches)
return any(candidates)
def _remap_input(self, operation, path, *args, **kw):
"""Called for path inputs"""
if operation in self.write_ops and not self._ok(path):
self._violation(operation, os.path.realpath(path), *args, **kw)
return path
def _remap_pair(self, operation, src, dst, *args, **kw):
"""Called for path pairs like rename, link, and symlink operations"""
if not self._ok(src) or not self._ok(dst):
self._violation(operation, src, dst, *args, **kw)
return (src, dst)
def open(self, file, flags, mode=0o777, *args, **kw):
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
self._violation("os.open", file, flags, mode, *args, **kw)
return _os.open(file, flags, mode, *args, **kw)
WRITE_FLAGS = functools.reduce(
operator.or_, [
getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
tmpl = textwrap.dedent("""
SandboxViolation: {cmd}{args!r} {kwargs}
The package setup script has attempted to modify files on your system
that are not within the EasyInstall build area, and has been aborted.
This package cannot be safely installed by EasyInstall, and may not
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.
""").lstrip()
def __str__(self):
cmd, args, kwargs = self.args
return self.tmpl.format(**locals())
| [
"[email protected]"
] | |
7c68e9555011e76ecb807ab9b5340bbc994a8aca | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_pkg_firewall_policy_vpndstnode.py | cc4c839796dde50386c6787ae3951a868ea8cab2 | [
"MIT"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 7,919 | py | #!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_pkg_firewall_policy_vpndstnode
short_description: no description
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
adom:
description: the parameter (adom) in requested url
type: str
required: true
pkg:
description: the parameter (pkg) in requested url
type: str
required: true
policy:
description: the parameter (policy) in requested url
type: str
required: true
pkg_firewall_policy_vpndstnode:
description: the top level parameters set
required: false
type: dict
suboptions:
host:
type: str
description: no description
seq:
type: int
description: no description
subnet:
type: str
description: no description
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: no description
fmgr_pkg_firewall_policy_vpndstnode:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
adom: <your own value>
pkg: <your own value>
policy: <your own value>
state: <value in [present, absent]>
pkg_firewall_policy_vpndstnode:
host: <value of string>
seq: <value of integer>
subnet: <value of string>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/pm/config/adom/{adom}/pkg/{pkg}/firewall/policy/{policy}/vpn_dst_node'
]
perobject_jrpc_urls = [
'/pm/config/adom/{adom}/pkg/{pkg}/firewall/policy/{policy}/vpn_dst_node/{vpn_dst_node}'
]
url_params = ['adom', 'pkg', 'policy']
module_primary_key = 'seq'
module_arg_spec = {
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'state': {
'type': 'str',
'required': True,
'choices': [
'present',
'absent'
]
},
'adom': {
'required': True,
'type': 'str'
},
'pkg': {
'required': True,
'type': 'str'
},
'policy': {
'required': True,
'type': 'str'
},
'pkg_firewall_policy_vpndstnode': {
'required': False,
'type': 'dict',
'options': {
'host': {
'required': False,
'type': 'str'
},
'seq': {
'required': True,
'type': 'int'
},
'subnet': {
'required': False,
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'pkg_firewall_policy_vpndstnode'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_curd()
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
02ab6ce4b0a5e3cc8f4857f83855687843f7324c | 29f65ef4059ba04c20558f3be36c06fe3879a8e6 | /c1/func.py | a173c0eafdf8e495d94cfb2dc8c14bfc80c1e2be | [] | no_license | kobe24shou/pythonwebdev | d9c912bd9304802069bc41345b054b065a173272 | c7c6c5af69e7d8783e5c8b15f75e9ca61ed6a03f | refs/heads/master | 2020-03-17T23:01:41.787573 | 2018-06-06T14:11:21 | 2018-06-06T14:11:21 | 134,028,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | #!/usr/bin/env python
# -*-coding:utf-8-*-
def sum(x, y):
return x + y
def total(x, y, z):
sum_of_two = sum(x, y)
sum_of_three = sum(sum_of_two, z)
return sum_of_two, sum_of_three
# 定义了没有参数和返回值 的 main()函数
def main():
print("return of sum:", sum(4, 6))
x, y = total(1, 7, 10)
print("return of total:", x, ",", y)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
cc2f066e03ede1f54ac46b07dad2bb6621a03d10 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_287/ch149_2020_04_13_19_29_39_088791.py | 5364f78261dc18794532675b8b2199879ae98f9f | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | salario_bruto = int(input('Seu salário bruto: '))
n_dependentes=int(input('Quantos dependentes: ' ))
def faixa(sal):
if sal <= 1045:
return 0.075
elif sal <= 2089.6:
return 0.09
elif sal <= 3134.4:
return 0.12
else:
return 0.14
if salario_bruto <= 6101.06:
b=salario_bruto-(faixa(salario_bruto)*sal)-(n_dependentes*189.59)
else:
b=salario_bruto-(671.12)-(n_dependentes*189.59)
def deducao(c):
if c<=1903.98:
return 0
elif c<=2826.65:
return 142.8
elif c<=3751.05:
return 354.8
elif c<=4664.68:
return 636.13
else:
return 869.36
def aliquota(d):
if d<=1903.98:
return 0
elif d<=2826.65:
return 0.075
elif d<=3751.05:
return 0.15
elif d<=4664.68:
return 0.225
else:
return 0.275
IRRF=(b*aliquota(b))-ded(b)
print("Sua contribuição para o INSS é de: ",IRRF) | [
"[email protected]"
] | |
643d38c0512e082e8c9a7018af157e92220e51da | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/fabric/rsnodetechsupp.py | 700fdc371962997c14a296087490fee59112e394 | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 8,389 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RsNodeTechSupP(Mo):
"""
A source relation to a utility that creates a summary report containing configuration information, logs, and diagnostic data that will help TAC in troubleshooting and resolving a technical issue.
"""
meta = NamedSourceRelationMeta("cobra.model.fabric.RsNodeTechSupP", "cobra.model.dbgexp.TechSupP")
meta.targetNameProps["name"] = "tnDbgexpTechSupPName"
meta.cardinality = SourceRelationMeta.N_TO_ONE
meta.moClassName = "fabricRsNodeTechSupP"
meta.rnFormat = "rsnodeTechSupP"
meta.category = MoCategory.RELATIONSHIP_TO_LOCAL
meta.label = "Tech Support Policy"
meta.writeAccessMask = 0x80000000001
meta.readAccessMask = 0x80000000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.parentClasses.add("cobra.model.fabric.SpNodePGrp")
meta.parentClasses.add("cobra.model.fabric.LeNodePGrp")
meta.superClasses.add("cobra.model.reln.Inst")
meta.superClasses.add("cobra.model.reln.To")
meta.superClasses.add("cobra.model.pol.NToRef")
meta.rnPrefixes = [
('rsnodeTechSupP', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "forceResolve", "forceResolve", 107, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("forceResolve", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14000, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "rType", "rType", 106, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "mo"
prop._addConstant("local", "local", 3)
prop._addConstant("mo", "mo", 1)
prop._addConstant("service", "service", 2)
meta.props.add("rType", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "state", "state", 103, PropCategory.REGULAR)
prop.label = "State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unformed"
prop._addConstant("cardinality-violation", "cardinality-violation", 5)
prop._addConstant("formed", "formed", 1)
prop._addConstant("invalid-target", "invalid-target", 4)
prop._addConstant("missing-target", "missing-target", 2)
prop._addConstant("unformed", "unformed", 0)
meta.props.add("state", prop)
prop = PropMeta("str", "stateQual", "stateQual", 104, PropCategory.REGULAR)
prop.label = "State Qualifier"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("stateQual", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tCl", "tCl", 11574, PropCategory.REGULAR)
prop.label = "Target-class"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 4112
prop.defaultValueStr = "dbgexpTechSupP"
prop._addConstant("dbgexpTechSupP", None, 4112)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("tCl", prop)
prop = PropMeta("str", "tContextDn", "tContextDn", 4990, PropCategory.REGULAR)
prop.label = "Target-context"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tContextDn", prop)
prop = PropMeta("str", "tDn", "tDn", 100, PropCategory.REGULAR)
prop.label = "Target-dn"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tDn", prop)
prop = PropMeta("str", "tRn", "tRn", 4989, PropCategory.REGULAR)
prop.label = "Target-rn"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("tRn", prop)
prop = PropMeta("str", "tType", "tType", 4988, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "name"
prop._addConstant("all", "all", 2)
prop._addConstant("mo", "mo", 1)
prop._addConstant("name", "name", 0)
meta.props.add("tType", prop)
prop = PropMeta("str", "tnDbgexpTechSupPName", "tnDbgexpTechSupPName", 11573, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("tnDbgexpTechSupPName", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
b5be03bae05e2c31bc7c6b3158b111ca8d5fc886 | 791ce6452fb555f953ed3adb1966b59abc7c2dbb | /arxiv_net/dashboard/assets/style.py | c359a7598a7dde951f38e4ceb3c9d495568f0370 | [] | no_license | mirandrom/arxiv-net | d63b76006d7cde62a4ba4e623ffa3971436455f5 | 86bdc7a878c8d1d4a0135ddd2785cb59ca638937 | refs/heads/master | 2023-03-21T13:37:30.567726 | 2019-12-05T23:25:24 | 2019-12-05T23:25:24 | 222,019,331 | 2 | 0 | null | 2021-03-20T02:10:29 | 2019-11-15T23:26:34 | Python | UTF-8 | Python | false | false | 1,468 | py | card_style = {
"box-shadow": "0 4px 5px 0 rgba(0,0,0,0.14), 0 1px 10px 0 rgba(0,0,0,0.12), 0 2px 4px -1px rgba(0,0,0,0.3)"
}
BLUES = ["rgb(210, 218, 255)", "rgb(86, 117, 255)", "rgb(8, 31, 139)",
"rgb(105, 125, 215)", "rgb(84, 107, 208)",
"rgb(210, 210, 210)", "rgb(102, 103, 107)", "rgb(19, 23, 37)", ]
gradients = ['rgb(115, 132, 212)', 'rgb(169, 120, 219)', 'rgb(211, 107, 218)',
'rgb(237, 84, 199)',
'rgb(244, 70, 157)', 'rgb(240, 90, 127)', 'rgb(238, 117, 124)',
'rgb(230, 193, 119)']
tab_style = {
'borderLeft' : 'thin lightgrey solid',
'borderRight': 'thin lightgrey solid',
'borderTop' : '2px white solid',
'boxShadow' : 'inset 0px -1px 0px 0px lightgrey',
'fontSize' : '0.7vw',
'color' : 'black',
}
selected_style = {
'borderLeft' : 'thin lightgrey solid',
'borderRight' : 'thin lightgrey solid',
'background-image': f"linear-gradient(to top left, {','.join(gradients[:4])})",
'color' : 'white',
'fontSize' : '0.7vw',
}
container_style = {
# 'width' : '100%',
'verticalAlign': 'middle',
# 'display' : 'inlineBlock',
# 'boxShadow': 'inset 0px -1px 0px 0px lightgrey',
'alignItems' : 'center',
'padding' : '20px ',
}
# EXTERNAL CSS / JS
# app.css.config.serve_locally = True
# app.scripts.config.serve_locally = True
# app.config['suppress_callback_exceptions'] = True
| [
"[email protected]"
] | |
2aa7d7541d47bf6cbc5349b3cb975f5eb6b55412 | 29145db13229d311269f317bf2819af6cba7d356 | /may easy/maxVal.py | 91313d4b8983c93bfc3cfa232fbdb5c36ee8edff | [] | no_license | rocket3989/hackerEarth2019 | 802d1ca6fd03e80657cbe07a3f123e087679af4d | 42c0a7005e52c3762496220136cc5c1ee93571bb | refs/heads/master | 2021-07-05T01:32:42.203964 | 2020-12-22T03:40:20 | 2020-12-22T03:40:20 | 211,607,143 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 209 | py |
fib = [1, 1]
while True:
fib.append(fib[-1] + fib[-2])
if fib[-1] > 10 ** 18:
break
N = int(input())
for val in fib:
if val <= N:
continue
print(val)
break | [
"[email protected]"
] | |
969d2be266219f2b062ad7111a43f44275354f4d | 13b2f7ca4bbad32b0ce7d547399e6097580ae097 | /bfs+dfs/1260_DFS와 BFS.py | f69616d0dba433892b0d30f2d1628280ae3b9b5c | [] | no_license | hksoftcorn/review | dadbd3a4ee7961282bfefd697a97f6ccf78dbe83 | 474aef3747c135c54322ff28261d2a6812a3d9a0 | refs/heads/master | 2023-06-17T05:41:50.178831 | 2021-07-11T23:30:27 | 2021-07-11T23:30:27 | 385,072,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | def dfs(v):
visited[v] = 1
for w in sorted(G[v]):
if not visited[w]:
dfs_path.append(w)
dfs(w)
def bfs(v):
visit = [0] * (N+1)
visit[v] = 1
Q = [v]
while Q:
current = Q.pop(0)
for w in sorted(G[current]):
if not visit[w]:
visit[w] = 1
bfs_path.append(w)
Q.append(w)
N, E, V = map(int, input().split())
G = [[] for _ in range(N + 1)]
visited = [0] * (N + 1)
for _ in range(E):
u, v = map(int, input().split())
G[u].append(v)
G[v].append(u)
dfs_path = [V]
dfs(V)
print(' '.join(map(str, dfs_path)))
bfs_path = [V]
bfs(V)
print(' '.join(map(str, bfs_path))) | [
"[email protected]"
] | |
1b03a8531d7533b57236f251b0c713bced9b5f50 | f2befaae3840bafd181cc712108e3b64caf2696f | /app/portal/horizon/openstack_dashboard/dashboards/admin/routers/tests.py | 557966985c38691b0549627c5fe8ece11b815e77 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | F5Networks/f5-adcaas-openstack | 17d5c408d421dcfe542002e1f850b2d9f29f1663 | 02bd8a606215c0fa08b926bac1b092b5e8b278df | refs/heads/master | 2023-08-28T12:09:54.972191 | 2022-08-12T02:03:43 | 2022-08-12T02:03:43 | 164,592,273 | 4 | 23 | Apache-2.0 | 2022-08-12T02:03:44 | 2019-01-08T07:40:35 | Python | UTF-8 | Python | false | false | 10,869 | py | # Copyright 2012, Nachi Ueno, NTT MCL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IgnoreArg
from mox3.mox import IsA
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.routers import tests as r_test
from openstack_dashboard.test import helpers as test
INDEX_TEMPLATE = 'horizon/common/_data_table_view.html'
class RouterTests(test.BaseAdminViewTests, r_test.RouterTests):
DASHBOARD = 'admin'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
DETAIL_PATH = 'horizon:%s:routers:detail' % DASHBOARD
def _get_detail(self, router, extraroute=True):
res = super(RouterTests, self)._get_detail(router, extraroute,
lookup_l3=True)
return res
@test.create_stubs({api.neutron: ('router_list', 'network_list',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_index(self):
tenants = self.tenants.list()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.AndReturn(True)
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
routers = res.context['table'].data
self.assertItemsEqual(routers, self.routers.list())
@test.create_stubs({api.neutron: ('router_list',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_index_router_list_exception(self):
api.neutron.router_list(
IsA(http.HttpRequest)).AndRaise(self.exceptions.neutron)
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
self.assertEqual(len(res.context['table'].data), 0)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('agent_list',
'router_list_on_l3_agent',
'network_list',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_list_by_l3_agent(self):
tenants = self.tenants.list()
agent = self.agents.list()[1]
api.neutron.agent_list(
IsA(http.HttpRequest),
id=agent.id).AndReturn([agent])
api.neutron.router_list_on_l3_agent(
IsA(http.HttpRequest),
agent.id,
search_opts=None).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.AndReturn(True)
self._mock_external_network_list()
self.mox.ReplayAll()
l3_list_url = reverse('horizon:admin:routers:l3_agent_list',
args=[agent.id])
res = self.client.get(l3_list_url)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
routers = res.context['table'].data
self.assertItemsEqual(routers, self.routers.list())
@test.create_stubs({api.neutron: ('router_list', 'network_list',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_set_external_network_empty(self):
router = self.routers.first()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn([router])
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.AndReturn(True)
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([self.tenants.list(), False])
self._mock_external_network_list(alter_ids=True)
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
table_data = res.context['table'].data
self.assertEqual(len(table_data), 1)
self.assertIn('(Not Found)',
table_data[0]['external_gateway_info']['network'])
self.assertTemplateUsed(res, INDEX_TEMPLATE)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('router_list', 'network_list',
'port_list', 'router_delete',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_router_delete(self):
router = self.routers.first()
tenants = self.tenants.list()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.MultipleTimes().AndReturn(True)
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
api.neutron.port_list(IsA(http.HttpRequest),
device_id=router.id, device_owner=IgnoreArg())\
.AndReturn([])
api.neutron.router_delete(IsA(http.HttpRequest), router.id)
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
formData = {'action': 'routers__delete__' + router.id}
res = self.client.post(self.INDEX_URL, formData, follow=True)
self.assertNoFormErrors(res)
self.assertMessageCount(response=res, success=1)
self.assertIn('Deleted Router: ' + router.name,
res.content.decode('utf-8'))
@test.create_stubs({api.neutron: ('router_list', 'network_list',
'port_list', 'router_remove_interface',
'router_delete',
'is_extension_supported'),
api.keystone: ('tenant_list',)})
def test_router_with_interface_delete(self):
router = self.routers.first()
ports = self.ports.list()
tenants = self.tenants.list()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.MultipleTimes().AndReturn(True)
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
api.neutron.port_list(IsA(http.HttpRequest),
device_id=router.id, device_owner=IgnoreArg())\
.AndReturn(ports)
for port in ports:
api.neutron.router_remove_interface(IsA(http.HttpRequest),
router.id, port_id=port.id)
api.neutron.router_delete(IsA(http.HttpRequest), router.id)
api.neutron.router_list(
IsA(http.HttpRequest)).AndReturn(self.routers.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
formData = {'action': 'routers__delete__' + router.id}
res = self.client.post(self.INDEX_URL, formData, follow=True)
self.assertNoFormErrors(res)
self.assertMessageCount(response=res, success=1)
self.assertIn('Deleted Router: ' + router.name,
res.content.decode('utf-8'))
@test.create_stubs({api.neutron: ('is_extension_supported',)})
@test.update_settings(FILTER_DATA_FIRST={'admin.routers': True})
def test_routers_list_with_admin_filter_first(self):
api.neutron.is_extension_supported(IsA(http.HttpRequest),
"router_availability_zone")\
.MultipleTimes().AndReturn(True)
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
routers = res.context['table'].data
self.assertItemsEqual(routers, [])
class RouterTestsNoL3Agent(RouterTests):
def _get_detail(self, router, extraroute=True):
return super(RouterTests, self)._get_detail(router, extraroute,
lookup_l3=True,
support_l3_agent=False)
class RouterRouteTest(test.BaseAdminViewTests, r_test.RouterRouteTests):
DASHBOARD = 'admin'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
DETAIL_PATH = 'horizon:%s:routers:detail' % DASHBOARD
def _get_detail(self, router, extraroute=True):
return super(RouterRouteTest, self)._get_detail(router, extraroute,
lookup_l3=True)
| [
"[email protected]"
] | |
8e1117685899d2bf068c219a6f66312448e008ff | 9131dd03ff2880fca2a5883572784f8e51046e41 | /env/lib/python3.6/site-packages/clicksend_client/models/delivery_issue.py | 84f1503041f46cfe49989d1ade2142787157ff54 | [] | no_license | aviadm24/coronaap | fe10619ae42a8c839cd0a2c2c522187c5f21fbc7 | 5608c2d77cb3441b48ba51da04c06a187fb09488 | refs/heads/master | 2022-12-09T21:35:17.179422 | 2021-01-28T08:21:49 | 2021-01-28T08:21:49 | 249,938,200 | 0 | 0 | null | 2021-09-22T18:47:51 | 2020-03-25T09:36:10 | JavaScript | UTF-8 | Python | false | false | 7,502 | py | # coding: utf-8
"""
ClickSend v3 API
This is an official SDK for [ClickSend](https://clicksend.com) Below you will find a current list of the available methods for clicksend. *NOTE: You will need to create a free account to use the API. You can register [here](https://dashboard.clicksend.com/#/signup/step1/)..* # noqa: E501
OpenAPI spec version: 3.1
Contact: [email protected]
Generated by: https://github.com/clicksend-api/clicksend-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DeliveryIssue(object):
"""NOTE: This class is auto generated by the clicksend code generator program.
Do not edit the class manually.
"""
"""
Attributes:
clicksend_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
clicksend_types = {
'message_id': 'str',
'type': 'str',
'description': 'str',
'client_comments': 'str',
'email_address': 'str'
}
attribute_map = {
'message_id': 'message_id',
'type': 'type',
'description': 'description',
'client_comments': 'client_comments',
'email_address': 'email_address'
}
discriminator_value_class_map = {
}
def __init__(self, message_id=None, type=None, description=None, client_comments=None, email_address=None): # noqa: E501
"""DeliveryIssue - a model defined in Swagger""" # noqa: E501
self._message_id = None
self._type = None
self._description = None
self._client_comments = None
self._email_address = None
self.discriminator = 'classType'
if message_id is not None:
self.message_id = message_id
self.type = type
self.description = description
if client_comments is not None:
self.client_comments = client_comments
self.email_address = email_address
@property
def message_id(self):
"""Gets the message_id of this DeliveryIssue. # noqa: E501
The message id of the message. # noqa: E501
:return: The message_id of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._message_id
@message_id.setter
def message_id(self, message_id):
"""Sets the message_id of this DeliveryIssue.
The message id of the message. # noqa: E501
:param message_id: The message_id of this DeliveryIssue. # noqa: E501
:type: str
"""
self._message_id = message_id
@property
def type(self):
"""Gets the type of this DeliveryIssue. # noqa: E501
The type of message, must be one of the following values SMS, MMS, VOICE, EMAIL_MARKETING, EMAIL_TRANSACTIONAL, FAX, POST. # noqa: E501
:return: The type of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this DeliveryIssue.
The type of message, must be one of the following values SMS, MMS, VOICE, EMAIL_MARKETING, EMAIL_TRANSACTIONAL, FAX, POST. # noqa: E501
:param type: The type of this DeliveryIssue. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def description(self):
"""Gets the description of this DeliveryIssue. # noqa: E501
The description of the message. # noqa: E501
:return: The description of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this DeliveryIssue.
The description of the message. # noqa: E501
:param description: The description of this DeliveryIssue. # noqa: E501
:type: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def client_comments(self):
"""Gets the client_comments of this DeliveryIssue. # noqa: E501
The user's comments. # noqa: E501
:return: The client_comments of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._client_comments
@client_comments.setter
def client_comments(self, client_comments):
"""Sets the client_comments of this DeliveryIssue.
The user's comments. # noqa: E501
:param client_comments: The client_comments of this DeliveryIssue. # noqa: E501
:type: str
"""
self._client_comments = client_comments
@property
def email_address(self):
"""Gets the email_address of this DeliveryIssue. # noqa: E501
The user's email address. # noqa: E501
:return: The email_address of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._email_address
@email_address.setter
def email_address(self, email_address):
"""Sets the email_address of this DeliveryIssue.
The user's email address. # noqa: E501
:param email_address: The email_address of this DeliveryIssue. # noqa: E501
:type: str
"""
if email_address is None:
raise ValueError("Invalid value for `email_address`, must not be `None`") # noqa: E501
self._email_address = email_address
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.clicksend_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DeliveryIssue, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeliveryIssue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
712ebb3e8e9c6daab9c2cd3b469cecab96797c6e | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_godfather.py | 813d71c96e7c700883fb63b1932814bc31f99141 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py |
#calss header
class _GODFATHER():
def __init__(self,):
self.name = "GODFATHER"
self.definitions = [u'a male godparent', u'the leader of a criminal group, especially a mafia family']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
c064647cd1304d7aff89c6683cd29e2b315cfa1e | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2863/60673/273869.py | 625083ae06703fe4379f18234384daf60c110ffb | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | # 围墙高h 第i个人高a[i] 平常走路宽度为1 弯腰2
n, h = input().split(" ")
a = input().split(" ")
n = int(n)
h = int(n)
for i in range(n):
a[i] = int(a[i])
walkNum = 0
bendNum = 0
for i in range(n):
if (a[i] <= h):
walkNum += 1
else:
bendNum += 1
print(walkNum + bendNum * 2)
| [
"[email protected]"
] | |
00256e1c2a75d6e2643d1a889bf9b296376e09eb | 6be845bf70a8efaf390da28c811c52b35bf9e475 | /windows/Resources/Dsz/PyScripts/Lib/dsz/mca/file/cmd/put/type_Params.py | 7be73ef22b4325cbd9bcac9c3611c066cc82f983 | [] | no_license | kyeremalprime/ms | 228194910bf2ed314d0492bc423cc687144bb459 | 47eea098ec735b2173ff0d4e5c493cb8f04e705d | refs/heads/master | 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,221 | py | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: type_Params.py
from types import *
import array
PARAMS_CREATE_FLAG_PERMANENT = 1
class CreateParams:
def __init__(self):
self.__dict__['flags'] = 0
self.__dict__['writeOffset'] = 0
self.__dict__['filePath'] = ''
self.__dict__['provider'] = 0
def __getattr__(self, name):
if name == 'flags':
return self.__dict__['flags']
if name == 'writeOffset':
return self.__dict__['writeOffset']
if name == 'filePath':
return self.__dict__['filePath']
if name == 'provider':
return self.__dict__['provider']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'flags':
self.__dict__['flags'] = value
elif name == 'writeOffset':
self.__dict__['writeOffset'] = value
elif name == 'filePath':
self.__dict__['filePath'] = value
elif name == 'provider':
self.__dict__['provider'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddU16(MSG_KEY_PARAMS_CREATE_FLAGS, self.__dict__['flags'])
submsg.AddU64(MSG_KEY_PARAMS_CREATE_WRITE_OFFSET, self.__dict__['writeOffset'])
submsg.AddStringUtf8(MSG_KEY_PARAMS_CREATE_FILE_PATH, self.__dict__['filePath'])
submsg.AddU32(MSG_KEY_PARAMS_CREATE_PROVIDER, self.__dict__['provider'])
mmsg.AddMessage(MSG_KEY_PARAMS_CREATE, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_PARAMS_CREATE, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['flags'] = submsg.FindU16(MSG_KEY_PARAMS_CREATE_FLAGS)
try:
self.__dict__['writeOffset'] = submsg.FindU64(MSG_KEY_PARAMS_CREATE_WRITE_OFFSET)
except:
pass
self.__dict__['filePath'] = submsg.FindString(MSG_KEY_PARAMS_CREATE_FILE_PATH)
try:
self.__dict__['provider'] = submsg.FindU32(MSG_KEY_PARAMS_CREATE_PROVIDER)
except:
pass
class WriteParams:
def __init__(self):
self.__dict__['lastData'] = False
self.__dict__['chunkIndex'] = 0
self.__dict__['data'] = array.array('B')
def __getattr__(self, name):
if name == 'lastData':
return self.__dict__['lastData']
if name == 'chunkIndex':
return self.__dict__['chunkIndex']
if name == 'data':
return self.__dict__['data']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'lastData':
self.__dict__['lastData'] = value
elif name == 'chunkIndex':
self.__dict__['chunkIndex'] = value
elif name == 'data':
self.__dict__['data'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddBool(MSG_KEY_PARAMS_WRITE_LAST_DATA, self.__dict__['lastData'])
submsg.AddU32(MSG_KEY_PARAMS_WRITE_CHUNK_INDEX, self.__dict__['chunkIndex'])
submsg.AddData(MSG_KEY_PARAMS_WRITE_DATA, self.__dict__['data'])
mmsg.AddMessage(MSG_KEY_PARAMS_WRITE, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_PARAMS_WRITE, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['lastData'] = submsg.FindBool(MSG_KEY_PARAMS_WRITE_LAST_DATA)
self.__dict__['chunkIndex'] = submsg.FindU32(MSG_KEY_PARAMS_WRITE_CHUNK_INDEX)
self.__dict__['data'] = submsg.FindData(MSG_KEY_PARAMS_WRITE_DATA) | [
"[email protected]"
] | |
13915155f7c20e488e358ce9a8fc7c78b8049d80 | 299fe2ca879e509798e95c00b7ba33914031f4a7 | /eruditio/shared_apps/django_userhistory/userhistory.py | 10aad86e7ff44123a9ea653ae8ca81813915a013 | [
"MIT"
] | permissive | genghisu/eruditio | dcf2390c98d5d1a7c1044a9221bf319cb7d1f0f6 | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | refs/heads/master | 2021-01-10T11:15:28.230527 | 2010-04-23T21:13:01 | 2010-04-23T21:13:01 | 50,865,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,733 | py | from django_userhistory.models import UserTrackedContent
class UserHistoryRegistry(object):
"""
Registry for UserHistory handlers. Necessary so that only one
receiver is registered for each UserTrackedContent object.
"""
def __init__(self):
self._registry = {}
self._handlers = {}
user_tracked_contents = UserTrackedContent.objects.all()
for content in user_tracked_contents:
self.register(content.content_type, content.action)
def get_handler(self, content_name):
"""
Attempt to get a handler for target content type, based
on the following naming convention.
content_type.model_class()._meta.db_table as StudlyCaps + Handler
"""
import django_userhistory.handlers as handlers
def to_studly(x):
return "".join([token.capitalize() for token in x.split("_")])
handler_class = getattr(handlers,
"%sHandler" % (to_studly(content_name)),
handlers.BaseUserHistoryHandler)
return handler_class
def register(self, content_type, action):
"""
Registers a handler from django_userhistory.handlers with the target
content type.
"""
content_name = content_type.model_class()._meta.db_table
if not content_name in self._registry.keys():
HandlerClass = self.get_handler(content_name)
handler = HandlerClass(content_type, action)
self._registry[content_name] = content_type
self._handlers[content_name] = handler
user_history_registry = UserHistoryRegistry() | [
"genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb"
] | genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb |
cf7594d16439bef485cf4bb9c072a01bf8bafede | 3ccd609f68016aad24829b8dd3cdbb535fb0ff6d | /python/bpy/types/LineStyleColorModifier_Curvature_3D.py | 92d99c6cbf2195f90c35af6beed12322fa7454ae | [] | no_license | katharostech/blender_externs | 79b2eed064fd927e3555aced3e2eb8a45840508e | fdf7f019a460de0fe7e62375c1c94f7ab0e9f68d | refs/heads/master | 2020-04-11T14:00:29.393478 | 2018-10-01T00:40:51 | 2018-10-01T00:40:51 | 161,838,212 | 1 | 1 | null | 2018-12-14T20:41:32 | 2018-12-14T20:41:32 | null | UTF-8 | Python | false | false | 50 | py | LineStyleColorModifier_Curvature_3D.type = None
| [
"[email protected]"
] | |
e589e9b8d3a9feebdb918b5bc6c69646e2a2bba0 | 911d3ffa7f6687b7b2d5609f4d7bb1f907f1703a | /Conditional Statements - More Exercises/06. Pets.py | d518cc7b89d6286ab3fc57f9402ad4d4aa37db01 | [] | no_license | ivan-yosifov88/python_basics | 923e5ba5dcdc5f2288f012eeb544176d1eb964e9 | ee02f1b7566e49566f15c4285d92b04f8fa6a986 | refs/heads/master | 2023-03-05T21:49:24.191904 | 2021-02-24T12:36:03 | 2021-02-24T12:36:03 | 341,581,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | from math import floor, ceil
number_of_days = int(input())
left_food = int(input())
dog_food = float(input())
cat_food = float(input())
turtle_food = float(input())
kilogram_food_eaten = number_of_days * (dog_food + cat_food + turtle_food / 1000)
difference = abs(left_food - kilogram_food_eaten)
if left_food >= kilogram_food_eaten:
print(f"{floor(difference)} kilos of food left.")
else:
print(f"{ceil(difference)} more kilos of food are needed.")
| [
"ivan.yosifov88gmail.com"
] | ivan.yosifov88gmail.com |
3f35f2a8b17f35df510599c29d815a6b083efd36 | ff5892487c262ce845a9996a282d3a2fdb1a3b15 | /URI_1254.py | 17a978a92191caec16353d8fd8ca9417daec8b41 | [] | no_license | dankoga/URIOnlineJudge--Python-3.9 | d424a47671f106d665a4e255382fc0ec3059096a | f1c99521caeff59be0843af5f63a74013b63f7f0 | refs/heads/master | 2023-07-15T08:32:11.040426 | 2021-09-03T13:27:17 | 2021-09-03T13:27:17 | 393,991,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | import re
while True:
try:
tag = input().lower()
except EOFError:
break
tag_replacement = input()
text = input()
text_replaced = []
index_begin = 0
index_end = 0
regex = re.compile(tag, re.IGNORECASE)
while index_end < len(text):
while index_end < len(text) and text[index_end] != '<':
index_end += 1
text_replaced += text[index_begin:index_end]
index_begin = index_end
while index_end < len(text) and text[index_end] != '>':
index_end += 1
text_replaced += regex.sub(tag_replacement, text[index_begin:index_end])
index_begin = index_end
print(''.join(text_replaced))
| [
"[email protected]"
] | |
7870f65dc0b7e24d9079a084ded746c988bdb9bb | 1bd3076902117867ec048210905195ba2aaaaa6b | /exercise/leetcode/python_src/by2017_Sep/Leet279.py | d70c3fe111c535970d12a2902656ed8da5306c9a | [] | no_license | SS4G/AlgorithmTraining | d75987929f1f86cd5735bc146e86b76c7747a1ab | 7a1c3aba65f338f6e11afd2864dabd2b26142b6c | refs/heads/master | 2021-01-17T20:54:31.120884 | 2020-06-03T15:04:10 | 2020-06-03T15:04:10 | 84,150,587 | 2 | 0 | null | 2017-10-19T11:50:38 | 2017-03-07T03:33:04 | Python | UTF-8 | Python | false | false | 809 | py | class Solution(object):
"""
my first dp code
"""
def __init__(self):
self.dpstate = [0, 1, 2, 3, ] + ([-1, ] * 10000)
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
res = self.dpRecursive(n, self.dpstate)
return res
def dpRecursive(self, n, stateRecord):
if stateRecord[n] != -1:
return stateRecord[n]
else:
maxSqrt = int(n**0.5)
min = 0xffffffff
while maxSqrt >= 1:
tmp = self.dpRecursive(n - maxSqrt**2, stateRecord)
min = tmp if tmp < min else min
maxSqrt -= 1
stateRecord[n] = min + 1
return min + 1
if __name__ == "__main__":
s = Solution()
print(s.numSquares(6405)) | [
"[email protected]"
] | |
2eae42fa8e4b1dc07aa735f7b8fc312778f409cd | 4b4df51041551c9a855468ddf1d5004a988f59a2 | /leetcode_python/Array/rotate-function.py | 3d952365fd1c669f093f899be1b8236df3d9be1b | [] | no_license | yennanliu/CS_basics | 99b7ad3ef6817f04881d6a1993ec634f81525596 | 035ef08434fa1ca781a6fb2f9eed3538b7d20c02 | refs/heads/master | 2023-09-03T13:42:26.611712 | 2023-09-03T12:46:08 | 2023-09-03T12:46:08 | 66,194,791 | 64 | 40 | null | 2022-08-20T09:44:48 | 2016-08-21T11:11:35 | Python | UTF-8 | Python | false | false | 4,546 | py | """
396. Rotate Function
Medium
You are given an integer array nums of length n.
Assume arrk to be an array obtained by rotating nums by k positions clock-wise. We define the rotation function F on nums as follow:
F(k) = 0 * arrk[0] + 1 * arrk[1] + ... + (n - 1) * arrk[n - 1].
Return the maximum value of F(0), F(1), ..., F(n-1).
The test cases are generated so that the answer fits in a 32-bit integer.
Example 1:
Input: nums = [4,3,2,6]
Output: 26
Explanation:
F(0) = (0 * 4) + (1 * 3) + (2 * 2) + (3 * 6) = 0 + 3 + 4 + 18 = 25
F(1) = (0 * 6) + (1 * 4) + (2 * 3) + (3 * 2) = 0 + 4 + 6 + 6 = 16
F(2) = (0 * 2) + (1 * 6) + (2 * 4) + (3 * 3) = 0 + 6 + 8 + 9 = 23
F(3) = (0 * 3) + (1 * 2) + (2 * 6) + (3 * 4) = 0 + 2 + 12 + 12 = 26
So the maximum value of F(0), F(1), F(2), F(3) is F(3) = 26.
Example 2:
Input: nums = [100]
Output: 0
Constraints:
n == nums.length
1 <= n <= 105
-100 <= nums[i] <= 100
"""
# V0
# IDEA : MATH
# first, we represent the F(1) op as below:
#
# F(0) = 0A + 1B + 2C +3D
#
# F(1) = 0D + 1A + 2B +3C
#
# F(2) = 0C + 1D + 2A +3B
#
# F(3) = 0B + 1C + 2D +3A
#
# then, by some math manipulation, we have below relation:
#
# set sum = 1A + 1B + 1C + 1D
#
# -> F(1) = F(0) + sum - 4D
#
# -> F(2) = F(1) + sum - 4C
#
# -> F(3) = F(2) + sum - 4B
#
# so we find the rules!
#
# => F(i) = F(i-1) + sum - n*A[n-i]
#
# https://www.cnblogs.com/grandyang/p/5869791.html
# http://bookshadow.com/weblog/2016/09/11/leetcode-rotate-function/
class Solution(object):
def maxRotateFunction(self, A):
size = len(A)
sums = sum(A)
sumn = sum(x * n for x, n in enumerate(A))
ans = sumn
for x in range(size - 1, 0, -1):
sumn += sums - size * A[x]
ans = max(ans, sumn)
return ans
# V0'
# IDEA : BRUTE FORCE (TLE)
class Solution(object):
def maxRotateFunction(self, nums):
# help func
def help(arr):
ans = 0
for i in range(len(arr)):
tmp = i * arr[i]
ans += tmp
return ans
# edge case
if not nums:
return 0
# rotate
ans = -float('inf')
for i in range(len(nums)):
tmp = nums.pop(-1)
nums.insert(0, tmp)
cur = help(nums)
ans = max(ans, cur)
#print("nums = " + str(nums) + " cur = " + str(cur))
return ans
# V1
# https://blog.csdn.net/fuxuemingzhu/article/details/83002609
# IDEA : MATH PATTERN
# -> SINCE
# F(0) = 0A + 1B + 2C +3D
# F(1) = 0D + 1A + 2B +3C
# F(2) = 0C + 1D + 2A +3B
# F(3) = 0B + 1C + 2D +3A
# -> SO
# F(1) = F(0) + sum - 4D
# F(2) = F(1) + sum - 4C
# F(3) = F(2) + sum - 4B
# -> THEN WE KNOW THE PATTERN OF ROTATE OPERATION IS ACTUAL :
# ---> F(i) = F(i-1) + sum - n * A[n-i]
class Solution:
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
_sum = 0
N = len(A)
f = 0
for i, a in enumerate(A):
_sum += a
f += i * a
res = f
for i in range(N - 1, 0, -1):
f = f + _sum - N * A[i]
res = max(res, f) # since we want to calculate the MAX value of F(0), F(1), ..., F(n-1).
return res
### Test case
s=Solution()
assert s.maxRotateFunction([]) == 0
assert s.maxRotateFunction([7]) == 0
assert s.maxRotateFunction([7,2,1]) == 15
assert s.maxRotateFunction([4, 3, 2, 6]) == 26
assert s.maxRotateFunction([0,0,0,0]) == 0
assert s.maxRotateFunction([3,7,0,1]) == 28
assert s.maxRotateFunction([1,1,1,1]) == 6
assert s.maxRotateFunction([-1,-1,-1,-1]) == -6
assert s.maxRotateFunction([-1,10,-5,1]) == 29
# V1'
# http://bookshadow.com/weblog/2016/09/11/leetcode-rotate-function/
class Solution(object):
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
size = len(A)
sums = sum(A)
sumn = sum(x * n for x, n in enumerate(A))
ans = sumn
for x in range(size - 1, 0, -1):
sumn += sums - size * A[x]
ans = max(ans, sumn)
return ans
# V2
# Time: O(n)
# Space: O(1)
class Solution(object):
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
s = sum(A)
fi = 0
for i in range(len(A)):
fi += i * A[i]
result = fi
for i in range(1, len(A)+1):
fi += s - len(A) * A[-i]
result = max(result, fi)
return result | [
"[email protected]"
] | |
e6ac0a4377f1efeaa6ced9a1f60ff1064ee4f9d5 | 48894ae68f0234e263d325470178d67ab313c73e | /sa/apps/mrt/views.py | 2436dec1eafdb325d310c8be9f817091229bae4b | [
"BSD-3-Clause"
] | permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 4,323 | py | # -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## sa.mrt application
##----------------------------------------------------------------------
## Copyright (C) 2007-2011 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
## Python modules
import datetime
## NOC modules
from noc.lib.app import ExtApplication, view
from noc.sa.models import (ManagedObjectSelector, ManagedObject,
ReduceTask, MRTConfig)
from noc.main.models import Permission
from noc.lib.serialize import json_decode
class MRTAppplication(ExtApplication):
"""
sa.mrt application
"""
def extra_permissions(self):
"""
Get list of additional permissions
:return:
"""
x = set([p.permission_name for p in
MRTConfig.objects.filter(is_active=True)])
return list(x)
@view(url="^(?P<task>[0-9a-zA-Z_\-]+)/$", method=["POST"],
access="launch", api=True)
def api_run(self, request, task):
"""
Run new MRT
:param request:
:param task:
:return:
"""
# Get task
config = MRTConfig.objects.filter(
name=task, is_active=True).first()
if not config:
return self.response_not_found("Task not found")
# Check permissions
pn = "sa:mrt:%s" % config.permission_name
if not Permission.has_perm(request.user, pn):
return self.response_forbidden(
"Permission denied: '%s' permission required" % pn)
# Parse request
try:
r = json_decode(request.raw_post_data)
except Exception, why:
return self.response_bad_request(str(why))
if type(r) != dict:
return self.response_bad_request("dict required")
if "selector" not in r:
return self.response_bad_request("'selector' is missed")
# Resolve objects from selector
try:
objects = ManagedObjectSelector.resolve_expression(r["selector"])
except ManagedObjectSelector.DoesNotExist, why:
return self.response_not_found(str(why))
except ManagedObject.DoesNotExist, why:
return self.response_not_found(str(why))
# Check all objects fall within MRTConfig selector
unauthorized = set(objects).difference(set(
config.selector.managed_objects))
if unauthorized:
return self.response_forbidden("Unauthorized objects: %s" % (
", ".join([o.name for o in unauthorized])
))
# Run MRT
timeout = r.get("timeout", None) or config.timeout
t = ReduceTask.create_task(
objects,
"pyrule:%s" % config.reduce_pyrule.name, {},
config.map_script, r.get("map_args", {}),
timeout)
return self.response_accepted(
location="/sa/mrt/%s/%d/" % (task, t.id))
@view(url="^(?P<task>[0-9a-zA-Z_\-]+)/(?P<task_id>\d+)/$", method=["GET"],
access="launch", api=True)
def api_result(self, request, task, task_id):
# Get task
config = MRTConfig.objects.filter(name=task, is_active=True).first()
if not config:
return self.response_not_found("Task not found")
# Check permissions
pn = "sa:mrt:%s" % config.permission_name
if not Permission.has_perm(request.user, pn):
return self.response_forbidden(
"Permission denied: '%s' permission required" % pn)
#
t = self.get_object_or_404(ReduceTask, id=int(task_id))
try:
r = t.get_result(block=False)
except ReduceTask.NotReady:
# Not ready
completed = t.maptask_set.filter(status__in=("C", "F")).count()
total = t.maptask_set.count()
return {
"ready": False,
"progress": int(completed * 100 / total),
"max_timeout": (t.stop_time - datetime.datetime.now()).seconds,
"result": None
}
# Return result
return {
"ready": True,
"progress": 100,
"max_timeout": 0,
"result": r
}
| [
"[email protected]"
] | |
398e431632ab1e171a30c473667a6229cbf94728 | 76b983258793d294b81791ebe72591bfebf78625 | /lib/ia32/optable.py | 314689b3d1c369c472d481e50573e58dabea9a73 | [
"BSD-2-Clause"
] | permissive | lotusexpeditor/syringe | 18ac9cb800a7fefb7d67e31936db6a84e47df9eb | 34a8386b90f534f9a856d0a436bba04dbf5100bd | refs/heads/master | 2023-02-08T10:08:20.295797 | 2020-12-27T00:06:09 | 2020-12-27T00:06:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | from ._optable import OperandLookupTable
from . import typesize
def Lookup(opcode):
'''Lookup specified opcode in the lookup table'''
res = ord(opcode[0])
if res == 0x0f:
res = ord(opcode[1])
return OperandLookupTable[res+0x100]
return OperandLookupTable[res]
def HasModrm(lookup):
'''Returns True if specified opcode requires a modrm byte'''
return bool(ord(lookup) & 0x80)
def HasImmediate(lookup):
'''Returns True if specified opcode contains an immediate value'''
return bool(ord(lookup) & 0x40)
def GetImmediateLength(lookup, prefixes):
res = ord(lookup) & 0x3f
opsizeindex = not int(b'\x66' in prefixes)
if res == 0x3f: # it sucks because i know python has such a horrible optimizer, and i need to redo this as a dict for that reason
size = [ 2*typesize.halfword, 2*typesize.word ][opsizeindex]
elif res == 0x3e:
size = [ typesize.byte, typesize.halfword ][opsizeindex]
elif res == 0x3d:
size = [ typesize.halfword, typesize.word ][opsizeindex]
elif res == 0x3c:
size = [ typesize.word, typesize.word*2][opsizeindex]
elif res == 0x3b:
size = [ typesize.word*2, typesize.halfword ][opsizeindex]
elif res == 0x3a:
size = [ typesize.halfword + typesize.word, typesize.word ][opsizeindex]
else:
size = res
return size
| [
"[email protected]"
] | |
43cd83767fb5b114eb726ddf99e8ae561d91adf5 | aa0270b351402e421631ebc8b51e528448302fab | /sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2022_06_02_preview/aio/operations/_maintenance_configurations_operations.py | e03f2a6cb9fe01b22e856323445e0d162f7c457d | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | fangchen0601/azure-sdk-for-python | d04a22109d0ff8ff209c82e4154b7169b6cb2e53 | c2e11d6682e368b2f062e714490d2de42e1fed36 | refs/heads/master | 2023-05-11T16:53:26.317418 | 2023-05-04T20:02:16 | 2023-05-04T20:02:16 | 300,440,803 | 0 | 0 | MIT | 2020-10-16T18:45:29 | 2020-10-01T22:27:56 | null | UTF-8 | Python | false | false | 19,860 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._maintenance_configurations_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_by_managed_cluster_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class MaintenanceConfigurationsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2022_06_02_preview.aio.ContainerServiceClient`'s
:attr:`maintenance_configurations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_managed_cluster(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.MaintenanceConfiguration"]:
"""Gets a list of maintenance configurations in the specified managed cluster.
Gets a list of maintenance configurations in the specified managed cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MaintenanceConfiguration or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-06-02-preview"))
cls: ClsType[_models.MaintenanceConfigurationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_managed_cluster_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_managed_cluster.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("MaintenanceConfigurationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_managed_cluster.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations"
}
@distributed_trace_async
async def get(
self, resource_group_name: str, resource_name: str, config_name: str, **kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Gets the specified maintenance configuration of a managed cluster.
Gets the specified maintenance configuration of a managed cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-06-02-preview"))
cls: ClsType[_models.MaintenanceConfiguration] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("MaintenanceConfiguration", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
@overload
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: _models.MaintenanceConfiguration,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Required.
:type parameters:
~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
config_name: str,
parameters: Union[_models.MaintenanceConfiguration, IO],
**kwargs: Any
) -> _models.MaintenanceConfiguration:
"""Creates or updates a maintenance configuration in the specified managed cluster.
Creates or updates a maintenance configuration in the specified managed cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:param parameters: The maintenance configuration to create or update. Is either a
MaintenanceConfiguration type or a IO type. Required.
:type parameters:
~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MaintenanceConfiguration or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_06_02_preview.models.MaintenanceConfiguration
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-06-02-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.MaintenanceConfiguration] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "MaintenanceConfiguration")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("MaintenanceConfiguration", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
@distributed_trace_async
async def delete( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, config_name: str, **kwargs: Any
) -> None:
"""Deletes a maintenance configuration.
Deletes a maintenance configuration.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param config_name: The name of the maintenance configuration. Required.
:type config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-06-02-preview"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
config_name=config_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/maintenanceConfigurations/{configName}"
}
| [
"[email protected]"
] | |
418128e933eadf203bb45c157fb1159c2f0fd3fc | 04c21e01c7dd002d0d66f26f17294bbe25ab30c1 | /src/core/serializers/authentication/reset_password.py | 0e0b5a6f90c5a3f1927d314da4b45df747402d19 | [] | no_license | unbrokenguy/Q-n-A-rest-api | 29d1a7614d761bf68f38bbbbbd731c3692afccf7 | dd483993e304d6660c8c8f7518bf7414efd8ec28 | refs/heads/master | 2023-06-03T20:19:52.606677 | 2021-06-18T09:35:27 | 2021-06-18T09:35:27 | 376,749,787 | 0 | 0 | null | 2021-06-18T09:35:27 | 2021-06-14T08:08:44 | Python | UTF-8 | Python | false | false | 394 | py | from rest_framework import serializers
from core.models import User
class ResetPasswordSerializer(serializers.ModelSerializer):
"""
Reset password serializer check if new password is strong enough if not raises ValidationError.
"""
class Meta:
model = User
fields = ["password"]
extra_kwargs = {
"password": {"required": True},
}
| [
"[email protected]"
] | |
7718f80d703242913200b6318bd12354622ff8e1 | c1bd12405d244c5924a4b069286cd9baf2c63895 | /azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py | b9ace78ff9b74043c8e8e5253b7611a5e4bd9da3 | [
"MIT"
] | permissive | lmazuel/azure-sdk-for-python | 972708ad5902778004680b142874582a284a8a7c | b40e0e36cc00a82b7f8ca2fa599b1928240c98b5 | refs/heads/master | 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 | MIT | 2019-10-25T15:56:00 | 2014-06-27T19:40:56 | Python | UTF-8 | Python | false | false | 952 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .linked_services_operations import LinkedServicesOperations
from .data_sources_operations import DataSourcesOperations
from .workspaces_operations import WorkspacesOperations
from .storage_insights_operations import StorageInsightsOperations
from .saved_searches_operations import SavedSearchesOperations
__all__ = [
'LinkedServicesOperations',
'DataSourcesOperations',
'WorkspacesOperations',
'StorageInsightsOperations',
'SavedSearchesOperations',
]
| [
"[email protected]"
] | |
c3a88a91df0ca6dd325fb81f7f3f25b014e2a78d | 44b455e1d02445954382ef1d40be41b7798700a1 | /async_request/tornado_request.py | de8b957897f97dcc138d423b81f977d6163bbeb8 | [] | no_license | anstones/Mylib | 58a9f49a784f9dce7ab2053020f5ac754f3203ee | c21a28d9a34cf8c71ad290d61034365fb86bdc86 | refs/heads/master | 2020-08-19T12:51:40.647523 | 2019-11-18T14:19:29 | 2019-11-18T14:19:29 | 215,921,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,589 | py | # coding: utf-8
import json as _json
import urllib.parse
import mimetypes
from functools import partial
from tornado.httpclient import AsyncHTTPClient
from tornado.httpclient import HTTPRequest, HTTPResponse
from tornado import gen
from lib.exceptions import CallServiceException
from lib.utilities import get_unique_id
class AsyncResponse(object):
def __init__(self, method, url, response=None):
self._method = method
self._url = url
self._result = None
self._response = response
@property
def request_method(self):
return self._response.request.method if self._response else self._method
@property
def request_url(self):
return self._response.request.url if self._response else self._url
@property
def request_data(self):
return self._response.request.body
@property
def response(self):
return self._response
@response.setter
def response(self, response):
self._response = response
@property
def headers(self):
return self._response.headers
@property
def result(self):
return self._result
@result.setter
def result(self, result):
self._result = result
@property
def json(self):
return _json.loads(self.text)
@property
def text(self):
return self._decode_content()
@property
def content(self):
return self._response.body
def _decode_content(self):
content = None
if isinstance(self._response, HTTPResponse) and isinstance(self._response.body, bytes):
content = self._response.body.decode("utf-8")
return content
def has_exception(self):
return isinstance(self._result, Exception)
def __str__(self):
return '{} {} request:{} response:{}'.format(self.request_method, self.request_url, self.request_data,
self.text)
__repr__ = __str__
class AsyncResult(object):
def __init__(self, response=None):
self._response = response
@property
def request_method(self):
return self._response.request.method
@property
def request_url(self):
return self._response.request.url
@property
def request_data(self):
return self._response.request.body
@property
def request_headers(self):
return self._response.request.headers
@property
def response_headers(self):
return self._response.headers
@property
def status_code(self):
return self._response.code
@property
def response(self):
return self._response
@response.setter
def response(self, response):
self._response = response
@property
def json(self):
return _json.loads(self.text)
@property
def text(self):
return self._decode_content()
@property
def content(self):
return self._response.body
def _decode_content(self):
content = None
if isinstance(self._response, HTTPResponse) and isinstance(self._response.body, bytes):
content = self._response.body.decode("utf-8")
return content
def abstract(self):
return "{} {} request:{} response:{}".format(self.request_method, self.request_url, self.request_data[:512],
self.text)
def __str__(self):
return '{} {} request:{} response:{}'.format(self.request_method, self.request_url, self.request_data,
self.text)
__repr__ = __str__
class TornadoHttpRequest(object):
def __init__(self, logger):
self._client = AsyncHTTPClient()
self._logger = logger
@gen.coroutine
def get(self, url, is_json_result=True, params=None, headers=None, **kwargs):
"""
:param url:
:param is_json_result:
:param params:
:param headers:
:param kwargs:
:return:
"""
if params is not None:
kwargs.update(params)
if kwargs:
real_url = "{}?{}".format(url, urllib.parse.urlencode(kwargs))
else:
real_url = url
result = AsyncResponse(method="GET", url=real_url)
try:
response = yield self._client.fetch(real_url, headers=headers)
result.response = response
self._logger.debug(result)
if response.error:
raise CallServiceException(method=result.request_method, url=result.request_url, errmsg=response.error)
except Exception as e:
raise CallServiceException(method=result.request_method, url=result.request_url, errmsg=e)
else:
try:
result.result = result.text if not is_json_result else result.json
except _json.JSONDecodeError:
raise CallServiceException(method=result.request_method,
url=result.request_url,
errmsg="Invalid json format")
return result
@gen.coroutine
def post(self, url, is_json_result=True, data=None, json=None, headers=None, use_url_encode=False, **kwargs):
"""
:param url:
:param data:
:param is_json_result:
:param json:
:param headers:
:param use_url_encode:
:param kwargs:
:return:
"""
if use_url_encode:
if headers is None:
headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
data = urllib.parse.urlencode(json)
else:
if json is not None:
if headers is None:
headers = {"Content-Type": "application/json; charset=UTF-8"}
data = _json.dumps(json)
result = AsyncResponse(method="POST", url=url)
request = HTTPRequest(url=url, method=result.request_method, body=data, headers=headers, **kwargs)
try:
response = yield self._client.fetch(request)
result.response = response
self._logger.debug(result)
if response.error:
raise CallServiceException(method=result.request_method, url=result.request_url, errmsg=response.error)
except Exception as e:
raise CallServiceException(method=result.request_method, url=result.request_url, errmsg=e)
else:
try:
result.result = result.text if not is_json_result else result.json
except _json.JSONDecodeError:
raise CallServiceException(method=result.request_method,
url=result.request_url,
errmsg="Invalid json format")
return result
@gen.coroutine
def send_file(self, url, file_names):
"""
:param url:
:param file_names:
:return:
"""
method = "POST"
boundary = get_unique_id()
headers = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary}
producer = partial(self._multipart_producer, boundary, file_names)
result = AsyncResult()
request = HTTPRequest(url=url, method=method, headers=headers, body_producer=producer)
try:
response = yield self._client.fetch(request)
if response.error:
raise CallServiceException(method=method, url=url, errmsg=response.error)
result.response = response
except Exception as e:
raise CallServiceException(method=method, url=url, errmsg=e)
return result
@gen.coroutine
def send_data_as_file(self, url, raw_data, filename=None, ext="jpg"):
"""
:param url:
:param filename:
:param raw_data:
:param ext:
:return:
"""
method = "POST"
boundary = get_unique_id()
headers = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary}
producer = partial(self._stream_producer, boundary, filename, ext, raw_data)
result = AsyncResult()
request = HTTPRequest(url=url, method=method, headers=headers, body_producer=producer)
try:
response = yield self._client.fetch(request)
if response.error:
raise CallServiceException(method=method, url=url, errmsg=response.error)
result.response = response
except Exception as e:
raise CallServiceException(method=method, url=url, errmsg=e)
return result
@gen.coroutine
def upload_file(self, url, raw_data, filename=None, ext="jpg"):
"""
:param url:
:param raw_data:
:param filename:
:param ext:
:return:
"""
method = "POST"
boundary = get_unique_id()
body = TornadoHttpRequest._encode_formdata(boundary=boundary, filename=filename, ext=ext, raw_data=raw_data)
headers = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary, 'Content-Length': len(body)}
result = AsyncResult()
request = HTTPRequest(url=url, method=method, headers=headers, body=body)
try:
response = yield self._client.fetch(request)
if response.error:
raise CallServiceException(method=method, url=url, errmsg=response.error)
result.response = response
except Exception as e:
raise CallServiceException(method=method, url=url, errmsg=e)
return result
@classmethod
@gen.coroutine
def _multipart_producer(cls, boundary, file_names, write):
"""
:param boundary:
:param file_names:
:param write:
:return:
"""
boundary_bytes = boundary.encode()
for filename in file_names:
filename_bytes = filename.encode()
mime_type = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf = (
(b'--%s\r\n' % boundary_bytes) +
(b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' %
(filename_bytes, filename_bytes)) +
(b'Content-Type: %s\r\n' % mime_type.encode()) +
b'\r\n'
)
yield write(buf)
with open(filename, 'rb') as f:
while True:
chunk = f.read(16 * 1024)
if not chunk:
break
yield write(chunk)
yield write(b'\r\n')
yield write(b'--%s--\r\n' % (boundary_bytes,))
@classmethod
@gen.coroutine
def _stream_producer(cls, boundary, filename, ext, raw_data, write):
"""
:param boundary:
:param filename:
:param ext:
:param raw_data:
:param write:
:return:
"""
boundary_bytes = boundary.encode()
if not filename:
filename = "{}.{}".format(boundary, ext)
filename_bytes = filename.encode()
mime_type = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf = (
(b'--%s\r\n' % boundary_bytes) +
(b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' %
(filename_bytes, filename_bytes)) +
(b'Content-Type: %s\r\n' % mime_type.encode()) +
b'\r\n'
)
yield write(buf)
yield write(raw_data)
yield write(b'\r\n')
yield write(b'--%s--\r\n' % (boundary_bytes,))
@classmethod
def _encode_formdata(cls, boundary, filename, ext, raw_data):
"""
:param boundary:
:param filename:
:param ext:
:param raw_data:
:return:
"""
crlf = b'\r\n'
buffer = list()
boundary_bytes = boundary.encode()
if not filename:
filename = "{}.{}".format(boundary, ext)
filename_bytes = filename.encode()
buffer.append(b'--%s' % boundary_bytes)
buffer.append(b'Content-Disposition: form-data; name="%s"; filename="%s"' % (filename_bytes, filename_bytes))
mime_type = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buffer.append(b'Content-Type: %s' % mime_type.encode())
buffer.append(b'')
buffer.append(raw_data)
buffer.append(b'--%s--' % boundary_bytes)
buffer.append(b'')
body = crlf.join(buffer)
return body
| [
"[email protected]"
] | |
234f603a62fbcfc25412c15d4df79e54e6129073 | 60f95eff7c43f788af2420813c371152c1e2e5eb | /hulk/broker/oanda/common/constants.py | b37ffd670df513efa144a0da60298fba8d27b29e | [
"BSD-3-Clause"
] | permissive | webclinic017/hulk | 1667c508acb061a8120dc415978a72e83dc38f54 | de326ca1554dc743e225cef4e4b1e2fd4f5090c6 | refs/heads/master | 2022-03-22T20:07:23.276317 | 2019-12-02T01:10:43 | 2019-12-02T01:11:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | from ....base.models import AccountType
OANDA_ENVIRONMENTS = {
"streaming": {
AccountType.REAL: "stream-fxtrade.oanda.com",
AccountType.DEMO: "stream-fxpractice.oanda.com",
},
"api": {
AccountType.REAL: "api-fxtrade.oanda.com",
AccountType.DEMO: "api-fxpractice.oanda.com",
}
}
| [
"[email protected]"
] | |
cc53f060d460eb0ef9a0249b2bb6c1c52008ea64 | bf8d344b17e2ff9b7e38ad9597d5ce0e3d4da062 | /ppdet/optimizer/optimizer.py | 2d0714078eec14dadd57f5689ae6a41039562202 | [
"Apache-2.0"
] | permissive | PaddlePaddle/PaddleDetection | e7e0f40bef75a4e0b6dcbacfafa7eb1969e44961 | bd83b98342b0a6bc8d8dcd5936233aeda1e32167 | refs/heads/release/2.6 | 2023-08-31T07:04:15.357051 | 2023-08-18T02:24:45 | 2023-08-18T02:24:45 | 217,475,193 | 12,523 | 3,096 | Apache-2.0 | 2023-09-10T10:05:56 | 2019-10-25T07:21:14 | Python | UTF-8 | Python | false | false | 12,296 | py | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import math
import paddle
import paddle.nn as nn
import paddle.optimizer as optimizer
import paddle.regularizer as regularizer
from ppdet.core.workspace import register, serializable
import copy
from .adamw import AdamWDL, build_adamwdl
__all__ = ['LearningRate', 'OptimizerBuilder']
from ppdet.utils.logger import setup_logger
logger = setup_logger(__name__)
@serializable
class CosineDecay(object):
"""
Cosine learning rate decay
Args:
max_epochs (int): max epochs for the training process.
if you commbine cosine decay with warmup, it is recommended that
the max_iters is much larger than the warmup iter
use_warmup (bool): whether to use warmup. Default: True.
min_lr_ratio (float): minimum learning rate ratio. Default: 0.
last_plateau_epochs (int): use minimum learning rate in
the last few epochs. Default: 0.
"""
def __init__(self,
max_epochs=1000,
use_warmup=True,
min_lr_ratio=0.,
last_plateau_epochs=0):
self.max_epochs = max_epochs
self.use_warmup = use_warmup
self.min_lr_ratio = min_lr_ratio
self.last_plateau_epochs = last_plateau_epochs
def __call__(self,
base_lr=None,
boundary=None,
value=None,
step_per_epoch=None):
assert base_lr is not None, "either base LR or values should be provided"
max_iters = self.max_epochs * int(step_per_epoch)
last_plateau_iters = self.last_plateau_epochs * int(step_per_epoch)
min_lr = base_lr * self.min_lr_ratio
if boundary is not None and value is not None and self.use_warmup:
# use warmup
warmup_iters = len(boundary)
for i in range(int(boundary[-1]), max_iters):
boundary.append(i)
if i < max_iters - last_plateau_iters:
decayed_lr = min_lr + (base_lr - min_lr) * 0.5 * (math.cos(
(i - warmup_iters) * math.pi /
(max_iters - warmup_iters - last_plateau_iters)) + 1)
value.append(decayed_lr)
else:
value.append(min_lr)
return optimizer.lr.PiecewiseDecay(boundary, value)
elif last_plateau_iters > 0:
# not use warmup, but set `last_plateau_epochs` > 0
boundary = []
value = []
for i in range(max_iters):
if i < max_iters - last_plateau_iters:
decayed_lr = min_lr + (base_lr - min_lr) * 0.5 * (math.cos(
i * math.pi / (max_iters - last_plateau_iters)) + 1)
value.append(decayed_lr)
else:
value.append(min_lr)
if i > 0:
boundary.append(i)
return optimizer.lr.PiecewiseDecay(boundary, value)
return optimizer.lr.CosineAnnealingDecay(
base_lr, T_max=max_iters, eta_min=min_lr)
@serializable
class PiecewiseDecay(object):
"""
Multi step learning rate decay
Args:
gamma (float | list): decay factor
milestones (list): steps at which to decay learning rate
"""
def __init__(self,
gamma=[0.1, 0.01],
milestones=[8, 11],
values=None,
use_warmup=True):
super(PiecewiseDecay, self).__init__()
if type(gamma) is not list:
self.gamma = []
for i in range(len(milestones)):
self.gamma.append(gamma / 10**i)
else:
self.gamma = gamma
self.milestones = milestones
self.values = values
self.use_warmup = use_warmup
def __call__(self,
base_lr=None,
boundary=None,
value=None,
step_per_epoch=None):
if boundary is not None and self.use_warmup:
boundary.extend([int(step_per_epoch) * i for i in self.milestones])
else:
# do not use LinearWarmup
boundary = [int(step_per_epoch) * i for i in self.milestones]
value = [base_lr] # during step[0, boundary[0]] is base_lr
# self.values is setted directly in config
if self.values is not None:
assert len(self.milestones) + 1 == len(self.values)
return optimizer.lr.PiecewiseDecay(boundary, self.values)
# value is computed by self.gamma
value = value if value is not None else [base_lr]
for i in self.gamma:
value.append(base_lr * i)
return optimizer.lr.PiecewiseDecay(boundary, value)
@serializable
class LinearWarmup(object):
"""
Warm up learning rate linearly
Args:
steps (int): warm up steps
start_factor (float): initial learning rate factor
epochs (int|None): use epochs as warm up steps, the priority
of `epochs` is higher than `steps`. Default: None.
"""
def __init__(self, steps=500, start_factor=1. / 3, epochs=None):
super(LinearWarmup, self).__init__()
self.steps = steps
self.start_factor = start_factor
self.epochs = epochs
def __call__(self, base_lr, step_per_epoch):
boundary = []
value = []
warmup_steps = self.epochs * step_per_epoch \
if self.epochs is not None else self.steps
warmup_steps = max(warmup_steps, 1)
for i in range(warmup_steps + 1):
if warmup_steps > 0:
alpha = i / warmup_steps
factor = self.start_factor * (1 - alpha) + alpha
lr = base_lr * factor
value.append(lr)
if i > 0:
boundary.append(i)
return boundary, value
@serializable
class ExpWarmup(object):
"""
Warm up learning rate in exponential mode
Args:
steps (int): warm up steps.
epochs (int|None): use epochs as warm up steps, the priority
of `epochs` is higher than `steps`. Default: None.
power (int): Exponential coefficient. Default: 2.
"""
def __init__(self, steps=1000, epochs=None, power=2):
super(ExpWarmup, self).__init__()
self.steps = steps
self.epochs = epochs
self.power = power
def __call__(self, base_lr, step_per_epoch):
boundary = []
value = []
warmup_steps = self.epochs * step_per_epoch if self.epochs is not None else self.steps
warmup_steps = max(warmup_steps, 1)
for i in range(warmup_steps + 1):
factor = (i / float(warmup_steps))**self.power
value.append(base_lr * factor)
if i > 0:
boundary.append(i)
return boundary, value
@register
class LearningRate(object):
"""
Learning Rate configuration
Args:
base_lr (float): base learning rate
schedulers (list): learning rate schedulers
"""
__category__ = 'optim'
def __init__(self,
base_lr=0.01,
schedulers=[PiecewiseDecay(), LinearWarmup()]):
super(LearningRate, self).__init__()
self.base_lr = base_lr
self.schedulers = []
schedulers = copy.deepcopy(schedulers)
for sched in schedulers:
if isinstance(sched, dict):
# support dict sched instantiate
module = sys.modules[__name__]
type = sched.pop("name")
scheduler = getattr(module, type)(**sched)
self.schedulers.append(scheduler)
else:
self.schedulers.append(sched)
def __call__(self, step_per_epoch):
assert len(self.schedulers) >= 1
if not self.schedulers[0].use_warmup:
return self.schedulers[0](base_lr=self.base_lr,
step_per_epoch=step_per_epoch)
# TODO: split warmup & decay
# warmup
boundary, value = self.schedulers[1](self.base_lr, step_per_epoch)
# decay
decay_lr = self.schedulers[0](self.base_lr, boundary, value,
step_per_epoch)
return decay_lr
@register
class OptimizerBuilder():
"""
Build optimizer handles
Args:
regularizer (object): an `Regularizer` instance
optimizer (object): an `Optimizer` instance
"""
__category__ = 'optim'
def __init__(self,
clip_grad_by_norm=None,
clip_grad_by_value=None,
regularizer={'type': 'L2',
'factor': .0001},
optimizer={'type': 'Momentum',
'momentum': .9}):
self.clip_grad_by_norm = clip_grad_by_norm
self.clip_grad_by_value = clip_grad_by_value
self.regularizer = regularizer
self.optimizer = optimizer
def __call__(self, learning_rate, model=None):
if self.clip_grad_by_norm is not None:
grad_clip = nn.ClipGradByGlobalNorm(
clip_norm=self.clip_grad_by_norm)
elif self.clip_grad_by_value is not None:
var = abs(self.clip_grad_by_value)
grad_clip = nn.ClipGradByValue(min=-var, max=var)
else:
grad_clip = None
if self.regularizer and self.regularizer != 'None':
reg_type = self.regularizer['type'] + 'Decay'
reg_factor = self.regularizer['factor']
regularization = getattr(regularizer, reg_type)(reg_factor)
else:
regularization = None
optim_args = self.optimizer.copy()
optim_type = optim_args['type']
del optim_args['type']
if optim_type == 'AdamWDL':
return build_adamwdl(model, lr=learning_rate, **optim_args)
if optim_type != 'AdamW':
optim_args['weight_decay'] = regularization
op = getattr(optimizer, optim_type)
if 'param_groups' in optim_args:
assert isinstance(optim_args['param_groups'], list), ''
param_groups = optim_args.pop('param_groups')
params, visited = [], []
for group in param_groups:
assert isinstance(group,
dict) and 'params' in group and isinstance(
group['params'], list), ''
_params = {
n: p
for n, p in model.named_parameters()
if any([k in n
for k in group['params']]) and p.trainable is True
}
_group = group.copy()
_group.update({'params': list(_params.values())})
params.append(_group)
visited.extend(list(_params.keys()))
ext_params = [
p for n, p in model.named_parameters()
if n not in visited and p.trainable is True
]
if len(ext_params) < len(model.parameters()):
params.append({'params': ext_params})
elif len(ext_params) > len(model.parameters()):
raise RuntimeError
else:
_params = model.parameters()
params = [param for param in _params if param.trainable is True]
return op(learning_rate=learning_rate,
parameters=params,
grad_clip=grad_clip,
**optim_args)
| [
"[email protected]"
] | |
4e86e0e6ff825aaff5a9add1e218622ecce984ed | ca75f7099b93d8083d5b2e9c6db2e8821e63f83b | /z2/part2/batch/jm/parser_errors_2/185179947.py | 546d93745ac3129f50e6b4ee8ebd53a7475e4971 | [
"MIT"
] | permissive | kozakusek/ipp-2020-testy | 210ed201eaea3c86933266bd57ee284c9fbc1b96 | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | refs/heads/master | 2022-10-04T18:55:37.875713 | 2020-06-09T21:15:37 | 2020-06-09T21:15:37 | 262,290,632 | 0 | 0 | MIT | 2020-06-09T21:15:38 | 2020-05-08T10:10:47 | C | UTF-8 | Python | false | false | 916 | py | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 185179947
"""
"""
random actions, total chaos
"""
board = gamma_new(2, 2, 2, 2)
assert board is not None
assert gamma_move(board, 1, 1, 0) == 1
assert gamma_move(board, 2, 0, 1) == 1
board737265096 = gamma_board(board)
assert board737265096 is not None
assert board737265096 == ("2.\n"
".1\n")
del board737265096
board737265096 = None
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 0, 0) == 1
assert gamma_free_fields(board, 2) == 0
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_golden_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 1) == 0
gamma_delete(board)
| [
"[email protected]"
] | |
ba97b518db15458fb817d0b872d2356510abc92f | df8438656cc2b15001a03d02949abec9a374cb6f | /test/normalizer_issue_files/E72.py | c39cacc09c68bb48fdc7e3972843eaa5190fa3fb | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] | permissive | gandhis1/parso | 65fcc7540eb2664691b1ed12203faa617995c4ce | 7b166db0b5b0b46a3b8b2f1ea5c9dcf57bc36197 | refs/heads/master | 2021-01-25T04:36:15.558393 | 2017-06-05T23:20:12 | 2017-06-05T23:20:12 | 93,455,487 | 0 | 0 | null | 2017-06-05T23:18:20 | 2017-06-05T23:18:20 | null | UTF-8 | Python | false | false | 1,089 | py | #: E721
if type(res) == type(42):
pass
#: E721
if type(res) != type(""):
pass
import types
if res == types.IntType:
pass
import types
#: E721:3
if type(res) is not types.ListType:
pass
#: E721:7 E721:35
assert type(res) == type(False) or type(res) == type(None)
#: E721:7
assert type(res) == type([])
#: E721:7
assert type(res) == type(())
#: E721:7
assert type(res) == type((0,))
#: E721:7
assert type(res) == type((0))
#: E721:7
assert type(res) != type((1, ))
#: E721:7
assert type(res) is type((1, ))
#: E721:7
assert type(res) is not type((1, ))
# Okay
#: E402
import types
if isinstance(res, int):
pass
if isinstance(res, str):
pass
if isinstance(res, types.MethodType):
pass
#: E721:3 E721:25
if type(a) != type(b) or type(a) == type(ccc):
pass
#: E721
type(a) != type(b)
#: E721
1 != type(b)
#: E721
type(b) != 1
1 != 1
try:
pass
#: E722
except:
pass
try:
pass
except Exception:
pass
#: E722
except:
pass
# Okay
fake_code = """"
try:
do_something()
except:
pass
"""
try:
pass
except Exception:
pass
| [
"[email protected]"
] | |
f960787efb67b91348af709a474548bd3c83a751 | 99f145ac3a1b9192e54c114379f16bf992781251 | /venv/lib/python2.7/site-packages/pandas/tests/plotting/test_series.py | 6878ca0e1bc0618a0b53b0b7d150206acc962c7e | [
"MIT"
] | permissive | dushyantRathore/Cricket-API | 0a7df84f9760090e8a24dc61689e63e123c33d1f | d28bc5e6c613052793117e3dbd9035e4540901bb | refs/heads/master | 2021-03-24T10:18:58.362716 | 2020-07-08T17:52:38 | 2020-07-08T17:52:38 | 79,565,447 | 3 | 1 | MIT | 2018-10-01T19:16:47 | 2017-01-20T14:24:08 | Python | UTF-8 | Python | false | false | 30,920 | py | #!/usr/bin/env python
# coding: utf-8
import nose
import itertools
from datetime import datetime
import pandas as pd
from pandas import Series, DataFrame, date_range
from pandas.compat import range, lrange
import pandas.util.testing as tm
from pandas.util.testing import slow
import numpy as np
from numpy.random import randn
import pandas.tools.plotting as plotting
from pandas.tests.plotting.common import (TestPlotBase, _check_plot_works,
_skip_if_no_scipy_gaussian_kde,
_ok_for_gaussian_kde)
""" Test cases for Series.plot """
@tm.mplskip
class TestSeriesPlots(TestPlotBase):
def setUp(self):
TestPlotBase.setUp(self)
import matplotlib as mpl
mpl.rcdefaults()
self.ts = tm.makeTimeSeries()
self.ts.name = 'ts'
self.series = tm.makeStringSeries()
self.series.name = 'series'
self.iseries = tm.makePeriodSeries()
self.iseries.name = 'iseries'
@slow
def test_plot(self):
_check_plot_works(self.ts.plot, label='foo')
_check_plot_works(self.ts.plot, use_index=False)
axes = _check_plot_works(self.ts.plot, rot=0)
self._check_ticks_props(axes, xrot=0)
ax = _check_plot_works(self.ts.plot, style='.', logy=True)
self._check_ax_scales(ax, yaxis='log')
ax = _check_plot_works(self.ts.plot, style='.', logx=True)
self._check_ax_scales(ax, xaxis='log')
ax = _check_plot_works(self.ts.plot, style='.', loglog=True)
self._check_ax_scales(ax, xaxis='log', yaxis='log')
_check_plot_works(self.ts[:10].plot.bar)
_check_plot_works(self.ts.plot.area, stacked=False)
_check_plot_works(self.iseries.plot)
for kind in ['line', 'bar', 'barh', 'kde', 'hist', 'box']:
if not _ok_for_gaussian_kde(kind):
continue
_check_plot_works(self.series[:5].plot, kind=kind)
_check_plot_works(self.series[:10].plot.barh)
ax = _check_plot_works(Series(randn(10)).plot.bar, color='black')
self._check_colors([ax.patches[0]], facecolors=['black'])
# GH 6951
ax = _check_plot_works(self.ts.plot, subplots=True)
self._check_axes_shape(ax, axes_num=1, layout=(1, 1))
ax = _check_plot_works(self.ts.plot, subplots=True, layout=(-1, 1))
self._check_axes_shape(ax, axes_num=1, layout=(1, 1))
ax = _check_plot_works(self.ts.plot, subplots=True, layout=(1, -1))
self._check_axes_shape(ax, axes_num=1, layout=(1, 1))
@slow
def test_plot_figsize_and_title(self):
# figsize and title
ax = self.series.plot(title='Test', figsize=(16, 8))
self._check_text_labels(ax.title, 'Test')
self._check_axes_shape(ax, axes_num=1, layout=(1, 1), figsize=(16, 8))
def test_dont_modify_rcParams(self):
# GH 8242
if self.mpl_ge_1_5_0:
key = 'axes.prop_cycle'
else:
key = 'axes.color_cycle'
colors = self.plt.rcParams[key]
Series([1, 2, 3]).plot()
self.assertEqual(colors, self.plt.rcParams[key])
def test_ts_line_lim(self):
ax = self.ts.plot()
xmin, xmax = ax.get_xlim()
lines = ax.get_lines()
self.assertEqual(xmin, lines[0].get_data(orig=False)[0][0])
self.assertEqual(xmax, lines[0].get_data(orig=False)[0][-1])
tm.close()
ax = self.ts.plot(secondary_y=True)
xmin, xmax = ax.get_xlim()
lines = ax.get_lines()
self.assertEqual(xmin, lines[0].get_data(orig=False)[0][0])
self.assertEqual(xmax, lines[0].get_data(orig=False)[0][-1])
def test_ts_area_lim(self):
ax = self.ts.plot.area(stacked=False)
xmin, xmax = ax.get_xlim()
line = ax.get_lines()[0].get_data(orig=False)[0]
self.assertEqual(xmin, line[0])
self.assertEqual(xmax, line[-1])
tm.close()
# GH 7471
ax = self.ts.plot.area(stacked=False, x_compat=True)
xmin, xmax = ax.get_xlim()
line = ax.get_lines()[0].get_data(orig=False)[0]
self.assertEqual(xmin, line[0])
self.assertEqual(xmax, line[-1])
tm.close()
tz_ts = self.ts.copy()
tz_ts.index = tz_ts.tz_localize('GMT').tz_convert('CET')
ax = tz_ts.plot.area(stacked=False, x_compat=True)
xmin, xmax = ax.get_xlim()
line = ax.get_lines()[0].get_data(orig=False)[0]
self.assertEqual(xmin, line[0])
self.assertEqual(xmax, line[-1])
tm.close()
ax = tz_ts.plot.area(stacked=False, secondary_y=True)
xmin, xmax = ax.get_xlim()
line = ax.get_lines()[0].get_data(orig=False)[0]
self.assertEqual(xmin, line[0])
self.assertEqual(xmax, line[-1])
def test_label(self):
s = Series([1, 2])
ax = s.plot(label='LABEL', legend=True)
self._check_legend_labels(ax, labels=['LABEL'])
self.plt.close()
ax = s.plot(legend=True)
self._check_legend_labels(ax, labels=['None'])
self.plt.close()
# get name from index
s.name = 'NAME'
ax = s.plot(legend=True)
self._check_legend_labels(ax, labels=['NAME'])
self.plt.close()
# override the default
ax = s.plot(legend=True, label='LABEL')
self._check_legend_labels(ax, labels=['LABEL'])
self.plt.close()
# Add lebel info, but don't draw
ax = s.plot(legend=False, label='LABEL')
self.assertEqual(ax.get_legend(), None) # Hasn't been drawn
ax.legend() # draw it
self._check_legend_labels(ax, labels=['LABEL'])
def test_line_area_nan_series(self):
values = [1, 2, np.nan, 3]
s = Series(values)
ts = Series(values, index=tm.makeDateIndex(k=4))
for d in [s, ts]:
ax = _check_plot_works(d.plot)
masked = ax.lines[0].get_ydata()
# remove nan for comparison purpose
exp = np.array([1, 2, 3], dtype=np.float64)
self.assert_numpy_array_equal(np.delete(masked.data, 2), exp)
self.assert_numpy_array_equal(
masked.mask, np.array([False, False, True, False]))
expected = np.array([1, 2, 0, 3], dtype=np.float64)
ax = _check_plot_works(d.plot, stacked=True)
self.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected)
ax = _check_plot_works(d.plot.area)
self.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected)
ax = _check_plot_works(d.plot.area, stacked=False)
self.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected)
def test_line_use_index_false(self):
s = Series([1, 2, 3], index=['a', 'b', 'c'])
s.index.name = 'The Index'
ax = s.plot(use_index=False)
label = ax.get_xlabel()
self.assertEqual(label, '')
ax2 = s.plot.bar(use_index=False)
label2 = ax2.get_xlabel()
self.assertEqual(label2, '')
@slow
def test_bar_log(self):
expected = np.array([1., 10., 100., 1000.])
if not self.mpl_le_1_2_1:
expected = np.hstack((.1, expected, 1e4))
ax = Series([200, 500]).plot.bar(log=True)
tm.assert_numpy_array_equal(ax.yaxis.get_ticklocs(), expected)
tm.close()
ax = Series([200, 500]).plot.barh(log=True)
tm.assert_numpy_array_equal(ax.xaxis.get_ticklocs(), expected)
tm.close()
# GH 9905
expected = np.array([1.0e-03, 1.0e-02, 1.0e-01, 1.0e+00])
if not self.mpl_le_1_2_1:
expected = np.hstack((1.0e-04, expected, 1.0e+01))
if self.mpl_ge_2_0_0:
expected = np.hstack((1.0e-05, expected))
ax = Series([0.1, 0.01, 0.001]).plot(log=True, kind='bar')
ymin = 0.0007943282347242822 if self.mpl_ge_2_0_0 else 0.001
ymax = 0.12589254117941673 if self.mpl_ge_2_0_0 else .10000000000000001
res = ax.get_ylim()
self.assertAlmostEqual(res[0], ymin)
self.assertAlmostEqual(res[1], ymax)
tm.assert_numpy_array_equal(ax.yaxis.get_ticklocs(), expected)
tm.close()
ax = Series([0.1, 0.01, 0.001]).plot(log=True, kind='barh')
res = ax.get_xlim()
self.assertAlmostEqual(res[0], ymin)
self.assertAlmostEqual(res[1], ymax)
tm.assert_numpy_array_equal(ax.xaxis.get_ticklocs(), expected)
@slow
def test_bar_ignore_index(self):
df = Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd'])
ax = df.plot.bar(use_index=False)
self._check_text_labels(ax.get_xticklabels(), ['0', '1', '2', '3'])
def test_rotation(self):
df = DataFrame(randn(5, 5))
# Default rot 0
axes = df.plot()
self._check_ticks_props(axes, xrot=0)
axes = df.plot(rot=30)
self._check_ticks_props(axes, xrot=30)
def test_irregular_datetime(self):
rng = date_range('1/1/2000', '3/1/2000')
rng = rng[[0, 1, 2, 3, 5, 9, 10, 11, 12]]
ser = Series(randn(len(rng)), rng)
ax = ser.plot()
xp = datetime(1999, 1, 1).toordinal()
ax.set_xlim('1/1/1999', '1/1/2001')
self.assertEqual(xp, ax.get_xlim()[0])
@slow
def test_pie_series(self):
# if sum of values is less than 1.0, pie handle them as rate and draw
# semicircle.
series = Series(np.random.randint(1, 5),
index=['a', 'b', 'c', 'd', 'e'], name='YLABEL')
ax = _check_plot_works(series.plot.pie)
self._check_text_labels(ax.texts, series.index)
self.assertEqual(ax.get_ylabel(), 'YLABEL')
# without wedge labels
ax = _check_plot_works(series.plot.pie, labels=None)
self._check_text_labels(ax.texts, [''] * 5)
# with less colors than elements
color_args = ['r', 'g', 'b']
ax = _check_plot_works(series.plot.pie, colors=color_args)
color_expected = ['r', 'g', 'b', 'r', 'g']
self._check_colors(ax.patches, facecolors=color_expected)
# with labels and colors
labels = ['A', 'B', 'C', 'D', 'E']
color_args = ['r', 'g', 'b', 'c', 'm']
ax = _check_plot_works(series.plot.pie, labels=labels,
colors=color_args)
self._check_text_labels(ax.texts, labels)
self._check_colors(ax.patches, facecolors=color_args)
# with autopct and fontsize
ax = _check_plot_works(series.plot.pie, colors=color_args,
autopct='%.2f', fontsize=7)
pcts = ['{0:.2f}'.format(s * 100)
for s in series.values / float(series.sum())]
iters = [iter(series.index), iter(pcts)]
expected_texts = list(next(it) for it in itertools.cycle(iters))
self._check_text_labels(ax.texts, expected_texts)
for t in ax.texts:
self.assertEqual(t.get_fontsize(), 7)
# includes negative value
with tm.assertRaises(ValueError):
series = Series([1, 2, 0, 4, -1], index=['a', 'b', 'c', 'd', 'e'])
series.plot.pie()
# includes nan
series = Series([1, 2, np.nan, 4], index=['a', 'b', 'c', 'd'],
name='YLABEL')
ax = _check_plot_works(series.plot.pie)
self._check_text_labels(ax.texts, ['a', 'b', '', 'd'])
def test_pie_nan(self):
s = Series([1, np.nan, 1, 1])
ax = s.plot.pie(legend=True)
expected = ['0', '', '2', '3']
result = [x.get_text() for x in ax.texts]
self.assertEqual(result, expected)
@slow
def test_hist_df_kwargs(self):
df = DataFrame(np.random.randn(10, 2))
ax = df.plot.hist(bins=5)
self.assertEqual(len(ax.patches), 10)
@slow
def test_hist_df_with_nonnumerics(self):
# GH 9853
with tm.RNGContext(1):
df = DataFrame(
np.random.randn(10, 4), columns=['A', 'B', 'C', 'D'])
df['E'] = ['x', 'y'] * 5
ax = df.plot.hist(bins=5)
self.assertEqual(len(ax.patches), 20)
ax = df.plot.hist() # bins=10
self.assertEqual(len(ax.patches), 40)
@slow
def test_hist_legacy(self):
_check_plot_works(self.ts.hist)
_check_plot_works(self.ts.hist, grid=False)
_check_plot_works(self.ts.hist, figsize=(8, 10))
# _check_plot_works adds an ax so catch warning. see GH #13188
with tm.assert_produces_warning(UserWarning):
_check_plot_works(self.ts.hist,
by=self.ts.index.month)
with tm.assert_produces_warning(UserWarning):
_check_plot_works(self.ts.hist,
by=self.ts.index.month, bins=5)
fig, ax = self.plt.subplots(1, 1)
_check_plot_works(self.ts.hist, ax=ax)
_check_plot_works(self.ts.hist, ax=ax, figure=fig)
_check_plot_works(self.ts.hist, figure=fig)
tm.close()
fig, (ax1, ax2) = self.plt.subplots(1, 2)
_check_plot_works(self.ts.hist, figure=fig, ax=ax1)
_check_plot_works(self.ts.hist, figure=fig, ax=ax2)
with tm.assertRaises(ValueError):
self.ts.hist(by=self.ts.index, figure=fig)
@slow
def test_hist_bins_legacy(self):
df = DataFrame(np.random.randn(10, 2))
ax = df.hist(bins=2)[0][0]
self.assertEqual(len(ax.patches), 2)
@slow
def test_hist_layout(self):
df = self.hist_df
with tm.assertRaises(ValueError):
df.height.hist(layout=(1, 1))
with tm.assertRaises(ValueError):
df.height.hist(layout=[1, 1])
@slow
def test_hist_layout_with_by(self):
df = self.hist_df
# _check_plot_works adds an ax so catch warning. see GH #13188
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.gender, layout=(2, 1))
self._check_axes_shape(axes, axes_num=2, layout=(2, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.gender, layout=(3, -1))
self._check_axes_shape(axes, axes_num=2, layout=(3, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.category, layout=(4, 1))
self._check_axes_shape(axes, axes_num=4, layout=(4, 1))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.category, layout=(2, -1))
self._check_axes_shape(axes, axes_num=4, layout=(2, 2))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.category, layout=(3, -1))
self._check_axes_shape(axes, axes_num=4, layout=(3, 2))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.category, layout=(-1, 4))
self._check_axes_shape(axes, axes_num=4, layout=(1, 4))
with tm.assert_produces_warning(UserWarning):
axes = _check_plot_works(df.height.hist,
by=df.classroom, layout=(2, 2))
self._check_axes_shape(axes, axes_num=3, layout=(2, 2))
axes = df.height.hist(by=df.category, layout=(4, 2), figsize=(12, 7))
self._check_axes_shape(axes, axes_num=4, layout=(4, 2),
figsize=(12, 7))
@slow
def test_hist_no_overlap(self):
from matplotlib.pyplot import subplot, gcf
x = Series(randn(2))
y = Series(randn(2))
subplot(121)
x.hist()
subplot(122)
y.hist()
fig = gcf()
axes = fig.axes if self.mpl_ge_1_5_0 else fig.get_axes()
self.assertEqual(len(axes), 2)
@slow
def test_hist_secondary_legend(self):
# GH 9610
df = DataFrame(np.random.randn(30, 4), columns=list('abcd'))
# primary -> secondary
ax = df['a'].plot.hist(legend=True)
df['b'].plot.hist(ax=ax, legend=True, secondary_y=True)
# both legends are dran on left ax
# left and right axis must be visible
self._check_legend_labels(ax, labels=['a', 'b (right)'])
self.assertTrue(ax.get_yaxis().get_visible())
self.assertTrue(ax.right_ax.get_yaxis().get_visible())
tm.close()
# secondary -> secondary
ax = df['a'].plot.hist(legend=True, secondary_y=True)
df['b'].plot.hist(ax=ax, legend=True, secondary_y=True)
# both legends are draw on left ax
# left axis must be invisible, right axis must be visible
self._check_legend_labels(ax.left_ax,
labels=['a (right)', 'b (right)'])
self.assertFalse(ax.left_ax.get_yaxis().get_visible())
self.assertTrue(ax.get_yaxis().get_visible())
tm.close()
# secondary -> primary
ax = df['a'].plot.hist(legend=True, secondary_y=True)
# right axes is returned
df['b'].plot.hist(ax=ax, legend=True)
# both legends are draw on left ax
# left and right axis must be visible
self._check_legend_labels(ax.left_ax, labels=['a (right)', 'b'])
self.assertTrue(ax.left_ax.get_yaxis().get_visible())
self.assertTrue(ax.get_yaxis().get_visible())
tm.close()
@slow
def test_df_series_secondary_legend(self):
# GH 9779
df = DataFrame(np.random.randn(30, 3), columns=list('abc'))
s = Series(np.random.randn(30), name='x')
# primary -> secondary (without passing ax)
ax = df.plot()
s.plot(legend=True, secondary_y=True)
# both legends are dran on left ax
# left and right axis must be visible
self._check_legend_labels(ax, labels=['a', 'b', 'c', 'x (right)'])
self.assertTrue(ax.get_yaxis().get_visible())
self.assertTrue(ax.right_ax.get_yaxis().get_visible())
tm.close()
# primary -> secondary (with passing ax)
ax = df.plot()
s.plot(ax=ax, legend=True, secondary_y=True)
# both legends are dran on left ax
# left and right axis must be visible
self._check_legend_labels(ax, labels=['a', 'b', 'c', 'x (right)'])
self.assertTrue(ax.get_yaxis().get_visible())
self.assertTrue(ax.right_ax.get_yaxis().get_visible())
tm.close()
# seconcary -> secondary (without passing ax)
ax = df.plot(secondary_y=True)
s.plot(legend=True, secondary_y=True)
# both legends are dran on left ax
# left axis must be invisible and right axis must be visible
expected = ['a (right)', 'b (right)', 'c (right)', 'x (right)']
self._check_legend_labels(ax.left_ax, labels=expected)
self.assertFalse(ax.left_ax.get_yaxis().get_visible())
self.assertTrue(ax.get_yaxis().get_visible())
tm.close()
# secondary -> secondary (with passing ax)
ax = df.plot(secondary_y=True)
s.plot(ax=ax, legend=True, secondary_y=True)
# both legends are dran on left ax
# left axis must be invisible and right axis must be visible
expected = ['a (right)', 'b (right)', 'c (right)', 'x (right)']
self._check_legend_labels(ax.left_ax, expected)
self.assertFalse(ax.left_ax.get_yaxis().get_visible())
self.assertTrue(ax.get_yaxis().get_visible())
tm.close()
# secondary -> secondary (with passing ax)
ax = df.plot(secondary_y=True, mark_right=False)
s.plot(ax=ax, legend=True, secondary_y=True)
# both legends are dran on left ax
# left axis must be invisible and right axis must be visible
expected = ['a', 'b', 'c', 'x (right)']
self._check_legend_labels(ax.left_ax, expected)
self.assertFalse(ax.left_ax.get_yaxis().get_visible())
self.assertTrue(ax.get_yaxis().get_visible())
tm.close()
@slow
def test_plot_fails_with_dupe_color_and_style(self):
x = Series(randn(2))
with tm.assertRaises(ValueError):
x.plot(style='k--', color='k')
@slow
def test_hist_kde(self):
ax = self.ts.plot.hist(logy=True)
self._check_ax_scales(ax, yaxis='log')
xlabels = ax.get_xticklabels()
# ticks are values, thus ticklabels are blank
self._check_text_labels(xlabels, [''] * len(xlabels))
ylabels = ax.get_yticklabels()
self._check_text_labels(ylabels, [''] * len(ylabels))
tm._skip_if_no_scipy()
_skip_if_no_scipy_gaussian_kde()
_check_plot_works(self.ts.plot.kde)
_check_plot_works(self.ts.plot.density)
ax = self.ts.plot.kde(logy=True)
self._check_ax_scales(ax, yaxis='log')
xlabels = ax.get_xticklabels()
self._check_text_labels(xlabels, [''] * len(xlabels))
ylabels = ax.get_yticklabels()
self._check_text_labels(ylabels, [''] * len(ylabels))
@slow
def test_kde_kwargs(self):
tm._skip_if_no_scipy()
_skip_if_no_scipy_gaussian_kde()
from numpy import linspace
_check_plot_works(self.ts.plot.kde, bw_method=.5,
ind=linspace(-100, 100, 20))
_check_plot_works(self.ts.plot.density, bw_method=.5,
ind=linspace(-100, 100, 20))
ax = self.ts.plot.kde(logy=True, bw_method=.5,
ind=linspace(-100, 100, 20))
self._check_ax_scales(ax, yaxis='log')
self._check_text_labels(ax.yaxis.get_label(), 'Density')
@slow
def test_kde_missing_vals(self):
tm._skip_if_no_scipy()
_skip_if_no_scipy_gaussian_kde()
s = Series(np.random.uniform(size=50))
s[0] = np.nan
axes = _check_plot_works(s.plot.kde)
# check if the values have any missing values
# GH14821
self.assertTrue(any(~np.isnan(axes.lines[0].get_xdata())),
msg='Missing Values not dropped')
@slow
def test_hist_kwargs(self):
ax = self.ts.plot.hist(bins=5)
self.assertEqual(len(ax.patches), 5)
self._check_text_labels(ax.yaxis.get_label(), 'Frequency')
tm.close()
if self.mpl_ge_1_3_1:
ax = self.ts.plot.hist(orientation='horizontal')
self._check_text_labels(ax.xaxis.get_label(), 'Frequency')
tm.close()
ax = self.ts.plot.hist(align='left', stacked=True)
tm.close()
@slow
def test_hist_kde_color(self):
ax = self.ts.plot.hist(logy=True, bins=10, color='b')
self._check_ax_scales(ax, yaxis='log')
self.assertEqual(len(ax.patches), 10)
self._check_colors(ax.patches, facecolors=['b'] * 10)
tm._skip_if_no_scipy()
_skip_if_no_scipy_gaussian_kde()
ax = self.ts.plot.kde(logy=True, color='r')
self._check_ax_scales(ax, yaxis='log')
lines = ax.get_lines()
self.assertEqual(len(lines), 1)
self._check_colors(lines, ['r'])
@slow
def test_boxplot_series(self):
ax = self.ts.plot.box(logy=True)
self._check_ax_scales(ax, yaxis='log')
xlabels = ax.get_xticklabels()
self._check_text_labels(xlabels, [self.ts.name])
ylabels = ax.get_yticklabels()
self._check_text_labels(ylabels, [''] * len(ylabels))
@slow
def test_kind_both_ways(self):
s = Series(range(3))
for kind in plotting._common_kinds + plotting._series_kinds:
if not _ok_for_gaussian_kde(kind):
continue
s.plot(kind=kind)
getattr(s.plot, kind)()
@slow
def test_invalid_plot_data(self):
s = Series(list('abcd'))
for kind in plotting._common_kinds:
if not _ok_for_gaussian_kde(kind):
continue
with tm.assertRaises(TypeError):
s.plot(kind=kind)
@slow
def test_valid_object_plot(self):
s = Series(lrange(10), dtype=object)
for kind in plotting._common_kinds:
if not _ok_for_gaussian_kde(kind):
continue
_check_plot_works(s.plot, kind=kind)
def test_partially_invalid_plot_data(self):
s = Series(['a', 'b', 1.0, 2])
for kind in plotting._common_kinds:
if not _ok_for_gaussian_kde(kind):
continue
with tm.assertRaises(TypeError):
s.plot(kind=kind)
def test_invalid_kind(self):
s = Series([1, 2])
with tm.assertRaises(ValueError):
s.plot(kind='aasdf')
@slow
def test_dup_datetime_index_plot(self):
dr1 = date_range('1/1/2009', periods=4)
dr2 = date_range('1/2/2009', periods=4)
index = dr1.append(dr2)
values = randn(index.size)
s = Series(values, index=index)
_check_plot_works(s.plot)
@slow
def test_errorbar_plot(self):
s = Series(np.arange(10), name='x')
s_err = np.random.randn(10)
d_err = DataFrame(randn(10, 2), index=s.index, columns=['x', 'y'])
# test line and bar plots
kinds = ['line', 'bar']
for kind in kinds:
ax = _check_plot_works(s.plot, yerr=Series(s_err), kind=kind)
self._check_has_errorbars(ax, xerr=0, yerr=1)
ax = _check_plot_works(s.plot, yerr=s_err, kind=kind)
self._check_has_errorbars(ax, xerr=0, yerr=1)
ax = _check_plot_works(s.plot, yerr=s_err.tolist(), kind=kind)
self._check_has_errorbars(ax, xerr=0, yerr=1)
ax = _check_plot_works(s.plot, yerr=d_err, kind=kind)
self._check_has_errorbars(ax, xerr=0, yerr=1)
ax = _check_plot_works(s.plot, xerr=0.2, yerr=0.2, kind=kind)
self._check_has_errorbars(ax, xerr=1, yerr=1)
ax = _check_plot_works(s.plot, xerr=s_err)
self._check_has_errorbars(ax, xerr=1, yerr=0)
# test time series plotting
ix = date_range('1/1/2000', '1/1/2001', freq='M')
ts = Series(np.arange(12), index=ix, name='x')
ts_err = Series(np.random.randn(12), index=ix)
td_err = DataFrame(randn(12, 2), index=ix, columns=['x', 'y'])
ax = _check_plot_works(ts.plot, yerr=ts_err)
self._check_has_errorbars(ax, xerr=0, yerr=1)
ax = _check_plot_works(ts.plot, yerr=td_err)
self._check_has_errorbars(ax, xerr=0, yerr=1)
# check incorrect lengths and types
with tm.assertRaises(ValueError):
s.plot(yerr=np.arange(11))
s_err = ['zzz'] * 10
# in mpl 1.5+ this is a TypeError
with tm.assertRaises((ValueError, TypeError)):
s.plot(yerr=s_err)
def test_table(self):
_check_plot_works(self.series.plot, table=True)
_check_plot_works(self.series.plot, table=self.series)
@slow
def test_series_grid_settings(self):
# Make sure plot defaults to rcParams['axes.grid'] setting, GH 9792
self._check_grid_settings(Series([1, 2, 3]),
plotting._series_kinds +
plotting._common_kinds)
@slow
def test_standard_colors(self):
for c in ['r', 'red', 'green', '#FF0000']:
result = plotting._get_standard_colors(1, color=c)
self.assertEqual(result, [c])
result = plotting._get_standard_colors(1, color=[c])
self.assertEqual(result, [c])
result = plotting._get_standard_colors(3, color=c)
self.assertEqual(result, [c] * 3)
result = plotting._get_standard_colors(3, color=[c])
self.assertEqual(result, [c] * 3)
@slow
def test_standard_colors_all(self):
import matplotlib.colors as colors
# multiple colors like mediumaquamarine
for c in colors.cnames:
result = plotting._get_standard_colors(num_colors=1, color=c)
self.assertEqual(result, [c])
result = plotting._get_standard_colors(num_colors=1, color=[c])
self.assertEqual(result, [c])
result = plotting._get_standard_colors(num_colors=3, color=c)
self.assertEqual(result, [c] * 3)
result = plotting._get_standard_colors(num_colors=3, color=[c])
self.assertEqual(result, [c] * 3)
# single letter colors like k
for c in colors.ColorConverter.colors:
result = plotting._get_standard_colors(num_colors=1, color=c)
self.assertEqual(result, [c])
result = plotting._get_standard_colors(num_colors=1, color=[c])
self.assertEqual(result, [c])
result = plotting._get_standard_colors(num_colors=3, color=c)
self.assertEqual(result, [c] * 3)
result = plotting._get_standard_colors(num_colors=3, color=[c])
self.assertEqual(result, [c] * 3)
def test_series_plot_color_kwargs(self):
# GH1890
ax = Series(np.arange(12) + 1).plot(color='green')
self._check_colors(ax.get_lines(), linecolors=['green'])
def test_time_series_plot_color_kwargs(self):
# #1890
ax = Series(np.arange(12) + 1, index=date_range(
'1/1/2000', periods=12)).plot(color='green')
self._check_colors(ax.get_lines(), linecolors=['green'])
def test_time_series_plot_color_with_empty_kwargs(self):
import matplotlib as mpl
if self.mpl_ge_1_5_0:
def_colors = self._maybe_unpack_cycler(mpl.rcParams)
else:
def_colors = mpl.rcParams['axes.color_cycle']
index = date_range('1/1/2000', periods=12)
s = Series(np.arange(1, 13), index=index)
ncolors = 3
for i in range(ncolors):
ax = s.plot()
self._check_colors(ax.get_lines(), linecolors=def_colors[:ncolors])
def test_xticklabels(self):
# GH11529
s = Series(np.arange(10), index=['P%02d' % i for i in range(10)])
ax = s.plot(xticks=[0, 3, 5, 9])
exp = ['P%02d' % i for i in [0, 3, 5, 9]]
self._check_text_labels(ax.get_xticklabels(), exp)
def test_custom_business_day_freq(self):
# GH7222
from pandas.tseries.offsets import CustomBusinessDay
s = Series(range(100, 121), index=pd.bdate_range(
start='2014-05-01', end='2014-06-01',
freq=CustomBusinessDay(holidays=['2014-05-26'])))
_check_plot_works(s.plot)
if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
| [
"[email protected]"
] | |
6a18f0ae5be54533a66e3eca6087ba0b206673dc | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/saddle-points/3a5b8c06b75443c1ba4f3e45cd0ac791.py | bc8cf755d9ace6afd58ad1d6751c8c089df1218d | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 496 | py | def saddle_points(mat):
spoints = set()
if mat:
rowLen = len(mat[0])
for row in mat:
if len(row) != rowLen:
raise ValueError("Irregular matrix. All rows must be the same length.")
for i, row in enumerate(mat):
for j in range(rowLen):
if row[j] == max(row):
if row[j] == min([mat[n][j] for n in range(len(mat))]):
spoints.add((i, j))
return spoints
| [
"[email protected]"
] | |
8a2c478a7c55bd6e17bdb6130aaa087cc8b4487b | 46035631e6d76ddea73603fcf139290f5cb4a991 | /aws-python/__main__.py | dfb3e5a12cc77cf4fba6391e7fa0f6e30b3084db | [
"Apache-2.0"
] | permissive | pulumi/templates | c6150fd66d5ba85a312d9ee3102ed456abebda8b | 7c18d24ed7a4e0f5e00801bc133bb19dae630ee3 | refs/heads/master | 2023-08-21T12:46:56.389767 | 2023-08-04T20:36:26 | 2023-08-04T20:36:26 | 124,577,647 | 66 | 52 | Apache-2.0 | 2023-09-13T00:07:57 | 2018-03-09T18:21:12 | Go | UTF-8 | Python | false | false | 219 | py | """An AWS Python Pulumi program"""
import pulumi
from pulumi_aws import s3
# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('my-bucket')
# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
| [
"[email protected]"
] | |
c4e918875ec7a2958629ca6a8d541407018065d7 | 01b1a86160eca8c948c80ef506da00ecebe1b933 | /gerapy_auto_extractor/helpers.py | be5a131196cf20284049814772a471f03e95f487 | [
"Apache-2.0"
] | permissive | downdawn/GerapyAutoExtractor | 0b23d10761576a2ebe6b81332dc1ba914fe3e78d | e7085264244aede0207de2641302f79bba42edf5 | refs/heads/master | 2023-06-24T08:54:05.772440 | 2021-07-24T18:29:07 | 2021-07-24T18:29:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | from gerapy_auto_extractor.utils.helper import jsonify, content
| [
"[email protected]"
] | |
e4910e73c04302fba1ca073f461f43827a177146 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03067/s724811425.py | b3ebbb168ee4d506128efbef3cb4cafbd8f7d694 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | a,b,c=map(int,input().split())
print(["No","Yes"][a>c>b or a<c<b]) | [
"[email protected]"
] | |
2aca85423c446bc02c5316174601aea98f11d8bb | 1aa61c09db8e37bb6e9e3f8144c6f82a61af8f31 | /mergify_engine/github_types.py | c1f33fc79a421fdb4dde3945c53e93c6c2a12b80 | [
"Apache-2.0"
] | permissive | HarshCasper/mergify-engine | 15460e813eadaaebeeb5942dd07c9dbc8bd18c5b | 02d0a682c14db1c3fefeef4895645161cbb40f4f | refs/heads/master | 2023-02-25T10:51:59.853549 | 2021-01-25T07:17:25 | 2021-01-25T07:17:25 | 332,657,373 | 0 | 0 | Apache-2.0 | 2021-01-25T07:17:26 | 2021-01-25T06:55:32 | Python | UTF-8 | Python | false | false | 10,038 | py | # -*- encoding: utf-8 -*-
#
# Copyright © 2020 Mergify SAS
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import typing
GitHubLogin = typing.NewType("GitHubLogin", str)
class GitHubInstallationAccessToken(typing.TypedDict):
# https://developer.github.com/v3/apps/#response-7
token: str
expires_at: str
GitHubAccountType = typing.Literal["User", "Organization", "Bot"]
GitHubAccountIdType = typing.NewType("GitHubAccountIdType", int)
class GitHubAccount(typing.TypedDict):
login: GitHubLogin
id: GitHubAccountIdType
type: GitHubAccountType
GitHubInstallationIdType = typing.NewType("GitHubInstallationIdType", int)
class GitHubInstallation(typing.TypedDict):
# https://developer.github.com/v3/apps/#get-an-organization-installation-for-the-authenticated-app
id: GitHubInstallationIdType
account: GitHubAccount
GitHubRefType = typing.NewType("GitHubRefType", str)
SHAType = typing.NewType("SHAType", str)
GitHubRepositoryIdType = typing.NewType("GitHubRepositoryIdType", int)
GitHubRepositoryName = typing.NewType("GitHubRepositoryName", str)
class GitHubRepository(typing.TypedDict):
id: GitHubRepositoryIdType
owner: GitHubAccount
private: bool
name: GitHubRepositoryName
full_name: str
archived: bool
url: str
default_branch: GitHubRefType
class GitHubBranchCommitParent(typing.TypedDict):
sha: SHAType
class GitHubBranchCommitCommit(typing.TypedDict):
message: str
class GitHubBranchCommit(typing.TypedDict):
sha: SHAType
parents: typing.List[GitHubBranchCommitParent]
commit: GitHubBranchCommitCommit
class GitHubBranch(typing.TypedDict):
name: GitHubRefType
commit: GitHubBranchCommit
class GitHubBranchRef(typing.TypedDict):
label: str
ref: GitHubRefType
sha: SHAType
repo: GitHubRepository
user: GitHubAccount
class GitHubLabel(typing.TypedDict):
id: int
name: str
color: str
default: bool
class GitHubComment(typing.TypedDict):
id: int
body: str
user: GitHubAccount
class GitHubIssueOrPullRequest(typing.TypedDict):
pass
GitHubIssueId = typing.NewType("GitHubIssueId", int)
GitHubIssueNumber = typing.NewType("GitHubIssueNumber", int)
class GitHubIssue(GitHubIssueOrPullRequest):
id: GitHubIssueId
number: GitHubIssueNumber
GitHubPullRequestState = typing.Literal["open", "closed"]
# NOTE(sileht): Github mergeable_state is undocumented, here my finding by
# testing and and some info from other project:
#
# unknown: not yet computed by Github
# dirty: pull request conflict with the base branch
# behind: head branch is behind the base branch (only if strict: True)
# unstable: branch up2date (if strict: True) and not required status
# checks are failure or pending
# clean: branch up2date (if strict: True) and all status check OK
# has_hooks: Mergeable with passing commit status and pre-recieve hooks.
#
# https://platform.github.community/t/documentation-about-mergeable-state/4259
# https://github.com/octokit/octokit.net/issues/1763
# https://developer.github.com/v4/enum/mergestatestatus/
GitHubPullRequestMergeableState = typing.Literal[
"unknown",
"dirty",
"behind",
"unstable",
"clean",
"has_hooks",
]
GitHubPullRequestId = typing.NewType("GitHubPullRequestId", int)
GitHubPullRequestNumber = typing.NewType("GitHubPullRequestNumber", int)
ISODateTimeType = typing.NewType("ISODateTimeType", str)
class GitHubPullRequest(GitHubIssueOrPullRequest):
# https://developer.github.com/v3/pulls/#get-a-pull-request
id: GitHubPullRequestId
number: GitHubPullRequestNumber
maintainer_can_modify: bool
base: GitHubBranchRef
head: GitHubBranchRef
state: GitHubPullRequestState
user: GitHubAccount
labels: typing.List[GitHubLabel]
merged: bool
merged_by: typing.Optional[GitHubAccount]
merged_at: typing.Optional[ISODateTimeType]
rebaseable: bool
draft: bool
merge_commit_sha: typing.Optional[SHAType]
mergeable_state: GitHubPullRequestMergeableState
html_url: str
title: str
# https://docs.github.com/en/free-pro-team@latest/developers/webhooks-and-events/webhook-events-and-payloads
GitHubEventType = typing.Literal[
"check_run",
"check_suite",
"pull_request",
"status",
"push",
"issue_comment",
"pull_request_review",
"pull_request_review_comment",
# This does not exist in GitHub, it's a Mergify made one
"refresh",
]
class GitHubEvent(typing.TypedDict):
organization: GitHubAccount
installation: GitHubInstallation
sender: GitHubAccount
GitHubEventRefreshActionType = typing.Literal[
"user",
"forced",
]
# This does not exist in GitHub, it's a Mergify made one
class GitHubEventRefresh(GitHubEvent):
repository: GitHubRepository
action: GitHubEventRefreshActionType
ref: typing.Optional[GitHubRefType]
pull_request: typing.Optional[GitHubPullRequest]
GitHubEventPullRequestActionType = typing.Literal[
"opened",
"edited",
"closed",
"assigned",
"unassigned",
"review_requested",
"review_request_removed",
"ready_for_review",
"labeled",
"unlabeled",
"synchronize",
"locked",
"unlocked",
"reopened",
]
class GitHubEventPullRequest(GitHubEvent):
repository: GitHubRepository
action: GitHubEventPullRequestActionType
pull_request: GitHubPullRequest
GitHubEventPullRequestReviewCommentActionType = typing.Literal[
"created",
"edited",
"deleted",
]
class GitHubEventPullRequestReviewComment(GitHubEvent):
repository: GitHubRepository
action: GitHubEventPullRequestReviewCommentActionType
pull_request: GitHubPullRequest
GitHubEventPullRequestReviewActionType = typing.Literal[
"submitted",
"edited",
"dismissed",
]
GitHubReviewIdType = typing.NewType("GitHubReviewIdType", int)
GitHubReviewStateType = typing.Literal[
"APPROVED", "COMMENTED", "DISMISSED", "CHANGES_REQUESTED"
]
class GitHubReview(typing.TypedDict):
id: GitHubReviewIdType
user: GitHubAccount
body: typing.Optional[str]
pull_request: GitHubPullRequest
repository: GitHubRepository
state: GitHubReviewStateType
class GitHubEventPullRequestReview(GitHubEvent):
repository: GitHubRepository
action: GitHubEventPullRequestReviewActionType
pull_request: GitHubPullRequest
GitHubEventIssueCommentActionType = typing.Literal[
"created",
"edited",
"deleted",
]
class GitHubEventIssueComment(GitHubEvent):
repository: GitHubRepository
action: GitHubEventIssueCommentActionType
issue: GitHubIssue
comment: GitHubComment
class GitHubEventPush(GitHubEvent):
repository: GitHubRepository
ref: GitHubRefType
before: SHAType
after: SHAType
class GitHubEventStatus(GitHubEvent):
repository: GitHubRepository
sha: SHAType
class GitHubApp(typing.TypedDict):
id: int
GitHubCheckRunConclusion = typing.Literal[
"success",
"failure",
"neutral",
"cancelled",
"timed_out",
"action_required",
"stale",
]
class GitHubCheckRunOutput(typing.TypedDict):
title: typing.Optional[str]
summary: typing.Optional[str]
text: typing.Optional[str]
GitHubStatusState = typing.Literal[
"pending",
"success",
"failure",
"error",
]
class GitHubStatus(typing.TypedDict):
context: str
state: GitHubStatusState
class GitHubCheckRun(typing.TypedDict):
id: int
app: GitHubApp
external_id: str
pull_requests: typing.List[GitHubPullRequest]
head_sha: SHAType
before: SHAType
after: SHAType
name: str
output: GitHubCheckRunOutput
conclusion: typing.Optional[GitHubCheckRunConclusion]
completed_at: ISODateTimeType
class GitHubCheckSuite(typing.TypedDict):
id: int
app: GitHubApp
external_id: str
pull_requests: typing.List[GitHubPullRequest]
head_sha: SHAType
before: SHAType
after: SHAType
GitHubCheckRunActionType = typing.Literal[
"created",
"completed",
"rerequested",
"requested_action",
]
class GitHubEventCheckRun(GitHubEvent):
repository: GitHubRepository
action: GitHubCheckRunActionType
check_run: GitHubCheckRun
GitHubCheckSuiteActionType = typing.Literal[
"created",
"completed",
"rerequested",
"requested_action",
]
class GitHubEventCheckSuite(GitHubEvent):
repository: GitHubRepository
action: GitHubCheckSuiteActionType
check_suite: GitHubCheckSuite
GitHubEventOrganizationActionType = typing.Literal[
"deleted",
"renamed",
"member_added",
"member_removed",
"member_invited",
]
class GitHubEventOrganization(GitHubEvent):
action: GitHubEventOrganizationActionType
GitHubEventMemberActionType = typing.Literal["added", "removed", "edited"]
class GitHubEventMember(GitHubEvent):
action: GitHubEventMemberActionType
repository: GitHubRepository
GitHubEventMembershipActionType = typing.Literal["added", "removed"]
class GitHubEventMembership(GitHubEvent):
action: GitHubEventMembershipActionType
GitHubEventTeamActionType = typing.Literal[
"created",
"deleted",
"edited",
"added_to_repository",
"removed_from_repository",
]
class GitHubEventTeam(GitHubEvent):
action: GitHubEventTeamActionType
repository: typing.Optional[GitHubRepository]
class GitHubEventTeamAdd(GitHubEvent, total=False):
# Repository key can be missing on Enterprise installations
repository: GitHubRepository
| [
"37929162+mergify[bot]@users.noreply.github.com"
] | 37929162+mergify[bot]@users.noreply.github.com |
684fc7ef464c7a993ed509a48263880dc368f563 | 8d79fc03f6e5a6df41e824c8573d3ea4646146bf | /IB_Tree_PathSum_if_Exists.py | e0c2d820da30e788bdd1a62a83ce8e103a92d034 | [] | no_license | Cbkhare/Codes | 3bea294dd0f2ec99e7e0ef0b7ff976cbe1765b7f | 5b535795cdd742b7810ea163e0868b022736647d | refs/heads/master | 2021-10-24T03:26:54.983073 | 2019-03-21T14:33:41 | 2019-03-21T14:33:41 | 111,226,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | class Solution:
# @param A : root node of tree
# @param B : integer
# @return an integer
def hasPathSum(self, A, B):
if not A: return 0
def validate(node, some):
#print (node.val, some)
if node.left == node.right == None:
if node.val==some:
return True
else:
return False
else:
if node.left:
r = validate(node.left,some=some-node.val)
if r:
# This is to avoid going to node.right if found true
return True
if node.right:
r = validate(node.right, some=some-node.val)
if r:
return True
return False
if validate(A,B):
return 1
else:
return 0
'''
https://www.interviewbit.com/problems/path-sum/
''' | [
"[email protected]"
] | |
6fae06163498067858f995086c69e2c86473bfc5 | 9876a02fb4f6c38271e41995296c6da4d2ec84af | /wagtail_review/templatetags/wagtailreview_tags.py | 0f6c7e5ea88157336aa4fe725a39f1f39153a035 | [
"BSD-3-Clause"
] | permissive | jacobtoppm/wagtail-review | 423c19cecfa17ddeb22de6bb2a34baad0cd10fdb | 23b81d7e5699ecb843a99da1aa207775a8b85bd6 | refs/heads/master | 2020-12-27T18:26:42.182847 | 2019-05-20T11:29:16 | 2019-05-20T11:29:16 | 238,005,148 | 0 | 0 | BSD-3-Clause | 2020-03-13T10:44:00 | 2020-02-03T16:05:08 | null | UTF-8 | Python | false | false | 1,172 | py | from django import template
from wagtail_review.forms import ResponseForm
register = template.Library()
@register.inclusion_tag('wagtail_review/annotate.html', takes_context=True)
def wagtailreview(context):
request = context['request']
review_mode = getattr(request, 'wagtailreview_mode', None)
reviewer = getattr(request, 'wagtailreview_reviewer', None)
if review_mode == 'respond' or review_mode == 'comment':
return {
'mode': review_mode,
'allow_annotations': (reviewer.review.status != 'closed'),
'show_closed': (reviewer.review.status == 'closed'),
'allow_responses': (review_mode == 'respond' and reviewer.review.status != 'closed'),
'reviewer': reviewer,
'token': reviewer.response_token,
'response_form': ResponseForm()
}
elif review_mode == 'view':
return {
'mode': review_mode,
'show_closed': False,
'allow_annotations': False,
'allow_responses': False,
'reviewer': reviewer,
'token': reviewer.view_token
}
else:
return {'mode': None}
| [
"[email protected]"
] | |
18a16704f66dd1d340db3c65e8ea06fa3b6b70cd | 59f64b5cf799e31c97b11828dba4787afb8f3f17 | /hail/python/hail/ggplot/aes.py | 5497f28d4d22e7863d89af491b89520fe20e5f4b | [
"MIT"
] | permissive | hail-is/hail | 2089e6f3b38548f13fa5c2a8ab67f5cfdd67b4f1 | 07a483ae0f46c66f3ed6fd265b48f48c06298f98 | refs/heads/main | 2023-09-01T15:03:01.450365 | 2023-09-01T02:46:35 | 2023-09-01T02:46:35 | 45,069,467 | 913 | 262 | MIT | 2023-09-14T21:53:32 | 2015-10-27T20:55:42 | Python | UTF-8 | Python | false | false | 1,112 | py | from collections.abc import Mapping
from hail.expr import Expression
from hail import literal
class Aesthetic(Mapping):
def __init__(self, properties):
self.properties = properties
def __getitem__(self, item):
return self.properties[item]
def __len__(self):
return len(self.properties)
def __contains__(self, item):
return item in self.properties
def __iter__(self):
return iter(self.properties)
def __repr__(self):
return self.properties.__repr__()
def merge(self, other):
return Aesthetic({**self.properties, **other.properties})
def aes(**kwargs):
"""Create an aesthetic mapping
Parameters
----------
kwargs:
Map aesthetic names to hail expressions based on table's plot.
Returns
-------
:class:`.Aesthetic`
The aesthetic mapping to be applied.
"""
hail_field_properties = {}
for k, v in kwargs.items():
if not isinstance(v, Expression):
v = literal(v)
hail_field_properties[k] = v
return Aesthetic(hail_field_properties)
| [
"[email protected]"
] | |
cffd05aad6e7ec0a8b97f7e2970e5b764364375f | 2ac0e1ca51c473bba04bb08ea3be2015063a6a13 | /galmeko/hospital/models.py | 99b33f90648b820ca4d8c879fc1956e7d0906004 | [] | no_license | guarav00009/Gaurav-Pandey-Latest | 2012aafe643e1fcc915626422e352d1e4411905a | aa38a47a46bc434f5ec608fde5eec0f0f58259b9 | refs/heads/master | 2020-12-22T10:03:17.325527 | 2020-01-28T13:53:52 | 2020-01-28T13:53:52 | 236,746,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,460 | py | from django.db import models
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from django.utils.html import format_html
from django.template.response import TemplateResponse
User = settings.AUTH_USER_MODEL
# Create your models here.
class Hospital(models.Model):
user = models.OneToOneField(User,on_delete=models.CASCADE)
hospital_name = models.CharField(max_length=100,blank=False,null=False)
phone = models.CharField(max_length=15, blank=True, null=True)
registration_no = models.CharField(max_length=30, unique=True)
address = models.CharField(max_length=150,blank=False,null=False)
file = models.ImageField(null=True, blank=True, upload_to="hospital/")
STATUS_CHOICES = (
(0, 'Pending'),
(1, 'Active'),
(2, 'Rejected'),
(3, 'Deleted'),
)
status = models.IntegerField(
_('status'), choices=STATUS_CHOICES, default=0)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Hospital'
verbose_name_plural = 'Hospital'
def __str__(self):
return self.hospital_name
def file_link(self):
if self.file:
return format_html("<a href='%s' download>Download</a>" % (self.file.url,))
else:
return "No attachment"
file_link.allow_tags = True
file_link.short_description = 'Attachment' | [
"[email protected]"
] | |
6644f5d39c16c8085f33054bbbdde0e525062265 | 3c2323929499a4d81adada6f60ee64bde1e86cb2 | /Simple_Backpropagation_Program/pytorch/views.py | ad9420f44e05bb17b3ef53f819f0390a0c1d09d5 | [] | no_license | GeonwooVincentKim/Backpropagation_Pytorch_Django | 8ba22bb065aca35fed114420b749bb9f0a383688 | 41df659956e5e4e8126b272bd4f5053cdeb30663 | refs/heads/master | 2022-11-22T06:44:27.901139 | 2020-07-16T14:01:06 | 2020-07-16T14:01:06 | 273,230,382 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, 'index.html', {})
def sub(request):
# if request.method == 'POST':
# """
# Write down some code which is related to
# the number that you input.
# """
# """
# From now you are going to here
# to handle the data, you should make database
# that helps save data users input numbers into this
# Simple BackPropagation Algorithm.
# """
# context = {'form': }
return render(request, "sub/sub.html", {})
# return render(request, 'sub/sub.html', {})
def input(request):
return render(request, "sub/index.html", {})
# context = {'form': InputForm()}
# return render(request, "input/input.html", {})
| [
"[email protected]"
] | |
07db860fc6de84b931e4b270036c770e99f84c94 | 89b6997b24e404c176358073626a8bfad7bcdb8e | /.history/chat/consumers_20210427011737.py | 8bc854916d684ee4e430ad24a1f2c472b16dc6f0 | [] | no_license | mohamedhawas123/Education-platform-django | 513e64ac112880385402ce609077796578b4e9ee | 7b83e66bba66b8b2b1a007f5818a534653e6abfb | refs/heads/main | 2023-07-18T16:19:52.177886 | 2021-09-24T12:04:09 | 2021-09-24T12:04:09 | 352,306,462 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | import json
from channels.generic.websocket import WebsocketConsumer
| [
"[email protected]"
] | |
68e429904fe72245794c1b21b63e11df67f9ce97 | cb13037cdbd3e0ab6108670108e9497cc1e2a5a7 | /0.leetcode/0.基本的算法/4.排序/1.冒泡排序Bubblesort.py | 87ddbad13767a3782c1a06daaf71a3b8bf67122c | [] | no_license | GMwang550146647/network | 390fe0d1c72dcaca8b6d6dd1307adca0d56b55ce | 576de9b993f7763789d25a995702b40c9bc6fa57 | refs/heads/master | 2023-06-15T04:42:54.306077 | 2021-07-12T06:06:02 | 2021-07-12T06:06:02 | 315,488,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | '''
1.冒泡排序:
把最大的数一个一个地丢到最前面(期间对比的时候,见到比自己小的就交换相邻两个)
优点:
在非顺序链表都可以用
'''
def bubbleSort(arr):
for i in range(len(arr)-1,0,-1):
for j in range(1,i+1):
if arr[j-1]>arr[j]:
arr[j],arr[j-1]=arr[j-1],arr[j]
return arr
def bubbleSortModified(arr):
for i in range(len(arr)-1,0,-1):
modified=False
for j in range(1,i+1):
if arr[j-1]>arr[j]:
arr[j],arr[j-1]=arr[j-1],arr[j]
modified=True
if not modified:
break
return arr
arr=[9,8,7,6,5,4,3,2,1]
print(bubbleSort(arr.copy()))
print(bubbleSortModified(arr.copy())) | [
"[email protected]"
] | |
349ea6ce098e264d8c03d7b91b59e71dad2c0350 | d15eb2285895469a452867f76b033d0d64a4af5c | /Old_scripts_delete_20220804/Scripts/measurements/vna_autler_townes.py | 7d51bf373377dba1857bae3f809c5d6dc426d33d | [] | no_license | MRitter95/Kollar-Lab | 45ac62ed7805ad9faeeb33b54be50f39950f3b2c | c905725c43af6a49fe5bb2a994d5180f2ba469c2 | refs/heads/master | 2023-08-19T03:38:43.761313 | 2023-08-10T17:49:00 | 2023-08-10T17:49:00 | 236,054,959 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,356 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Nov 8 18:31:45 2020
@author: Kollarlab
"""
import time
import os
import numpy as np
import matplotlib.pyplot as plt
import userfuncs
import plotting_tools as plots
def get_default_settings():
settings = {}
#Save location
settings['scanname'] = 'scanname'
settings['meas_type'] = 'Autler_Townes'
settings['project_dir'] = r'Z:\Data\defaultdir'
#Sweep parameters
settings['CAV_Attenuation'] = 30
settings['Qbit_Attenuation'] = 10
settings['Autler_Attenuation'] = 10
settings['ext_flux'] = 0
settings['autler_power'] = -20
settings['start_autler_freq'] = 3.5e9
settings['stop_autler_freq'] = 4.5e9
settings['autler_points'] = 31
#VNA settings
settings['channel'] = 1
settings['avg_time'] = 30
settings['measurement'] = 'S21'
settings['start_freq'] = 3.5e9
settings['stop_freq'] = 4.5e9
settings['freq_points'] = 501
settings['RFpower'] = -25
settings['RFport'] = 3
settings['Mport'] = 2
settings['CAVport'] = 1
settings['CAVpower'] = -55
settings['CAVfreq'] = 8.12555e9
settings['ifBW'] = 1e3
return settings
def vna_autler_townes(instruments, settings):
#Instruments used
vna = instruments['VNA']
autlergen = instruments['RFsource']
SRS = instruments['SRS']
vna.reset()
#Data saving and naming
saveDir = userfuncs.saveDir(settings['project_dir'], settings['meas_type'])
stamp = userfuncs.timestamp()
filename = settings['scanname'] + '_' + stamp
scanname = settings['scanname']
CAV_Attenuation = settings['CAV_Attenuation']
Qbit_Attenuation = settings['Qbit_Attenuation']
Autler_Attenuation = settings['Autler_Attenuation']
settings['CAVpower'] = settings['CAVpower'] + CAV_Attenuation
settings['RFpower'] = settings['RFpower'] + Qbit_Attenuation
settings['autler_power'] = settings['autler_power'] + Autler_Attenuation
autlergen.power = settings['autler_power']
autlergen.output = 'On'
SRS.output = 'On'
SRS.voltage_ramp(settings['ext_flux'])
start_autler_freq = settings['start_autler_freq']
stop_autler_freq = settings['stop_autler_freq']
autler_points = settings['autler_points']
autler_freqs = np.round(np.linspace(start_autler_freq, stop_autler_freq, autler_points),-3)
findices = np.array(list(range(len(autler_freqs))))
if settings['reverse']:
findices = np.flipud(findices)
if settings['random']:
np.random.shuffle(findices)
mags = np.zeros((len(autler_freqs), settings['freq_points']))
phases = np.zeros((len(autler_freqs), settings['freq_points']))
tstart = time.time()
for freqind in findices:
autler_freq = autler_freqs[freqind]
print('Freq: {}, final freq: {}'.format(autler_freq, autler_freqs[-1]))
autlergen.freq = autler_freq
data = vna.spec_meas(settings)
vna.autoscale()
mags[freqind] = data['mag']
phases[freqind] = data['phase']
if freqind==0:
tstop = time.time()
singlePointTime = tstop-tstart
estimatedTime = singlePointTime*len(autler_freqs)
print(' ')
print('estimated time for this scan : ' + str(np.round(estimatedTime/60, 1)) + ' minutes')
print('estimated time for this scan : ' + str(np.round(estimatedTime/60/60, 2)) + ' hours')
print(' ')
freqs = data['xaxis']
labels = ['Freq (GHz)', 'Autler freq (GHz)']
full_data = {}
single_data = {}
if not settings['random']:
if settings['reverse']:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags[freqind:]
full_data['phases'] = phases[freqind:]
single_data = data
yaxis = autler_freqs[freqind:]
else:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags[0:freqind+1]
full_data['phases'] = phases[0:freqind+1]
single_data = data
yaxis = autler_freqs[0:freqind+1]
plots.simplescan_plot(full_data, single_data, yaxis, filename, labels, identifier='', fig_num=1)
userfuncs.SaveFull(saveDir, filename, ['full_data', 'single_data', 'autler_freqs', 'labels', 'filename'], locals(), expsettings=settings)
plt.savefig(os.path.join(saveDir, filename+'.png'), dpi = 150)
t2 = time.time()
print('Elapsed time: {}'.format(t2-tstart))
if settings['random']:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags
full_data['phases'] = phases
single_data = data
yaxis = autler_freqs
plots.simplescan_plot(full_data, single_data, yaxis, filename, labels, identifier='', fig_num=1)
# SRS.voltage_ramp(0.)
# SRS.output = 'Off'
autlergen.output = 'Off'
userfuncs.SaveFull(saveDir, filename, ['full_data', 'single_data', 'autler_freqs', 'labels', 'filename'], locals(), expsettings=settings)
plt.savefig(os.path.join(saveDir, filename+'.png'), dpi = 150) | [
"[email protected]"
] | |
1b70f2c79a348180971c5ae664a3ee3a8482424a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03378/s251701721.py | 49de078c7467f51e9581f9eab691c6a075c1561c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | n,m,s = [int(x) for x in input().split()]
a = [int(x) for x in input().split()]
low = 0
high = 0
for i in range(m):
if a[i] < s:
low += 1
else:
high += 1
print(min(low,high)) | [
"[email protected]"
] | |
333c75b551e4d62e7e80906e1b5ab7e2af0653cc | bd28f8a8dbcf7f2b4be3bcc0c0e656009191d379 | /predict_nn/ranlp/rsr_dev/mi/ian.py | 58b47a880118a587446b42c4ca6f575d9f0355ea | [
"MIT"
] | permissive | nicolay-r/attitudes-extraction-ds | e2e5f9218408514ca1f3eff5edf88771e2f368ee | 49a82843e6adbca35321aaaa08d05532e953a0fc | refs/heads/master | 2022-08-30T04:51:14.133899 | 2020-05-28T11:06:01 | 2020-05-28T11:06:01 | 197,908,649 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,240 | py | #!/usr/bin/python
import sys
sys.path.append('../../../../')
from predict_nn.ranlp.rsr_dev.config import TEST_ON_EPOCHS, MI_CONTEXTS_PER_OPINION
from networks.ranlp.io_rsr_dev import RaNLPConfTaskRuSentRelWithDevIO
from networks.mimlre.base import MIMLRE
from networks.context.architectures.ian import IAN
from networks.context.configurations.ian import IANConfig
from predict_nn.ranlp.mi_names import ModelNames
from networks.ranlp.model_mimlre import RaNLPConfTaskMIMLREModel
from networks.mimlre.configuration.base import MIMLRESettings
import predict_nn.ranlp.utils as utils
def modify_settings(settings):
assert(isinstance(settings, MIMLRESettings))
settings.modify_contexts_per_opinion(MI_CONTEXTS_PER_OPINION)
if __name__ == "__main__":
utils.run_cv_testing(model_name=ModelNames.MI_IAN,
create_network=lambda: MIMLRE(context_network=IAN()),
create_config=lambda: MIMLRESettings(context_settings=IANConfig()),
create_io=RaNLPConfTaskRuSentRelWithDevIO,
create_model=RaNLPConfTaskMIMLREModel,
modify_settings_callback=modify_settings,
test_on_epochs=TEST_ON_EPOCHS)
| [
"[email protected]"
] | |
37fcce29634843a7c5c79899d2c6871a27f98257 | 3fb718b33d486d638402e5f5bb4eb028332bd54e | /Objects and Classes/Zoo.py | c657af3653914ff55c24c427eacb63f1fabf3133 | [] | no_license | lion963/SoftUni-Python-Fundamentals- | 1c0aced0d770d0f5d0a4977543e945576425aff1 | 25fca7f88513d9e9b9ceb2741d9cb3b3c067b97b | refs/heads/master | 2023-01-24T16:21:46.517847 | 2020-12-14T13:50:06 | 2020-12-14T13:50:06 | 297,916,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | class Zoo:
__animals = 0
def __init__(self, name):
self.name = name
self.mammals = []
self.fishes = []
self.birds = []
def add_animal(self, species, name):
if species == 'mammal':
self.mammals.append(name)
elif species == 'fish':
self.fishes.append(name)
elif species == 'bird':
self.birds.append(name)
self.__animals += 1
def get_info(self, species):
if species == 'mammal':
species_names = self.mammals
elif species == 'fish':
species_names = self.fishes
elif species == 'bird':
species_names = self.birds
names = ', '.join(species_names)
if species == 'mammal':
return f'Mammals in {zoo.name}: {names}'
elif species == 'fish':
return f'Fishes in {zoo.name}: {names}'
elif species == 'bird':
return f'Birds in {zoo.name}: {names}'
def get_total(self):
return f'Total animals: {self.__animals}'
zoo_name = input()
zoo = Zoo(zoo_name)
n = int(input())
for _ in range(n):
species, name = input().split(' ')
zoo.add_animal(species, name)
species = input()
print(zoo.get_info(species))
print(zoo.get_total())
| [
"[email protected]"
] | |
a1e6752c97c13384efca970a958b0761d12d34cd | d2189145e7be2c836017bea0d09a473bf1bc5a63 | /Reposiciones/reposicionesIsraelFP/reposicion31Ago18IsraelFP/fibonacciISraelFP.py | 692bd0eafb663ca194cd985e7f9b1080a1142875 | [] | no_license | emilianoNM/Tecnicas3 | 12d10ce8d78803c8d2cd6a721786a68f7ee2809d | 6ad7f0427ab9e23643a28ac16889bca8791421d0 | refs/heads/master | 2020-03-25T18:06:34.126165 | 2018-11-24T04:42:14 | 2018-11-24T04:42:14 | 144,013,045 | 3 | 5 | null | 2018-09-14T10:47:26 | 2018-08-08T12:49:57 | Python | UTF-8 | Python | false | false | 306 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 15 16:04:05 2018
@author: israel
"""
def fib(f):
if f == 1: return 1
if f == 2: return 1
return fib(f-1)+fib(f-2)
print "\t..:Fibonacci:.."
f=input("Cantidad de no. a hacer en Fibonacci: ")
print "> No. Fibonacci: ",fib(f)
| [
"[email protected]"
] | |
40547c88ef4733a7b77c0d92fa0344e3439c408f | 98efe1aee73bd9fbec640132e6fb2e54ff444904 | /loldib/getratings/models/NA/na_aatrox/__init__.py | edb18cb382f17b02c1036fa9cc09ee67a24a63ab | [
"Apache-2.0"
] | permissive | koliupy/loldib | be4a1702c26546d6ae1b4a14943a416f73171718 | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | refs/heads/master | 2021-07-04T03:34:43.615423 | 2017-09-21T15:44:10 | 2017-09-21T15:44:10 | 104,359,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | from .na_aatrox_top import *
from .na_aatrox_jng import *
from .na_aatrox_mid import *
from .na_aatrox_bot import *
from .na_aatrox_sup import *
| [
"[email protected]"
] | |
390b65607f271bdd88f9fab4359365ad28e4f992 | d92235bce35d7bf1b028ae417c6ceb8891b6c8b4 | /dk_mnist_mlp_weightnorm.py | 10c935941f332df7936c404f15dd57a9d282b466 | [] | no_license | capybaralet/BayesianHypernet | 63faadc83aa95ec80e5d7805ec300c151734f93a | 4d7bdc749b2fb9cf74e45c5b21ccc590b6f781e7 | refs/heads/master | 2020-12-30T15:30:54.687925 | 2017-05-15T21:38:15 | 2017-05-15T21:38:15 | 91,155,018 | 3 | 0 | null | 2017-05-13T06:41:49 | 2017-05-13T06:41:49 | null | UTF-8 | Python | false | false | 5,345 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 12 17:46:38 2017
@author: Chin-Wei
"""
from modules import LinearFlowLayer, IndexLayer, PermuteLayer
from modules import CoupledDenseLayer, stochasticDenseLayer2
from utils import log_normal, log_stdnormal
from ops import load_mnist
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
srng = RandomStreams(seed=427)
floatX = theano.config.floatX
import lasagne
from lasagne import init
from lasagne import nonlinearities
from lasagne.layers import get_output
from lasagne.objectives import categorical_crossentropy as cc
import numpy as np
if 1:#def main():
"""
MNIST example
weight norm reparameterized MLP with prior on rescaling parameters
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--perdatapoint',action='store_true')
parser.add_argument('--coupling',action='store_true')
parser.add_argument('--lrdecay',action='store_true')
parser.add_argument('--lr0',default=0.1,type=float)
parser.add_argument('--lbda',default=0.5,type=float)
parser.add_argument('--bs',default=32,type=int)
args = parser.parse_args()
print args
perdatapoint = args.perdatapoint
coupling = 1#args.coupling
lr0 = args.lr0
lrdecay = args.lrdecay
lbda = np.cast[floatX](args.lbda)
bs = args.bs
size = max(10,min(50000,args.size))
clip_grad = 100
max_norm = 100
# load dataset
filename = '/data/lisa/data/mnist.pkl.gz'
train_x, train_y, valid_x, valid_y, test_x, test_y = load_mnist(filename)
input_var = T.matrix('input_var')
target_var = T.matrix('target_var')
dataset_size = T.scalar('dataset_size')
lr = T.scalar('lr')
# 784 -> 20 -> 10
weight_shapes = [(784, 200),
(200, 10)]
num_params = sum(ws[1] for ws in weight_shapes)
if perdatapoint:
wd1 = input_var.shape[0]
else:
wd1 = 1
# stochastic hypernet
ep = srng.normal(std=0.01,size=(wd1,num_params),dtype=floatX)
logdets_layers = []
h_layer = lasagne.layers.InputLayer([None,num_params])
layer_temp = LinearFlowLayer(h_layer)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
if coupling:
layer_temp = CoupledDenseLayer(h_layer,200)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
h_layer = PermuteLayer(h_layer,num_params)
layer_temp = CoupledDenseLayer(h_layer,200)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
weights = lasagne.layers.get_output(h_layer,ep)
# primary net
t = np.cast['int32'](0)
layer = lasagne.layers.InputLayer([None,784])
inputs = {layer:input_var}
for ws in weight_shapes:
num_param = ws[1]
w_layer = lasagne.layers.InputLayer((None,ws[1]))
weight = weights[:,t:t+num_param].reshape((wd1,ws[1]))
inputs[w_layer] = weight
layer = stochasticDenseLayer2([layer,w_layer],ws[1])
print layer.output_shape
t += num_param
layer.nonlinearity = nonlinearities.softmax
y = T.clip(get_output(layer,inputs), 0.001, 0.999) # stability
# loss terms
logdets = sum([get_output(logdet,ep) for logdet in logdets_layers])
logqw = - (0.5*(ep**2).sum(1) + 0.5*T.log(2*np.pi)*num_params + logdets)
#logpw = log_normal(weights,0.,-T.log(lbda)).sum(1)
logpw = log_stdnormal(weights).sum(1)
kl = (logqw - logpw).mean()
logpyx = - cc(y,target_var).mean()
loss = - (logpyx - kl/T.cast(dataset_size,floatX))
params = lasagne.layers.get_all_params([h_layer,layer])
grads = T.grad(loss, params)
mgrads = lasagne.updates.total_norm_constraint(grads,
max_norm=max_norm)
cgrads = [T.clip(g, -clip_grad, clip_grad) for g in mgrads]
updates = lasagne.updates.adam(cgrads, params,
learning_rate=lr)
train = theano.function([input_var,target_var,dataset_size,lr],
loss,updates=updates)
predict = theano.function([input_var],y.argmax(1))
##################
# TRAIN
X, Y = train_x[:size],train_y[:size]
Xt, Yt = valid_x,valid_y
print 'trainset X.shape:{}, Y.shape:{}'.format(X.shape,Y.shape)
N = X.shape[0]
epochs = 50
records=list()
t = 0
for e in range(epochs):
if lrdecay:
lr = lr0 * 10**(-e/float(epochs-1))
else:
lr = lr0
for i in range(N/bs):
x = X[i*bs:(i+1)*bs]
y = Y[i*bs:(i+1)*bs]
loss = train(x,y,N,lr)
if t%100==0:
print 'epoch: {} {}, loss:{}'.format(e,t,loss)
tr_acc = (predict(X)==Y.argmax(1)).mean()
te_acc = (predict(Xt)==Yt.argmax(1)).mean()
print '\ttrain acc: {}'.format(tr_acc)
print '\ttest acc: {}'.format(te_acc)
t+=1
records.append(loss)
| [
"[email protected]"
] | |
775a119a67245fdb0d9299d512d4b793d1281268 | 0f931d9e5b74f52a57499364d858819873bdf469 | /15.py | ea1afc8f020b5301aa75fbcffe5bfc0a28df61c1 | [] | no_license | estuprofe/AdventOfCode2019 | 43f4d6f96d580a1732d7932ea863613af270fe56 | 54450df616feef810fbd410ccc9d1b0670195e49 | refs/heads/master | 2022-04-03T11:35:30.553698 | 2019-12-22T03:21:33 | 2019-12-22T03:21:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,313 | py | import fileinput
import heapq
import intcode
left, right, opposite = [2, 3, 1, 0], [3, 2, 0, 1], [1, 0, 3, 2]
dxs, dys = [0, 0, -1, 1], [-1, 1, 0, 0]
def traverse(program):
buf = []
gen = intcode.run(program, buf)
send = lambda d: buf.append(d + 1) or next(gen)
test = lambda d: send(d) and send(opposite[d])
d, p, cells, oxygen = 0, (0, 0), set(), None
while True:
if test(left[d]):
d = left[d] # turn left if possible
elif not test(d):
d = right[d] # else turn right if can't go straight
s = send(d)
if s == 0:
continue
p = (p[0] + dxs[d], p[1] + dys[d])
cells.add(p)
if s == 2:
oxygen = p
if p == (0, 0):
return cells, oxygen
def shortest_path(cells, source, target):
seen, queue = set(), [(0, source)]
while queue:
d, p = heapq.heappop(queue)
if p == target:
return d
seen.add(p)
for dx, dy in zip(dxs, dys):
q = (p[0] + dx, p[1] + dy)
if q in cells and q not in seen:
heapq.heappush(queue, (d + 1, q))
cells, oxygen = traverse(list(fileinput.input())[0])
print(shortest_path(cells, (0, 0), oxygen))
print(max(shortest_path(cells, cell, oxygen) for cell in cells))
| [
"[email protected]"
] | |
f1883475c18fada917ce742224d4c5223a023126 | 659a7a65c877f2eb0adbb6001a1f85f063d01acd | /mscreen/autodocktools_prepare_py3k/AutoDockTools/autoanalyze4Commands.py | 1c6e7002d9350becb7fe81829ce939e0463cab13 | [
"MIT"
] | permissive | e-mayo/mscreen | da59771be250ebe341feb102e0cbf41aab70de43 | a50f0b2f7104007c730baa51b4ec65c891008c47 | refs/heads/main | 2023-06-21T17:47:06.519307 | 2021-08-09T16:06:29 | 2021-08-09T16:06:29 | 345,008,321 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,348 | py | #############################################################################
#
# Author: Ruth HUEY, Michel F. SANNER
#
# Copyright: M. Sanner TSRI 2000
#
#############################################################################
# $Header: /opt/cvs/python/packages/share1.5/AutoDockTools/autoanalyze4Commands.py,v 1.7 2009/02/26 22:14:47 rhuey Exp $
#
# $Id: autoanalyze4Commands.py,v 1.7 2009/02/26 22:14:47 rhuey Exp $
#
#
#
#
#
#
#
"""
This Module facilitates analyzing results of autodock jobs.
* The first step is 'Read Docking Log' The selected file is parsed
which sets self.docked to a new Docking instance. The Docking class
has attributes:
o dlgParser
x 'dlg': full pathname of dlg
o dpo
o ch:a conformation handler.
x 'clusterNum':
x 'clusterList':
x 'modelList': a list of docked conformations
o macroFile: the Macromolecule file used
o 'macro': filename of macromolecule (eg '1hvrCorr.pdbqt')
o 'macroStem': name of macromolecule up to last '.' (eg '1hvrCorr')
o ligand: the original ligand
o output: lines containing summary of docking
The new Docking is also entered in the dictionary 'dockings' as a separate item
whose key is the file and whose value is the Docking.
After the selected docking log file is parsed, the user can:
* select a displayed docked conformation using the 'Choose A Docked Conformation' menubutton. This opens a DockingChooser widget which is a ListChooser allowing selection either in the widget or in the viewer of any of the displayed docking. Information about each docked conformation is displayed in the information window of the DockingChooser as different entries are high-lighted.
* display the macromolecule via the "Show Macromolecule" menubutton. This menubutton is linked to a file browsers in case the macromolecule whose name is parsed from the docking log file is not in the current directory. (FIX THIS: what if the macromolecule is in a different directory but there is a molecule with the same name here???). The user can change the visibility, sampling, isovalue, renderMode and visibility of bounding box for each of the displayed grids
* display the autogrids used in the docking via the "Show Grids Used For Calc" menubutton. This menubutton is linked to a ListChooser which lets the user select whether to load all or some of the grids. The user can interactively change the visibility of each grid's isosurface, its sampling value, its isovalue, its rendermode (LINE or FILL) and the visibility of its bounding box.
* The user is able to visualize extra grid maps using the "Show Grid" button.
* If the current docking has clusters, the user is able to visualize a results histogram for it with 'Show Histogram'. The histogram can be printed.
* Result Summaries for docking(s) can be viewed, edited and saved with 'Get Output'
* Dockings can be deleted via 'Delete Docking Log'
"""
from ViewerFramework.VFCommand import CommandGUI
from AutoDockTools.autoanalyzeCommands import menuText,\
checkHasInitializedDockings, hideShowHide, toggleShowHide,\
checkNameStr, ADChooseMacro, ADReadMacro, ADEPDBMol,\
ADSeeSpots, ADShowBindingSite, ADMakeAllGrids, ADGetOutput,\
ADGetAGrid, ADSelectDLG, ADDeleteDLG, ADGetDirDLGs, ADGetDLG,\
ClusterDockingChooser, ModelDockingChooser, ADDrawHistogram,\
ADMacroLigandChart, ADDockingChooser, ReadAutoDockStates,\
StatesPlayerWidget, ShowAutoDockStatesBaseCmd, ShowAutoDockStates,\
ShowAutoDockStatesByEnergy, ShowAutoDockPopulation,\
ShowAutoDockStatesHISTOGRAM, ShowAutoDockClusteringStates,\
ReadAutoDockClusteringStates, WriteAutoDockStates,\
WriteAutoDockClustering, MakeAutoDockCLUSTERING,\
MakeAutoDockSubsetCLUSTERING
ADChooseMacroGUI=CommandGUI()
ADChooseMacroGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['chooseMacro'], cascadeName = menuText['MoleculesMB'])
ADReadMacroGUI=CommandGUI()
ADReadMacroGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['readMacro'], cascadeName = menuText['MoleculesMB'])
ADEPDBMolGUI=CommandGUI()
ADEPDBMolGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['epdbMol'], cascadeName = menuText['GridsMB'])
ADSeeSpotsGUI=CommandGUI()
ADSeeSpotsGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['seeSpots'], cascadeName = menuText['DockingLogMB'])
ADShowBindingSiteGUI=CommandGUI()
ADShowBindingSiteGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['showBindingSite'], cascadeName = menuText['DockingLogMB'])
ADMakeAllGridsGUI=CommandGUI()
ADMakeAllGridsGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['showGridsMB'], cascadeName=menuText['GridsMB'])
ADGetOutputGUI=CommandGUI()
ADGetOutputGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['getOutputMB'] , cascadeName=menuText['StatesMB'])
ADGetAGridGUI=CommandGUI()
ADGetAGridGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['addGridMB'], cascadeName=menuText['GridsMB'])
ADSelectDLGGUI=CommandGUI()
ADSelectDLGGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['selectDLG'], cascadeName = menuText['DockingLogMB'])
ADDeleteDLGGUI=CommandGUI()
ADDeleteDLGGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['deleteDLG'], cascadeName = menuText['DockingLogMB'])
ADGetDirDLGsGUI=CommandGUI()
ADGetDirDLGsGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['readDirDLG'], cascadeName = menuText['DockingLogMB'])
ADGetDLGGUI=CommandGUI()
ADGetDLGGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['readDLG'], cascadeName = menuText['DockingLogMB'])
###ADGetDLGGUI.menuBarCfg.update({'background':'tan','relief':'sunken'})
ADDrawHistogramGUI=CommandGUI()
ADDrawHistogramGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['showHistogramMB'], cascadeName=menuText['StatesMB'])
ADMacroLigandChartGUI=CommandGUI()
ADMacroLigandChartGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['showChartMB'], cascadeName=menuText['StatesMB'])
ADDockingChooserGUI=CommandGUI()
ADDockingChooserGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['chooseConfMB'], cascadeName = menuText['StatesMB'])
ReadAutoDockStatesGUI = CommandGUI()
ReadAutoDockStatesGUI.addMenuCommand('AutoTools4Bar', menuText['AnalyzeMB'],
menuText['readStatesMB'],cascadeName=menuText['StatesMB'])
ShowAutoDockStatesGUI = CommandGUI()
ShowAutoDockStatesGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['showStatesMB'],
cascadeName=menuText['StatesMB'])
ShowAutoDockStatesByEnergyGUI = CommandGUI()
ShowAutoDockStatesByEnergyGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['showStatesByEnergyMB'],
cascadeName=menuText['StatesMB'])
ShowAutoDockPopulationGUI = CommandGUI()
ShowAutoDockPopulationGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['showPopulationMB'],
cascadeName=menuText['StatesMB'])
ShowAutoDockStatesHISTOGRAMGUI = CommandGUI()
ShowAutoDockStatesHISTOGRAMGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['showStatesHISTOGRAMMB'],
cascadeName=menuText['StatesMB'])
ShowAutoDockStatesCLUSTERINGGUI = CommandGUI()
ShowAutoDockStatesCLUSTERINGGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['showStatesCLUSTERINGMB'],
cascadeName=menuText['ClusteringMB'])
ReadAutoDockStatesCLUSTERINGGUI = CommandGUI()
ReadAutoDockStatesCLUSTERINGGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['readStatesCLUSTERINGMB'],
cascadeName=menuText['ClusteringMB'])
WriteAutoDockStatesGUI = CommandGUI()
WriteAutoDockStatesGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['writeResultMB'],
cascadeName=menuText['StatesMB'])
WriteAutoDockClusteringGUI = CommandGUI()
WriteAutoDockClusteringGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['writeClusteringMB'],
cascadeName=menuText['ClusteringMB'])
MakeAutoDockCLUSTERINGGUI = CommandGUI()
MakeAutoDockCLUSTERINGGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['makeCLUSTERINGMB'],
cascadeName=menuText['ClusteringMB'])
MakeAutoDockSubsetCLUSTERINGGUI = CommandGUI()
MakeAutoDockSubsetCLUSTERINGGUI.addMenuCommand('AutoTools4Bar',
menuText['AnalyzeMB'], menuText['makeSubsetCLUSTERINGMB'],
cascadeName=menuText['ClusteringMB'])
commandList = [
{'name':'AD4analyze_readDLG','cmd':ADGetDLG(),'gui':ADGetDLGGUI},
{'name':'AD4analyze_readAllDLGInDirectory','cmd':ADGetDirDLGs(),'gui':ADGetDirDLGsGUI},
{'name':'AD4analyze_selectDLG','cmd':ADSelectDLG(),'gui':ADSelectDLGGUI},
{'name':'AD4analyze_deleteDLG','cmd':ADDeleteDLG(),'gui':ADDeleteDLGGUI},
{'name':'AD4analyze_readMacromolecule','cmd':ADReadMacro(),'gui':ADReadMacroGUI},
{'name':'AD4analyze_chooseMacromolecule','cmd':ADChooseMacro(),'gui':ADChooseMacroGUI},
{'name':'AD4analyze_showDockingsAsSpheres','cmd':ADSeeSpots(),'gui':ADSeeSpotsGUI},
{'name':'AD4analyze_showBindingSite','cmd':ADShowBindingSite(),'gui':ADShowBindingSiteGUI},
#{'name':'AD4analyze_readStates','cmd':ReadAutoDockStates(),'gui':ReadAutoDockStatesGUI},
{'name':'AD4analyze_showStates','cmd':ShowAutoDockStates(),'gui':ShowAutoDockStatesGUI},
{'name':'AD4analyze_showStatesByEnergy','cmd':ShowAutoDockStatesByEnergy(),'gui':ShowAutoDockStatesByEnergyGUI},
{'name':'AD4analyze_showPopulation','cmd':ShowAutoDockPopulation(),'gui':ShowAutoDockPopulationGUI},
{'name':'AD4analyze_chooseDockedConformations','cmd':ADDockingChooser(),'gui':ADDockingChooserGUI},
#{'name':'AD4analyze_showStatesHISTOGRAM','cmd':ShowAutoDockStatesHISTOGRAM(),'gui':ShowAutoDockStatesHISTOGRAMGUI},
#{'name':'AD4analyze_showResultsOutput','cmd':ADGetOutput(),'gui':ADGetOutputGUI},
#{'name':'AD4analyze_showHistogram','cmd':ADDrawHistogram(),'gui':ADDrawHistogramGUI},
#{'name':'AD4analyze_getChart','cmd':ADMacroLigandChart(),'gui':ADMacroLigandChartGUI},
#{'name':'AD4analyze_writeStates','cmd':WriteAutoDockStates(),'gui':WriteAutoDockStatesGUI},
{'name':'AD4analyze_showClusteringStates','cmd':ShowAutoDockClusteringStates(),'gui':ShowAutoDockStatesCLUSTERINGGUI},
#{'name':'AD4analyze_readClusteringStates','cmd':ReadAutoDockClusteringStates(),'gui':ReadAutoDockStatesCLUSTERINGGUI},
{'name':'AD4analyze_makeClustering','cmd':MakeAutoDockCLUSTERING(),'gui':MakeAutoDockCLUSTERINGGUI},
{'name':'AD4analyze_makeSubsetClustering','cmd':MakeAutoDockSubsetCLUSTERING(),'gui':MakeAutoDockSubsetCLUSTERINGGUI},
#{'name':'AD4analyze_writeClustering','cmd':WriteAutoDockClustering(),'gui':WriteAutoDockClusteringGUI},
]
try:
from Pmv.Grid import AutoGrid, AutoGridSurfaceGui
for i in [ #{'name':'AD4analyze_epdbMolecule', 'cmd':ADEPDBMol(), 'gui':ADEPDBMolGUI},
{'name':'AD4analyze_addExtraGridIsocontour','cmd':ADGetAGrid(),'gui':ADGetAGridGUI}, {'name':'AD4analyze_showGridIsocontours','cmd':ADMakeAllGrids(),'gui':ADMakeAllGridsGUI}]:
commandList.insert(7,i)
except:
print('skipping the isocontour-dependent commands')
def initModule(vf):
for dict in commandList:
vf.addCommand(dict['cmd'],dict['name'],dict['gui'])
#if not hasattr(vf, 'ADanalyze_showHistogram') and hasattr(vf, 'AD4analyze_showHistogram'):
# vf.ADanalyze_showHistogram = vf.AD4analyze_showHistogram
if not hasattr(vf, 'ADanalyze_showDockingsAsSpheres') and hasattr(vf, 'AD4analyze_showDockingsAsSpheres'):
vf.ADanalyze_showDockingsAsSpheres = vf.AD4analyze_showDockingsAsSpheres
if not hasattr(vf, 'ADanalyze_showGridIsocontours') and hasattr(vf, 'AD4analyze_showGridIsocontours'):
vf.ADanalyze_showGridIsocontours = vf.AD4analyze_showGridIsocontours
if not hasattr(vf, 'ADanalyze_showBindingSite') and hasattr(vf, 'AD4analyze_showBindingSite'):
vf.ADanalyze_showBindingSite = vf.AD4analyze_showBindingSite
if not hasattr(vf, 'ADanalyze_chooseDockedConformations') and hasattr(vf, 'AD4analyze_chooseDockedConformations'):
vf.ADanalyze_chooseDockedConformations = vf.AD4analyze_chooseDockedConformations
if not hasattr(vf, 'ADanalyze_readDLG') and hasattr(vf, 'AD4analyze_readDLG'):
vf.ADanalyze_readDLG = vf.AD4analyze_readDLG
if not hasattr(vf, 'ADanalyze_selectDLG') and hasattr(vf, 'AD4analyze_selectDLG'):
vf.ADanalyze_selectDLG = vf.AD4analyze_selectDLG
if not hasattr(vf, 'ADanalyze_makeSubsetClustering') and hasattr(vf, 'AD4analyze_makeSubsetClustering'):
vf.ADanalyze_makeSubsetClustering = vf.AD4analyze_makeSubsetClustering
if hasattr(vf, 'GUI'):
for item in list(vf.GUI.menuBars['AutoTools4Bar'].menubuttons.values()):
item.configure(background = 'tan')
if not hasattr(vf.GUI, 'adtBar'):
vf.GUI.adtBar = vf.GUI.menuBars['AutoTools4Bar']
vf.GUI.adtFrame = list(vf.GUI.adtBar.menubuttons.values())[0].master
| [
"[email protected]"
] | |
ef907923a1970b33a70abe7364cdcf42e701a3d2 | 3cea6c6664d9489b4cfb33ea8580f8189b5839ff | /torchex/nn/modules/padding.py | ca8bc82e42fac577d1304747aa66ed99bb511ce6 | [
"MIT"
] | permissive | tarokiritani/torchex | 81c24457337bdbf6ad103dd9ded5488b69b468bd | 5e9d8f7f08a3931c2271e108d73226b1ee6b3efa | refs/heads/master | 2020-04-12T17:55:02.960878 | 2018-12-14T09:37:46 | 2018-12-14T09:37:46 | 162,661,997 | 0 | 0 | null | 2018-12-21T03:40:19 | 2018-12-21T03:40:19 | null | UTF-8 | Python | false | false | 2,265 | py | import torch
import torch.nn as nn
class PeriodicPad2d(nn.Module):
"""
:params torch.Tensor input: Input(B, C, W, H)
# https://github.com/ZichaoLong/aTEAM/blob/master/nn/functional/utils.py
"""
def __init__(self,
pad_left: int=0, pad_right: int=0,
pad_top: int=0, pad_bottom: int=0):
super(PeriodicPad2d, self).__init__()
self.__doc__ = 'hello'
self.pad_left = pad_left
self.pad_right = pad_right
self.pad_top = pad_top
self.pad_bottom = pad_bottom
def forward(self, input):
assert input.dim() == 4, 'only support Input(B, C, W, H) or Input(B, C, H, W)'
B, C, H, W = input.size()
left_pad = input[:, :, :, W-(self.pad_left):]
right_pad = input[:, :, :, :self.pad_right]
input = torch.cat([left_pad, input, right_pad], dim=3)
top_pad = input[:, :, H-(self.pad_top):, :]
bottom_pad = input[:, :, :self.pad_bottom, :]
input = torch.cat([top_pad, input, bottom_pad], dim=2)
return input
class PeriodicPad3d(nn.Module):
'''
Only support isotropic padding
'''
def __init__(self, pad: int=0):
super(PeriodicPad3d, self).__init__()
self.pad = pad
def forward(self, input):
'''
:params torch.Tensor input: Input(B, C, D, W, H)
'''
assert input.dim() == 5, 'only support Input(B, C, D, W, H)'
B, C, D, H, W = input.size()
pad_0 = input[:, :, D-(self.pad):, :, :]
pad_1 = input[:, :, :self.pad, :, :]
input = torch.cat([pad_0, input, pad_1], dim=2)
pad_0 = input[:, :, :, H-(self.pad):, :]
pad_1 = input[:, :, :, :self.pad, :]
input = torch.cat([pad_0, input, pad_1], dim=3)
pad_0 = input[:, :, :, :, W-(self.pad):]
pad_1 = input[:, :, :, :, :self.pad]
input = torch.cat([pad_0, input, pad_1], dim=4)
return input
if __name__ == '__main__':
x = torch.range(1, 25).view(1, 1, 5, 5)
print(x)
pad = PeriodicPad2d(2, 2, 2, 1)
print(pad(x))
print(pad(x).shape)
x = torch.range(1, 27).view(1, 1, 3, 3, 3)
pad = PeriodicPad3d(1)
print(pad(x))
| [
"[email protected]"
] | |
f303c4c5c52b859986065ba36976c2cd24f5fa30 | 4e8e9ed2a8fb69ed8b46066a8d967e4c107013a4 | /main/control/comment.py | 74b22b2e72d524f3e59cb31990a4cf5d1b395682 | [
"MIT"
] | permissive | welovecoding/vote4code | a57b3d155096d362dca47587ad2985b4201ef036 | be265d553af35dc6c5322ecb3f7d5b3cf7691b75 | refs/heads/master | 2021-08-11T22:46:40.884030 | 2019-11-15T16:15:05 | 2019-11-15T16:15:05 | 90,191,931 | 14 | 0 | MIT | 2021-08-10T22:50:49 | 2017-05-03T20:46:02 | Python | UTF-8 | Python | false | false | 5,801 | py | # coding: utf-8
from google.appengine.ext import ndb
import flask
import flask_wtf
import wtforms
import auth
import config
import model
import util
from main import app
###############################################################################
# Update
###############################################################################
class CommentUpdateForm(flask_wtf.FlaskForm):
content = wtforms.TextAreaField(
model.Comment.content._verbose_name,
[wtforms.validators.required()],
filters=[util.strip_filter],
)
post_key = wtforms.SelectField(
model.Comment.post_key._verbose_name,
[wtforms.validators.required()],
choices=[],
)
@app.route('/comment/create/', methods=['GET', 'POST'])
@app.route('/comment/<int:comment_id>/update/', methods=['GET', 'POST'])
@auth.login_required
def comment_update(comment_id=0):
if comment_id:
comment_db = model.Comment.get_by_id(comment_id)
else:
comment_db = model.Comment(user_key=auth.current_user_key())
if not comment_db or comment_db.user_key != auth.current_user_key():
flask.abort(404)
form = CommentUpdateForm(obj=comment_db)
user_dbs, user_cursor = model.User.get_dbs(limit=-1)
post_dbs, post_cursor = model.Post.get_dbs(limit=-1)
form.post_key.choices = [(c.key.urlsafe(), c.title) for c in post_dbs]
if flask.request.method == 'GET' and not form.errors:
form.post_key.data = comment_db.post_key.urlsafe() if comment_db.post_key else None
if form.validate_on_submit():
form.post_key.data = ndb.Key(urlsafe=form.post_key.data) if form.post_key.data else None
form.populate_obj(comment_db)
comment_db.put()
return flask.redirect(flask.url_for('comment_view', comment_id=comment_db.key.id()))
return flask.render_template(
'comment/comment_update.html',
title=comment_db.content if comment_id else 'New Comment',
html_class='comment-update',
form=form,
comment_db=comment_db,
)
###############################################################################
# List
###############################################################################
@app.route('/comment/')
def comment_list():
comment_dbs, comment_cursor = model.Comment.get_dbs()
return flask.render_template(
'comment/comment_list.html',
html_class='comment-list',
title='Comment List',
comment_dbs=comment_dbs,
next_url=util.generate_next_url(comment_cursor),
api_url=flask.url_for('api.comment.list'),
)
###############################################################################
# View
###############################################################################
@app.route('/comment/<int:comment_id>/')
def comment_view(comment_id):
comment_db = model.Comment.get_by_id(comment_id)
if not comment_db:
flask.abort(404)
return flask.render_template(
'comment/comment_view.html',
html_class='comment-view',
title=comment_db.content,
comment_db=comment_db,
api_url=flask.url_for('api.comment', comment_key=comment_db.key.urlsafe() if comment_db.key else ''),
)
###############################################################################
# Admin List
###############################################################################
@app.route('/admin/comment/')
@auth.admin_required
def admin_comment_list():
comment_dbs, comment_cursor = model.Comment.get_dbs(
order=util.param('order') or '-modified',
)
return flask.render_template(
'comment/admin_comment_list.html',
html_class='admin-comment-list',
title='Comment List',
comment_dbs=comment_dbs,
next_url=util.generate_next_url(comment_cursor),
api_url=flask.url_for('api.admin.comment.list'),
)
###############################################################################
# Admin Update
###############################################################################
class CommentUpdateAdminForm(CommentUpdateForm):
pass
@app.route('/admin/comment/create/', methods=['GET', 'POST'])
@app.route('/admin/comment/<int:comment_id>/update/', methods=['GET', 'POST'])
@auth.admin_required
def admin_comment_update(comment_id=0):
if comment_id:
comment_db = model.Comment.get_by_id(comment_id)
else:
comment_db = model.Comment(user_key=auth.current_user_key())
if not comment_db:
flask.abort(404)
form = CommentUpdateAdminForm(obj=comment_db)
user_dbs, user_cursor = model.User.get_dbs(limit=-1)
post_dbs, post_cursor = model.Post.get_dbs(limit=-1)
form.post_key.choices = [(c.key.urlsafe(), c.title) for c in post_dbs]
if flask.request.method == 'GET' and not form.errors:
form.post_key.data = comment_db.post_key.urlsafe() if comment_db.post_key else None
if form.validate_on_submit():
form.post_key.data = ndb.Key(urlsafe=form.post_key.data) if form.post_key.data else None
form.populate_obj(comment_db)
comment_db.put()
return flask.redirect(flask.url_for('admin_comment_list', order='-modified'))
return flask.render_template(
'comment/admin_comment_update.html',
title=comment_db.content,
html_class='admin-comment-update',
form=form,
comment_db=comment_db,
back_url_for='admin_comment_list',
api_url=flask.url_for('api.admin.comment', comment_key=comment_db.key.urlsafe() if comment_db.key else ''),
)
###############################################################################
# Admin Delete
###############################################################################
@app.route('/admin/comment/<int:comment_id>/delete/', methods=['POST'])
@auth.admin_required
def admin_comment_delete(comment_id):
comment_db = model.Comment.get_by_id(comment_id)
comment_db.key.delete()
flask.flash('Comment deleted.', category='success')
return flask.redirect(flask.url_for('admin_comment_list'))
| [
"[email protected]"
] | |
224115799dcddd421f082f520cd9f670ef3cd9cc | e81fabdd6988c787524755fac73aa9d3631fc64c | /tests/test_ops/test_early_stopping.py | 286560c5fd38fb4cc2edbac48b85b01eeecdd9e7 | [
"MIT"
] | permissive | granularai/polyaxon-schemas | 0aa06f15b7353ceb6d31f1e5cf63c269ab0e2ce4 | 017ae74701f21f12f0b25e75379681ea5d8baa9e | refs/heads/master | 2022-08-30T00:05:40.888476 | 2020-05-19T17:22:46 | 2020-05-19T17:22:46 | 265,312,701 | 0 | 0 | MIT | 2020-05-19T17:16:38 | 2020-05-19T17:16:37 | null | UTF-8 | Python | false | false | 1,874 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from unittest import TestCase
from tests.utils import assert_equal_dict
from polyaxon_schemas.ops.group.early_stopping_policies import EarlyStoppingConfig
from polyaxon_schemas.ops.group.metrics import Optimization
class TestEarlyStoppingConfigs(TestCase):
def test_early_stopping(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
}
config = EarlyStoppingConfig.from_dict(config_dict)
config_to_dict = config.to_dict()
assert config_to_dict.pop('optimization') == Optimization.MAXIMIZE
assert_equal_dict(config_to_dict, config_dict)
def test_early_stopping_with_median_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MINIMIZE,
'policy': {'kind': 'median', 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
def test_early_stopping_with_average_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MINIMIZE,
'policy': {'kind': 'average', 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
def test_early_stopping_with_truncation_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MAXIMIZE,
'policy': {'kind': 'truncation', 'percent': 50, 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
| [
"[email protected]"
] | |
9ff9b1b4f5e88031f1b4c71bf900b366103e5a6f | b67efb7ac1832f2a70aa570f8025c69498a8cd71 | /pgoapi/protos/POGOProtos/Data/Logs/FortSearchLogEntry_pb2.py | 7469fad7bf20a643ec48fffd8c8889493a9bf5e5 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | PogoHop/pgoapi-hsvr | f1513d7548075a7defd21f1018bd59afcb79d78f | b5761159e0240bbb81ef6c257fe2eb1bc1ce2d47 | refs/heads/master | 2021-01-12T11:17:55.334203 | 2016-11-05T12:48:38 | 2016-11-05T12:48:38 | 72,892,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 4,709 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Data/Logs/FortSearchLogEntry.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Inventory.Item import ItemData_pb2 as POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Data/Logs/FortSearchLogEntry.proto',
package='POGOProtos.Data.Logs',
syntax='proto3',
serialized_pb=_b('\n-POGOProtos/Data/Logs/FortSearchLogEntry.proto\x12\x14POGOProtos.Data.Logs\x1a(POGOProtos/Inventory/Item/ItemData.proto\"\xca\x01\n\x12\x46ortSearchLogEntry\x12?\n\x06result\x18\x01 \x01(\x0e\x32/.POGOProtos.Data.Logs.FortSearchLogEntry.Result\x12\x0f\n\x07\x66ort_id\x18\x02 \x01(\t\x12\x32\n\x05items\x18\x03 \x03(\x0b\x32#.POGOProtos.Inventory.Item.ItemData\x12\x0c\n\x04\x65ggs\x18\x04 \x01(\x05\" \n\x06Result\x12\t\n\x05UNSET\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x62\x06proto3')
,
dependencies=[POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_FORTSEARCHLOGENTRY_RESULT = _descriptor.EnumDescriptor(
name='Result',
full_name='POGOProtos.Data.Logs.FortSearchLogEntry.Result',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCESS', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=284,
serialized_end=316,
)
_sym_db.RegisterEnumDescriptor(_FORTSEARCHLOGENTRY_RESULT)
_FORTSEARCHLOGENTRY = _descriptor.Descriptor(
name='FortSearchLogEntry',
full_name='POGOProtos.Data.Logs.FortSearchLogEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.result', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fort_id', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.fort_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='items', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.items', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eggs', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.eggs', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FORTSEARCHLOGENTRY_RESULT,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=316,
)
_FORTSEARCHLOGENTRY.fields_by_name['result'].enum_type = _FORTSEARCHLOGENTRY_RESULT
_FORTSEARCHLOGENTRY.fields_by_name['items'].message_type = POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2._ITEMDATA
_FORTSEARCHLOGENTRY_RESULT.containing_type = _FORTSEARCHLOGENTRY
DESCRIPTOR.message_types_by_name['FortSearchLogEntry'] = _FORTSEARCHLOGENTRY
FortSearchLogEntry = _reflection.GeneratedProtocolMessageType('FortSearchLogEntry', (_message.Message,), dict(
DESCRIPTOR = _FORTSEARCHLOGENTRY,
__module__ = 'POGOProtos.Data.Logs.FortSearchLogEntry_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Data.Logs.FortSearchLogEntry)
))
_sym_db.RegisterMessage(FortSearchLogEntry)
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
67dcd3ec7cdb0cc71d9f3b762d542f02506fbeb3 | 49ba5356bdc5df7dd9803b56fe507c5164a90716 | /surface-area-of-3d-shapes/solution.py | a1de598aa85c92a605d01dfaf2403263d9ecf1e5 | [] | no_license | uxlsl/leetcode_practice | d80ad481c9d8ee71cce0f3c66e98446ced149635 | d8ed762d1005975f0de4f07760c9671195621c88 | refs/heads/master | 2021-04-25T18:12:28.136504 | 2020-03-11T07:54:15 | 2020-03-11T07:54:15 | 121,472,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,509 | py | # leetcode
# https://leetcode-cn.com/problems/surface-area-of-3d-shapes/
# 解法:
# 求六个方向的表面积
class Solution(object):
def surfaceArea(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
N = len(grid)
area = 0
# xy 正反面
for i in range(N):
h = 0
v = 0
for j in range(N):
if grid[i][j] > 0:
area += 2
if grid[i][j] > h:
h = grid[i][j]
if grid[j][i] > v:
v = grid[j][i]
if j > 0 and j+1 < N and grid[i][j-1] > grid[i][j] < grid[i][j+1]:
area +=2*(grid[i][j-1] - grid[i][j])
if i > 0 and i+1 < N and grid[i-1][j] > grid[i][j] < grid[i+1][j]:
area +=2*(grid[i-1][j] - grid[i][j])
area += 2*v
area += 2*h
return area
class Solution(object):
def surfaceArea(self, grid):
N = len(grid)
ans = 0
for r in xrange(N):
for c in xrange(N):
if grid[r][c]:
ans += 2
for nr, nc in ((r-1, c), (r+1, c), (r, c-1), (r,c+1)):
if 0 <= nr < N and 0 <= nc < N:
nval = grid[nr][nc]
else:
nval = 0
ans += max(grid[r][c] - nval, 0)
return ans
| [
"[email protected]"
] | |
ce978302f88b0065282a8fb57be6ec347d9e2012 | 2fabea234735beefc980b77b213fcb0dfb394980 | /tensorflow_probability/python/distributions/deprecated_linalg_test.py | e30bf6de1138043acd8d2544bd85b4b5b72eabca | [
"Apache-2.0"
] | permissive | tarrou/probability | 0eee452b525a6e6b3c7c98d467468e47f07e861b | d4d80a1c04ad0b3e98758ebc3f7f82887274384d | refs/heads/master | 2020-08-08T11:16:42.441268 | 2019-12-06T17:35:17 | 2019-12-06T17:35:17 | 213,819,828 | 0 | 0 | Apache-2.0 | 2019-10-09T04:20:19 | 2019-10-09T04:20:19 | null | UTF-8 | Python | false | false | 2,518 | py | # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for deprecated_linalg functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions.deprecated_linalg import tridiag
from tensorflow_probability.python.internal import test_case
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
@test_util.run_all_in_graph_and_eager_modes
class TridiagTest(test_case.TestCase):
def testWorksCorrectlyNoBatches(self):
self.assertAllEqual(
[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
self.evaluate(tridiag(
[1., 2., 3.],
[4., 5., 6., 7.],
[8., 9., 10.])))
def testWorksCorrectlyBatches(self):
self.assertAllClose(
[[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
[[0.7, 0.1, 0.0, 0.0],
[0.8, 0.6, 0.2, 0.0],
[0.0, 0.9, 0.5, 0.3],
[0.0, 0.0, 1.0, 0.4]]],
self.evaluate(tridiag(
[[1., 2., 3.],
[0.8, 0.9, 1.]],
[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]],
[[8., 9., 10.],
[0.1, 0.2, 0.3]])),
rtol=1e-5, atol=0.)
def testHandlesNone(self):
self.assertAllClose(
[[[4., 0., 0., 0.],
[0., 5., 0., 0.],
[0., 0., 6., 0.],
[0., 0., 0, 7.]],
[[0.7, 0.0, 0.0, 0.0],
[0.0, 0.6, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0],
[0.0, 0.0, 0.0, 0.4]]],
self.evaluate(tridiag(
diag=[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]])),
rtol=1e-5, atol=0.)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
8c0d012d8d04a4973b14979e0731ec72a32bbdde | 0728a2e165808cfe5651693a6e7f47804bfb085f | /ry/trunk-ry/rynok/controllers/category.py | 2c8663f5cbf391dbaad2d949ff7d5a5f07a4cd0e | [] | no_license | testTemtProj/OLD_PROJECT | 5b026e072017f5135159b0940370fda860241d39 | 9e5b165f4e8acf9003536e05dcefd33a5ae46890 | refs/heads/master | 2020-05-18T15:30:24.543319 | 2013-07-23T15:17:32 | 2013-07-23T15:17:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,907 | py | #coding: utf-8
""" Category Controller
"""
import logging
import rynok.lib.helpers as h
import json
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from webhelpers.html.builder import HTML
from rynok.lib.base import BaseController, render
from rynok.model.categoriesModel import CategoriesModel
from rynok.lib import helpers as h
from rynok.model.referenceModel import ReferenceModel
from rynok.model.settingsModel import SettingsModel
LOG = logging.getLogger(__name__)
class CategoryController(BaseController):
def __init__(self):
BaseController.__init__(self)
self.categories_model = CategoriesModel
def index(self, url):
category = self.categories_model.getByURL(url=url)
if not category:
return render('/error/error.mako.html')
if 'isLeaf' in category and category['isLeaf']:
return self.view(category=category)
cats = self.categories_model.getChildrens(category["ID"], non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
c.category = category
return render('/category.mako.html')
def all(self):
cats = self.categories_model.getChildrens(categoryId=0, non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
return render('/all.categories.mako.html')
def popular(self):
reference_model = ReferenceModel
settings = SettingsModel.get_popular_block_settings()
c.title = 'Популярные товары'
c.products = reference_model.get_popular_products(settings['categories'], settings['per_category'])
return render('/products.html')
def new(self):
reference_model = ReferenceModel
c.title = 'Новые товары'
c.products = reference_model.get_new_products(28)
return render('/products.html')
def view(self, category, page=1):
reference_model = ReferenceModel
if not isinstance(category, dict):
category = self.categories_model.getByURL(category)
c.category = category
c.error_message = None
sort_by = request.params.get('sort_by', 'price')
if sort_by == 'rating':
by = 'Rate'
elif sort_by == 'price':
by = 'price'
elif sort_by == 'popular':
by = 'popular'
try:
c.markets = json.loads(request.params.get('m_id', '[]'))
except ValueError:
c.markets = []
try:
c.vendors = json.loads(request.params.get('v_id', '[]'))
except ValueError:
c.vendors = []
sort_order = request.params.get('sort_order', 'desc')
try:
c.price_min = int(request.params.get('price_min', 0))
except:
c.price_min = 0
try:
c.perPage = int(request.params.get('per_page', 10))
except:
c.perPage = 10
c.currency = request.params.get('currency', 'UAH')
query = {'categoryId':int(category['ID']), c.currency: {'$gt': c.price_min-1}}
c.affordable_price = reference_model.get_max_price(query, c.currency) + 1
c.price_max = int(request.params.get('price_max', c.affordable_price))
query[c.currency]['$lt'] = c.price_max + 1
if len(c.markets) > 0 and len(c.vendors) > 0:
query['shopId'] = {'$in':c.markets}
query['vendor'] = {'$in':c.vendors}
if len(c.markets) > 0 and len(c.vendors) == 0:
query['shopId'] = {'$in':c.markets}
if len(c.markets) == 0 and len(c.vendors) > 0:
query['vendor'] = {'$in':c.vendors}
count_products = reference_model.get_count(query=query)
"""
if count_products == 0:
referer = request.headers.get('Referer', '')
http_host = request.environ.get('HTTP_HOST')
c.back_url = referer
if referer.find(http_host) == -1:
c.back_url = '/'
cats = self.categories_model.getChildrens(categoryId=0, non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
c.noresult = u"По даной цене товары не найдены"
return render('/empty.category.mako.html')
"""
if count_products > 0:
c.products = reference_model.get_reference(where=query, perPage = c.perPage, page = int(page)-1, by=by, direction=sort_order)
else:
#get_less_products_query = query.copy()
#get_less_products_query[c.currency] = {'$lt' : c.price_min}
get_more_products_query = query.copy()
del(get_more_products_query[c.currency])# = {'$lte' : c.price_max}
#less_products = reference_model.get_reference(where=get_less_products_query, limit=2, by=c.currency, direction=-1)
#more_products = reference_model.get_reference(where=get_more_products_query, limit=2, by=c.currency, direction=1)
#c.products = more_products
print get_more_products_query
c.products = reference_model.get_reference(where=get_more_products_query, perPage = c.perPage, page = int(page)-1, by=by, direction=sort_order)
c.error_message = u"По даной цене товары не найдены, показаны без учета цены"
count_products = reference_model.get_count(query=get_more_products_query)
c.page = page
c.total_pages = count_products/c.perPage
if count_products%c.perPage:
c.total_pages += 1
c.sort_settings = {sort_by: sort_order}
c.current_url = category['URL']+'/'+str(page)
return render('/view.category.mako.html')
| [
"[email protected]"
] | |
2bd112ac93dcd356a16b4eefafb8a2aa5b1fe728 | 4d30d39cbcb0d2428d710a47c0ca7ef8bcec447d | /core/dbs/__init__.py | c95c6e94c224de6b64cb1e2e67ddf572f055abd2 | [
"BSD-3-Clause"
] | permissive | baifengbai/CornerNet-Lite-Pytorch | 7a8c5bbe49343e67ae100001c18df5542b375b4e | 7c52f93720d6276a6e073c06fb6cec6b8580da56 | refs/heads/master | 2020-09-22T15:25:13.050615 | 2019-12-01T16:44:28 | 2019-12-01T16:44:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | from .coco import COCO
from .dagm import DAGM
# 数据库名字
datasets = {
"COCO": COCO,
"DAGM": DAGM,
}
| [
"[email protected]"
] | |
4905389b265f26eae8f3ad56e407e10420eb28aa | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/eclect.py | 7be45d81379ad886d250abb271881bb833c02954 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 80 | py | ii = [('NewmJLP.py', 2), ('BachARE.py', 1), ('SoutRD.py', 1), ('WestJIT.py', 3)] | [
"[email protected]"
] | |
36686ecf3ef8dddacb386186f976e7db325b7da8 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/16/usersdata/122/6123/submittedfiles/triangulo.py | 3ae57cd9af2aa0c78741ee0de80b08dafd3b0c19 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
#ENTRADA
a=input('digite o valor do lado a:')
b=input('digite o valor do lado b:')
c=input('digite o valor do lado c:')
if a<(b+c):
print('S')
if (a**2)==(b**2)+(c**2):
print('Re')
elif (a**2)>(b**2)+(c**2):
print('Ob')
elif (a**2)<(b**2)+(c**2):
print('Ac')
| [
"[email protected]"
] | |
c2bb2d65b3870a887e0ddb17c2f03d3f97dbddcc | 8f50c262f89d3dc4f15f2f67eb76e686b8f808f5 | /Trigger/TriggerCommon/TriggerMenu/scripts/moveDisabledChains.py | 3cf550466499b6bb7c7896c7e66d13a32585c169 | [
"Apache-2.0"
] | permissive | strigazi/athena | 2d099e6aab4a94ab8b636ae681736da4e13ac5c9 | 354f92551294f7be678aebcd7b9d67d2c4448176 | refs/heads/master | 2022-12-09T02:05:30.632208 | 2020-09-03T14:03:18 | 2020-09-03T14:03:18 | 292,587,480 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,373 | py | #!/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
"""
This script reads the rulebook and swaps items between Physics and MC if they are disabled or not.
TrigMenuRulebook needs to be checked out and installed.
Currently it needs to be run from the script folder.
One argument can be supplied specifying which "tag" to use, default is pp_v7
"""
import importlib
import sys, re, os
tag = "pp_v7"
if len(sys.argv) > 1:
tag = sys.argv[1]
import checkTigherThanPrimary
tighter_than_primaries = set([x for x, y in checkTigherThanPrimary.main()])
def swapItems():
physics_rulemod = importlib.import_module("TrigMenuRulebook.Physics_%s_rules" % tag)
monitoring_rulemod = importlib.import_module("TrigMenuRulebook.Monitoring_%s_rules" % tag)
standby_rulemod = importlib.import_module("TrigMenuRulebook.Standby_%s_rules" % tag)
cosmic_rulemod = importlib.import_module("TrigMenuRulebook.Cosmic_%s_rules" % tag)
toroidoff_rulemod = importlib.import_module("TrigMenuRulebook.Physics_%s_ToroidOff_rules" % tag)
commissioning_rulemod = importlib.import_module("TrigMenuRulebook.Commissioning2016_rules")
startup_rulemod = importlib.import_module("TrigMenuRulebook.Physics_%s_startup_rules" % tag)
monitoring_rulemod.rules = monitoring_rulemod.physics_rules
modules = (physics_rulemod,monitoring_rulemod,standby_rulemod,cosmic_rulemod,toroidoff_rulemod,commissioning_rulemod,startup_rulemod)
modules = (physics_rulemod,monitoring_rulemod,standby_rulemod,cosmic_rulemod,toroidoff_rulemod,commissioning_rulemod)
l1topo_pattern = "\w-\w"
def getPS(item):
hlt = "HLT_"+item
ps = -1
for mod in modules:
if hlt in mod.rules.keys(): key = hlt
elif item in mod.rules.keys(): key = item
else: continue
for rule in mod.rules[key].itervalues():
if "PS" not in rule.keys():
if "rate" in rule.keys(): return 2
else: ps = 1
if rule["PS"] > ps:
ps = rule["PS"]
if ps > 1: return ps
return ps
lines_Physics = {}
lines_MC = {}
lines_Physics_fromMC = {}
lines_MC_fromPhysics = {}
count_toMC = 0
count_toPhysics = 0
items_MC_fromPhysics = set()
items_Physics_fromMC_withPS = set()
items_Physics_withPS = set()
current_Physics = "../python/menu/Physics_%s.py" % tag
current_MC = "../python/menu/MC_%s.py" % tag
slice_start_pattern = r"\s*TriggerFlags.(.*).signatures *=.*\[[^\]]"
slice_end_pattern = "^\s*\]\s*$"
item_pattern = """(?x)\['(.*)'\s*, #chainName
\s*'.*'\s*, #L1itemforchain
\s*\[.*\]\s*, #[L1 items for chainParts]
\s*\[.*\]\s*, #[stream]
\s*\[.*\]\s*, #[groups]
\s*.*\]* #EBstep and ]"""
ftk_pattern = "if TriggerFlags.doFTK()"
last_slice_pattern = "TriggerFlags.(HeavyIon|MinBias|Beamspot)Slice.signatures"
output_Physics = ""
output_MC = ""
with open (current_Physics) as file_Physics:
currentSlice = None
linebuffer = ""
lastslice = False
for line in file_Physics:
commented = line.strip().startswith("#")
# Last slice pattern
m = re.search(last_slice_pattern,line)
if m and not commented:
lastslice = True
if lastslice:
output_Physics += line
continue
# Item pattern
m = re.search(item_pattern,line)
if m and not commented:
assert(currentSlice != None)
name = m.group(1)
ps = getPS(name)
if name in tighter_than_primaries:
print "Found item that is tighter than primary, instead of moving to MC please enable the rule in RB:", name
if ps==-1 and not name in tighter_than_primaries and not re.search(l1topo_pattern,line):
lines_MC_fromPhysics[currentSlice] += linebuffer+line
items_MC_fromPhysics.add(name)
count_toMC += 1
else:
lines_Physics[currentSlice] += linebuffer+line
linebuffer = ""
if ps>1:
items_Physics_withPS.add(name)
continue
# Slice start pattern
m = re.search(slice_start_pattern,line)
if m and not commented:
assert(currentSlice == None)
currentSlice = m.group(1)
output_Physics += "REPLACE_%s\n"%currentSlice
if not currentSlice in lines_Physics.keys(): lines_Physics[currentSlice] = line
if not currentSlice in lines_MC_fromPhysics.keys(): lines_MC_fromPhysics[currentSlice] = ""
continue
# Slice end pattern
m = re.search(slice_end_pattern,line)
if m and not commented:
if currentSlice!=None:
lines_Physics[currentSlice] += linebuffer
linebuffer = ""
currentSlice = None
continue
# Store line in the proper place
if currentSlice == None:
output_Physics += line
else:
linebuffer += line
with open (current_MC) as file_MC:
currentSlice = None
if_ftk = False
linebuffer = ""
lastslice = False
for line in file_MC:
commented = line.strip().startswith("#")
# Last slice pattern
m = re.search(last_slice_pattern,line)
if m and not commented:
lastslice = True
if lastslice:
output_MC += line
continue
# Item pattern
m = re.search(item_pattern,line)
if m and not commented and not if_ftk:
assert(currentSlice != None)
ps = getPS(m.group(1))
if ps==-1 or re.search(l1topo_pattern,line):
lines_MC[currentSlice] += linebuffer+line
else:
lines_Physics_fromMC[currentSlice] += linebuffer+line
if ps>1:
items_Physics_fromMC_withPS.add(m.group(1))
count_toPhysics +=1
linebuffer = ""
continue
# Slice start pattern
m = re.search(slice_start_pattern,line)
if m and not commented and not if_ftk:
assert(currentSlice == None)
currentSlice = m.group(1)
output_MC += "REPLACE_%s\n"%currentSlice
if not currentSlice in lines_Physics_fromMC.keys(): lines_Physics_fromMC[currentSlice] = ""
if not currentSlice in lines_MC.keys(): lines_MC[currentSlice] = line
continue
# Slice end pattern
m = re.search(slice_end_pattern,line)
if m and not commented:
if_ftk = False
if currentSlice!=None:
lines_MC[currentSlice] += linebuffer
linebuffer = ""
currentSlice = None
continue
# FTK pattern
m = re.search(ftk_pattern,line)
if m and not commented:
if_ftk = True
# Store line in the proper place
if currentSlice == None:
output_MC += line
else:
linebuffer += line
for key,val in lines_Physics.iteritems():
toreplace = val
if key in lines_Physics_fromMC.keys():
toreplace += lines_Physics_fromMC[key]
toreplace += " ]\n"
output_Physics = output_Physics.replace("REPLACE_%s\n"%key,toreplace)
for key,val in lines_MC.iteritems():
toreplace = val
if key in lines_MC_fromPhysics.keys():
toreplace += lines_MC_fromPhysics[key]
toreplace += " ]\n"
output_MC = output_MC.replace("REPLACE_%s\n"%key,toreplace)
with open (current_Physics+".edit","w") as outfile_Physics:
outfile_Physics.write(output_Physics)
with open (current_MC+".edit","w") as outfile_MC:
outfile_MC.write(output_MC)
print "Items moved Physics -> MC:",count_toMC
print "Items moved MC -> Physics:",count_toPhysics
if items_Physics_fromMC_withPS:
print "Some new items in Physics are prescaled, you probably want to add them to CPS.py:"
print sorted(list(items_Physics_fromMC_withPS))
return items_MC_fromPhysics
def cleanCPS(movedToMC):
from TriggerMenu.menu.CPS import defineCPSGroups
HLT_CPS_Groups = defineCPSGroups()
cps_to_remove = set()
for cps, items in HLT_CPS_Groups.iteritems():
if sum(item in movedToMC for item in items) >= len(items)-1:
cps_to_remove.add(cps)
cpsfile = "../python/menu/CPS.py"
cps_start_pattern = "'RATE:CPS:(.*)'\s*:\s*\["
cps_end_pattern = "\]\s*,"
cps_item_pattern = "^\s*'(.*)'\s*,\s*$"
with open (cpsfile+".edit","w") as outcps, open (cpsfile) as incps:
removing = False
for line in incps:
commented = line.strip().startswith("#")
# CPS start pattern
m = re.search(cps_start_pattern,line)
if m and not commented:
name = 'RATE:CPS:%s'%m.group(1)
if name in cps_to_remove:
removing = True
#CPS item pattern
if not m:
m = re.search(cps_item_pattern,line)
if m and not commented:
if m.group(1) in movedToMC: continue
if not removing:
outcps.write(line)
# CPS end pattern
m = re.search(cps_end_pattern,line)
if m and not commented:
removing = False
if __name__ == "__main__":
movedToMC = swapItems()
cleanCPS(movedToMC)
| [
"[email protected]"
] | |
9cc28d9f4c07c4648dc57207f4e8201627ae8aed | 1b9075ffea7d4b846d42981b41be44238c371202 | /2008/devel/programming/libs/libdbf/actions.py | 242da707fa8723753f2298926612cdf827675c4e | [] | no_license | pars-linux/contrib | bf630d4be77f4e484b8c6c8b0698a5b34b3371f4 | 908210110796ef9461a1f9b080b6171fa022e56a | refs/heads/master | 2020-05-26T20:35:58.697670 | 2011-07-11T11:16:38 | 2011-07-11T11:16:38 | 82,484,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 829 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
WorkDir = "libdbf"
def setup():
shelltools.chmod("configure")
shelltools.chmod("install-sh")
pisitools.dosed("configure","docbook-to-man","docbook2man")
autotools.rawConfigure("--prefix=/usr \
--disable-static")
def build():
autotools.make()
def install():
autotools.install()
pisitools.domo("po/tr.po", "tr", "libdbf.mo")
pisitools.insinto("/usr/share/doc/%s" % get.srcTAG(),"man/html")
pisitools.dodoc("ChangeLog", "COPYING", "README")
| [
"MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2"
] | MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2 |
cdbe41c2ec761eb560f3450e4eafcb73d802900a | cecd61903943d9f25f37605a344b1683ee958b11 | /what_is_food/config/desktop.py | 06f7e215806a55310c988083ea653bb469f998b8 | [
"MIT"
] | permissive | ashish-greycube/what_is_food | 1f31ce461f97f8d4dccbbd078eb2190a4f785caa | 6c4a327a721accf86667cc87c0b2976dcd09abe6 | refs/heads/master | 2022-12-15T06:07:07.137326 | 2020-08-27T04:05:21 | 2020-08-27T04:05:21 | 273,717,466 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"module_name": "What Is Food",
"color": "yellow",
"icon": "octicon octicon-device-mobile",
"type": "module",
"label": _("What Is Food")
}
]
| [
"[email protected]"
] | |
e0c0f5874c310c08d6aae5b8963709dc2a7f55f7 | 061c36c4b33dd0c47d9d62c2057559d4c5973681 | /validate_csv.py | 2e7d2f78cb711aa32b69265e49916552cf28ae42 | [
"MIT"
] | permissive | ashkankamyab/DevOps-Python-tools | 0847f9e1b74d7864d17b0a9833beeef1f149e5a5 | dc4b1ce2b2fbee3797b66501ba3918a900a79769 | refs/heads/master | 2022-10-09T15:23:31.108086 | 2022-09-01T14:32:56 | 2022-09-01T14:32:56 | 189,855,037 | 1 | 0 | NOASSERTION | 2019-06-02T14:15:18 | 2019-06-02T14:15:18 | null | UTF-8 | Python | false | false | 10,977 | py | #!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2015-12-22 23:25:25 +0000 (Tue, 22 Dec 2015)
#
# https://github.com/HariSekhon/DevOps-Python-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback
# to help improve or steer this or other code I publish
#
# https://www.linkedin.com/in/HariSekhon
#
"""
CSV Validator Tool
Validates each file passed as an argument
Directories are recursed, checking all files ending in a .csv suffix.
Works like a standard unix filter program - if no files are passed as arguments or '-' is given then reads
from standard input
This is not as good as the other validate_*.py programs in this repo as the others have clearer syntactic structure
to check. CSV/TSV has higher variation with delimiters, quote characters etc. If delimiters and quotechars are not
specified it'll try to infer the structure but I've had to add a few heuristics to invalidate files which otherwise
pass python csv module's inference including json and yaml files which we don't accept.
Explicitly using the --delimiter option will disable the inference which is handy if it's
allowing through non-csv files, you don't want to accept other delimited files such as TSV files etc.
This may be fine for simple purposes but for a better validation tool with more options see:
https://pythonhosted.org/chkcsv/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# this causes csvreader TypeError: the "delimiter" must be an 1-character string
# from __future__ import unicode_literals
import csv
import os
import re
import sys
libdir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'pylib'))
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import die, ERRORS, log_option, uniq_list_ordered, log, isChars, validate_regex
from harisekhon import CLI
except ImportError as _:
print('module import failed: %s' % _, file=sys.stderr)
print("Did you remember to build the project by running 'make'?", file=sys.stderr)
print("Alternatively perhaps you tried to copy this program out without it's adjacent libraries?", file=sys.stderr)
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.10.0'
class CsvValidatorTool(CLI):
def __init__(self):
# Python 2.x
super(CsvValidatorTool, self).__init__()
# Python 3.x
# super().__init__()
self.filename = None
# self.delimiter = ','
# self.quotechar = '"'
# allow CSV module inference - this way user can choose to explicitly specify --delimiter=, --quotechar='"'
# or allow to try to infer itself
self.delimiter = None
self.quotechar = None
self.re_csv_suffix = re.compile(r'.*\.csv$', re.I)
self.valid_csv_msg = '<unknown> => CSV OK'
self.invalid_csv_msg = '<unknown> => CSV INVALID'
self.failed = False
self.exclude = None
def add_options(self):
# do not leave as None to infer per line, it'll split a single word line like 'blah' => ['b', 'ah']
# and there is no way to detect it only had one field
self.add_opt('-d', '--delimiter', default=',',
help='Delimiter to test (default: comma)')
self.add_opt('-q', '--quotechar', default=self.quotechar,
help='Quotechar to test (default: None)')
# self.add_opt('-p', '--print', action='store_true',
# help='Print the CSV lines(s) which are valid, else print nothing (useful for shell ' +
# 'pipelines). Exit codes are still 0 for success, or %s for failure'
# % ERRORS['CRITICAL'])
self.add_opt('-e', '--exclude', metavar='regex', default=os.getenv('EXCLUDE'),
help='Regex of file / directory paths to exclude from checking ($EXCLUDE)')
def process_options(self):
self.exclude = self.get_opt('exclude')
if self.exclude:
validate_regex(self.exclude, 'exclude')
self.exclude = re.compile(self.exclude, re.I)
def is_excluded(self, path):
if self.exclude and self.exclude.search(path):
log.debug("excluding path: %s", path)
return True
return False
def process_csv(self, filehandle):
csvreader = None
try:
if self.delimiter is not None:
try:
csvreader = csv.reader(filehandle, delimiter=self.delimiter, quotechar=self.quotechar)
except TypeError as _:
self.usage(_)
else:
# dialect = csv.excel
dialect = csv.Sniffer().sniff(filehandle.read(1024))
# this will raise an Error if invalid
dialect.strict = True
filehandle.seek(0)
csvreader = csv.reader(filehandle, dialect)
except csv.Error as _:
log.warning('file %s: %s', self.filename, _)
return False
count = 0
try:
# csvreader doesn't seem to generate any errors ever :-(
# csv module allows entire lines of json/xml/yaml to go in as a single field
# Adding some invalidations manually
for field_list in csvreader:
# list of fields with no separator information
# log.debug("line: %s", _)
# make it fail if there is only a single field on any line
if len(field_list) < 2:
return False
# it's letting JSON through :-/
if field_list[0] == '{':
return False
# extra protection along the same lines as anti-json:
# the first char of field should be alphanumeric, not syntax
# however instead of isAlnum allow quotes for quoted CSVs to pass validation
if not isChars(field_list[0][0], 'A-Za-z0-9\'"'):
return False
count += 1
except csv.Error as _:
log.warning('file %s, line %s: %s', self.filename, csvreader.line_num, _)
return False
if count == 0:
log.debug('zero lines detected, blank input is not valid CSV')
return False
log.debug('%s CSV lines passed', count)
return True
def check_csv(self, filehandle):
if self.process_csv(filehandle):
# if self.get_opt('print'):
# print(content, end='')
# else:
# print(self.valid_csv_msg)
print(self.valid_csv_msg)
else:
self.failed = True
# if not self.get_opt('print'):
# if self.verbose > 2:
# try:
# except csv.Error as _:
# if not self.get_opt('print'):
# print(_)
# die(self.invalid_csv_msg)
die(self.invalid_csv_msg)
def run(self):
self.delimiter = self.get_opt('delimiter')
self.quotechar = self.get_opt('quotechar')
log_option('delimiter', self.delimiter)
log_option('quotechar', self.quotechar)
if not self.args:
self.args.append('-')
args = uniq_list_ordered(self.args)
for arg in args:
if arg == '-':
continue
if not os.path.exists(arg):
print("'{0}' not found".format(arg))
sys.exit(ERRORS['CRITICAL'])
if os.path.isfile(arg):
log_option('file', arg)
elif os.path.isdir(arg):
log_option('directory', os.path.abspath(arg))
else:
die("path '{0}' could not be determined as either a file or directory".format(arg))
for arg in args:
self.check_path(arg)
if self.failed:
sys.exit(ERRORS['CRITICAL'])
def check_path(self, path):
if path == '-' or os.path.isfile(path):
self.check_file(path)
elif os.path.isdir(path):
self.walk(path)
else:
die("failed to determine if path '%s' is file or directory" % path)
# don't need to recurse when using walk generator
def walk(self, path):
if self.is_excluded(path):
return
for root, dirs, files in os.walk(path, topdown=True):
# modify dirs in place to prune descent for increased efficiency
# requires topdown=True
# calling is_excluded() on joined root/dir so that things like
# '/tests/spark-\d+\.\d+.\d+-bin-hadoop\d+.\d+' will match
dirs[:] = [d for d in dirs if not self.is_excluded(os.path.join(root, d))]
for filename in files:
file_path = os.path.join(root, filename)
if self.re_csv_suffix.match(file_path):
self.check_file(file_path)
def check_file(self, filename):
self.filename = filename
if self.filename == '-':
self.filename = '<STDIN>'
self.valid_csv_msg = '%s => CSV OK' % self.filename
self.invalid_csv_msg = '%s => CSV INVALID' % self.filename
if self.filename == '<STDIN>':
log.debug('checking stdin')
self.check_csv(sys.stdin)
else:
if self.is_excluded(filename):
return
log.debug('checking %s', self.filename)
try:
with open(self.filename) as iostream:
self.check_csv(iostream)
except IOError as _:
die("ERROR: %s" % _)
if __name__ == '__main__':
CsvValidatorTool().main()
# =========================================================================== #
# borrowed and tweaked from Python standard library:
# https://docs.python.org/2/library/csv.html
# import codecs
# import cStringIO
# class UTF8Recoder(object):
# """
# Iterator that reads an encoded stream and reencodes the input to UTF-8
# """
# def __init__(self, _, encoding):
# self.reader = codecs.getreader(encoding)(_)
#
# def __iter__(self):
# return self
#
# def next(self):
# return self.reader.next().encode("utf-8")
#
#
# class UnicodeReader(object):
# """
# A CSV reader which will iterate over lines in the CSV filehandle,
# which is encoded in the given encoding.
# """
#
# def __init__(self, _, dialect=csv.excel, encoding="utf-8", **kwargs):
# _ = UTF8Recoder(_, encoding)
# self.reader = csv.reader(_, dialect=dialect, **kwargs)
#
# def next(self):
# row = self.reader.next()
# return [unicode(s, "utf-8") for s in row]
#
# def __iter__(self):
# return self
| [
"[email protected]"
] | |
40bf69fc32a19fddc23cf0e29fdc8fc40c238709 | 8ef8e6818c977c26d937d09b46be0d748022ea09 | /nlp/dialogue_generation/cpm/pytorch/iluvatar/cpm/config/layers/self_multihead_attn.py | 55be679404c3ac3d70ad62e15e3d9ac7aa90f005 | [
"Apache-2.0"
] | permissive | Deep-Spark/DeepSparkHub | eb5996607e63ccd2c706789f64b3cc0070e7f8ef | 9d643e88946fc4a24f2d4d073c08b05ea693f4c5 | refs/heads/master | 2023-09-01T11:26:49.648759 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 | Apache-2.0 | 2023-03-28T02:54:59 | 2022-09-08T09:07:01 | Python | UTF-8 | Python | false | false | 3,926 | py | import math
import torch
from torch import nn
from torch.nn import Parameter
import torch.nn.functional as F
from layers.self_multihead_attn_func import self_attn_func
from layers.fast_self_multihead_attn_func import fast_self_attn_func
from apex.normalization.fused_layer_norm import FusedLayerNorm
class SelfMultiheadAttn(nn.Module):
"""Multi-headed attention.
See "Attention Is All You Need" for more details.
"""
def __init__(self, embed_dim, num_heads, dropout=0., bias=False, impl='fast'):
super().__init__()
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
assert self.head_dim * \
num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
self.bias = bias
self.impl = impl
self.scaling = self.head_dim**-0.5
self.q_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.k_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.v_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.out_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.q_bias = Parameter(torch.Tensor(embed_dim))
self.k_bias = Parameter(torch.Tensor(embed_dim))
self.v_bias = Parameter(torch.Tensor(embed_dim))
self.out_proj_bias = Parameter(torch.Tensor(embed_dim))
self.reset_parameters()
if impl == 'fast':
self.attn_func = fast_self_attn_func
elif impl == 'default':
self.attn_func = self_attn_func
else:
assert False, "Unsupported impl: {} !".format(impl)
def reset_parameters(self):
nn.init.xavier_uniform_(self.q_weight)
nn.init.xavier_uniform_(self.k_weight)
nn.init.xavier_uniform_(self.v_weight)
nn.init.xavier_uniform_(self.out_proj_weight)
nn.init.constant_(self.q_bias, 0.)
nn.init.constant_(self.k_bias, 0.)
nn.init.constant_(self.v_bias, 0.)
nn.init.constant_(self.out_proj_bias, 0.)
def forward(self, query, attn_mask=None, is_training=True):
"""Input shape: Time x Batch x Channel
Self-attention can be implemented by passing in the same arguments for
query, key and value. Future timesteps can be masked with the
`mask_future_timesteps` argument. Padding elements can be excluded from
the key by passing a binary ByteTensor (`key_padding_mask`) with shape:
batch x src_len, where padding elements are indicated by 1s.
"""
mask = attn_mask
input_weights = torch.cat([self.q_weight.view(self.num_heads, 1, self.head_dim, self.embed_dim), self.k_weight.view(self.num_heads, 1, self.head_dim,
self.embed_dim), self.v_weight.view(self.num_heads, 1, self.head_dim, self.embed_dim)], dim=1).reshape(3*self.embed_dim, self.embed_dim).contiguous()
input_bias = torch.cat([self.q_bias.view(self.num_heads, 1, self.head_dim), self.k_bias.view(
self.num_heads, 1, self.head_dim), self.v_bias.view(self.num_heads, 1, self.head_dim)], dim=1).reshape(3*self.embed_dim).contiguous()
if self.impl == 'fast':
outputs = self.attn_func(attn_mask is not None, is_training, self.num_heads, query,
input_weights, self.out_proj_weight, input_bias, self.out_proj_bias, mask, False, self.dropout)
else:
outputs = self.attn_func(attn_mask is not None, is_training, self.num_heads, self.scaling, query,
input_weights, self.out_proj_weight,
input_bias, self.out_proj_bias,
mask, False, self.dropout)
return outputs
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.