body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
5e9125c7dc3c970ac04c93b821b2b79d7e170cbabc62f8a41e1cf7dda9f3e29c
@property def areas(self): 'Compute areas of masks.\n\n This func is modified from `detectron2\n <https://github.com/facebookresearch/detectron2/blob/ffff8acc35ea88ad1cb1806ab0f00b4c1c5dbfd9/detectron2/structures/masks.py#L387>`_.\n The function only works with Polygons using the shoelace formula.\n\n Return:\n ndarray: areas of each instance\n ' area = [] for polygons_per_obj in self.masks: area_per_obj = 0 for p in polygons_per_obj: area_per_obj += self._polygon_area(p[0::2], p[1::2]) area.append(area_per_obj) return np.asarray(area)
Compute areas of masks. This func is modified from `detectron2 <https://github.com/facebookresearch/detectron2/blob/ffff8acc35ea88ad1cb1806ab0f00b4c1c5dbfd9/detectron2/structures/masks.py#L387>`_. The function only works with Polygons using the shoelace formula. Return: ndarray: areas of each instance
mmdet/core/mask/structures.py
areas
ange33333333/2021_VRDL-HW3
20,190
python
@property def areas(self): 'Compute areas of masks.\n\n This func is modified from `detectron2\n <https://github.com/facebookresearch/detectron2/blob/ffff8acc35ea88ad1cb1806ab0f00b4c1c5dbfd9/detectron2/structures/masks.py#L387>`_.\n The function only works with Polygons using the shoelace formula.\n\n Return:\n ndarray: areas of each instance\n ' area = [] for polygons_per_obj in self.masks: area_per_obj = 0 for p in polygons_per_obj: area_per_obj += self._polygon_area(p[0::2], p[1::2]) area.append(area_per_obj) return np.asarray(area)
@property def areas(self): 'Compute areas of masks.\n\n This func is modified from `detectron2\n <https://github.com/facebookresearch/detectron2/blob/ffff8acc35ea88ad1cb1806ab0f00b4c1c5dbfd9/detectron2/structures/masks.py#L387>`_.\n The function only works with Polygons using the shoelace formula.\n\n Return:\n ndarray: areas of each instance\n ' area = [] for polygons_per_obj in self.masks: area_per_obj = 0 for p in polygons_per_obj: area_per_obj += self._polygon_area(p[0::2], p[1::2]) area.append(area_per_obj) return np.asarray(area)<|docstring|>Compute areas of masks. This func is modified from `detectron2 <https://github.com/facebookresearch/detectron2/blob/ffff8acc35ea88ad1cb1806ab0f00b4c1c5dbfd9/detectron2/structures/masks.py#L387>`_. The function only works with Polygons using the shoelace formula. Return: ndarray: areas of each instance<|endoftext|>
94baae7e4e41557e7f3f9c66bee3c2316c3572d72471e881a164b5c8e7a4a729
def _polygon_area(self, x, y): 'Compute the area of a component of a polygon.\n\n Using the shoelace formula:\n https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates\n\n Args:\n x (ndarray): x coordinates of the component\n y (ndarray): y coordinates of the component\n\n Return:\n float: the are of the component\n ' return (0.5 * np.abs((np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))))
Compute the area of a component of a polygon. Using the shoelace formula: https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates Args: x (ndarray): x coordinates of the component y (ndarray): y coordinates of the component Return: float: the are of the component
mmdet/core/mask/structures.py
_polygon_area
ange33333333/2021_VRDL-HW3
20,190
python
def _polygon_area(self, x, y): 'Compute the area of a component of a polygon.\n\n Using the shoelace formula:\n https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates\n\n Args:\n x (ndarray): x coordinates of the component\n y (ndarray): y coordinates of the component\n\n Return:\n float: the are of the component\n ' return (0.5 * np.abs((np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))))
def _polygon_area(self, x, y): 'Compute the area of a component of a polygon.\n\n Using the shoelace formula:\n https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates\n\n Args:\n x (ndarray): x coordinates of the component\n y (ndarray): y coordinates of the component\n\n Return:\n float: the are of the component\n ' return (0.5 * np.abs((np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))))<|docstring|>Compute the area of a component of a polygon. Using the shoelace formula: https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates Args: x (ndarray): x coordinates of the component y (ndarray): y coordinates of the component Return: float: the are of the component<|endoftext|>
3003f1c4e896b23ccaf54f2991fa5f8eb7eee1d26e5e38cd8440bee4179b4f61
def to_ndarray(self): 'Convert masks to the format of ndarray.' if (len(self.masks) == 0): return np.empty((0, self.height, self.width), dtype=np.uint8) bitmap_masks = [] for poly_per_obj in self.masks: bitmap_masks.append(polygon_to_bitmap(poly_per_obj, self.height, self.width)) return np.stack(bitmap_masks)
Convert masks to the format of ndarray.
mmdet/core/mask/structures.py
to_ndarray
ange33333333/2021_VRDL-HW3
20,190
python
def to_ndarray(self): if (len(self.masks) == 0): return np.empty((0, self.height, self.width), dtype=np.uint8) bitmap_masks = [] for poly_per_obj in self.masks: bitmap_masks.append(polygon_to_bitmap(poly_per_obj, self.height, self.width)) return np.stack(bitmap_masks)
def to_ndarray(self): if (len(self.masks) == 0): return np.empty((0, self.height, self.width), dtype=np.uint8) bitmap_masks = [] for poly_per_obj in self.masks: bitmap_masks.append(polygon_to_bitmap(poly_per_obj, self.height, self.width)) return np.stack(bitmap_masks)<|docstring|>Convert masks to the format of ndarray.<|endoftext|>
219e45312208e47e9e88828520e93cac62bc950e5df129295870d11586ed83de
def to_tensor(self, dtype, device): 'See :func:`BaseInstanceMasks.to_tensor`.' if (len(self.masks) == 0): return torch.empty((0, self.height, self.width), dtype=dtype, device=device) ndarray_masks = self.to_ndarray() return torch.tensor(ndarray_masks, dtype=dtype, device=device)
See :func:`BaseInstanceMasks.to_tensor`.
mmdet/core/mask/structures.py
to_tensor
ange33333333/2021_VRDL-HW3
20,190
python
def to_tensor(self, dtype, device): if (len(self.masks) == 0): return torch.empty((0, self.height, self.width), dtype=dtype, device=device) ndarray_masks = self.to_ndarray() return torch.tensor(ndarray_masks, dtype=dtype, device=device)
def to_tensor(self, dtype, device): if (len(self.masks) == 0): return torch.empty((0, self.height, self.width), dtype=dtype, device=device) ndarray_masks = self.to_ndarray() return torch.tensor(ndarray_masks, dtype=dtype, device=device)<|docstring|>See :func:`BaseInstanceMasks.to_tensor`.<|endoftext|>
c62ab217bf0133782b9a8026c780e7cf10dcf2ae8735abc393d29a8f72fbca38
@classmethod def random(cls, num_masks=3, height=32, width=32, n_verts=5, dtype=np.float32, rng=None): "Generate random polygon masks for demo / testing purposes.\n\n Adapted from [1]_\n\n References:\n .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501\n\n Example:\n >>> from mmdet.core.mask.structures import PolygonMasks\n >>> self = PolygonMasks.random()\n >>> print('self = {}'.format(self))\n " from mmdet.utils.util_random import ensure_rng rng = ensure_rng(rng) def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts masks = [] for _ in range(num_masks): exterior = _order_vertices(_gen_polygon(n_verts, 0.9, 0.9)) exterior = (exterior * [(width, height)]).astype(dtype) masks.append([exterior.ravel()]) self = cls(masks, height, width) return self
Generate random polygon masks for demo / testing purposes. Adapted from [1]_ References: .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501 Example: >>> from mmdet.core.mask.structures import PolygonMasks >>> self = PolygonMasks.random() >>> print('self = {}'.format(self))
mmdet/core/mask/structures.py
random
ange33333333/2021_VRDL-HW3
20,190
python
@classmethod def random(cls, num_masks=3, height=32, width=32, n_verts=5, dtype=np.float32, rng=None): "Generate random polygon masks for demo / testing purposes.\n\n Adapted from [1]_\n\n References:\n .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501\n\n Example:\n >>> from mmdet.core.mask.structures import PolygonMasks\n >>> self = PolygonMasks.random()\n >>> print('self = {}'.format(self))\n " from mmdet.utils.util_random import ensure_rng rng = ensure_rng(rng) def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts masks = [] for _ in range(num_masks): exterior = _order_vertices(_gen_polygon(n_verts, 0.9, 0.9)) exterior = (exterior * [(width, height)]).astype(dtype) masks.append([exterior.ravel()]) self = cls(masks, height, width) return self
@classmethod def random(cls, num_masks=3, height=32, width=32, n_verts=5, dtype=np.float32, rng=None): "Generate random polygon masks for demo / testing purposes.\n\n Adapted from [1]_\n\n References:\n .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501\n\n Example:\n >>> from mmdet.core.mask.structures import PolygonMasks\n >>> self = PolygonMasks.random()\n >>> print('self = {}'.format(self))\n " from mmdet.utils.util_random import ensure_rng rng = ensure_rng(rng) def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts masks = [] for _ in range(num_masks): exterior = _order_vertices(_gen_polygon(n_verts, 0.9, 0.9)) exterior = (exterior * [(width, height)]).astype(dtype) masks.append([exterior.ravel()]) self = cls(masks, height, width) return self<|docstring|>Generate random polygon masks for demo / testing purposes. Adapted from [1]_ References: .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501 Example: >>> from mmdet.core.mask.structures import PolygonMasks >>> self = PolygonMasks.random() >>> print('self = {}'.format(self))<|endoftext|>
4a72f01055979713c8b257a95cfc471ab31823eb74b1ac385e22deb44412379c
def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points
Creates the polygon by sampling points on a circle around the centre. Random noise is added by varying the angular spacing between sequential points, and by varying the radial distance of each point from the centre. Based on original code by Mike Ounsworth Args: n (int): number of vertices irregularity (float): [0,1] indicating how much variance there is in the angular spacing of vertices. [0,1] will map to [0, 2pi/numberOfVerts] spikeyness (float): [0,1] indicating how much variance there is in each vertex from the circle of radius aveRadius. [0,1] will map to [0, aveRadius] Returns: a list of vertices, in CCW order.
mmdet/core/mask/structures.py
_gen_polygon
ange33333333/2021_VRDL-HW3
20,190
python
def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points
def _gen_polygon(n, irregularity, spikeyness): 'Creates the polygon by sampling points on a circle around the\n centre. Random noise is added by varying the angular spacing\n between sequential points, and by varying the radial distance of\n each point from the centre.\n\n Based on original code by Mike Ounsworth\n\n Args:\n n (int): number of vertices\n irregularity (float): [0,1] indicating how much variance there\n is in the angular spacing of vertices. [0,1] will map to\n [0, 2pi/numberOfVerts]\n spikeyness (float): [0,1] indicating how much variance there is\n in each vertex from the circle of radius aveRadius. [0,1]\n will map to [0, aveRadius]\n\n Returns:\n a list of vertices, in CCW order.\n ' from scipy.stats import truncnorm (cx, cy) = (0.0, 0.0) radius = 1 tau = (np.pi * 2) irregularity = (((np.clip(irregularity, 0, 1) * 2) * np.pi) / n) spikeyness = np.clip(spikeyness, 1e-09, 1) lower = ((tau / n) - irregularity) upper = ((tau / n) + irregularity) angle_steps = rng.uniform(lower, upper, n) k = (angle_steps.sum() / (2 * np.pi)) angles = ((angle_steps / k).cumsum() + rng.uniform(0, tau)) low = 0 high = (2 * radius) mean = radius std = spikeyness a = ((low - mean) / std) b = ((high - mean) / std) tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) radii = tnorm.rvs(n, random_state=rng) x_pts = (cx + (radii * np.cos(angles))) y_pts = (cy + (radii * np.sin(angles))) points = np.hstack([x_pts[(:, None)], y_pts[(:, None)]]) points = (points - points.min(axis=0)) points = (points / points.max(axis=0)) points = (points * ((rng.rand() * 0.8) + 0.2)) min_pt = points.min(axis=0) max_pt = points.max(axis=0) high = (1 - max_pt) low = (0 - min_pt) offset = ((rng.rand(2) * (high - low)) + low) points = (points + offset) return points<|docstring|>Creates the polygon by sampling points on a circle around the centre. Random noise is added by varying the angular spacing between sequential points, and by varying the radial distance of each point from the centre. Based on original code by Mike Ounsworth Args: n (int): number of vertices irregularity (float): [0,1] indicating how much variance there is in the angular spacing of vertices. [0,1] will map to [0, 2pi/numberOfVerts] spikeyness (float): [0,1] indicating how much variance there is in each vertex from the circle of radius aveRadius. [0,1] will map to [0, aveRadius] Returns: a list of vertices, in CCW order.<|endoftext|>
ec60e2c71293aec0c2283b8b2ae5ecab9f7dfdd231095739752dad38f23318ba
def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts
References: https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise
mmdet/core/mask/structures.py
_order_vertices
ange33333333/2021_VRDL-HW3
20,190
python
def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts
def _order_vertices(verts): '\n References:\n https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise\n ' mlat = (verts.T[0].sum() / len(verts)) mlng = (verts.T[1].sum() / len(verts)) tau = (np.pi * 2) angle = ((np.arctan2((mlat - verts.T[0]), (verts.T[1] - mlng)) + tau) % tau) sortx = angle.argsort() verts = verts.take(sortx, axis=0) return verts<|docstring|>References: https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise<|endoftext|>
851446c3c4fde55d2e578c6ff87362ded6a1ac3a1a88d430190d3e6eb15d1a44
def three_sum(self, nums: List[int]) -> List[List[int]]: '\n 三个数之和\n Args:\n nums: 数组\n Returns: 链表\n ' s = set() nums.sort() for i in range(len(nums)): j = (i + 1) k = (len(nums) - 1) while (j < k): total = ((nums[i] + nums[j]) + nums[k]) if (total == 0): s.add((nums[i], nums[j], nums[k])) j += 1 elif (total < 0): j += 1 else: k -= 1 return list(s)
三个数之和 Args: nums: 数组 Returns: 链表
src/leetcodepython/math/three_sum_15.py
three_sum
zhangyu345293721/leetcode
90
python
def three_sum(self, nums: List[int]) -> List[List[int]]: '\n 三个数之和\n Args:\n nums: 数组\n Returns: 链表\n ' s = set() nums.sort() for i in range(len(nums)): j = (i + 1) k = (len(nums) - 1) while (j < k): total = ((nums[i] + nums[j]) + nums[k]) if (total == 0): s.add((nums[i], nums[j], nums[k])) j += 1 elif (total < 0): j += 1 else: k -= 1 return list(s)
def three_sum(self, nums: List[int]) -> List[List[int]]: '\n 三个数之和\n Args:\n nums: 数组\n Returns: 链表\n ' s = set() nums.sort() for i in range(len(nums)): j = (i + 1) k = (len(nums) - 1) while (j < k): total = ((nums[i] + nums[j]) + nums[k]) if (total == 0): s.add((nums[i], nums[j], nums[k])) j += 1 elif (total < 0): j += 1 else: k -= 1 return list(s)<|docstring|>三个数之和 Args: nums: 数组 Returns: 链表<|endoftext|>
0dc74f260489faf07cbf86e19a5c641d086fb501d876c93d2dcf6632b608fae5
def choose(self) -> None: 'Choose a value in the age radio buttons group.' self.page.choose(RADIO_AGE, self.event.age)
Choose a value in the age radio buttons group.
tests/pbraiders/pages/events/actions/age_write.py
choose
pbraiders/vanilla-test-bdd
1
python
def choose(self) -> None: self.page.choose(RADIO_AGE, self.event.age)
def choose(self) -> None: self.page.choose(RADIO_AGE, self.event.age)<|docstring|>Choose a value in the age radio buttons group.<|endoftext|>
2dcb5e5c52f0f76d294e65e1256fb9b7434e712f30c384010186230887cb46db
def __init__(self, *args, **kwargs): '\n Args:\n args (List[Tuple[String, String]]): key-value pairs to store\n kwargs (Dict[string, string]): key-value pairs to store\n ' self.store = dict() self.update(dict(*args, **kwargs))
Args: args (List[Tuple[String, String]]): key-value pairs to store kwargs (Dict[string, string]): key-value pairs to store
src/flask_pyoidc/provider_configuration.py
__init__
lionslon/Flask-pyoidc
64
python
def __init__(self, *args, **kwargs): '\n Args:\n args (List[Tuple[String, String]]): key-value pairs to store\n kwargs (Dict[string, string]): key-value pairs to store\n ' self.store = dict() self.update(dict(*args, **kwargs))
def __init__(self, *args, **kwargs): '\n Args:\n args (List[Tuple[String, String]]): key-value pairs to store\n kwargs (Dict[string, string]): key-value pairs to store\n ' self.store = dict() self.update(dict(*args, **kwargs))<|docstring|>Args: args (List[Tuple[String, String]]): key-value pairs to store kwargs (Dict[string, string]): key-value pairs to store<|endoftext|>
82718142354a7657fc1b833cfd62c98909c11a4b18542b14df7bc82d64bfdcbd
def __init__(self, issuer=None, authorization_endpoint=None, jwks_uri=None, **kwargs): "\n Args:\n issuer (str): OP Issuer Identifier.\n authorization_endpoint (str): URL of the OP's Authorization Endpoint\n jwks_uri (str): URL of the OP's JSON Web Key Set\n kwargs (dict): key-value pairs corresponding to\n `OpenID Provider Metadata`_\n\n .. _OpenID Provider Metadata:\n https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata\n " super(ProviderMetadata, self).__init__(issuer=issuer, authorization_endpoint=authorization_endpoint, jwks_uri=jwks_uri, **kwargs)
Args: issuer (str): OP Issuer Identifier. authorization_endpoint (str): URL of the OP's Authorization Endpoint jwks_uri (str): URL of the OP's JSON Web Key Set kwargs (dict): key-value pairs corresponding to `OpenID Provider Metadata`_ .. _OpenID Provider Metadata: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
src/flask_pyoidc/provider_configuration.py
__init__
lionslon/Flask-pyoidc
64
python
def __init__(self, issuer=None, authorization_endpoint=None, jwks_uri=None, **kwargs): "\n Args:\n issuer (str): OP Issuer Identifier.\n authorization_endpoint (str): URL of the OP's Authorization Endpoint\n jwks_uri (str): URL of the OP's JSON Web Key Set\n kwargs (dict): key-value pairs corresponding to\n `OpenID Provider Metadata`_\n\n .. _OpenID Provider Metadata:\n https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata\n " super(ProviderMetadata, self).__init__(issuer=issuer, authorization_endpoint=authorization_endpoint, jwks_uri=jwks_uri, **kwargs)
def __init__(self, issuer=None, authorization_endpoint=None, jwks_uri=None, **kwargs): "\n Args:\n issuer (str): OP Issuer Identifier.\n authorization_endpoint (str): URL of the OP's Authorization Endpoint\n jwks_uri (str): URL of the OP's JSON Web Key Set\n kwargs (dict): key-value pairs corresponding to\n `OpenID Provider Metadata`_\n\n .. _OpenID Provider Metadata:\n https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata\n " super(ProviderMetadata, self).__init__(issuer=issuer, authorization_endpoint=authorization_endpoint, jwks_uri=jwks_uri, **kwargs)<|docstring|>Args: issuer (str): OP Issuer Identifier. authorization_endpoint (str): URL of the OP's Authorization Endpoint jwks_uri (str): URL of the OP's JSON Web Key Set kwargs (dict): key-value pairs corresponding to `OpenID Provider Metadata`_ .. _OpenID Provider Metadata: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata<|endoftext|>
4bc5b632bd70b73c85aa9c0e709cc3c35edb1a2a3f3d8fd959631d842446589b
def __init__(self, client_id=None, client_secret=None, **kwargs): '\n Args:\n client_id (str): client identifier representing the client\n client_secret (str): client secret to authenticate the client with\n the OP\n kwargs (dict): key-value pairs\n ' super(ClientMetadata, self).__init__(client_id=client_id, client_secret=client_secret, **kwargs)
Args: client_id (str): client identifier representing the client client_secret (str): client secret to authenticate the client with the OP kwargs (dict): key-value pairs
src/flask_pyoidc/provider_configuration.py
__init__
lionslon/Flask-pyoidc
64
python
def __init__(self, client_id=None, client_secret=None, **kwargs): '\n Args:\n client_id (str): client identifier representing the client\n client_secret (str): client secret to authenticate the client with\n the OP\n kwargs (dict): key-value pairs\n ' super(ClientMetadata, self).__init__(client_id=client_id, client_secret=client_secret, **kwargs)
def __init__(self, client_id=None, client_secret=None, **kwargs): '\n Args:\n client_id (str): client identifier representing the client\n client_secret (str): client secret to authenticate the client with\n the OP\n kwargs (dict): key-value pairs\n ' super(ClientMetadata, self).__init__(client_id=client_id, client_secret=client_secret, **kwargs)<|docstring|>Args: client_id (str): client identifier representing the client client_secret (str): client secret to authenticate the client with the OP kwargs (dict): key-value pairs<|endoftext|>
b8bb6552f4c3ad3953b6018f60c0fe09a7ffd7431159abdc8edcc082c5360027
def __init__(self, issuer=None, provider_metadata=None, userinfo_http_method='GET', client_registration_info=None, client_metadata=None, auth_request_params=None, session_refresh_interval_seconds=None, requests_session=None): '\n Args:\n issuer (str): OP Issuer Identifier. If this is specified discovery will be used to fetch the provider\n metadata, otherwise `provider_metadata` must be specified.\n provider_metadata (ProviderMetadata): OP metadata,\n userinfo_http_method (Optional[str]): HTTP method (GET or POST) to use when sending the UserInfo Request.\n If `none` is specified, no userinfo request will be sent.\n client_registration_info (ClientRegistrationInfo): Client metadata to register your app\n dynamically with the provider. Either this or `registered_client_metadata` must be specified.\n client_metadata (ClientMetadata): Client metadata if your app is statically\n registered with the provider. Either this or `client_registration_info` must be specified.\n auth_request_params (dict): Extra parameters that should be included in the authentication request.\n session_refresh_interval_seconds (int): Length of interval (in seconds) between attempted user data\n refreshes.\n requests_session (requests.Session): custom requests object to allow for example retry handling, etc.\n ' if ((not issuer) and (not provider_metadata)): raise ValueError("Specify either 'issuer' or 'provider_metadata'.") if ((not client_registration_info) and (not client_metadata)): raise ValueError("Specify either 'client_registration_info' or 'client_metadata'.") self._issuer = issuer self._provider_metadata = provider_metadata self._client_registration_info = client_registration_info self._client_metadata = client_metadata self.userinfo_endpoint_method = userinfo_http_method self.auth_request_params = (auth_request_params or {}) self.session_refresh_interval_seconds = session_refresh_interval_seconds self.requests_session = (requests_session or requests.Session())
Args: issuer (str): OP Issuer Identifier. If this is specified discovery will be used to fetch the provider metadata, otherwise `provider_metadata` must be specified. provider_metadata (ProviderMetadata): OP metadata, userinfo_http_method (Optional[str]): HTTP method (GET or POST) to use when sending the UserInfo Request. If `none` is specified, no userinfo request will be sent. client_registration_info (ClientRegistrationInfo): Client metadata to register your app dynamically with the provider. Either this or `registered_client_metadata` must be specified. client_metadata (ClientMetadata): Client metadata if your app is statically registered with the provider. Either this or `client_registration_info` must be specified. auth_request_params (dict): Extra parameters that should be included in the authentication request. session_refresh_interval_seconds (int): Length of interval (in seconds) between attempted user data refreshes. requests_session (requests.Session): custom requests object to allow for example retry handling, etc.
src/flask_pyoidc/provider_configuration.py
__init__
lionslon/Flask-pyoidc
64
python
def __init__(self, issuer=None, provider_metadata=None, userinfo_http_method='GET', client_registration_info=None, client_metadata=None, auth_request_params=None, session_refresh_interval_seconds=None, requests_session=None): '\n Args:\n issuer (str): OP Issuer Identifier. If this is specified discovery will be used to fetch the provider\n metadata, otherwise `provider_metadata` must be specified.\n provider_metadata (ProviderMetadata): OP metadata,\n userinfo_http_method (Optional[str]): HTTP method (GET or POST) to use when sending the UserInfo Request.\n If `none` is specified, no userinfo request will be sent.\n client_registration_info (ClientRegistrationInfo): Client metadata to register your app\n dynamically with the provider. Either this or `registered_client_metadata` must be specified.\n client_metadata (ClientMetadata): Client metadata if your app is statically\n registered with the provider. Either this or `client_registration_info` must be specified.\n auth_request_params (dict): Extra parameters that should be included in the authentication request.\n session_refresh_interval_seconds (int): Length of interval (in seconds) between attempted user data\n refreshes.\n requests_session (requests.Session): custom requests object to allow for example retry handling, etc.\n ' if ((not issuer) and (not provider_metadata)): raise ValueError("Specify either 'issuer' or 'provider_metadata'.") if ((not client_registration_info) and (not client_metadata)): raise ValueError("Specify either 'client_registration_info' or 'client_metadata'.") self._issuer = issuer self._provider_metadata = provider_metadata self._client_registration_info = client_registration_info self._client_metadata = client_metadata self.userinfo_endpoint_method = userinfo_http_method self.auth_request_params = (auth_request_params or {}) self.session_refresh_interval_seconds = session_refresh_interval_seconds self.requests_session = (requests_session or requests.Session())
def __init__(self, issuer=None, provider_metadata=None, userinfo_http_method='GET', client_registration_info=None, client_metadata=None, auth_request_params=None, session_refresh_interval_seconds=None, requests_session=None): '\n Args:\n issuer (str): OP Issuer Identifier. If this is specified discovery will be used to fetch the provider\n metadata, otherwise `provider_metadata` must be specified.\n provider_metadata (ProviderMetadata): OP metadata,\n userinfo_http_method (Optional[str]): HTTP method (GET or POST) to use when sending the UserInfo Request.\n If `none` is specified, no userinfo request will be sent.\n client_registration_info (ClientRegistrationInfo): Client metadata to register your app\n dynamically with the provider. Either this or `registered_client_metadata` must be specified.\n client_metadata (ClientMetadata): Client metadata if your app is statically\n registered with the provider. Either this or `client_registration_info` must be specified.\n auth_request_params (dict): Extra parameters that should be included in the authentication request.\n session_refresh_interval_seconds (int): Length of interval (in seconds) between attempted user data\n refreshes.\n requests_session (requests.Session): custom requests object to allow for example retry handling, etc.\n ' if ((not issuer) and (not provider_metadata)): raise ValueError("Specify either 'issuer' or 'provider_metadata'.") if ((not client_registration_info) and (not client_metadata)): raise ValueError("Specify either 'client_registration_info' or 'client_metadata'.") self._issuer = issuer self._provider_metadata = provider_metadata self._client_registration_info = client_registration_info self._client_metadata = client_metadata self.userinfo_endpoint_method = userinfo_http_method self.auth_request_params = (auth_request_params or {}) self.session_refresh_interval_seconds = session_refresh_interval_seconds self.requests_session = (requests_session or requests.Session())<|docstring|>Args: issuer (str): OP Issuer Identifier. If this is specified discovery will be used to fetch the provider metadata, otherwise `provider_metadata` must be specified. provider_metadata (ProviderMetadata): OP metadata, userinfo_http_method (Optional[str]): HTTP method (GET or POST) to use when sending the UserInfo Request. If `none` is specified, no userinfo request will be sent. client_registration_info (ClientRegistrationInfo): Client metadata to register your app dynamically with the provider. Either this or `registered_client_metadata` must be specified. client_metadata (ClientMetadata): Client metadata if your app is statically registered with the provider. Either this or `client_registration_info` must be specified. auth_request_params (dict): Extra parameters that should be included in the authentication request. session_refresh_interval_seconds (int): Length of interval (in seconds) between attempted user data refreshes. requests_session (requests.Session): custom requests object to allow for example retry handling, etc.<|endoftext|>
47dc3061f5bbc178b22ca390b221df6cf08fc343484174a5aa4266ff02d088c1
def install(application, io_loop=None, **kwargs): 'Call this to install Avro publishing for the Tornado application.\n\n :rtype: bool\n\n ' amqp.install(application, io_loop=io_loop, **kwargs) if ('avro_schema_uri_format' not in application.settings): LOGGER.warning('avro_schema_uri_format is not set, using default') return True
Call this to install Avro publishing for the Tornado application. :rtype: bool
sprockets/mixins/avro_publisher/__init__.py
install
prashantkb/sprockets.mixins.avro-publisher
0
python
def install(application, io_loop=None, **kwargs): 'Call this to install Avro publishing for the Tornado application.\n\n :rtype: bool\n\n ' amqp.install(application, io_loop=io_loop, **kwargs) if ('avro_schema_uri_format' not in application.settings): LOGGER.warning('avro_schema_uri_format is not set, using default') return True
def install(application, io_loop=None, **kwargs): 'Call this to install Avro publishing for the Tornado application.\n\n :rtype: bool\n\n ' amqp.install(application, io_loop=io_loop, **kwargs) if ('avro_schema_uri_format' not in application.settings): LOGGER.warning('avro_schema_uri_format is not set, using default') return True<|docstring|>Call this to install Avro publishing for the Tornado application. :rtype: bool<|endoftext|>
7b80eceda8eb4fc1935b57fc21ac76e86d471d2642df351b73c00953ea05f868
def avro_amqp_publish(self, exchange, routing_key, message_type, data, properties=None): 'Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum and creating the AMQP message properties.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param str message_type: The message type for the Avro schema.\n :param dict data: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append.\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' properties = (properties or {}) properties['content_type'] = DATUM_MIME_TYPE properties['type'] = message_type return self.amqp_publish(exchange, routing_key, data, properties)
Publish a message to RabbitMQ, serializing the payload data as an Avro datum and creating the AMQP message properties. :param str exchange: The exchange to publish the message to. :param str routing_key: The routing key to publish the message with. :param str message_type: The message type for the Avro schema. :param dict data: The message data to serialize. :param dict properties: An optional dict of additional properties to append. :raises: sprockets.mixins.avro_publisher.SchemaFetchError
sprockets/mixins/avro_publisher/__init__.py
avro_amqp_publish
prashantkb/sprockets.mixins.avro-publisher
0
python
def avro_amqp_publish(self, exchange, routing_key, message_type, data, properties=None): 'Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum and creating the AMQP message properties.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param str message_type: The message type for the Avro schema.\n :param dict data: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append.\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' properties = (properties or {}) properties['content_type'] = DATUM_MIME_TYPE properties['type'] = message_type return self.amqp_publish(exchange, routing_key, data, properties)
def avro_amqp_publish(self, exchange, routing_key, message_type, data, properties=None): 'Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum and creating the AMQP message properties.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param str message_type: The message type for the Avro schema.\n :param dict data: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append.\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' properties = (properties or {}) properties['content_type'] = DATUM_MIME_TYPE properties['type'] = message_type return self.amqp_publish(exchange, routing_key, data, properties)<|docstring|>Publish a message to RabbitMQ, serializing the payload data as an Avro datum and creating the AMQP message properties. :param str exchange: The exchange to publish the message to. :param str routing_key: The routing key to publish the message with. :param str message_type: The message type for the Avro schema. :param dict data: The message data to serialize. :param dict properties: An optional dict of additional properties to append. :raises: sprockets.mixins.avro_publisher.SchemaFetchError<|endoftext|>
aa8474d1b0b3521cc048a1ebc8eb745f12aa023b4870cea6bdf4f805c43e3854
@gen.coroutine def amqp_publish(self, exchange, routing_key, body, properties=None): "Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum if the message is to be sent as such.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param dict body: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append. If publishing an Avro message, it\n must contain the Avro message type at 'type'\n and have a content type of\n 'application/vnd.apache.avro.datum'\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n " LOGGER.debug('Publishing Here: %r %r %r %r', exchange, routing_key, body, properties) properties = (properties or {}) if ((properties.get('content_type') == DATUM_MIME_TYPE) and isinstance(body, dict)): avro_schema = (yield self._schema(properties['type'])) LOGGER.debug('Schema: %r', avro_schema) body = self._serialize(avro_schema, body) (yield super(PublishingMixin, self).amqp_publish(exchange, routing_key, body, properties))
Publish a message to RabbitMQ, serializing the payload data as an Avro datum if the message is to be sent as such. :param str exchange: The exchange to publish the message to. :param str routing_key: The routing key to publish the message with. :param dict body: The message data to serialize. :param dict properties: An optional dict of additional properties to append. If publishing an Avro message, it must contain the Avro message type at 'type' and have a content type of 'application/vnd.apache.avro.datum' :raises: sprockets.mixins.avro_publisher.SchemaFetchError
sprockets/mixins/avro_publisher/__init__.py
amqp_publish
prashantkb/sprockets.mixins.avro-publisher
0
python
@gen.coroutine def amqp_publish(self, exchange, routing_key, body, properties=None): "Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum if the message is to be sent as such.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param dict body: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append. If publishing an Avro message, it\n must contain the Avro message type at 'type'\n and have a content type of\n 'application/vnd.apache.avro.datum'\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n " LOGGER.debug('Publishing Here: %r %r %r %r', exchange, routing_key, body, properties) properties = (properties or {}) if ((properties.get('content_type') == DATUM_MIME_TYPE) and isinstance(body, dict)): avro_schema = (yield self._schema(properties['type'])) LOGGER.debug('Schema: %r', avro_schema) body = self._serialize(avro_schema, body) (yield super(PublishingMixin, self).amqp_publish(exchange, routing_key, body, properties))
@gen.coroutine def amqp_publish(self, exchange, routing_key, body, properties=None): "Publish a message to RabbitMQ, serializing the payload data as an\n Avro datum if the message is to be sent as such.\n\n :param str exchange: The exchange to publish the message to.\n :param str routing_key: The routing key to publish the message with.\n :param dict body: The message data to serialize.\n :param dict properties: An optional dict of additional properties\n to append. If publishing an Avro message, it\n must contain the Avro message type at 'type'\n and have a content type of\n 'application/vnd.apache.avro.datum'\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n " LOGGER.debug('Publishing Here: %r %r %r %r', exchange, routing_key, body, properties) properties = (properties or {}) if ((properties.get('content_type') == DATUM_MIME_TYPE) and isinstance(body, dict)): avro_schema = (yield self._schema(properties['type'])) LOGGER.debug('Schema: %r', avro_schema) body = self._serialize(avro_schema, body) (yield super(PublishingMixin, self).amqp_publish(exchange, routing_key, body, properties))<|docstring|>Publish a message to RabbitMQ, serializing the payload data as an Avro datum if the message is to be sent as such. :param str exchange: The exchange to publish the message to. :param str routing_key: The routing key to publish the message with. :param dict body: The message data to serialize. :param dict properties: An optional dict of additional properties to append. If publishing an Avro message, it must contain the Avro message type at 'type' and have a content type of 'application/vnd.apache.avro.datum' :raises: sprockets.mixins.avro_publisher.SchemaFetchError<|endoftext|>
39709727a93886d336169271ef328b12875fe9a5a7b592dbe47ca9573f2c4f95
@gen.coroutine def _schema(self, message_type): 'Fetch the Avro schema file from application cache or the remote\n URI. If the request for the schema from the remote URI fails, a\n :exc:`sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' global _SCHEMAS if (message_type not in _SCHEMAS): schema = (yield self._fetch_schema(message_type)) _SCHEMAS[message_type] = schema raise gen.Return(_SCHEMAS[message_type])
Fetch the Avro schema file from application cache or the remote URI. If the request for the schema from the remote URI fails, a :exc:`sprockets.mixins.avro_publisher.SchemaFetchError` will be raised. :param str message_type: The message type for the Avro schema. :rtype: str :raises: sprockets.mixins.avro_publisher.SchemaFetchError
sprockets/mixins/avro_publisher/__init__.py
_schema
prashantkb/sprockets.mixins.avro-publisher
0
python
@gen.coroutine def _schema(self, message_type): 'Fetch the Avro schema file from application cache or the remote\n URI. If the request for the schema from the remote URI fails, a\n :exc:`sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' global _SCHEMAS if (message_type not in _SCHEMAS): schema = (yield self._fetch_schema(message_type)) _SCHEMAS[message_type] = schema raise gen.Return(_SCHEMAS[message_type])
@gen.coroutine def _schema(self, message_type): 'Fetch the Avro schema file from application cache or the remote\n URI. If the request for the schema from the remote URI fails, a\n :exc:`sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' global _SCHEMAS if (message_type not in _SCHEMAS): schema = (yield self._fetch_schema(message_type)) _SCHEMAS[message_type] = schema raise gen.Return(_SCHEMAS[message_type])<|docstring|>Fetch the Avro schema file from application cache or the remote URI. If the request for the schema from the remote URI fails, a :exc:`sprockets.mixins.avro_publisher.SchemaFetchError` will be raised. :param str message_type: The message type for the Avro schema. :rtype: str :raises: sprockets.mixins.avro_publisher.SchemaFetchError<|endoftext|>
2edcc96fe9766b03ffa07338ed43132a132832c228f23008c444a25d633d3fb6
@gen.coroutine def _fetch_schema(self, message_type): 'Fetch the Avro schema for the given message type from a remote\n location, returning the schema JSON string.\n\n If the schema can not be retrieved, a\n :exc:`~sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' response = (yield self.http_fetch(self._schema_url(message_type))) if response.ok: raise gen.Return(json.loads(response.raw.body.decode('utf-8'))) raise SchemaFetchError()
Fetch the Avro schema for the given message type from a remote location, returning the schema JSON string. If the schema can not be retrieved, a :exc:`~sprockets.mixins.avro_publisher.SchemaFetchError` will be raised. :param str message_type: The message type for the Avro schema. :rtype: str :raises: sprockets.mixins.avro_publisher.SchemaFetchError
sprockets/mixins/avro_publisher/__init__.py
_fetch_schema
prashantkb/sprockets.mixins.avro-publisher
0
python
@gen.coroutine def _fetch_schema(self, message_type): 'Fetch the Avro schema for the given message type from a remote\n location, returning the schema JSON string.\n\n If the schema can not be retrieved, a\n :exc:`~sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' response = (yield self.http_fetch(self._schema_url(message_type))) if response.ok: raise gen.Return(json.loads(response.raw.body.decode('utf-8'))) raise SchemaFetchError()
@gen.coroutine def _fetch_schema(self, message_type): 'Fetch the Avro schema for the given message type from a remote\n location, returning the schema JSON string.\n\n If the schema can not be retrieved, a\n :exc:`~sprockets.mixins.avro_publisher.SchemaFetchError` will be\n raised.\n\n :param str message_type: The message type for the Avro schema.\n :rtype: str\n :raises: sprockets.mixins.avro_publisher.SchemaFetchError\n\n ' response = (yield self.http_fetch(self._schema_url(message_type))) if response.ok: raise gen.Return(json.loads(response.raw.body.decode('utf-8'))) raise SchemaFetchError()<|docstring|>Fetch the Avro schema for the given message type from a remote location, returning the schema JSON string. If the schema can not be retrieved, a :exc:`~sprockets.mixins.avro_publisher.SchemaFetchError` will be raised. :param str message_type: The message type for the Avro schema. :rtype: str :raises: sprockets.mixins.avro_publisher.SchemaFetchError<|endoftext|>
512d0168bb31ff4c68b892bef6a1674f215533e06967114af4996980bc58aea7
def _schema_url(self, message_type): 'Return the URL for the given message type for retrieving the Avro\n schema from a remote location.\n\n :param str message_type: The message type for the Avro schema.\n\n :rtype: str\n\n ' return (self.application.settings.get('avro_schema_uri_format', SCHEMA_URI_FORMAT) % {'name': message_type})
Return the URL for the given message type for retrieving the Avro schema from a remote location. :param str message_type: The message type for the Avro schema. :rtype: str
sprockets/mixins/avro_publisher/__init__.py
_schema_url
prashantkb/sprockets.mixins.avro-publisher
0
python
def _schema_url(self, message_type): 'Return the URL for the given message type for retrieving the Avro\n schema from a remote location.\n\n :param str message_type: The message type for the Avro schema.\n\n :rtype: str\n\n ' return (self.application.settings.get('avro_schema_uri_format', SCHEMA_URI_FORMAT) % {'name': message_type})
def _schema_url(self, message_type): 'Return the URL for the given message type for retrieving the Avro\n schema from a remote location.\n\n :param str message_type: The message type for the Avro schema.\n\n :rtype: str\n\n ' return (self.application.settings.get('avro_schema_uri_format', SCHEMA_URI_FORMAT) % {'name': message_type})<|docstring|>Return the URL for the given message type for retrieving the Avro schema from a remote location. :param str message_type: The message type for the Avro schema. :rtype: str<|endoftext|>
8d1d2d97d3ccecba21e38b261c4894dc39643b00f3449b67da65102e7a1abe01
@staticmethod def _serialize(schema, data): 'Serialize a data structure into an Avro datum.\n\n :param dict schema: The parsed Avro schema.\n :param dict data: The value to turn into an Avro datum.\n\n :rtype: bytes\n\n ' stream = io.BytesIO() fastavro.schemaless_writer(stream, schema, data) return stream.getvalue()
Serialize a data structure into an Avro datum. :param dict schema: The parsed Avro schema. :param dict data: The value to turn into an Avro datum. :rtype: bytes
sprockets/mixins/avro_publisher/__init__.py
_serialize
prashantkb/sprockets.mixins.avro-publisher
0
python
@staticmethod def _serialize(schema, data): 'Serialize a data structure into an Avro datum.\n\n :param dict schema: The parsed Avro schema.\n :param dict data: The value to turn into an Avro datum.\n\n :rtype: bytes\n\n ' stream = io.BytesIO() fastavro.schemaless_writer(stream, schema, data) return stream.getvalue()
@staticmethod def _serialize(schema, data): 'Serialize a data structure into an Avro datum.\n\n :param dict schema: The parsed Avro schema.\n :param dict data: The value to turn into an Avro datum.\n\n :rtype: bytes\n\n ' stream = io.BytesIO() fastavro.schemaless_writer(stream, schema, data) return stream.getvalue()<|docstring|>Serialize a data structure into an Avro datum. :param dict schema: The parsed Avro schema. :param dict data: The value to turn into an Avro datum. :rtype: bytes<|endoftext|>
0c7789111f9bd114f5aab848557da2550bf92aa890efd6d1435d7c8abf1e5d1b
def getRestingHeartRate(day): '\n 安静時心拍数を取得する\n ' import requests from requests.auth import HTTPBasicAuth as hba api_ver = '1' url = ((((FITBIT_BASE_URL + api_ver) + '/user/-/activities/heart/date/') + day.strftime('%Y-%m-%d')) + '/1d.json') headers = {'Authorization': ('Bearer ' + access_token)} res = requests.get(url, headers=headers) jsonStr = json.loads(res.text) return jsonStr['activities-heart'][0]['value']['restingHeartRate']
安静時心拍数を取得する
get_resting_heart.py
getRestingHeartRate
Coniglio/fitbit_batch
0
python
def getRestingHeartRate(day): '\n \n ' import requests from requests.auth import HTTPBasicAuth as hba api_ver = '1' url = ((((FITBIT_BASE_URL + api_ver) + '/user/-/activities/heart/date/') + day.strftime('%Y-%m-%d')) + '/1d.json') headers = {'Authorization': ('Bearer ' + access_token)} res = requests.get(url, headers=headers) jsonStr = json.loads(res.text) return jsonStr['activities-heart'][0]['value']['restingHeartRate']
def getRestingHeartRate(day): '\n \n ' import requests from requests.auth import HTTPBasicAuth as hba api_ver = '1' url = ((((FITBIT_BASE_URL + api_ver) + '/user/-/activities/heart/date/') + day.strftime('%Y-%m-%d')) + '/1d.json') headers = {'Authorization': ('Bearer ' + access_token)} res = requests.get(url, headers=headers) jsonStr = json.loads(res.text) return jsonStr['activities-heart'][0]['value']['restingHeartRate']<|docstring|>安静時心拍数を取得する<|endoftext|>
72bf8a8d7c0c27e933758dd121ecca07112fcbcc0a9e5329cb56b43e16f2cdb4
def insertRestingHeartRate(date, heart_rate): '\n 安静時心拍数を格納する\n ' import pymysql.cursors conn = pymysql.connect(host=host, user=user, password=password, db=db, cursorclass=pymysql.cursors.DictCursor) try: with conn.cursor() as cursor: sql = 'insert into resting_heart_rate (resting_date, resting_heart_rate) values(%s, %s)' cursor.execute(sql, (date.strftime('%Y-%m-%d'), heart_rate)) conn.commit() finally: conn.close()
安静時心拍数を格納する
get_resting_heart.py
insertRestingHeartRate
Coniglio/fitbit_batch
0
python
def insertRestingHeartRate(date, heart_rate): '\n \n ' import pymysql.cursors conn = pymysql.connect(host=host, user=user, password=password, db=db, cursorclass=pymysql.cursors.DictCursor) try: with conn.cursor() as cursor: sql = 'insert into resting_heart_rate (resting_date, resting_heart_rate) values(%s, %s)' cursor.execute(sql, (date.strftime('%Y-%m-%d'), heart_rate)) conn.commit() finally: conn.close()
def insertRestingHeartRate(date, heart_rate): '\n \n ' import pymysql.cursors conn = pymysql.connect(host=host, user=user, password=password, db=db, cursorclass=pymysql.cursors.DictCursor) try: with conn.cursor() as cursor: sql = 'insert into resting_heart_rate (resting_date, resting_heart_rate) values(%s, %s)' cursor.execute(sql, (date.strftime('%Y-%m-%d'), heart_rate)) conn.commit() finally: conn.close()<|docstring|>安静時心拍数を格納する<|endoftext|>
c5e2478e104ccca32b0e0e70cc4baf667853797a8cf713c57a8d7bb2f109ed67
def __init__(self, machine): 'Initialise Trinamics Step Rocker platform.' super().__init__(machine) self.log = logging.getLogger('Trinamics StepRocker') self.log.debug('Configuring template hardware interface.') self.config = self.machine.config['trinamics_steprocker'] self.platform = None self.features['tickless'] = True self.tmcl = None
Initialise Trinamics Step Rocker platform.
mpf/platforms/trinamics_steprocker.py
__init__
seanirby/mpf
163
python
def __init__(self, machine): super().__init__(machine) self.log = logging.getLogger('Trinamics StepRocker') self.log.debug('Configuring template hardware interface.') self.config = self.machine.config['trinamics_steprocker'] self.platform = None self.features['tickless'] = True self.tmcl = None
def __init__(self, machine): super().__init__(machine) self.log = logging.getLogger('Trinamics StepRocker') self.log.debug('Configuring template hardware interface.') self.config = self.machine.config['trinamics_steprocker'] self.platform = None self.features['tickless'] = True self.tmcl = None<|docstring|>Initialise Trinamics Step Rocker platform.<|endoftext|>
315f63ddf1cf6c3310bd759c94aa133525cdd3a9e51064cf669ab29a59f5d5b4
def __repr__(self): 'Return string representation.' return '<Platform.TrinamicsStepRocker>'
Return string representation.
mpf/platforms/trinamics_steprocker.py
__repr__
seanirby/mpf
163
python
def __repr__(self): return '<Platform.TrinamicsStepRocker>'
def __repr__(self): return '<Platform.TrinamicsStepRocker>'<|docstring|>Return string representation.<|endoftext|>
a256e75a534141acf54d4d19e67450e03e88abb5bc50a1e76b47817c0716148b
async def initialize(self): 'Initialise trinamics steprocker platform.' (await super().initialize()) self.config = self.machine.config_validator.validate_config('trinamics_steprocker', self.config) self.tmcl = TMCLDevice(self.config['port'], False)
Initialise trinamics steprocker platform.
mpf/platforms/trinamics_steprocker.py
initialize
seanirby/mpf
163
python
async def initialize(self): (await super().initialize()) self.config = self.machine.config_validator.validate_config('trinamics_steprocker', self.config) self.tmcl = TMCLDevice(self.config['port'], False)
async def initialize(self): (await super().initialize()) self.config = self.machine.config_validator.validate_config('trinamics_steprocker', self.config) self.tmcl = TMCLDevice(self.config['port'], False)<|docstring|>Initialise trinamics steprocker platform.<|endoftext|>
b2de1157ec19e86da974e5d817d66abf8db66ef77b514ac98f85d48d46d9d4b6
def stop(self): 'Close serial.' if self.tmcl: self.tmcl.stop() self.tmcl = None
Close serial.
mpf/platforms/trinamics_steprocker.py
stop
seanirby/mpf
163
python
def stop(self): if self.tmcl: self.tmcl.stop() self.tmcl = None
def stop(self): if self.tmcl: self.tmcl.stop() self.tmcl = None<|docstring|>Close serial.<|endoftext|>
78f0177cc5bea9e33af3e530593cf54be128d6789d2f1d9aa2c8da32c55a7e51
async def configure_stepper(self, number: str, config: dict) -> 'TrinamicsTMCLStepper': 'Configure a smart stepper device in platform.\n\n Args:\n ----\n number: Number of the stepper.\n config (dict): Configuration of device\n ' return TrinamicsTMCLStepper(number, config, self.tmcl, self.machine)
Configure a smart stepper device in platform. Args: ---- number: Number of the stepper. config (dict): Configuration of device
mpf/platforms/trinamics_steprocker.py
configure_stepper
seanirby/mpf
163
python
async def configure_stepper(self, number: str, config: dict) -> 'TrinamicsTMCLStepper': 'Configure a smart stepper device in platform.\n\n Args:\n ----\n number: Number of the stepper.\n config (dict): Configuration of device\n ' return TrinamicsTMCLStepper(number, config, self.tmcl, self.machine)
async def configure_stepper(self, number: str, config: dict) -> 'TrinamicsTMCLStepper': 'Configure a smart stepper device in platform.\n\n Args:\n ----\n number: Number of the stepper.\n config (dict): Configuration of device\n ' return TrinamicsTMCLStepper(number, config, self.tmcl, self.machine)<|docstring|>Configure a smart stepper device in platform. Args: ---- number: Number of the stepper. config (dict): Configuration of device<|endoftext|>
75e4c1d7211b0f51e5dfcdd8c65b381cb354338390d7deb0369a4c5876330b78
@classmethod def get_stepper_config_section(cls): 'Return config validator name.' return 'steprocker_stepper_settings'
Return config validator name.
mpf/platforms/trinamics_steprocker.py
get_stepper_config_section
seanirby/mpf
163
python
@classmethod def get_stepper_config_section(cls): return 'steprocker_stepper_settings'
@classmethod def get_stepper_config_section(cls): return 'steprocker_stepper_settings'<|docstring|>Return config validator name.<|endoftext|>
f7135e482ad12f4366ed53f35e93bfae8919fa3cbf449cf96b6aa4bdb7385989
def __init__(self, number, config, tmcl_device, machine): 'Initialise stepper.' self._pulse_div = 5 self._ramp_div = 9 self._clock_freq = 16000000.0 self.config = config self.log = logging.getLogger('TMCL Stepper') self._mn = int(number) self.tmcl = tmcl_device self._move_current = int((2.55 * self.config['move_current'])) self._hold_current = int((2.55 * self.config['hold_current'])) self._microstep_per_fullstep = self.config['microstep_per_fullstep'] self._fullstep_per_userunit = self.config['fullstep_per_userunit'] self._velocity_limit = self._uu_to_velocity_cmd(self.config['velocity_limit']) self._acceleration_limit = self._uu_to_accel_cmd(self.config['acceleration_limit']) self.machine = machine self._homing_speed = self._uu_to_velocity_cmd(self.config['homing_speed']) self._set_important_parameters(self._velocity_limit, self._acceleration_limit, self._move_current, self._hold_current, self._get_micro_step_mode(self._microstep_per_fullstep), False) self.tmcl.sap(self._mn, 154, self._pulse_div) self.tmcl.sap(self._mn, 153, self._ramp_div) self._homing_active = False
Initialise stepper.
mpf/platforms/trinamics_steprocker.py
__init__
seanirby/mpf
163
python
def __init__(self, number, config, tmcl_device, machine): self._pulse_div = 5 self._ramp_div = 9 self._clock_freq = 16000000.0 self.config = config self.log = logging.getLogger('TMCL Stepper') self._mn = int(number) self.tmcl = tmcl_device self._move_current = int((2.55 * self.config['move_current'])) self._hold_current = int((2.55 * self.config['hold_current'])) self._microstep_per_fullstep = self.config['microstep_per_fullstep'] self._fullstep_per_userunit = self.config['fullstep_per_userunit'] self._velocity_limit = self._uu_to_velocity_cmd(self.config['velocity_limit']) self._acceleration_limit = self._uu_to_accel_cmd(self.config['acceleration_limit']) self.machine = machine self._homing_speed = self._uu_to_velocity_cmd(self.config['homing_speed']) self._set_important_parameters(self._velocity_limit, self._acceleration_limit, self._move_current, self._hold_current, self._get_micro_step_mode(self._microstep_per_fullstep), False) self.tmcl.sap(self._mn, 154, self._pulse_div) self.tmcl.sap(self._mn, 153, self._ramp_div) self._homing_active = False
def __init__(self, number, config, tmcl_device, machine): self._pulse_div = 5 self._ramp_div = 9 self._clock_freq = 16000000.0 self.config = config self.log = logging.getLogger('TMCL Stepper') self._mn = int(number) self.tmcl = tmcl_device self._move_current = int((2.55 * self.config['move_current'])) self._hold_current = int((2.55 * self.config['hold_current'])) self._microstep_per_fullstep = self.config['microstep_per_fullstep'] self._fullstep_per_userunit = self.config['fullstep_per_userunit'] self._velocity_limit = self._uu_to_velocity_cmd(self.config['velocity_limit']) self._acceleration_limit = self._uu_to_accel_cmd(self.config['acceleration_limit']) self.machine = machine self._homing_speed = self._uu_to_velocity_cmd(self.config['homing_speed']) self._set_important_parameters(self._velocity_limit, self._acceleration_limit, self._move_current, self._hold_current, self._get_micro_step_mode(self._microstep_per_fullstep), False) self.tmcl.sap(self._mn, 154, self._pulse_div) self.tmcl.sap(self._mn, 153, self._ramp_div) self._homing_active = False<|docstring|>Initialise stepper.<|endoftext|>
eb064f844cfc37cc2be53505bf59e96e13d2487e166237bdb2e275dc3966b4cf
def home(self, direction): 'Home an axis, resetting 0 position.' self.tmcl.rfs(self._mn, 'STOP') self._set_home_parameters(direction) self.tmcl.rfs(self._mn, 'START') self._homing_active = True
Home an axis, resetting 0 position.
mpf/platforms/trinamics_steprocker.py
home
seanirby/mpf
163
python
def home(self, direction): self.tmcl.rfs(self._mn, 'STOP') self._set_home_parameters(direction) self.tmcl.rfs(self._mn, 'START') self._homing_active = True
def home(self, direction): self.tmcl.rfs(self._mn, 'STOP') self._set_home_parameters(direction) self.tmcl.rfs(self._mn, 'START') self._homing_active = True<|docstring|>Home an axis, resetting 0 position.<|endoftext|>
dbd2e65b9f2688f0a2ac8c9682d7499ca413e59eea932bad48b23176bf2a9f96
def set_home_position(self): 'Tell the stepper that we are at the home position.' self.tmcl.sap(self._mn, 1, 0)
Tell the stepper that we are at the home position.
mpf/platforms/trinamics_steprocker.py
set_home_position
seanirby/mpf
163
python
def set_home_position(self): self.tmcl.sap(self._mn, 1, 0)
def set_home_position(self): self.tmcl.sap(self._mn, 1, 0)<|docstring|>Tell the stepper that we are at the home position.<|endoftext|>
62197db706d7b2557fb96a96fcdad80dae3717ef075aa32b2ff97ce1f6aed85a
def move_abs_pos(self, position): 'Move axis to a certain absolute position.' microstep_pos = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'ABS', microstep_pos)
Move axis to a certain absolute position.
mpf/platforms/trinamics_steprocker.py
move_abs_pos
seanirby/mpf
163
python
def move_abs_pos(self, position): microstep_pos = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'ABS', microstep_pos)
def move_abs_pos(self, position): microstep_pos = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'ABS', microstep_pos)<|docstring|>Move axis to a certain absolute position.<|endoftext|>
b33ed184d7961be8ab7f3e5a28e071eacaad6e572b6ebf8722b02f2ca12cf471
def move_rel_pos(self, position): 'Move axis to a relative position.' microstep_rel = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'REL', microstep_rel)
Move axis to a relative position.
mpf/platforms/trinamics_steprocker.py
move_rel_pos
seanirby/mpf
163
python
def move_rel_pos(self, position): microstep_rel = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'REL', microstep_rel)
def move_rel_pos(self, position): microstep_rel = self._uu_to_microsteps(position) self.tmcl.mvp(self._mn, 'REL', microstep_rel)<|docstring|>Move axis to a relative position.<|endoftext|>
33a2f1f6c82a3a1257b3e4ed2907355e390f1bdfa92be8762d26d14168975b2c
def move_vel_mode(self, velocity): 'Move at a specific velocity and direction (pos = clockwise, neg = counterclockwise).' self._rotate(velocity)
Move at a specific velocity and direction (pos = clockwise, neg = counterclockwise).
mpf/platforms/trinamics_steprocker.py
move_vel_mode
seanirby/mpf
163
python
def move_vel_mode(self, velocity): self._rotate(velocity)
def move_vel_mode(self, velocity): self._rotate(velocity)<|docstring|>Move at a specific velocity and direction (pos = clockwise, neg = counterclockwise).<|endoftext|>
f6cd76a32944f927567ac70dae00b34358f6bce1c829e1b4604f2436bee10d5b
def current_position(self): 'Return current position.' microsteps = self.tmcl.gap(self._mn, 1) return self._microsteps_to_uu(microsteps)
Return current position.
mpf/platforms/trinamics_steprocker.py
current_position
seanirby/mpf
163
python
def current_position(self): microsteps = self.tmcl.gap(self._mn, 1) return self._microsteps_to_uu(microsteps)
def current_position(self): microsteps = self.tmcl.gap(self._mn, 1) return self._microsteps_to_uu(microsteps)<|docstring|>Return current position.<|endoftext|>
73c6fa34ff141f4d45690aaa54707c0d02f127cdd71da12bbb76bb94eb09aa71
def stop(self) -> None: 'Stop stepper.' self.tmcl.mst(self._mn)
Stop stepper.
mpf/platforms/trinamics_steprocker.py
stop
seanirby/mpf
163
python
def stop(self) -> None: self.tmcl.mst(self._mn)
def stop(self) -> None: self.tmcl.mst(self._mn)<|docstring|>Stop stepper.<|endoftext|>
3c3dfa4dbb339b3aa58e5d9b694f526cf4c352b86a84b0cf56c49c31c27ed1f3
async def wait_for_move_completed(self): 'Wait until move completed.' while (not self.is_move_complete()): (await asyncio.sleep((1 / self.config['poll_ms'])))
Wait until move completed.
mpf/platforms/trinamics_steprocker.py
wait_for_move_completed
seanirby/mpf
163
python
async def wait_for_move_completed(self): while (not self.is_move_complete()): (await asyncio.sleep((1 / self.config['poll_ms'])))
async def wait_for_move_completed(self): while (not self.is_move_complete()): (await asyncio.sleep((1 / self.config['poll_ms'])))<|docstring|>Wait until move completed.<|endoftext|>
821e0f04d6421ced6525bc2da5297f228fb192241ed3206d445005d80620d27e
def is_move_complete(self) -> bool: 'Return true if move is complete.' if self._homing_active: ret = self.tmcl.rfs(self._mn, 'STATUS') if (ret != 0): return False self._homing_active = False return True ret = self.tmcl.gap(self._mn, 8) if (ret == 1): return True return False
Return true if move is complete.
mpf/platforms/trinamics_steprocker.py
is_move_complete
seanirby/mpf
163
python
def is_move_complete(self) -> bool: if self._homing_active: ret = self.tmcl.rfs(self._mn, 'STATUS') if (ret != 0): return False self._homing_active = False return True ret = self.tmcl.gap(self._mn, 8) if (ret == 1): return True return False
def is_move_complete(self) -> bool: if self._homing_active: ret = self.tmcl.rfs(self._mn, 'STATUS') if (ret != 0): return False self._homing_active = False return True ret = self.tmcl.gap(self._mn, 8) if (ret == 1): return True return False<|docstring|>Return true if move is complete.<|endoftext|>
e2e31871a6784d3c68c3056fb3ed7bd50c6cdfe2c1b9b7367dd569e3b59a4046
@login_manager.user_loader def load_user(user_id): 'Load user by ID.' return User.get_by_id(int(user_id))
Load user by ID.
myflaskapp/user/utils.py
load_user
IT-corridor/Parking
0
python
@login_manager.user_loader def load_user(user_id): return User.get_by_id(int(user_id))
@login_manager.user_loader def load_user(user_id): return User.get_by_id(int(user_id))<|docstring|>Load user by ID.<|endoftext|>
cc572506db5e60ae1678638975f019c6d83517b7e6a2d3c6268749f2812dc9a9
def run(self, filepath=None): '\n logically wrong\n ' pass
logically wrong
myflaskapp/user/utils.py
run
IT-corridor/Parking
0
python
def run(self, filepath=None): '\n \n ' pass
def run(self, filepath=None): '\n \n ' pass<|docstring|>logically wrong<|endoftext|>
35b6435c957360b7aaa1933d899842def40044d624c8fb396e9e18725d777053
def lookup(self, user_id: Union[(str, int)]=None, screen_name: str=None, users: List[str]=None, include_entities: bool=True, user_id_list: List[Union[(str, int)]]=[], screen_name_list: List[str]=[]): 'lookup\n * detail:\n ja: 任意の数だけ指定したユーザの情報を取得するエンドポイント.一度に 100 件まで取得可能.\n * rate-limit: 900 times / 15 min\n ' path = 'users/lookup.json' endpoint = (self.base_url + path) params_list = [user_id, screen_name, user_id_list, screen_name_list] param_num = sum([bool(param) for param in params_list]) if (param_num == 1): params = {'include_entities': include_entities} if user_id: params['user_id'] = user_id elif screen_name: params['screen_name'] = screen_name elif user_id_list: params['user_id'] = ','.join(user_id_list) elif screen_name_list: params['screen_name'] = ','.join(screen_name_list) return self.api.get(url=endpoint, params=params) else: raise Exception('1 of 4(user_id, screen_name, user_id_list, screen_name_list), must be assigned.')
lookup * detail: ja: 任意の数だけ指定したユーザの情報を取得するエンドポイント.一度に 100 件まで取得可能. * rate-limit: 900 times / 15 min
twitter/endpoints/users.py
lookup
nukopy/twitter-api
0
python
def lookup(self, user_id: Union[(str, int)]=None, screen_name: str=None, users: List[str]=None, include_entities: bool=True, user_id_list: List[Union[(str, int)]]=[], screen_name_list: List[str]=[]): 'lookup\n * detail:\n ja: 任意の数だけ指定したユーザの情報を取得するエンドポイント.一度に 100 件まで取得可能.\n * rate-limit: 900 times / 15 min\n ' path = 'users/lookup.json' endpoint = (self.base_url + path) params_list = [user_id, screen_name, user_id_list, screen_name_list] param_num = sum([bool(param) for param in params_list]) if (param_num == 1): params = {'include_entities': include_entities} if user_id: params['user_id'] = user_id elif screen_name: params['screen_name'] = screen_name elif user_id_list: params['user_id'] = ','.join(user_id_list) elif screen_name_list: params['screen_name'] = ','.join(screen_name_list) return self.api.get(url=endpoint, params=params) else: raise Exception('1 of 4(user_id, screen_name, user_id_list, screen_name_list), must be assigned.')
def lookup(self, user_id: Union[(str, int)]=None, screen_name: str=None, users: List[str]=None, include_entities: bool=True, user_id_list: List[Union[(str, int)]]=[], screen_name_list: List[str]=[]): 'lookup\n * detail:\n ja: 任意の数だけ指定したユーザの情報を取得するエンドポイント.一度に 100 件まで取得可能.\n * rate-limit: 900 times / 15 min\n ' path = 'users/lookup.json' endpoint = (self.base_url + path) params_list = [user_id, screen_name, user_id_list, screen_name_list] param_num = sum([bool(param) for param in params_list]) if (param_num == 1): params = {'include_entities': include_entities} if user_id: params['user_id'] = user_id elif screen_name: params['screen_name'] = screen_name elif user_id_list: params['user_id'] = ','.join(user_id_list) elif screen_name_list: params['screen_name'] = ','.join(screen_name_list) return self.api.get(url=endpoint, params=params) else: raise Exception('1 of 4(user_id, screen_name, user_id_list, screen_name_list), must be assigned.')<|docstring|>lookup * detail: ja: 任意の数だけ指定したユーザの情報を取得するエンドポイント.一度に 100 件まで取得可能. * rate-limit: 900 times / 15 min<|endoftext|>
ea2b16eb5354a14035b78bad3e2522667d545b4ebcc58c7ccdcc17382d6d2d62
def _update_project_configuration(new_project_id=None): '\n Update current project configuration\n ' global PROJECT_ID_PACKAGE, MODELING_PACKAGE, MODULE_NAME, SETTING_OBJECT if new_project_id: GS.PROBLEM_ID = new_project_id PROJECT_ID_PACKAGE = ('src.projects.' + GS.PROBLEM_ID) MODELING_PACKAGE = (PROJECT_ID_PACKAGE + '.modeling') SETTING_OBJECT = Projects.get_settings()
Update current project configuration
src/Executor.py
_update_project_configuration
Gabvaztor/TensorFlowCode
4
python
def _update_project_configuration(new_project_id=None): '\n \n ' global PROJECT_ID_PACKAGE, MODELING_PACKAGE, MODULE_NAME, SETTING_OBJECT if new_project_id: GS.PROBLEM_ID = new_project_id PROJECT_ID_PACKAGE = ('src.projects.' + GS.PROBLEM_ID) MODELING_PACKAGE = (PROJECT_ID_PACKAGE + '.modeling') SETTING_OBJECT = Projects.get_settings()
def _update_project_configuration(new_project_id=None): '\n \n ' global PROJECT_ID_PACKAGE, MODELING_PACKAGE, MODULE_NAME, SETTING_OBJECT if new_project_id: GS.PROBLEM_ID = new_project_id PROJECT_ID_PACKAGE = ('src.projects.' + GS.PROBLEM_ID) MODELING_PACKAGE = (PROJECT_ID_PACKAGE + '.modeling') SETTING_OBJECT = Projects.get_settings()<|docstring|>Update current project configuration<|endoftext|>
0fd94b8051b9444db03abcc824d1b5e9bf04370b46798c1faaf4164ad09f3667
def _api_process(user_id: str, model_selected: str, petition_process_in_background=True): '\n Execute api process in background (optional). Before it does it, it update the current global "PROBLEM_ID".\n Args:\n user_id: user id sent from PHP server/client\n petition_process_in_background: If the process will be executed in the background\n ' _update_project_configuration(new_project_id=model_selected) if petition_process_in_background: import subprocess import src.services.api.API as api try: filepath = str(api.__file__) pt('filepath', filepath) python_path2 = ((((('python "' + filepath) + '" -i ') + user_id) + ' -m ') + model_selected) if ('\\..\\src' in python_path2): python_path2 = python_path2.replace('\\..\\src', '') pt('python_path2', python_path2) GS.LOGGER.write_to_logger(('Opening from path: ' + python_path2)) GS.LOGGER.write_to_logger((((('New petition: \n' + 'USER_ID: ') + user_id) + ' MODEL: ') + model_selected)) GS.LOGGER.write_to_logger(python_path2) process = subprocess.Popen(python_path2, creationflags=subprocess.CREATE_NEW_CONSOLE) except Exception as error: GS.LOGGER.write_log_error(error) else: pass
Execute api process in background (optional). Before it does it, it update the current global "PROBLEM_ID". Args: user_id: user id sent from PHP server/client petition_process_in_background: If the process will be executed in the background
src/Executor.py
_api_process
Gabvaztor/TensorFlowCode
4
python
def _api_process(user_id: str, model_selected: str, petition_process_in_background=True): '\n Execute api process in background (optional). Before it does it, it update the current global "PROBLEM_ID".\n Args:\n user_id: user id sent from PHP server/client\n petition_process_in_background: If the process will be executed in the background\n ' _update_project_configuration(new_project_id=model_selected) if petition_process_in_background: import subprocess import src.services.api.API as api try: filepath = str(api.__file__) pt('filepath', filepath) python_path2 = ((((('python "' + filepath) + '" -i ') + user_id) + ' -m ') + model_selected) if ('\\..\\src' in python_path2): python_path2 = python_path2.replace('\\..\\src', ) pt('python_path2', python_path2) GS.LOGGER.write_to_logger(('Opening from path: ' + python_path2)) GS.LOGGER.write_to_logger((((('New petition: \n' + 'USER_ID: ') + user_id) + ' MODEL: ') + model_selected)) GS.LOGGER.write_to_logger(python_path2) process = subprocess.Popen(python_path2, creationflags=subprocess.CREATE_NEW_CONSOLE) except Exception as error: GS.LOGGER.write_log_error(error) else: pass
def _api_process(user_id: str, model_selected: str, petition_process_in_background=True): '\n Execute api process in background (optional). Before it does it, it update the current global "PROBLEM_ID".\n Args:\n user_id: user id sent from PHP server/client\n petition_process_in_background: If the process will be executed in the background\n ' _update_project_configuration(new_project_id=model_selected) if petition_process_in_background: import subprocess import src.services.api.API as api try: filepath = str(api.__file__) pt('filepath', filepath) python_path2 = ((((('python "' + filepath) + '" -i ') + user_id) + ' -m ') + model_selected) if ('\\..\\src' in python_path2): python_path2 = python_path2.replace('\\..\\src', ) pt('python_path2', python_path2) GS.LOGGER.write_to_logger(('Opening from path: ' + python_path2)) GS.LOGGER.write_to_logger((((('New petition: \n' + 'USER_ID: ') + user_id) + ' MODEL: ') + model_selected)) GS.LOGGER.write_to_logger(python_path2) process = subprocess.Popen(python_path2, creationflags=subprocess.CREATE_NEW_CONSOLE) except Exception as error: GS.LOGGER.write_log_error(error) else: pass<|docstring|>Execute api process in background (optional). Before it does it, it update the current global "PROBLEM_ID". Args: user_id: user id sent from PHP server/client petition_process_in_background: If the process will be executed in the background<|endoftext|>
3a0c825325a0f88621ddb7ed3f4b990a13c4d430eedb6bf0d018a81578598617
@staticmethod def fromstring(s): 'Return a new point object from a string representation.\n ' return Point((x for x in s.split()))
Return a new point object from a string representation.
greedypermutation/point.py
fromstring
siddharthsheth/greedypermutation
6
python
@staticmethod def fromstring(s): '\n ' return Point((x for x in s.split()))
@staticmethod def fromstring(s): '\n ' return Point((x for x in s.split()))<|docstring|>Return a new point object from a string representation.<|endoftext|>
729e993b8cff316528c371ba3e60d7ecb41c81eee80ffb6797a02e62079b9a68
def dist(self, other): "Compute the Euclidean distance between the `self` and `other`.\n If the dimensions don't match the distance is computed in projection\n down to the common subspace.\n " return (sum((((a - b) ** 2) for (a, b) in zip(self, other))) ** 0.5)
Compute the Euclidean distance between the `self` and `other`. If the dimensions don't match the distance is computed in projection down to the common subspace.
greedypermutation/point.py
dist
siddharthsheth/greedypermutation
6
python
def dist(self, other): "Compute the Euclidean distance between the `self` and `other`.\n If the dimensions don't match the distance is computed in projection\n down to the common subspace.\n " return (sum((((a - b) ** 2) for (a, b) in zip(self, other))) ** 0.5)
def dist(self, other): "Compute the Euclidean distance between the `self` and `other`.\n If the dimensions don't match the distance is computed in projection\n down to the common subspace.\n " return (sum((((a - b) ** 2) for (a, b) in zip(self, other))) ** 0.5)<|docstring|>Compute the Euclidean distance between the `self` and `other`. If the dimensions don't match the distance is computed in projection down to the common subspace.<|endoftext|>
6a518635e875ed6a8d18cf56125f23d447169d8d3223775782af2da15c8a15a1
def parse_args(): 'Parses arguments.' parser = argparse.ArgumentParser() parser.add_argument('model_name', type=str, help='Name of the GAN model.') parser.add_argument('image_dir', type=str, help='Image directory, which includes original images, inverted codes, and image list.') parser.add_argument('boundary_path', type=str, help='Path to the boundary for semantic manipulation.') parser.add_argument('-o', '--output_dir', type=str, default='', help='Directory to save the results. If not specified, `./results/manipulation` will be used by default.') parser.add_argument('--step', type=int, default=7, help='Number of manipulation steps. (default: 7)') parser.add_argument('--start_distance', type=float, default=(- 3.0), help='Start distance for manipulation. (default: -3.0)') parser.add_argument('--end_distance', type=float, default=3.0, help='End distance for manipulation. (default: 3.0)') parser.add_argument('--manipulate_layers', type=str, default='', help='Indices of the layers to perform manipulation. If not specified, all layers will be manipulated. More than one layers should be separated by `,`. (default: None)') parser.add_argument('--viz_size', type=int, default=256, help='Image size for visualization. (default: 256)') parser.add_argument('--gpu_id', type=str, default='0', help='Which GPU(s) to use. (default: `0`)') return parser.parse_args()
Parses arguments.
manipulate.py
parse_args
ayulockin/idinvert_pytorch
1
python
def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('model_name', type=str, help='Name of the GAN model.') parser.add_argument('image_dir', type=str, help='Image directory, which includes original images, inverted codes, and image list.') parser.add_argument('boundary_path', type=str, help='Path to the boundary for semantic manipulation.') parser.add_argument('-o', '--output_dir', type=str, default=, help='Directory to save the results. If not specified, `./results/manipulation` will be used by default.') parser.add_argument('--step', type=int, default=7, help='Number of manipulation steps. (default: 7)') parser.add_argument('--start_distance', type=float, default=(- 3.0), help='Start distance for manipulation. (default: -3.0)') parser.add_argument('--end_distance', type=float, default=3.0, help='End distance for manipulation. (default: 3.0)') parser.add_argument('--manipulate_layers', type=str, default=, help='Indices of the layers to perform manipulation. If not specified, all layers will be manipulated. More than one layers should be separated by `,`. (default: None)') parser.add_argument('--viz_size', type=int, default=256, help='Image size for visualization. (default: 256)') parser.add_argument('--gpu_id', type=str, default='0', help='Which GPU(s) to use. (default: `0`)') return parser.parse_args()
def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('model_name', type=str, help='Name of the GAN model.') parser.add_argument('image_dir', type=str, help='Image directory, which includes original images, inverted codes, and image list.') parser.add_argument('boundary_path', type=str, help='Path to the boundary for semantic manipulation.') parser.add_argument('-o', '--output_dir', type=str, default=, help='Directory to save the results. If not specified, `./results/manipulation` will be used by default.') parser.add_argument('--step', type=int, default=7, help='Number of manipulation steps. (default: 7)') parser.add_argument('--start_distance', type=float, default=(- 3.0), help='Start distance for manipulation. (default: -3.0)') parser.add_argument('--end_distance', type=float, default=3.0, help='End distance for manipulation. (default: 3.0)') parser.add_argument('--manipulate_layers', type=str, default=, help='Indices of the layers to perform manipulation. If not specified, all layers will be manipulated. More than one layers should be separated by `,`. (default: None)') parser.add_argument('--viz_size', type=int, default=256, help='Image size for visualization. (default: 256)') parser.add_argument('--gpu_id', type=str, default='0', help='Which GPU(s) to use. (default: `0`)') return parser.parse_args()<|docstring|>Parses arguments.<|endoftext|>
508bb58e9093c71358548af257977cb78a8b50a0309198b342436c375eead586
def main(): 'Main function.' args = parse_args() os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id image_dir = args.image_dir image_dir_name = os.path.basename(image_dir.rstrip('/')) assert os.path.exists(image_dir) assert os.path.exists(f'{image_dir}/image_list.txt') assert os.path.exists(f'{image_dir}/inverted_codes.npy') boundary_path = args.boundary_path assert os.path.exists(boundary_path) boundary_name = os.path.splitext(os.path.basename(boundary_path))[0] output_dir = (args.output_dir or 'results/manipulation') job_name = f'{boundary_name.upper()}_{image_dir_name}' logger = setup_logger(output_dir, f'{job_name}.log', f'{job_name}_logger') logger.info(f'Loading generator.') generator = build_generator(args.model_name) logger.info(f'Loading images and corresponding inverted latent codes.') image_list = [] with open(f'{image_dir}/image_list.txt', 'r') as f: for line in f: name = os.path.splitext(os.path.basename(line.strip()))[0] assert os.path.exists(f'{image_dir}/{name}_ori.png') assert os.path.exists(f'{image_dir}/{name}_inv.png') image_list.append(name) latent_codes = np.load(f'{image_dir}/inverted_codes.npy') assert (latent_codes.shape[0] == len(image_list)) num_images = latent_codes.shape[0] logger.info(f'Loading boundary.') boundary_file = np.load(boundary_path, allow_pickle=True)[()] if isinstance(boundary_file, dict): boundary = boundary_file['boundary'] manipulate_layers = boundary_file['meta_data']['manipulate_layers'] else: boundary = boundary_file manipulate_layers = args.manipulate_layers if manipulate_layers: logger.info(f' Manipulating on layers `{manipulate_layers}`.') else: logger.info(f' Manipulating on ALL layers.') logger.info(f'Start manipulation.') step = args.step viz_size = (None if (args.viz_size == 0) else args.viz_size) visualizer = HtmlPageVisualizer(num_rows=num_images, num_cols=(step + 3), viz_size=viz_size) visualizer.set_headers((['Name', 'Origin', 'Inverted'] + [f'Step {i:02d}' for i in range(1, (step + 1))])) for (img_idx, img_name) in enumerate(image_list): ori_image = load_image(f'{image_dir}/{img_name}_ori.png') inv_image = load_image(f'{image_dir}/{img_name}_inv.png') visualizer.set_cell(img_idx, 0, text=img_name) visualizer.set_cell(img_idx, 1, image=ori_image) visualizer.set_cell(img_idx, 2, image=inv_image) codes = manipulate(latent_codes=latent_codes, boundary=boundary, start_distance=args.start_distance, end_distance=args.end_distance, step=step, layerwise_manipulation=True, num_layers=generator.num_layers, manipulate_layers=manipulate_layers, is_code_layerwise=True, is_boundary_layerwise=True) for img_idx in tqdm(range(num_images), leave=False): wandb_images = [] img_name = image_list[img_idx] ori_image = load_image(f'{image_dir}/{img_name}_ori.png') wandb_images.append(ori_image) run = wandb.init(project='in-domain-gan', job_type='manipulate', name='manipulate-{}-{}'.format(boundary_name, img_name)) output_images = generator.easy_synthesize(codes[img_idx], latent_space_type='wp')['image'] wandb_images.extend(output_images) for (s, output_image) in enumerate(output_images): visualizer.set_cell(img_idx, (s + 3), image=output_image) wandb.log({'manipulate {} image'.format(boundary_name): [wandb.Image(image) for image in wandb_images]}) run.join() visualizer.save(f'{output_dir}/{job_name}.html')
Main function.
manipulate.py
main
ayulockin/idinvert_pytorch
1
python
def main(): args = parse_args() os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id image_dir = args.image_dir image_dir_name = os.path.basename(image_dir.rstrip('/')) assert os.path.exists(image_dir) assert os.path.exists(f'{image_dir}/image_list.txt') assert os.path.exists(f'{image_dir}/inverted_codes.npy') boundary_path = args.boundary_path assert os.path.exists(boundary_path) boundary_name = os.path.splitext(os.path.basename(boundary_path))[0] output_dir = (args.output_dir or 'results/manipulation') job_name = f'{boundary_name.upper()}_{image_dir_name}' logger = setup_logger(output_dir, f'{job_name}.log', f'{job_name}_logger') logger.info(f'Loading generator.') generator = build_generator(args.model_name) logger.info(f'Loading images and corresponding inverted latent codes.') image_list = [] with open(f'{image_dir}/image_list.txt', 'r') as f: for line in f: name = os.path.splitext(os.path.basename(line.strip()))[0] assert os.path.exists(f'{image_dir}/{name}_ori.png') assert os.path.exists(f'{image_dir}/{name}_inv.png') image_list.append(name) latent_codes = np.load(f'{image_dir}/inverted_codes.npy') assert (latent_codes.shape[0] == len(image_list)) num_images = latent_codes.shape[0] logger.info(f'Loading boundary.') boundary_file = np.load(boundary_path, allow_pickle=True)[()] if isinstance(boundary_file, dict): boundary = boundary_file['boundary'] manipulate_layers = boundary_file['meta_data']['manipulate_layers'] else: boundary = boundary_file manipulate_layers = args.manipulate_layers if manipulate_layers: logger.info(f' Manipulating on layers `{manipulate_layers}`.') else: logger.info(f' Manipulating on ALL layers.') logger.info(f'Start manipulation.') step = args.step viz_size = (None if (args.viz_size == 0) else args.viz_size) visualizer = HtmlPageVisualizer(num_rows=num_images, num_cols=(step + 3), viz_size=viz_size) visualizer.set_headers((['Name', 'Origin', 'Inverted'] + [f'Step {i:02d}' for i in range(1, (step + 1))])) for (img_idx, img_name) in enumerate(image_list): ori_image = load_image(f'{image_dir}/{img_name}_ori.png') inv_image = load_image(f'{image_dir}/{img_name}_inv.png') visualizer.set_cell(img_idx, 0, text=img_name) visualizer.set_cell(img_idx, 1, image=ori_image) visualizer.set_cell(img_idx, 2, image=inv_image) codes = manipulate(latent_codes=latent_codes, boundary=boundary, start_distance=args.start_distance, end_distance=args.end_distance, step=step, layerwise_manipulation=True, num_layers=generator.num_layers, manipulate_layers=manipulate_layers, is_code_layerwise=True, is_boundary_layerwise=True) for img_idx in tqdm(range(num_images), leave=False): wandb_images = [] img_name = image_list[img_idx] ori_image = load_image(f'{image_dir}/{img_name}_ori.png') wandb_images.append(ori_image) run = wandb.init(project='in-domain-gan', job_type='manipulate', name='manipulate-{}-{}'.format(boundary_name, img_name)) output_images = generator.easy_synthesize(codes[img_idx], latent_space_type='wp')['image'] wandb_images.extend(output_images) for (s, output_image) in enumerate(output_images): visualizer.set_cell(img_idx, (s + 3), image=output_image) wandb.log({'manipulate {} image'.format(boundary_name): [wandb.Image(image) for image in wandb_images]}) run.join() visualizer.save(f'{output_dir}/{job_name}.html')
def main(): args = parse_args() os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id image_dir = args.image_dir image_dir_name = os.path.basename(image_dir.rstrip('/')) assert os.path.exists(image_dir) assert os.path.exists(f'{image_dir}/image_list.txt') assert os.path.exists(f'{image_dir}/inverted_codes.npy') boundary_path = args.boundary_path assert os.path.exists(boundary_path) boundary_name = os.path.splitext(os.path.basename(boundary_path))[0] output_dir = (args.output_dir or 'results/manipulation') job_name = f'{boundary_name.upper()}_{image_dir_name}' logger = setup_logger(output_dir, f'{job_name}.log', f'{job_name}_logger') logger.info(f'Loading generator.') generator = build_generator(args.model_name) logger.info(f'Loading images and corresponding inverted latent codes.') image_list = [] with open(f'{image_dir}/image_list.txt', 'r') as f: for line in f: name = os.path.splitext(os.path.basename(line.strip()))[0] assert os.path.exists(f'{image_dir}/{name}_ori.png') assert os.path.exists(f'{image_dir}/{name}_inv.png') image_list.append(name) latent_codes = np.load(f'{image_dir}/inverted_codes.npy') assert (latent_codes.shape[0] == len(image_list)) num_images = latent_codes.shape[0] logger.info(f'Loading boundary.') boundary_file = np.load(boundary_path, allow_pickle=True)[()] if isinstance(boundary_file, dict): boundary = boundary_file['boundary'] manipulate_layers = boundary_file['meta_data']['manipulate_layers'] else: boundary = boundary_file manipulate_layers = args.manipulate_layers if manipulate_layers: logger.info(f' Manipulating on layers `{manipulate_layers}`.') else: logger.info(f' Manipulating on ALL layers.') logger.info(f'Start manipulation.') step = args.step viz_size = (None if (args.viz_size == 0) else args.viz_size) visualizer = HtmlPageVisualizer(num_rows=num_images, num_cols=(step + 3), viz_size=viz_size) visualizer.set_headers((['Name', 'Origin', 'Inverted'] + [f'Step {i:02d}' for i in range(1, (step + 1))])) for (img_idx, img_name) in enumerate(image_list): ori_image = load_image(f'{image_dir}/{img_name}_ori.png') inv_image = load_image(f'{image_dir}/{img_name}_inv.png') visualizer.set_cell(img_idx, 0, text=img_name) visualizer.set_cell(img_idx, 1, image=ori_image) visualizer.set_cell(img_idx, 2, image=inv_image) codes = manipulate(latent_codes=latent_codes, boundary=boundary, start_distance=args.start_distance, end_distance=args.end_distance, step=step, layerwise_manipulation=True, num_layers=generator.num_layers, manipulate_layers=manipulate_layers, is_code_layerwise=True, is_boundary_layerwise=True) for img_idx in tqdm(range(num_images), leave=False): wandb_images = [] img_name = image_list[img_idx] ori_image = load_image(f'{image_dir}/{img_name}_ori.png') wandb_images.append(ori_image) run = wandb.init(project='in-domain-gan', job_type='manipulate', name='manipulate-{}-{}'.format(boundary_name, img_name)) output_images = generator.easy_synthesize(codes[img_idx], latent_space_type='wp')['image'] wandb_images.extend(output_images) for (s, output_image) in enumerate(output_images): visualizer.set_cell(img_idx, (s + 3), image=output_image) wandb.log({'manipulate {} image'.format(boundary_name): [wandb.Image(image) for image in wandb_images]}) run.join() visualizer.save(f'{output_dir}/{job_name}.html')<|docstring|>Main function.<|endoftext|>
8d4b2d8c7a2c87743415c3c8bb1230fe764d21820417bdde300d4426b8eb977a
def get_survey_responses(sheets) -> dict: 'Get optout survey responses (i.e. columns I and J)\n\n Returns:\n dict with keys range (str), majorDimension (str), and values\n List[List[str]]\n ' spreadsheet_id = getenv('SPREADSHEET_ID') range_ = SPREADSHEET_RANGE major_dimension = 'COLUMNS' request = sheets.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=range_, majorDimension=major_dimension) response = request.execute() return response
Get optout survey responses (i.e. columns I and J) Returns: dict with keys range (str), majorDimension (str), and values List[List[str]]
sync_usernames.py
get_survey_responses
HDI-Project/ballet-study-telemetry-server
0
python
def get_survey_responses(sheets) -> dict: 'Get optout survey responses (i.e. columns I and J)\n\n Returns:\n dict with keys range (str), majorDimension (str), and values\n List[List[str]]\n ' spreadsheet_id = getenv('SPREADSHEET_ID') range_ = SPREADSHEET_RANGE major_dimension = 'COLUMNS' request = sheets.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=range_, majorDimension=major_dimension) response = request.execute() return response
def get_survey_responses(sheets) -> dict: 'Get optout survey responses (i.e. columns I and J)\n\n Returns:\n dict with keys range (str), majorDimension (str), and values\n List[List[str]]\n ' spreadsheet_id = getenv('SPREADSHEET_ID') range_ = SPREADSHEET_RANGE major_dimension = 'COLUMNS' request = sheets.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=range_, majorDimension=major_dimension) response = request.execute() return response<|docstring|>Get optout survey responses (i.e. columns I and J) Returns: dict with keys range (str), majorDimension (str), and values List[List[str]]<|endoftext|>
94884dc9890e0c713c02a5fa636b51e834e74d29ad025456f4ad117f85df4cc2
def login(password=None, email=None): 'Tries to get a fresh session id from the server by sending password and\n user_email to it. If a new session is granted (password and user_email match),\n gui switches to logged-in home view (login/register button hide, logged-in-as shows)\n ' password = (app['login']['pw_entry'].get() if (password is None) else password) user_email = (app['login']['email_entry'].get() if (email is None) else email) with db_conn as cursor: session_id = server.login(cursor, user_email, password) if (session_id is not None): app.data['session_id'] = session_id app['login'].hide_components('login_failed_label') switch_to_home() with db_conn as cursor: (session_id, user_data) = server.query_user_data(cursor, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = session_id store_user_data(user_data) app['main_menu'].unhide_components('logged_in_as_frame') app['main_menu'].hide_components('message_frame', 'login_button', 'register_button') set_logged_in_as_user_text() app['login'].clear_entries() else: app.views_dict['login'].unhide_components('login_failed_label')
Tries to get a fresh session id from the server by sending password and user_email to it. If a new session is granted (password and user_email match), gui switches to logged-in home view (login/register button hide, logged-in-as shows)
desktop_shop/gui/callbacks.py
login
cnonnet/desktop_shop
0
python
def login(password=None, email=None): 'Tries to get a fresh session id from the server by sending password and\n user_email to it. If a new session is granted (password and user_email match),\n gui switches to logged-in home view (login/register button hide, logged-in-as shows)\n ' password = (app['login']['pw_entry'].get() if (password is None) else password) user_email = (app['login']['email_entry'].get() if (email is None) else email) with db_conn as cursor: session_id = server.login(cursor, user_email, password) if (session_id is not None): app.data['session_id'] = session_id app['login'].hide_components('login_failed_label') switch_to_home() with db_conn as cursor: (session_id, user_data) = server.query_user_data(cursor, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = session_id store_user_data(user_data) app['main_menu'].unhide_components('logged_in_as_frame') app['main_menu'].hide_components('message_frame', 'login_button', 'register_button') set_logged_in_as_user_text() app['login'].clear_entries() else: app.views_dict['login'].unhide_components('login_failed_label')
def login(password=None, email=None): 'Tries to get a fresh session id from the server by sending password and\n user_email to it. If a new session is granted (password and user_email match),\n gui switches to logged-in home view (login/register button hide, logged-in-as shows)\n ' password = (app['login']['pw_entry'].get() if (password is None) else password) user_email = (app['login']['email_entry'].get() if (email is None) else email) with db_conn as cursor: session_id = server.login(cursor, user_email, password) if (session_id is not None): app.data['session_id'] = session_id app['login'].hide_components('login_failed_label') switch_to_home() with db_conn as cursor: (session_id, user_data) = server.query_user_data(cursor, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = session_id store_user_data(user_data) app['main_menu'].unhide_components('logged_in_as_frame') app['main_menu'].hide_components('message_frame', 'login_button', 'register_button') set_logged_in_as_user_text() app['login'].clear_entries() else: app.views_dict['login'].unhide_components('login_failed_label')<|docstring|>Tries to get a fresh session id from the server by sending password and user_email to it. If a new session is granted (password and user_email match), gui switches to logged-in home view (login/register button hide, logged-in-as shows)<|endoftext|>
330c3fe7e87b4d89265dd752feedf929b6514cf53e796a4ab92f4ee709dac94d
def sign_out(): 'Signs out the currently logged in user. clears the user data, login data,\n register data, checkout basket and returns to not-logged-in home view\n (unhides register/login buttons, hides logged-in-as message)\n ' app.data = {'session_id': None, 'user_data': user.UserSignUpData(), 'pw_hash': '', 'cart': []} app['login'].clear_entries() app['register'].clear_entries() app['checkout'].clear() app['main_menu'].hide_components('logged_in_as_frame', 'message_frame', 'checkout_button') app['main_menu'].unhide_components('login_button', 'register_button') switch_to_home()
Signs out the currently logged in user. clears the user data, login data, register data, checkout basket and returns to not-logged-in home view (unhides register/login buttons, hides logged-in-as message)
desktop_shop/gui/callbacks.py
sign_out
cnonnet/desktop_shop
0
python
def sign_out(): 'Signs out the currently logged in user. clears the user data, login data,\n register data, checkout basket and returns to not-logged-in home view\n (unhides register/login buttons, hides logged-in-as message)\n ' app.data = {'session_id': None, 'user_data': user.UserSignUpData(), 'pw_hash': , 'cart': []} app['login'].clear_entries() app['register'].clear_entries() app['checkout'].clear() app['main_menu'].hide_components('logged_in_as_frame', 'message_frame', 'checkout_button') app['main_menu'].unhide_components('login_button', 'register_button') switch_to_home()
def sign_out(): 'Signs out the currently logged in user. clears the user data, login data,\n register data, checkout basket and returns to not-logged-in home view\n (unhides register/login buttons, hides logged-in-as message)\n ' app.data = {'session_id': None, 'user_data': user.UserSignUpData(), 'pw_hash': , 'cart': []} app['login'].clear_entries() app['register'].clear_entries() app['checkout'].clear() app['main_menu'].hide_components('logged_in_as_frame', 'message_frame', 'checkout_button') app['main_menu'].unhide_components('login_button', 'register_button') switch_to_home()<|docstring|>Signs out the currently logged in user. clears the user data, login data, register data, checkout basket and returns to not-logged-in home view (unhides register/login buttons, hides logged-in-as message)<|endoftext|>
78f4fed034d36afb93c39eb73f474c4a2b6f10ca50446d3e7c7c7d3bcffe045e
def set_logged_in_as_user_text(): 'Sets text of options_dropdown in main menu view to --> Logged in as {full_name}' full_name = app.data['user_data'].full_name app['main_menu']['options_dropdown'].set_var(f'Logged in as {full_name}')
Sets text of options_dropdown in main menu view to --> Logged in as {full_name}
desktop_shop/gui/callbacks.py
set_logged_in_as_user_text
cnonnet/desktop_shop
0
python
def set_logged_in_as_user_text(): full_name = app.data['user_data'].full_name app['main_menu']['options_dropdown'].set_var(f'Logged in as {full_name}')
def set_logged_in_as_user_text(): full_name = app.data['user_data'].full_name app['main_menu']['options_dropdown'].set_var(f'Logged in as {full_name}')<|docstring|>Sets text of options_dropdown in main menu view to --> Logged in as {full_name}<|endoftext|>
feb0bf0f7f4f35c87e2eb2644211615f3d19b874a9653fc21f0476fa0a1cb088
def register(): 'Gets all data entered in the register view and validates it. If valid,\n send the data and the current date to the server and add it to a new user\n in the database, else show an error message\n ' user_data = app['register'].get_user_data() valid_data = validate_user_data(user_data) password = app['register']['pw_entry'].get_var() confirm_password = app['register']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if (valid_data and valid_password): with db_conn as cursor: session_id = server.add_user(cursor, user_data, password) app.data['session_id'] = session_id if (session_id is not None): login(password, user_data.email) app['register'].clear_entries() else: show_error_message('Failed to register.')
Gets all data entered in the register view and validates it. If valid, send the data and the current date to the server and add it to a new user in the database, else show an error message
desktop_shop/gui/callbacks.py
register
cnonnet/desktop_shop
0
python
def register(): 'Gets all data entered in the register view and validates it. If valid,\n send the data and the current date to the server and add it to a new user\n in the database, else show an error message\n ' user_data = app['register'].get_user_data() valid_data = validate_user_data(user_data) password = app['register']['pw_entry'].get_var() confirm_password = app['register']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if (valid_data and valid_password): with db_conn as cursor: session_id = server.add_user(cursor, user_data, password) app.data['session_id'] = session_id if (session_id is not None): login(password, user_data.email) app['register'].clear_entries() else: show_error_message('Failed to register.')
def register(): 'Gets all data entered in the register view and validates it. If valid,\n send the data and the current date to the server and add it to a new user\n in the database, else show an error message\n ' user_data = app['register'].get_user_data() valid_data = validate_user_data(user_data) password = app['register']['pw_entry'].get_var() confirm_password = app['register']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if (valid_data and valid_password): with db_conn as cursor: session_id = server.add_user(cursor, user_data, password) app.data['session_id'] = session_id if (session_id is not None): login(password, user_data.email) app['register'].clear_entries() else: show_error_message('Failed to register.')<|docstring|>Gets all data entered in the register view and validates it. If valid, send the data and the current date to the server and add it to a new user in the database, else show an error message<|endoftext|>
66c11cb375d34f79443a7e1c24370804a00da04ccefe269cc7acd94e62b333f3
def edit_user_data(): 'Edits the user data (callback for edit in profile view). Gets all user\n data from the text entries and validates it using validate_user_data. If\n valid, the data is sent to the server. If the server does not respond with\n a valid new session id, the changes failed and a corresponding error message\n is shown.\n ' user_data = app['profile'].get_user_data() valid_data = validate_user_data(user_data) if valid_data: session_id = app.data['session_id'] with db_conn as cursor: user_data_ = user_data[:(- 1)] (new_session_id, *_) = server.update_user(cursor, user_data_, user_data.email, user_email=user_data.email, session_id=session_id) app.data['session_id'] = new_session_id app.data['user_data'] = user_data populate_profile_with_user_data() if (not new_session_id): show_error_message('Failed to edit data.')
Edits the user data (callback for edit in profile view). Gets all user data from the text entries and validates it using validate_user_data. If valid, the data is sent to the server. If the server does not respond with a valid new session id, the changes failed and a corresponding error message is shown.
desktop_shop/gui/callbacks.py
edit_user_data
cnonnet/desktop_shop
0
python
def edit_user_data(): 'Edits the user data (callback for edit in profile view). Gets all user\n data from the text entries and validates it using validate_user_data. If\n valid, the data is sent to the server. If the server does not respond with\n a valid new session id, the changes failed and a corresponding error message\n is shown.\n ' user_data = app['profile'].get_user_data() valid_data = validate_user_data(user_data) if valid_data: session_id = app.data['session_id'] with db_conn as cursor: user_data_ = user_data[:(- 1)] (new_session_id, *_) = server.update_user(cursor, user_data_, user_data.email, user_email=user_data.email, session_id=session_id) app.data['session_id'] = new_session_id app.data['user_data'] = user_data populate_profile_with_user_data() if (not new_session_id): show_error_message('Failed to edit data.')
def edit_user_data(): 'Edits the user data (callback for edit in profile view). Gets all user\n data from the text entries and validates it using validate_user_data. If\n valid, the data is sent to the server. If the server does not respond with\n a valid new session id, the changes failed and a corresponding error message\n is shown.\n ' user_data = app['profile'].get_user_data() valid_data = validate_user_data(user_data) if valid_data: session_id = app.data['session_id'] with db_conn as cursor: user_data_ = user_data[:(- 1)] (new_session_id, *_) = server.update_user(cursor, user_data_, user_data.email, user_email=user_data.email, session_id=session_id) app.data['session_id'] = new_session_id app.data['user_data'] = user_data populate_profile_with_user_data() if (not new_session_id): show_error_message('Failed to edit data.')<|docstring|>Edits the user data (callback for edit in profile view). Gets all user data from the text entries and validates it using validate_user_data. If valid, the data is sent to the server. If the server does not respond with a valid new session id, the changes failed and a corresponding error message is shown.<|endoftext|>
824c02df802d881614e76bb15ab156e2fb5617e35d43323d719bf251eab242b4
def edit_user_password(): 'Edits the user password (callback for edit in profile view). Gets the\n password and confirm passwords and validates them. If they are valid, the\n new data is sent to the serer. If the server does not respond with a new\n valid session id, the password change failed and the user is shown an error\n message, else a confirmation message is shown.\n ' password = app['profile']['pw_entry'].get_var() confirm_password = app['profile']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if valid_password: session_id = app.data['session_id'] user_email = app.data['user_data'].email with db_conn as cursor: (new_session_id, *_) = server.update_user_password(cursor, password, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = new_session_id if (not new_session_id): show_error_message('Failed to edit password.') else: show_message('Set new password successfully.') app['profile']['confirm_pw_entry'].set_var('') app['profile']['pw_entry'].set_var('') app['profile'].hide_components('password_change_frame') app['profile'].unhide_components('password_change_frame_button')
Edits the user password (callback for edit in profile view). Gets the password and confirm passwords and validates them. If they are valid, the new data is sent to the serer. If the server does not respond with a new valid session id, the password change failed and the user is shown an error message, else a confirmation message is shown.
desktop_shop/gui/callbacks.py
edit_user_password
cnonnet/desktop_shop
0
python
def edit_user_password(): 'Edits the user password (callback for edit in profile view). Gets the\n password and confirm passwords and validates them. If they are valid, the\n new data is sent to the serer. If the server does not respond with a new\n valid session id, the password change failed and the user is shown an error\n message, else a confirmation message is shown.\n ' password = app['profile']['pw_entry'].get_var() confirm_password = app['profile']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if valid_password: session_id = app.data['session_id'] user_email = app.data['user_data'].email with db_conn as cursor: (new_session_id, *_) = server.update_user_password(cursor, password, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = new_session_id if (not new_session_id): show_error_message('Failed to edit password.') else: show_message('Set new password successfully.') app['profile']['confirm_pw_entry'].set_var() app['profile']['pw_entry'].set_var() app['profile'].hide_components('password_change_frame') app['profile'].unhide_components('password_change_frame_button')
def edit_user_password(): 'Edits the user password (callback for edit in profile view). Gets the\n password and confirm passwords and validates them. If they are valid, the\n new data is sent to the serer. If the server does not respond with a new\n valid session id, the password change failed and the user is shown an error\n message, else a confirmation message is shown.\n ' password = app['profile']['pw_entry'].get_var() confirm_password = app['profile']['confirm_pw_entry'].get_var() valid_password = validate_password(password, confirm_password) if valid_password: session_id = app.data['session_id'] user_email = app.data['user_data'].email with db_conn as cursor: (new_session_id, *_) = server.update_user_password(cursor, password, user_email, user_email=user_email, session_id=session_id) app.data['session_id'] = new_session_id if (not new_session_id): show_error_message('Failed to edit password.') else: show_message('Set new password successfully.') app['profile']['confirm_pw_entry'].set_var() app['profile']['pw_entry'].set_var() app['profile'].hide_components('password_change_frame') app['profile'].unhide_components('password_change_frame_button')<|docstring|>Edits the user password (callback for edit in profile view). Gets the password and confirm passwords and validates them. If they are valid, the new data is sent to the serer. If the server does not respond with a new valid session id, the password change failed and the user is shown an error message, else a confirmation message is shown.<|endoftext|>
4053e4ba438b2523b970363ace47bf5a3e57bae0a8a0a3c2981ee36d179bff79
def validate_user_data(user_data): 'Validates all the data entered in the register view. Validates that the\n email is in the correct format, validates that the first and last name are\n alphabetic and not empty, validates that the gender is either m or f and\n validates that the date of birth is a valid date and is not empty\n ' found_email = re.findall('.+?@.+\\..+', user_data.email) if (not found_email): show_error_message('Email needs to be of the format address@domain.') return False found_first_name = re.findall('\\w+?', user_data.first_name) if (not found_first_name): show_error_message('First name needs to be alphabetic.') return False found_last_name = re.findall('\\w+?', user_data.last_name) if (not found_last_name): show_error_message('Last name needs to be alphabetic.') return False found_gender = re.findall('[mf]', user_data.gender) if (user_data.gender and (not found_gender)): show_error_message('Gender needs to be m or f.') return False if (user_data.dob and (not util.validate_date_string(user_data.dob))): show_error_message('Date of birth needs to be in the format YYYY-MM-DD.') return False return True
Validates all the data entered in the register view. Validates that the email is in the correct format, validates that the first and last name are alphabetic and not empty, validates that the gender is either m or f and validates that the date of birth is a valid date and is not empty
desktop_shop/gui/callbacks.py
validate_user_data
cnonnet/desktop_shop
0
python
def validate_user_data(user_data): 'Validates all the data entered in the register view. Validates that the\n email is in the correct format, validates that the first and last name are\n alphabetic and not empty, validates that the gender is either m or f and\n validates that the date of birth is a valid date and is not empty\n ' found_email = re.findall('.+?@.+\\..+', user_data.email) if (not found_email): show_error_message('Email needs to be of the format address@domain.') return False found_first_name = re.findall('\\w+?', user_data.first_name) if (not found_first_name): show_error_message('First name needs to be alphabetic.') return False found_last_name = re.findall('\\w+?', user_data.last_name) if (not found_last_name): show_error_message('Last name needs to be alphabetic.') return False found_gender = re.findall('[mf]', user_data.gender) if (user_data.gender and (not found_gender)): show_error_message('Gender needs to be m or f.') return False if (user_data.dob and (not util.validate_date_string(user_data.dob))): show_error_message('Date of birth needs to be in the format YYYY-MM-DD.') return False return True
def validate_user_data(user_data): 'Validates all the data entered in the register view. Validates that the\n email is in the correct format, validates that the first and last name are\n alphabetic and not empty, validates that the gender is either m or f and\n validates that the date of birth is a valid date and is not empty\n ' found_email = re.findall('.+?@.+\\..+', user_data.email) if (not found_email): show_error_message('Email needs to be of the format address@domain.') return False found_first_name = re.findall('\\w+?', user_data.first_name) if (not found_first_name): show_error_message('First name needs to be alphabetic.') return False found_last_name = re.findall('\\w+?', user_data.last_name) if (not found_last_name): show_error_message('Last name needs to be alphabetic.') return False found_gender = re.findall('[mf]', user_data.gender) if (user_data.gender and (not found_gender)): show_error_message('Gender needs to be m or f.') return False if (user_data.dob and (not util.validate_date_string(user_data.dob))): show_error_message('Date of birth needs to be in the format YYYY-MM-DD.') return False return True<|docstring|>Validates all the data entered in the register view. Validates that the email is in the correct format, validates that the first and last name are alphabetic and not empty, validates that the gender is either m or f and validates that the date of birth is a valid date and is not empty<|endoftext|>
efb9806ad2da5499745c4bdd738dba9ffee1918690ddd29282e6148f0de5c5fc
def validate_password(password, confirm_password): 'Validates that the passwords match and are longer than 8 characters (returns bool)' if (len(password) < 8): show_error_message('Password needs to be at least 8 characters long.') return False if (not (password == confirm_password)): show_error_message("Passwords don't match.") return False return True
Validates that the passwords match and are longer than 8 characters (returns bool)
desktop_shop/gui/callbacks.py
validate_password
cnonnet/desktop_shop
0
python
def validate_password(password, confirm_password): if (len(password) < 8): show_error_message('Password needs to be at least 8 characters long.') return False if (not (password == confirm_password)): show_error_message("Passwords don't match.") return False return True
def validate_password(password, confirm_password): if (len(password) < 8): show_error_message('Password needs to be at least 8 characters long.') return False if (not (password == confirm_password)): show_error_message("Passwords don't match.") return False return True<|docstring|>Validates that the passwords match and are longer than 8 characters (returns bool)<|endoftext|>
7f9513ef0b630bc7a5ec85147ff205e0d3591e4d45b08f9e3d693f946b1d33a2
def show_message(message): 'Shows the specified message in the main menu view' app['main_menu']['message_label'].set_var(message) app['main_menu'].hide_components('error_message_label') app['main_menu'].unhide_components('message_label', 'message_frame')
Shows the specified message in the main menu view
desktop_shop/gui/callbacks.py
show_message
cnonnet/desktop_shop
0
python
def show_message(message): app['main_menu']['message_label'].set_var(message) app['main_menu'].hide_components('error_message_label') app['main_menu'].unhide_components('message_label', 'message_frame')
def show_message(message): app['main_menu']['message_label'].set_var(message) app['main_menu'].hide_components('error_message_label') app['main_menu'].unhide_components('message_label', 'message_frame')<|docstring|>Shows the specified message in the main menu view<|endoftext|>
0bb2b299b8a8e6e70195aa1658ef1a889c7dde334e7b181d7dbeac96aa46cf9a
def show_error_message(message): 'Shows the specified error message in the main menu view' app['main_menu']['error_message_label'].set_var(message) app['main_menu'].hide_components('message_frame') app['main_menu'].unhide_components('error_message_label', 'message_frame')
Shows the specified error message in the main menu view
desktop_shop/gui/callbacks.py
show_error_message
cnonnet/desktop_shop
0
python
def show_error_message(message): app['main_menu']['error_message_label'].set_var(message) app['main_menu'].hide_components('message_frame') app['main_menu'].unhide_components('error_message_label', 'message_frame')
def show_error_message(message): app['main_menu']['error_message_label'].set_var(message) app['main_menu'].hide_components('message_frame') app['main_menu'].unhide_components('error_message_label', 'message_frame')<|docstring|>Shows the specified error message in the main menu view<|endoftext|>
97b7d628bf5356332c586e4445d564eb2870abe02cfb6ed7684242f8f718df2f
def show_password_change_frame(): 'Callback for change password button in profile view, unhides the password change frame' app['profile'].unhide_components('password_change_frame') app['profile'].hide_components('password_change_frame_button')
Callback for change password button in profile view, unhides the password change frame
desktop_shop/gui/callbacks.py
show_password_change_frame
cnonnet/desktop_shop
0
python
def show_password_change_frame(): app['profile'].unhide_components('password_change_frame') app['profile'].hide_components('password_change_frame_button')
def show_password_change_frame(): app['profile'].unhide_components('password_change_frame') app['profile'].hide_components('password_change_frame_button')<|docstring|>Callback for change password button in profile view, unhides the password change frame<|endoftext|>
5aad1c363b5bdcb1c939b2fe44231998cfffac60ed8bdf37c6be91a26bf85d62
def store_user_data(user_data): 'Stores the passed user data in the appropriate gui data fields' if (len(user_data) == 6): app.data['user_data'] = user.UserSignUpData(*user_data) else: sign_out() show_error_message('Something went wrong while logging in. 2')
Stores the passed user data in the appropriate gui data fields
desktop_shop/gui/callbacks.py
store_user_data
cnonnet/desktop_shop
0
python
def store_user_data(user_data): if (len(user_data) == 6): app.data['user_data'] = user.UserSignUpData(*user_data) else: sign_out() show_error_message('Something went wrong while logging in. 2')
def store_user_data(user_data): if (len(user_data) == 6): app.data['user_data'] = user.UserSignUpData(*user_data) else: sign_out() show_error_message('Something went wrong while logging in. 2')<|docstring|>Stores the passed user data in the appropriate gui data fields<|endoftext|>
0ac71f3317036773f964ae9282a59e26d466a5a40b7b9e2a2bb2eb781de8d4b3
def switch_to_home(): 'Switches to home view' app.switch_to('home', 'main_menu')
Switches to home view
desktop_shop/gui/callbacks.py
switch_to_home
cnonnet/desktop_shop
0
python
def switch_to_home(): app.switch_to('home', 'main_menu')
def switch_to_home(): app.switch_to('home', 'main_menu')<|docstring|>Switches to home view<|endoftext|>
9d521bce2ee79307e400834126dbb2a3f2ab30fbbde5ca448530dea08a801ef5
def switch_to_login(): 'Switches to login view' app.switch_to('login', 'main_menu')
Switches to login view
desktop_shop/gui/callbacks.py
switch_to_login
cnonnet/desktop_shop
0
python
def switch_to_login(): app.switch_to('login', 'main_menu')
def switch_to_login(): app.switch_to('login', 'main_menu')<|docstring|>Switches to login view<|endoftext|>
7d1abc18145906d0f69943d9b0c1874676926f1aa6b27a776528cc03a3120f29
def switch_to_register(): 'Switches to register view' app.switch_to('register', 'main_menu')
Switches to register view
desktop_shop/gui/callbacks.py
switch_to_register
cnonnet/desktop_shop
0
python
def switch_to_register(): app.switch_to('register', 'main_menu')
def switch_to_register(): app.switch_to('register', 'main_menu')<|docstring|>Switches to register view<|endoftext|>
908cb66c15ad023de079ddc11887fe11147a59fa13c0cae8460f03c0740d5e48
def switch_to_checkout(): 'Switches to checkout view. If no valid session id is stored, a warning is given to user' app['checkout'].init_checkout() app.switch_to('checkout', 'main_menu') if (app.data['session_id'] is None): show_error_message('Please login to place your order')
Switches to checkout view. If no valid session id is stored, a warning is given to user
desktop_shop/gui/callbacks.py
switch_to_checkout
cnonnet/desktop_shop
0
python
def switch_to_checkout(): app['checkout'].init_checkout() app.switch_to('checkout', 'main_menu') if (app.data['session_id'] is None): show_error_message('Please login to place your order')
def switch_to_checkout(): app['checkout'].init_checkout() app.switch_to('checkout', 'main_menu') if (app.data['session_id'] is None): show_error_message('Please login to place your order')<|docstring|>Switches to checkout view. If no valid session id is stored, a warning is given to user<|endoftext|>
430652582a0615835c9b64821199fb3c868b998205cea5f08098dda8e6191c91
def switch_to_profile(): 'Switches to the profile view after populating it with the currently logged in user data' populate_profile_with_user_data() app.switch_to('profile', 'main_menu')
Switches to the profile view after populating it with the currently logged in user data
desktop_shop/gui/callbacks.py
switch_to_profile
cnonnet/desktop_shop
0
python
def switch_to_profile(): populate_profile_with_user_data() app.switch_to('profile', 'main_menu')
def switch_to_profile(): populate_profile_with_user_data() app.switch_to('profile', 'main_menu')<|docstring|>Switches to the profile view after populating it with the currently logged in user data<|endoftext|>
5ecb64cc4dabb1a8d153e982c7cce1953b4323007b7323547f05fd02f50769a5
def activate_profile_entry(entry_name): 'Callback for edit button for the respective field in the user data profile view' app['profile'][entry_name].config(state='normal') app['profile']['edit_user_data_button'].config(state='normal')
Callback for edit button for the respective field in the user data profile view
desktop_shop/gui/callbacks.py
activate_profile_entry
cnonnet/desktop_shop
0
python
def activate_profile_entry(entry_name): app['profile'][entry_name].config(state='normal') app['profile']['edit_user_data_button'].config(state='normal')
def activate_profile_entry(entry_name): app['profile'][entry_name].config(state='normal') app['profile']['edit_user_data_button'].config(state='normal')<|docstring|>Callback for edit button for the respective field in the user data profile view<|endoftext|>
7d36c4c5e8cb7b0eb06cabbea0e733e95529ad0a9fdc6458aab2ef5fcba7b8f3
def on_dropdown_value_write_event(*_): 'Callback for options dropdown selection, either switches to profile view or signs out' selected_option = app['main_menu']['options_dropdown'].get_var() if (selected_option == 'Profile'): switch_to_profile() set_logged_in_as_user_text() elif (selected_option == 'Sign out'): sign_out() app['main_menu']['options_dropdown'].config(state='active')
Callback for options dropdown selection, either switches to profile view or signs out
desktop_shop/gui/callbacks.py
on_dropdown_value_write_event
cnonnet/desktop_shop
0
python
def on_dropdown_value_write_event(*_): selected_option = app['main_menu']['options_dropdown'].get_var() if (selected_option == 'Profile'): switch_to_profile() set_logged_in_as_user_text() elif (selected_option == 'Sign out'): sign_out() app['main_menu']['options_dropdown'].config(state='active')
def on_dropdown_value_write_event(*_): selected_option = app['main_menu']['options_dropdown'].get_var() if (selected_option == 'Profile'): switch_to_profile() set_logged_in_as_user_text() elif (selected_option == 'Sign out'): sign_out() app['main_menu']['options_dropdown'].config(state='active')<|docstring|>Callback for options dropdown selection, either switches to profile view or signs out<|endoftext|>
2711a28b225b2fa5c58a3ed573512c12f5f44808ad47359f880a860a9e920b55
def populate_profile_with_user_data(): 'Stores all user data of the currently logged in user in the profile view' app['profile'].store_user_data(app.data['user_data']) app['profile']['first_name_entry'].config(state='disabled') app['profile']['last_name_entry'].config(state='disabled') app['profile']['gender_entry'].config(state='disabled') app['profile']['date_joined_data_label'].config(state='disabled') app['profile']['email_entry'].config(state='disabled') app['profile']['dob_entry'].config(state='disabled') app['profile']['edit_user_data_button'].config(state='disabled')
Stores all user data of the currently logged in user in the profile view
desktop_shop/gui/callbacks.py
populate_profile_with_user_data
cnonnet/desktop_shop
0
python
def populate_profile_with_user_data(): app['profile'].store_user_data(app.data['user_data']) app['profile']['first_name_entry'].config(state='disabled') app['profile']['last_name_entry'].config(state='disabled') app['profile']['gender_entry'].config(state='disabled') app['profile']['date_joined_data_label'].config(state='disabled') app['profile']['email_entry'].config(state='disabled') app['profile']['dob_entry'].config(state='disabled') app['profile']['edit_user_data_button'].config(state='disabled')
def populate_profile_with_user_data(): app['profile'].store_user_data(app.data['user_data']) app['profile']['first_name_entry'].config(state='disabled') app['profile']['last_name_entry'].config(state='disabled') app['profile']['gender_entry'].config(state='disabled') app['profile']['date_joined_data_label'].config(state='disabled') app['profile']['email_entry'].config(state='disabled') app['profile']['dob_entry'].config(state='disabled') app['profile']['edit_user_data_button'].config(state='disabled')<|docstring|>Stores all user data of the currently logged in user in the profile view<|endoftext|>
4b69344fa3436fe403590e307e348fda7aabefe040a56fdbc6a16ee30308b39b
def __init__(self, metadatastore_db, asset_registry_db, *, handler_registry=None, query=None, **kwargs): '\n This Catalog is backed by a pair of MongoDBs with "layout 1".\n\n This layout uses a separate Mongo collection per document type and a\n separate Mongo document for each logical document.\n\n Parameters\n ----------\n metadatastore_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n asset_registry_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n handler_registry : dict, optional\n Maps each asset spec to a handler class or a string specifying the\n module name and class name, as in (for example)\n ``{\'SOME_SPEC\': \'module.submodule.class_name\'}``.\n query : dict, optional\n MongoDB query. Used internally by the ``search()`` method.\n **kwargs :\n Additional keyword arguments are passed through to the base class,\n Catalog.\n ' name = 'mongo_metadatastore' if isinstance(metadatastore_db, str): mds_db = _get_database(metadatastore_db) else: mds_db = metadatastore_db if isinstance(asset_registry_db, str): assets_db = _get_database(asset_registry_db) else: assets_db = asset_registry_db self._run_start_collection = mds_db.get_collection('run_start') self._run_stop_collection = mds_db.get_collection('run_stop') self._event_descriptor_collection = mds_db.get_collection('event_descriptor') self._event_collection = mds_db.get_collection('event') self._resource_collection = assets_db.get_collection('resource') self._datum_collection = assets_db.get_collection('datum') self._metadatastore_db = mds_db self._asset_registry_db = assets_db self._query = (query or {}) if (handler_registry is None): handler_registry = {} parsed_handler_registry = parse_handler_registry(handler_registry) self.filler = event_model.Filler(parsed_handler_registry) super().__init__(**kwargs)
This Catalog is backed by a pair of MongoDBs with "layout 1". This layout uses a separate Mongo collection per document type and a separate Mongo document for each logical document. Parameters ---------- metadatastore_db : pymongo.database.Database or string Must be a Database or a URI string that includes a database name. asset_registry_db : pymongo.database.Database or string Must be a Database or a URI string that includes a database name. handler_registry : dict, optional Maps each asset spec to a handler class or a string specifying the module name and class name, as in (for example) ``{'SOME_SPEC': 'module.submodule.class_name'}``. query : dict, optional MongoDB query. Used internally by the ``search()`` method. **kwargs : Additional keyword arguments are passed through to the base class, Catalog.
intake_bluesky/mongo_layout1.py
__init__
danielballan/intake-bluesky
0
python
def __init__(self, metadatastore_db, asset_registry_db, *, handler_registry=None, query=None, **kwargs): '\n This Catalog is backed by a pair of MongoDBs with "layout 1".\n\n This layout uses a separate Mongo collection per document type and a\n separate Mongo document for each logical document.\n\n Parameters\n ----------\n metadatastore_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n asset_registry_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n handler_registry : dict, optional\n Maps each asset spec to a handler class or a string specifying the\n module name and class name, as in (for example)\n ``{\'SOME_SPEC\': \'module.submodule.class_name\'}``.\n query : dict, optional\n MongoDB query. Used internally by the ``search()`` method.\n **kwargs :\n Additional keyword arguments are passed through to the base class,\n Catalog.\n ' name = 'mongo_metadatastore' if isinstance(metadatastore_db, str): mds_db = _get_database(metadatastore_db) else: mds_db = metadatastore_db if isinstance(asset_registry_db, str): assets_db = _get_database(asset_registry_db) else: assets_db = asset_registry_db self._run_start_collection = mds_db.get_collection('run_start') self._run_stop_collection = mds_db.get_collection('run_stop') self._event_descriptor_collection = mds_db.get_collection('event_descriptor') self._event_collection = mds_db.get_collection('event') self._resource_collection = assets_db.get_collection('resource') self._datum_collection = assets_db.get_collection('datum') self._metadatastore_db = mds_db self._asset_registry_db = assets_db self._query = (query or {}) if (handler_registry is None): handler_registry = {} parsed_handler_registry = parse_handler_registry(handler_registry) self.filler = event_model.Filler(parsed_handler_registry) super().__init__(**kwargs)
def __init__(self, metadatastore_db, asset_registry_db, *, handler_registry=None, query=None, **kwargs): '\n This Catalog is backed by a pair of MongoDBs with "layout 1".\n\n This layout uses a separate Mongo collection per document type and a\n separate Mongo document for each logical document.\n\n Parameters\n ----------\n metadatastore_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n asset_registry_db : pymongo.database.Database or string\n Must be a Database or a URI string that includes a database name.\n handler_registry : dict, optional\n Maps each asset spec to a handler class or a string specifying the\n module name and class name, as in (for example)\n ``{\'SOME_SPEC\': \'module.submodule.class_name\'}``.\n query : dict, optional\n MongoDB query. Used internally by the ``search()`` method.\n **kwargs :\n Additional keyword arguments are passed through to the base class,\n Catalog.\n ' name = 'mongo_metadatastore' if isinstance(metadatastore_db, str): mds_db = _get_database(metadatastore_db) else: mds_db = metadatastore_db if isinstance(asset_registry_db, str): assets_db = _get_database(asset_registry_db) else: assets_db = asset_registry_db self._run_start_collection = mds_db.get_collection('run_start') self._run_stop_collection = mds_db.get_collection('run_stop') self._event_descriptor_collection = mds_db.get_collection('event_descriptor') self._event_collection = mds_db.get_collection('event') self._resource_collection = assets_db.get_collection('resource') self._datum_collection = assets_db.get_collection('datum') self._metadatastore_db = mds_db self._asset_registry_db = assets_db self._query = (query or {}) if (handler_registry is None): handler_registry = {} parsed_handler_registry = parse_handler_registry(handler_registry) self.filler = event_model.Filler(parsed_handler_registry) super().__init__(**kwargs)<|docstring|>This Catalog is backed by a pair of MongoDBs with "layout 1". This layout uses a separate Mongo collection per document type and a separate Mongo document for each logical document. Parameters ---------- metadatastore_db : pymongo.database.Database or string Must be a Database or a URI string that includes a database name. asset_registry_db : pymongo.database.Database or string Must be a Database or a URI string that includes a database name. handler_registry : dict, optional Maps each asset spec to a handler class or a string specifying the module name and class name, as in (for example) ``{'SOME_SPEC': 'module.submodule.class_name'}``. query : dict, optional MongoDB query. Used internally by the ``search()`` method. **kwargs : Additional keyword arguments are passed through to the base class, Catalog.<|endoftext|>
90e13d991246b1239b3cd44abf6b25c7af875a866647b930e520b96d82c25c99
def search(self, query): '\n Return a new Catalog with a subset of the entries in this Catalog.\n\n Parameters\n ----------\n query : dict\n MongoDB query.\n ' if self._query: query = {'$and': [self._query, query]} cat = type(self)(metadatastore_db=self._metadatastore_db, asset_registry_db=self._asset_registry_db, query=query, name='search results', getenv=self.getenv, getshell=self.getshell, auth=self.auth, metadata=(self.metadata or {}).copy(), storage_options=self.storage_options) return cat
Return a new Catalog with a subset of the entries in this Catalog. Parameters ---------- query : dict MongoDB query.
intake_bluesky/mongo_layout1.py
search
danielballan/intake-bluesky
0
python
def search(self, query): '\n Return a new Catalog with a subset of the entries in this Catalog.\n\n Parameters\n ----------\n query : dict\n MongoDB query.\n ' if self._query: query = {'$and': [self._query, query]} cat = type(self)(metadatastore_db=self._metadatastore_db, asset_registry_db=self._asset_registry_db, query=query, name='search results', getenv=self.getenv, getshell=self.getshell, auth=self.auth, metadata=(self.metadata or {}).copy(), storage_options=self.storage_options) return cat
def search(self, query): '\n Return a new Catalog with a subset of the entries in this Catalog.\n\n Parameters\n ----------\n query : dict\n MongoDB query.\n ' if self._query: query = {'$and': [self._query, query]} cat = type(self)(metadatastore_db=self._metadatastore_db, asset_registry_db=self._asset_registry_db, query=query, name='search results', getenv=self.getenv, getshell=self.getshell, auth=self.auth, metadata=(self.metadata or {}).copy(), storage_options=self.storage_options) return cat<|docstring|>Return a new Catalog with a subset of the entries in this Catalog. Parameters ---------- query : dict MongoDB query.<|endoftext|>
751bd1b7046ee0d66cfb551587b47f8559c1eed04c930d3d9d84ef0738411c1c
def open_for_read(uri): '\n Opens a test file for reading.\n ' return gdal.VSIFOpenExL(uri, 'rb', 1)
Opens a test file for reading.
autotest/gcore/vsis3.py
open_for_read
rroliver/gdal
1
python
def open_for_read(uri): '\n \n ' return gdal.VSIFOpenExL(uri, 'rb', 1)
def open_for_read(uri): '\n \n ' return gdal.VSIFOpenExL(uri, 'rb', 1)<|docstring|>Opens a test file for reading.<|endoftext|>
a2ebef825b7e4171df820af5952fecfaa2ebde72750fa74fb2d645cc6850b238
def _initialize(self, use_gpu=False, enable_mkldnn=False): '\n initialize with the necessary elements\n ' cfg = self.merge_configs() cfg.use_gpu = use_gpu if use_gpu: try: _places = os.environ['CUDA_VISIBLE_DEVICES'] int(_places[0]) print('use gpu: ', use_gpu) print('CUDA_VISIBLE_DEVICES: ', _places) cfg.gpu_mem = 8000 except: raise RuntimeError('Environment Variable CUDA_VISIBLE_DEVICES is not set correctly. If you wanna use gpu, please set CUDA_VISIBLE_DEVICES via export CUDA_VISIBLE_DEVICES=cuda_device_id.') cfg.ir_optim = True cfg.enable_mkldnn = enable_mkldnn self.text_detector = TextDetector(cfg)
initialize with the necessary elements
deploy/hubserving/ocr_det/module.py
_initialize
chccc1994/PaddleOCR
0
python
def _initialize(self, use_gpu=False, enable_mkldnn=False): '\n \n ' cfg = self.merge_configs() cfg.use_gpu = use_gpu if use_gpu: try: _places = os.environ['CUDA_VISIBLE_DEVICES'] int(_places[0]) print('use gpu: ', use_gpu) print('CUDA_VISIBLE_DEVICES: ', _places) cfg.gpu_mem = 8000 except: raise RuntimeError('Environment Variable CUDA_VISIBLE_DEVICES is not set correctly. If you wanna use gpu, please set CUDA_VISIBLE_DEVICES via export CUDA_VISIBLE_DEVICES=cuda_device_id.') cfg.ir_optim = True cfg.enable_mkldnn = enable_mkldnn self.text_detector = TextDetector(cfg)
def _initialize(self, use_gpu=False, enable_mkldnn=False): '\n \n ' cfg = self.merge_configs() cfg.use_gpu = use_gpu if use_gpu: try: _places = os.environ['CUDA_VISIBLE_DEVICES'] int(_places[0]) print('use gpu: ', use_gpu) print('CUDA_VISIBLE_DEVICES: ', _places) cfg.gpu_mem = 8000 except: raise RuntimeError('Environment Variable CUDA_VISIBLE_DEVICES is not set correctly. If you wanna use gpu, please set CUDA_VISIBLE_DEVICES via export CUDA_VISIBLE_DEVICES=cuda_device_id.') cfg.ir_optim = True cfg.enable_mkldnn = enable_mkldnn self.text_detector = TextDetector(cfg)<|docstring|>initialize with the necessary elements<|endoftext|>
62cd35f57e63736cda7eeaa809d79421a63388c0fc77f7df767fd4a7a3570eab
def predict(self, images=[], paths=[]): '\n Get the text box in the predicted images.\n Args:\n images (list(numpy.ndarray)): images data, shape of each is [H, W, C]. If images not paths\n paths (list[str]): The paths of images. If paths not images\n Returns:\n res (list): The result of text detection box and save path of images.\n ' if ((images != []) and isinstance(images, list) and (paths == [])): predicted_data = images elif ((images == []) and isinstance(paths, list) and (paths != [])): predicted_data = self.read_images(paths) else: raise TypeError('The input data is inconsistent with expectations.') assert (predicted_data != []), 'There is not any image to be predicted. Please check the input data.' all_results = [] for img in predicted_data: if (img is None): logger.info('error in loading image') all_results.append([]) continue (dt_boxes, elapse) = self.text_detector(img) logger.info('Predict time : {}'.format(elapse)) rec_res_final = [] for dno in range(len(dt_boxes)): rec_res_final.append({'text_region': dt_boxes[dno].astype(np.int).tolist()}) all_results.append(rec_res_final) return all_results
Get the text box in the predicted images. Args: images (list(numpy.ndarray)): images data, shape of each is [H, W, C]. If images not paths paths (list[str]): The paths of images. If paths not images Returns: res (list): The result of text detection box and save path of images.
deploy/hubserving/ocr_det/module.py
predict
chccc1994/PaddleOCR
0
python
def predict(self, images=[], paths=[]): '\n Get the text box in the predicted images.\n Args:\n images (list(numpy.ndarray)): images data, shape of each is [H, W, C]. If images not paths\n paths (list[str]): The paths of images. If paths not images\n Returns:\n res (list): The result of text detection box and save path of images.\n ' if ((images != []) and isinstance(images, list) and (paths == [])): predicted_data = images elif ((images == []) and isinstance(paths, list) and (paths != [])): predicted_data = self.read_images(paths) else: raise TypeError('The input data is inconsistent with expectations.') assert (predicted_data != []), 'There is not any image to be predicted. Please check the input data.' all_results = [] for img in predicted_data: if (img is None): logger.info('error in loading image') all_results.append([]) continue (dt_boxes, elapse) = self.text_detector(img) logger.info('Predict time : {}'.format(elapse)) rec_res_final = [] for dno in range(len(dt_boxes)): rec_res_final.append({'text_region': dt_boxes[dno].astype(np.int).tolist()}) all_results.append(rec_res_final) return all_results
def predict(self, images=[], paths=[]): '\n Get the text box in the predicted images.\n Args:\n images (list(numpy.ndarray)): images data, shape of each is [H, W, C]. If images not paths\n paths (list[str]): The paths of images. If paths not images\n Returns:\n res (list): The result of text detection box and save path of images.\n ' if ((images != []) and isinstance(images, list) and (paths == [])): predicted_data = images elif ((images == []) and isinstance(paths, list) and (paths != [])): predicted_data = self.read_images(paths) else: raise TypeError('The input data is inconsistent with expectations.') assert (predicted_data != []), 'There is not any image to be predicted. Please check the input data.' all_results = [] for img in predicted_data: if (img is None): logger.info('error in loading image') all_results.append([]) continue (dt_boxes, elapse) = self.text_detector(img) logger.info('Predict time : {}'.format(elapse)) rec_res_final = [] for dno in range(len(dt_boxes)): rec_res_final.append({'text_region': dt_boxes[dno].astype(np.int).tolist()}) all_results.append(rec_res_final) return all_results<|docstring|>Get the text box in the predicted images. Args: images (list(numpy.ndarray)): images data, shape of each is [H, W, C]. If images not paths paths (list[str]): The paths of images. If paths not images Returns: res (list): The result of text detection box and save path of images.<|endoftext|>
df3fa19d0c4fabf8d9703fde32bdaaadb92322564ecf0344c589cbfd3e907015
@serving def serving_method(self, images, **kwargs): '\n Run as a service.\n ' images_decode = [base64_to_cv2(image) for image in images] results = self.predict(images_decode, **kwargs) return results
Run as a service.
deploy/hubserving/ocr_det/module.py
serving_method
chccc1994/PaddleOCR
0
python
@serving def serving_method(self, images, **kwargs): '\n \n ' images_decode = [base64_to_cv2(image) for image in images] results = self.predict(images_decode, **kwargs) return results
@serving def serving_method(self, images, **kwargs): '\n \n ' images_decode = [base64_to_cv2(image) for image in images] results = self.predict(images_decode, **kwargs) return results<|docstring|>Run as a service.<|endoftext|>
44aa6a681ec6c92cffc4331767f70466e2aea46b074c1ff2191d81c0f2f05e57
@receiver(post_save, sender=Authorization) def delete_my_library_cache(sender, instance, **kwargs): 'Authorizations directly impact resources in in my_library, so delete my_library page cache after saving.' instance.user.userprofile.delete_my_library_cache()
Authorizations directly impact resources in in my_library, so delete my_library page cache after saving.
TWLight/users/signals.py
delete_my_library_cache
thatandromeda/TWLight
0
python
@receiver(post_save, sender=Authorization) def delete_my_library_cache(sender, instance, **kwargs): instance.user.userprofile.delete_my_library_cache()
@receiver(post_save, sender=Authorization) def delete_my_library_cache(sender, instance, **kwargs): instance.user.userprofile.delete_my_library_cache()<|docstring|>Authorizations directly impact resources in in my_library, so delete my_library page cache after saving.<|endoftext|>
2f6eb112a49dcaa9f2b204c1fa27761475e722720555793d17d310541b9f3956
@receiver(pre_save, sender=Authorization) def validate_authorization(sender, instance, **kwargs): 'Authorizations are generated by app code instead of ModelForm, so full_clean() before saving.' instance.full_clean()
Authorizations are generated by app code instead of ModelForm, so full_clean() before saving.
TWLight/users/signals.py
validate_authorization
thatandromeda/TWLight
0
python
@receiver(pre_save, sender=Authorization) def validate_authorization(sender, instance, **kwargs): instance.full_clean()
@receiver(pre_save, sender=Authorization) def validate_authorization(sender, instance, **kwargs): instance.full_clean()<|docstring|>Authorizations are generated by app code instead of ModelForm, so full_clean() before saving.<|endoftext|>
bbdc7abed3fe70c91f4a5f8c2e508c00de7aae62c5501ea7985d04bc09509a01
@receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_user_profile(sender, instance, created, **kwargs): 'Create user profiles automatically when users are created.' if created: UserProfile.objects.create(user=instance)
Create user profiles automatically when users are created.
TWLight/users/signals.py
create_user_profile
thatandromeda/TWLight
0
python
@receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance)
@receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance)<|docstring|>Create user profiles automatically when users are created.<|endoftext|>
7d2015ef91ccc46469e1a498eda425d7b6cd3861ad800c201341190e04318145
@receiver(post_save, sender=Partner) def update_partner_authorization_expiry(sender, instance, **kwargs): '\n Updates missing expiration dates upon resource updates.\n Mostly cribbed from\n TWLight.applications.models.post_receive_commit\n and\n TWLight.users.management.commands.authorization_backfill\n Could factor out the common code.\n ' if (sender == Partner): partner = instance if (partner.account_length or (partner.authorization_method == Partner.PROXY)): authorizations = Authorization.objects.filter(partners=partner, date_expires=None) for authorization in authorizations: if authorization.is_valid: if ((partner.authorization_method == Partner.PROXY) and (partner.requested_access_duration is True)): one_year_from_auth = (authorization.date_authorized + timedelta(days=365)) authorization.date_expires = one_year_from_auth authorization.save() elif partner.account_length: authorization.date_expires = (authorization.date_authorized + partner.account_length) authorization.save()
Updates missing expiration dates upon resource updates. Mostly cribbed from TWLight.applications.models.post_receive_commit and TWLight.users.management.commands.authorization_backfill Could factor out the common code.
TWLight/users/signals.py
update_partner_authorization_expiry
thatandromeda/TWLight
0
python
@receiver(post_save, sender=Partner) def update_partner_authorization_expiry(sender, instance, **kwargs): '\n Updates missing expiration dates upon resource updates.\n Mostly cribbed from\n TWLight.applications.models.post_receive_commit\n and\n TWLight.users.management.commands.authorization_backfill\n Could factor out the common code.\n ' if (sender == Partner): partner = instance if (partner.account_length or (partner.authorization_method == Partner.PROXY)): authorizations = Authorization.objects.filter(partners=partner, date_expires=None) for authorization in authorizations: if authorization.is_valid: if ((partner.authorization_method == Partner.PROXY) and (partner.requested_access_duration is True)): one_year_from_auth = (authorization.date_authorized + timedelta(days=365)) authorization.date_expires = one_year_from_auth authorization.save() elif partner.account_length: authorization.date_expires = (authorization.date_authorized + partner.account_length) authorization.save()
@receiver(post_save, sender=Partner) def update_partner_authorization_expiry(sender, instance, **kwargs): '\n Updates missing expiration dates upon resource updates.\n Mostly cribbed from\n TWLight.applications.models.post_receive_commit\n and\n TWLight.users.management.commands.authorization_backfill\n Could factor out the common code.\n ' if (sender == Partner): partner = instance if (partner.account_length or (partner.authorization_method == Partner.PROXY)): authorizations = Authorization.objects.filter(partners=partner, date_expires=None) for authorization in authorizations: if authorization.is_valid: if ((partner.authorization_method == Partner.PROXY) and (partner.requested_access_duration is True)): one_year_from_auth = (authorization.date_authorized + timedelta(days=365)) authorization.date_expires = one_year_from_auth authorization.save() elif partner.account_length: authorization.date_expires = (authorization.date_authorized + partner.account_length) authorization.save()<|docstring|>Updates missing expiration dates upon resource updates. Mostly cribbed from TWLight.applications.models.post_receive_commit and TWLight.users.management.commands.authorization_backfill Could factor out the common code.<|endoftext|>
080cf7bbf627070c65f205b5fd89e44fcae34ebc7478cf3b8f710e68d4e04805
@receiver(pre_save, sender=Partner) def update_existing_bundle_authorizations(sender, instance, **kwargs): '\n If this partner was just switched to Bundle from a non-Bundle\n authorization method, update any existing Bundle authorizations\n to include it, and vice-versa, including if it was marked not-available.\n Also delete any authorizations that previously existed to this partner.\n ' add_to_auths = False remove_from_auths = False delete_defunct_authorizations = False try: previous_data = Partner.even_not_available.get(pk=instance.pk) except Partner.DoesNotExist: return now_bundle = (instance.authorization_method == Partner.BUNDLE) now_available = (instance.status == Partner.AVAILABLE) previously_available = (previous_data.status == Partner.AVAILABLE) previously_bundle = (previous_data.authorization_method == Partner.BUNDLE) if now_available: if now_bundle: if ((not previously_available) or (not previously_bundle)): add_to_auths = True elif previously_bundle: remove_from_auths = True elif (not now_available): if (previously_available and previously_bundle): remove_from_auths = True elif (now_bundle and (not previously_bundle)): delete_defunct_authorizations = True if (add_to_auths or remove_from_auths or delete_defunct_authorizations): authorizations_to_update = get_all_bundle_authorizations() if add_to_auths: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete() for authorization in authorizations_to_update: authorization.partners.add(instance) elif remove_from_auths: for authorization in authorizations_to_update: authorization.partners.remove(instance) elif delete_defunct_authorizations: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete()
If this partner was just switched to Bundle from a non-Bundle authorization method, update any existing Bundle authorizations to include it, and vice-versa, including if it was marked not-available. Also delete any authorizations that previously existed to this partner.
TWLight/users/signals.py
update_existing_bundle_authorizations
thatandromeda/TWLight
0
python
@receiver(pre_save, sender=Partner) def update_existing_bundle_authorizations(sender, instance, **kwargs): '\n If this partner was just switched to Bundle from a non-Bundle\n authorization method, update any existing Bundle authorizations\n to include it, and vice-versa, including if it was marked not-available.\n Also delete any authorizations that previously existed to this partner.\n ' add_to_auths = False remove_from_auths = False delete_defunct_authorizations = False try: previous_data = Partner.even_not_available.get(pk=instance.pk) except Partner.DoesNotExist: return now_bundle = (instance.authorization_method == Partner.BUNDLE) now_available = (instance.status == Partner.AVAILABLE) previously_available = (previous_data.status == Partner.AVAILABLE) previously_bundle = (previous_data.authorization_method == Partner.BUNDLE) if now_available: if now_bundle: if ((not previously_available) or (not previously_bundle)): add_to_auths = True elif previously_bundle: remove_from_auths = True elif (not now_available): if (previously_available and previously_bundle): remove_from_auths = True elif (now_bundle and (not previously_bundle)): delete_defunct_authorizations = True if (add_to_auths or remove_from_auths or delete_defunct_authorizations): authorizations_to_update = get_all_bundle_authorizations() if add_to_auths: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete() for authorization in authorizations_to_update: authorization.partners.add(instance) elif remove_from_auths: for authorization in authorizations_to_update: authorization.partners.remove(instance) elif delete_defunct_authorizations: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete()
@receiver(pre_save, sender=Partner) def update_existing_bundle_authorizations(sender, instance, **kwargs): '\n If this partner was just switched to Bundle from a non-Bundle\n authorization method, update any existing Bundle authorizations\n to include it, and vice-versa, including if it was marked not-available.\n Also delete any authorizations that previously existed to this partner.\n ' add_to_auths = False remove_from_auths = False delete_defunct_authorizations = False try: previous_data = Partner.even_not_available.get(pk=instance.pk) except Partner.DoesNotExist: return now_bundle = (instance.authorization_method == Partner.BUNDLE) now_available = (instance.status == Partner.AVAILABLE) previously_available = (previous_data.status == Partner.AVAILABLE) previously_bundle = (previous_data.authorization_method == Partner.BUNDLE) if now_available: if now_bundle: if ((not previously_available) or (not previously_bundle)): add_to_auths = True elif previously_bundle: remove_from_auths = True elif (not now_available): if (previously_available and previously_bundle): remove_from_auths = True elif (now_bundle and (not previously_bundle)): delete_defunct_authorizations = True if (add_to_auths or remove_from_auths or delete_defunct_authorizations): authorizations_to_update = get_all_bundle_authorizations() if add_to_auths: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete() for authorization in authorizations_to_update: authorization.partners.add(instance) elif remove_from_auths: for authorization in authorizations_to_update: authorization.partners.remove(instance) elif delete_defunct_authorizations: all_partner_authorizations = Authorization.objects.filter(partners__pk=instance.pk) for defunct_authorization in all_partner_authorizations: defunct_authorization.delete()<|docstring|>If this partner was just switched to Bundle from a non-Bundle authorization method, update any existing Bundle authorizations to include it, and vice-versa, including if it was marked not-available. Also delete any authorizations that previously existed to this partner.<|endoftext|>
dfc4c9fac15eba0f73982492779640c2204c3630021077821085986b5d43085b
@receiver(post_save, sender=Partner) def update_bundle_authorizations_on_bundle_partner_creation(sender, instance, created, **kwargs): "\n This does the same thing that the pre-save signal update_existing_bundle_authorizations()\n does, except it handles new Bundle-partner creations. We can't do this in\n pre-save because the partner object doesn't exist yet.\n " if (created and (instance.status == Partner.AVAILABLE) and (instance.authorization_method == Partner.BUNDLE)): authorizations_to_update = get_all_bundle_authorizations() if authorizations_to_update: for authorization in authorizations_to_update: authorization.partners.add(instance)
This does the same thing that the pre-save signal update_existing_bundle_authorizations() does, except it handles new Bundle-partner creations. We can't do this in pre-save because the partner object doesn't exist yet.
TWLight/users/signals.py
update_bundle_authorizations_on_bundle_partner_creation
thatandromeda/TWLight
0
python
@receiver(post_save, sender=Partner) def update_bundle_authorizations_on_bundle_partner_creation(sender, instance, created, **kwargs): "\n This does the same thing that the pre-save signal update_existing_bundle_authorizations()\n does, except it handles new Bundle-partner creations. We can't do this in\n pre-save because the partner object doesn't exist yet.\n " if (created and (instance.status == Partner.AVAILABLE) and (instance.authorization_method == Partner.BUNDLE)): authorizations_to_update = get_all_bundle_authorizations() if authorizations_to_update: for authorization in authorizations_to_update: authorization.partners.add(instance)
@receiver(post_save, sender=Partner) def update_bundle_authorizations_on_bundle_partner_creation(sender, instance, created, **kwargs): "\n This does the same thing that the pre-save signal update_existing_bundle_authorizations()\n does, except it handles new Bundle-partner creations. We can't do this in\n pre-save because the partner object doesn't exist yet.\n " if (created and (instance.status == Partner.AVAILABLE) and (instance.authorization_method == Partner.BUNDLE)): authorizations_to_update = get_all_bundle_authorizations() if authorizations_to_update: for authorization in authorizations_to_update: authorization.partners.add(instance)<|docstring|>This does the same thing that the pre-save signal update_existing_bundle_authorizations() does, except it handles new Bundle-partner creations. We can't do this in pre-save because the partner object doesn't exist yet.<|endoftext|>
220aed9c0c78ff35655ad05a3a135601f63aa98a0e3489f0f08449cd144f8ef2
def process_request(self, request, spider): "\n When crawling, start urls are not called using selenium, here we generate a selenium request for those urls\n that are included in the list of start urls and don't have a selenium request\n " if ((not isinstance(request, SeleniumRequest)) and (request.url in spider.start_urls)): return SeleniumRequest(url=request.url, callback=request.callback) return None
When crawling, start urls are not called using selenium, here we generate a selenium request for those urls that are included in the list of start urls and don't have a selenium request
src/scrapcatalog/middlewares.py
process_request
mrdlp/Ontology-Population-with-Web-Scraping
0
python
def process_request(self, request, spider): "\n When crawling, start urls are not called using selenium, here we generate a selenium request for those urls\n that are included in the list of start urls and don't have a selenium request\n " if ((not isinstance(request, SeleniumRequest)) and (request.url in spider.start_urls)): return SeleniumRequest(url=request.url, callback=request.callback) return None
def process_request(self, request, spider): "\n When crawling, start urls are not called using selenium, here we generate a selenium request for those urls\n that are included in the list of start urls and don't have a selenium request\n " if ((not isinstance(request, SeleniumRequest)) and (request.url in spider.start_urls)): return SeleniumRequest(url=request.url, callback=request.callback) return None<|docstring|>When crawling, start urls are not called using selenium, here we generate a selenium request for those urls that are included in the list of start urls and don't have a selenium request<|endoftext|>
873b802c13c5743b0a9456782fd2ae24dd7c149cb22ccc59f19988ca7243b065
def process_response(self, request, response, spider): "\n 1 - imports spider's parse functions and parameters\n 2 - in a loop, parses new response from driver and checks against old response values\n 3 - if parsed values don't change during x consecutive seconds, return response\n 4 - save parsed valued in meta dicctionary to avoid double efforts in parse module inside spider\n " if (not isinstance(request, SeleniumRequest)): return response timeout = (time.time() + spider.wait_time) max_timeout = (time.time() + spider.max_wait_time) structured_data = extruct.extract(response.text, uniform=True, syntaxes=spider.wait_sintaxes) name = spider.get_name(response) category = spider.get_category(response, spider.category_locator) properties = spider.get_specifications(response, spider.properties_locator, spider.positions) while True: body = str.encode(response.request.meta['driver'].page_source) new_response = HtmlResponse(response.url, body=body, encoding='utf-8', request=request) structured_data2 = extruct.extract(body, uniform=True, syntaxes=spider.wait_sintaxes) name2 = spider.get_name(new_response) category2 = spider.get_category(new_response, spider.category_locator) properties2 = spider.get_specifications(new_response, spider.properties_locator, spider.positions) if ((structured_data != structured_data2) or (name != name2) or (category != category2) or (properties != properties2)): timeout = (time.time() + spider.wait_time) structured_data = structured_data2 name = name2 category = category2 properties = properties2 elif (time.time() > timeout): break if (time.time() > max_timeout): print('max timeout reached!') break time.sleep(1) request.meta['Name'] = name request.meta['Category'] = category request.meta['Properties'] = properties request.meta['StructuredData'] = extruct.extract(body, uniform=True) return new_response
1 - imports spider's parse functions and parameters 2 - in a loop, parses new response from driver and checks against old response values 3 - if parsed values don't change during x consecutive seconds, return response 4 - save parsed valued in meta dicctionary to avoid double efforts in parse module inside spider
src/scrapcatalog/middlewares.py
process_response
mrdlp/Ontology-Population-with-Web-Scraping
0
python
def process_response(self, request, response, spider): "\n 1 - imports spider's parse functions and parameters\n 2 - in a loop, parses new response from driver and checks against old response values\n 3 - if parsed values don't change during x consecutive seconds, return response\n 4 - save parsed valued in meta dicctionary to avoid double efforts in parse module inside spider\n " if (not isinstance(request, SeleniumRequest)): return response timeout = (time.time() + spider.wait_time) max_timeout = (time.time() + spider.max_wait_time) structured_data = extruct.extract(response.text, uniform=True, syntaxes=spider.wait_sintaxes) name = spider.get_name(response) category = spider.get_category(response, spider.category_locator) properties = spider.get_specifications(response, spider.properties_locator, spider.positions) while True: body = str.encode(response.request.meta['driver'].page_source) new_response = HtmlResponse(response.url, body=body, encoding='utf-8', request=request) structured_data2 = extruct.extract(body, uniform=True, syntaxes=spider.wait_sintaxes) name2 = spider.get_name(new_response) category2 = spider.get_category(new_response, spider.category_locator) properties2 = spider.get_specifications(new_response, spider.properties_locator, spider.positions) if ((structured_data != structured_data2) or (name != name2) or (category != category2) or (properties != properties2)): timeout = (time.time() + spider.wait_time) structured_data = structured_data2 name = name2 category = category2 properties = properties2 elif (time.time() > timeout): break if (time.time() > max_timeout): print('max timeout reached!') break time.sleep(1) request.meta['Name'] = name request.meta['Category'] = category request.meta['Properties'] = properties request.meta['StructuredData'] = extruct.extract(body, uniform=True) return new_response
def process_response(self, request, response, spider): "\n 1 - imports spider's parse functions and parameters\n 2 - in a loop, parses new response from driver and checks against old response values\n 3 - if parsed values don't change during x consecutive seconds, return response\n 4 - save parsed valued in meta dicctionary to avoid double efforts in parse module inside spider\n " if (not isinstance(request, SeleniumRequest)): return response timeout = (time.time() + spider.wait_time) max_timeout = (time.time() + spider.max_wait_time) structured_data = extruct.extract(response.text, uniform=True, syntaxes=spider.wait_sintaxes) name = spider.get_name(response) category = spider.get_category(response, spider.category_locator) properties = spider.get_specifications(response, spider.properties_locator, spider.positions) while True: body = str.encode(response.request.meta['driver'].page_source) new_response = HtmlResponse(response.url, body=body, encoding='utf-8', request=request) structured_data2 = extruct.extract(body, uniform=True, syntaxes=spider.wait_sintaxes) name2 = spider.get_name(new_response) category2 = spider.get_category(new_response, spider.category_locator) properties2 = spider.get_specifications(new_response, spider.properties_locator, spider.positions) if ((structured_data != structured_data2) or (name != name2) or (category != category2) or (properties != properties2)): timeout = (time.time() + spider.wait_time) structured_data = structured_data2 name = name2 category = category2 properties = properties2 elif (time.time() > timeout): break if (time.time() > max_timeout): print('max timeout reached!') break time.sleep(1) request.meta['Name'] = name request.meta['Category'] = category request.meta['Properties'] = properties request.meta['StructuredData'] = extruct.extract(body, uniform=True) return new_response<|docstring|>1 - imports spider's parse functions and parameters 2 - in a loop, parses new response from driver and checks against old response values 3 - if parsed values don't change during x consecutive seconds, return response 4 - save parsed valued in meta dicctionary to avoid double efforts in parse module inside spider<|endoftext|>
346bf98eb42df209db1b9fa8fc284542410bedbb843918cf0447c85ec7507512
def logo(config: BotConfigBundle) -> Path: '\n Returns the path to the given bot or None if it does not exists.\n ' return config.get_logo_file()
Returns the path to the given bot or None if it does not exists.
autoleague/bots.py
logo
lucas-emery/AutoLeague2
0
python
def logo(config: BotConfigBundle) -> Path: '\n \n ' return config.get_logo_file()
def logo(config: BotConfigBundle) -> Path: '\n \n ' return config.get_logo_file()<|docstring|>Returns the path to the given bot or None if it does not exists.<|endoftext|>
a0813d90115ba453984ddb1fbc6e6d9c7ce7a20bf3b4ca679df57c24ec88d352
def print_details(config: BotConfigBundle): '\n Print all details about a bot\n ' name = config.name developer = config.base_agent_config.get('Details', 'developer') description = config.base_agent_config.get('Details', 'description') fun_fact = config.base_agent_config.get('Details', 'fun_fact') github = config.base_agent_config.get('Details', 'github') language = config.base_agent_config.get('Details', 'language') config_path = str(config.config_path) logo_path = (str(logo(config)) if logo(config) else None) print(f'Bot name: {name}') print(f'Developer: {developer}') print(f'Description: {description}') print(f'Fun fact: {fun_fact}') print(f'Github: {github}') print(f'Language: {language}') print(f'Config path: {config_path}') print(f'Logo path: {logo_path}')
Print all details about a bot
autoleague/bots.py
print_details
lucas-emery/AutoLeague2
0
python
def print_details(config: BotConfigBundle): '\n \n ' name = config.name developer = config.base_agent_config.get('Details', 'developer') description = config.base_agent_config.get('Details', 'description') fun_fact = config.base_agent_config.get('Details', 'fun_fact') github = config.base_agent_config.get('Details', 'github') language = config.base_agent_config.get('Details', 'language') config_path = str(config.config_path) logo_path = (str(logo(config)) if logo(config) else None) print(f'Bot name: {name}') print(f'Developer: {developer}') print(f'Description: {description}') print(f'Fun fact: {fun_fact}') print(f'Github: {github}') print(f'Language: {language}') print(f'Config path: {config_path}') print(f'Logo path: {logo_path}')
def print_details(config: BotConfigBundle): '\n \n ' name = config.name developer = config.base_agent_config.get('Details', 'developer') description = config.base_agent_config.get('Details', 'description') fun_fact = config.base_agent_config.get('Details', 'fun_fact') github = config.base_agent_config.get('Details', 'github') language = config.base_agent_config.get('Details', 'language') config_path = str(config.config_path) logo_path = (str(logo(config)) if logo(config) else None) print(f'Bot name: {name}') print(f'Developer: {developer}') print(f'Description: {description}') print(f'Fun fact: {fun_fact}') print(f'Github: {github}') print(f'Language: {language}') print(f'Config path: {config_path}') print(f'Logo path: {logo_path}')<|docstring|>Print all details about a bot<|endoftext|>
08d27ddba1b17ccfb42a7c18caa331d16205cc86e9a2e16c920c08c0c5e04ff5
def unzip_all_bots(ld: LeagueDir): '\n Unzip all zip files in the bot directory\n ' for (root, dirs, files) in os.walk(ld.bots, topdown=True): dirs[:] = [d for d in dirs] for file in files: if ('.zip' in file): path = os.path.join(root, file) with ZipFile(path, 'r') as zipObj: print(f'Extracting {path}') folder_name = os.path.splitext(os.path.basename(path))[0] target_dir = os.path.join(root, folder_name) zipObj.extractall(path=target_dir)
Unzip all zip files in the bot directory
autoleague/bots.py
unzip_all_bots
lucas-emery/AutoLeague2
0
python
def unzip_all_bots(ld: LeagueDir): '\n \n ' for (root, dirs, files) in os.walk(ld.bots, topdown=True): dirs[:] = [d for d in dirs] for file in files: if ('.zip' in file): path = os.path.join(root, file) with ZipFile(path, 'r') as zipObj: print(f'Extracting {path}') folder_name = os.path.splitext(os.path.basename(path))[0] target_dir = os.path.join(root, folder_name) zipObj.extractall(path=target_dir)
def unzip_all_bots(ld: LeagueDir): '\n \n ' for (root, dirs, files) in os.walk(ld.bots, topdown=True): dirs[:] = [d for d in dirs] for file in files: if ('.zip' in file): path = os.path.join(root, file) with ZipFile(path, 'r') as zipObj: print(f'Extracting {path}') folder_name = os.path.splitext(os.path.basename(path))[0] target_dir = os.path.join(root, folder_name) zipObj.extractall(path=target_dir)<|docstring|>Unzip all zip files in the bot directory<|endoftext|>
c828941a8786b51024d896c116a6876bfca3c9fae705eb98bcf0db22d759ec6e
def load_retired_bots(ld: LeagueDir) -> Set[BotID]: '\n Loads the list of retired bots\n ' if (not ld.retirement.exists()): return set() with open(ld.retirement, 'r') as retirement_file: return set(json.load(retirement_file))
Loads the list of retired bots
autoleague/bots.py
load_retired_bots
lucas-emery/AutoLeague2
0
python
def load_retired_bots(ld: LeagueDir) -> Set[BotID]: '\n \n ' if (not ld.retirement.exists()): return set() with open(ld.retirement, 'r') as retirement_file: return set(json.load(retirement_file))
def load_retired_bots(ld: LeagueDir) -> Set[BotID]: '\n \n ' if (not ld.retirement.exists()): return set() with open(ld.retirement, 'r') as retirement_file: return set(json.load(retirement_file))<|docstring|>Loads the list of retired bots<|endoftext|>
d08af081c7652879af1d7fc445808f0cf089d448948a7b5f1ce1c956b949e45b
def __init__(self, qubit_number: int, cache_matrix: bool=True) -> None: 'Initialise the :py:class:`~.QuantumCircuit` instance.\n\n For documentation about the :py:class:`~.QuantumCircuit` internals see\n the :py:mod:`.quantum_circuit.quantum_circuit` documentation.\n\n :param qubit_number: The number of qubits the instance will acts on.\n :param cache_matrix: A boolean flag indicating if the instance should\n keep in memory the current value of its representing matrix or if it\n should recompute this matrix at each call to\n :py:attr:`.QuantumCircuit.matrix`.\n ' assert (qubit_number > 0), 'A circuit with less than 1 qubit cannot be created.' self._qubit_number = qubit_number self._graph = nx.MultiDiGraph() self._node_counter = 0 for qubit_id in range(qubit_number): self._graph.add_node(self._node_counter, type='input', key=qubit_id) self._node_counter += 1 self._last_inserted_operations = numpy.arange(qubit_number) self._cache_matrix = cache_matrix self._matrix = None if self._cache_matrix: self._matrix = numpy.identity((2 ** self._qubit_number))
Initialise the :py:class:`~.QuantumCircuit` instance. For documentation about the :py:class:`~.QuantumCircuit` internals see the :py:mod:`.quantum_circuit.quantum_circuit` documentation. :param qubit_number: The number of qubits the instance will acts on. :param cache_matrix: A boolean flag indicating if the instance should keep in memory the current value of its representing matrix or if it should recompute this matrix at each call to :py:attr:`.QuantumCircuit.matrix`.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
__init__
nelimee/qtoolkit
3
python
def __init__(self, qubit_number: int, cache_matrix: bool=True) -> None: 'Initialise the :py:class:`~.QuantumCircuit` instance.\n\n For documentation about the :py:class:`~.QuantumCircuit` internals see\n the :py:mod:`.quantum_circuit.quantum_circuit` documentation.\n\n :param qubit_number: The number of qubits the instance will acts on.\n :param cache_matrix: A boolean flag indicating if the instance should\n keep in memory the current value of its representing matrix or if it\n should recompute this matrix at each call to\n :py:attr:`.QuantumCircuit.matrix`.\n ' assert (qubit_number > 0), 'A circuit with less than 1 qubit cannot be created.' self._qubit_number = qubit_number self._graph = nx.MultiDiGraph() self._node_counter = 0 for qubit_id in range(qubit_number): self._graph.add_node(self._node_counter, type='input', key=qubit_id) self._node_counter += 1 self._last_inserted_operations = numpy.arange(qubit_number) self._cache_matrix = cache_matrix self._matrix = None if self._cache_matrix: self._matrix = numpy.identity((2 ** self._qubit_number))
def __init__(self, qubit_number: int, cache_matrix: bool=True) -> None: 'Initialise the :py:class:`~.QuantumCircuit` instance.\n\n For documentation about the :py:class:`~.QuantumCircuit` internals see\n the :py:mod:`.quantum_circuit.quantum_circuit` documentation.\n\n :param qubit_number: The number of qubits the instance will acts on.\n :param cache_matrix: A boolean flag indicating if the instance should\n keep in memory the current value of its representing matrix or if it\n should recompute this matrix at each call to\n :py:attr:`.QuantumCircuit.matrix`.\n ' assert (qubit_number > 0), 'A circuit with less than 1 qubit cannot be created.' self._qubit_number = qubit_number self._graph = nx.MultiDiGraph() self._node_counter = 0 for qubit_id in range(qubit_number): self._graph.add_node(self._node_counter, type='input', key=qubit_id) self._node_counter += 1 self._last_inserted_operations = numpy.arange(qubit_number) self._cache_matrix = cache_matrix self._matrix = None if self._cache_matrix: self._matrix = numpy.identity((2 ** self._qubit_number))<|docstring|>Initialise the :py:class:`~.QuantumCircuit` instance. For documentation about the :py:class:`~.QuantumCircuit` internals see the :py:mod:`.quantum_circuit.quantum_circuit` documentation. :param qubit_number: The number of qubits the instance will acts on. :param cache_matrix: A boolean flag indicating if the instance should keep in memory the current value of its representing matrix or if it should recompute this matrix at each call to :py:attr:`.QuantumCircuit.matrix`.<|endoftext|>
7587fdd9d827f982582068a18f9825a1d4ab4094be892f4a71cef608bd7841bf
def add_operation(self, operation: qop.QuantumOperation) -> None: 'Add an operation to the circuit.\n\n :param operation: The operation to add to the\n :py:class:`~.QuantumCircuit` instance.\n ' self._check_operation(operation) current_node_id = self._node_counter self._graph.add_node(self._node_counter, type='op', op=operation) self._node_counter += 1 self._create_edge(self._last_inserted_operations[operation.target], current_node_id, operation.target) self._last_inserted_operations[operation.target] = current_node_id for ctrl in operation.controls: self._create_edge(self._last_inserted_operations[ctrl], current_node_id, ctrl) self._last_inserted_operations[ctrl] = current_node_id if self._cache_matrix: self._matrix = (self._matrix @ operation.matrix(self._qubit_number))
Add an operation to the circuit. :param operation: The operation to add to the :py:class:`~.QuantumCircuit` instance.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
add_operation
nelimee/qtoolkit
3
python
def add_operation(self, operation: qop.QuantumOperation) -> None: 'Add an operation to the circuit.\n\n :param operation: The operation to add to the\n :py:class:`~.QuantumCircuit` instance.\n ' self._check_operation(operation) current_node_id = self._node_counter self._graph.add_node(self._node_counter, type='op', op=operation) self._node_counter += 1 self._create_edge(self._last_inserted_operations[operation.target], current_node_id, operation.target) self._last_inserted_operations[operation.target] = current_node_id for ctrl in operation.controls: self._create_edge(self._last_inserted_operations[ctrl], current_node_id, ctrl) self._last_inserted_operations[ctrl] = current_node_id if self._cache_matrix: self._matrix = (self._matrix @ operation.matrix(self._qubit_number))
def add_operation(self, operation: qop.QuantumOperation) -> None: 'Add an operation to the circuit.\n\n :param operation: The operation to add to the\n :py:class:`~.QuantumCircuit` instance.\n ' self._check_operation(operation) current_node_id = self._node_counter self._graph.add_node(self._node_counter, type='op', op=operation) self._node_counter += 1 self._create_edge(self._last_inserted_operations[operation.target], current_node_id, operation.target) self._last_inserted_operations[operation.target] = current_node_id for ctrl in operation.controls: self._create_edge(self._last_inserted_operations[ctrl], current_node_id, ctrl) self._last_inserted_operations[ctrl] = current_node_id if self._cache_matrix: self._matrix = (self._matrix @ operation.matrix(self._qubit_number))<|docstring|>Add an operation to the circuit. :param operation: The operation to add to the :py:class:`~.QuantumCircuit` instance.<|endoftext|>
df63cb3413422fb3e6662cd0483b45b0d1e9c11940e9fd091497c63abd312d3e
def apply(self, gate: qgate.QuantumGate, target: int, controls: typing.Sequence[int]=()) -> None: 'Apply a quantum operation to the circuit.\n\n :param gate: The quantum gate to apply.\n :param target: The target qubit. The quantum gate will be applied on\n this qubit.\n :param controls: The control qubit(s).\n ' self.add_operation(qop.QuantumOperation(gate, target, controls))
Apply a quantum operation to the circuit. :param gate: The quantum gate to apply. :param target: The target qubit. The quantum gate will be applied on this qubit. :param controls: The control qubit(s).
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
apply
nelimee/qtoolkit
3
python
def apply(self, gate: qgate.QuantumGate, target: int, controls: typing.Sequence[int]=()) -> None: 'Apply a quantum operation to the circuit.\n\n :param gate: The quantum gate to apply.\n :param target: The target qubit. The quantum gate will be applied on\n this qubit.\n :param controls: The control qubit(s).\n ' self.add_operation(qop.QuantumOperation(gate, target, controls))
def apply(self, gate: qgate.QuantumGate, target: int, controls: typing.Sequence[int]=()) -> None: 'Apply a quantum operation to the circuit.\n\n :param gate: The quantum gate to apply.\n :param target: The target qubit. The quantum gate will be applied on\n this qubit.\n :param controls: The control qubit(s).\n ' self.add_operation(qop.QuantumOperation(gate, target, controls))<|docstring|>Apply a quantum operation to the circuit. :param gate: The quantum gate to apply. :param target: The target qubit. The quantum gate will be applied on this qubit. :param controls: The control qubit(s).<|endoftext|>
5c06b02d86994e5f440aae17ff47b1a1e296bc054f2bb81026d47c15ca9692ce
def _check_operation(self, operation: qop.QuantumOperation) -> None: 'Check if the operation is valid. If not, raise an exception.\n\n :param operation: The operation to check for validity.\n :raise IndexError: if the qubits of the operation (target or control(s))\n are not within the range of the current instance.\n :raise RuntimeError: if one of the qubits on the operation (target or\n control(s)) is None or if the target qubit is also listed in the\n control qubit(s).\n ' if ((operation.target is None) or any(((ctrl is None) for ctrl in operation.controls))): raise RuntimeError('At least one of the target or control qubit is None. Generic QuantumOperations are not supported in a QuantumCircuit instance.') if (operation.target in operation.controls): raise RuntimeError('The target qubit cannot be in the list of control qubits.') if ((operation.target >= self._qubit_number) or (operation.target < 0)): raise IndexError(f"The operation's target ({operation.target}) is not valid for the current quantum circuit with {self._qubit_number} qubits.") for ctrl in operation.controls: if ((ctrl >= self._qubit_number) or (ctrl < 0)): raise IndexError('One of the control qubit is not valid for the current quantum circuit.')
Check if the operation is valid. If not, raise an exception. :param operation: The operation to check for validity. :raise IndexError: if the qubits of the operation (target or control(s)) are not within the range of the current instance. :raise RuntimeError: if one of the qubits on the operation (target or control(s)) is None or if the target qubit is also listed in the control qubit(s).
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
_check_operation
nelimee/qtoolkit
3
python
def _check_operation(self, operation: qop.QuantumOperation) -> None: 'Check if the operation is valid. If not, raise an exception.\n\n :param operation: The operation to check for validity.\n :raise IndexError: if the qubits of the operation (target or control(s))\n are not within the range of the current instance.\n :raise RuntimeError: if one of the qubits on the operation (target or\n control(s)) is None or if the target qubit is also listed in the\n control qubit(s).\n ' if ((operation.target is None) or any(((ctrl is None) for ctrl in operation.controls))): raise RuntimeError('At least one of the target or control qubit is None. Generic QuantumOperations are not supported in a QuantumCircuit instance.') if (operation.target in operation.controls): raise RuntimeError('The target qubit cannot be in the list of control qubits.') if ((operation.target >= self._qubit_number) or (operation.target < 0)): raise IndexError(f"The operation's target ({operation.target}) is not valid for the current quantum circuit with {self._qubit_number} qubits.") for ctrl in operation.controls: if ((ctrl >= self._qubit_number) or (ctrl < 0)): raise IndexError('One of the control qubit is not valid for the current quantum circuit.')
def _check_operation(self, operation: qop.QuantumOperation) -> None: 'Check if the operation is valid. If not, raise an exception.\n\n :param operation: The operation to check for validity.\n :raise IndexError: if the qubits of the operation (target or control(s))\n are not within the range of the current instance.\n :raise RuntimeError: if one of the qubits on the operation (target or\n control(s)) is None or if the target qubit is also listed in the\n control qubit(s).\n ' if ((operation.target is None) or any(((ctrl is None) for ctrl in operation.controls))): raise RuntimeError('At least one of the target or control qubit is None. Generic QuantumOperations are not supported in a QuantumCircuit instance.') if (operation.target in operation.controls): raise RuntimeError('The target qubit cannot be in the list of control qubits.') if ((operation.target >= self._qubit_number) or (operation.target < 0)): raise IndexError(f"The operation's target ({operation.target}) is not valid for the current quantum circuit with {self._qubit_number} qubits.") for ctrl in operation.controls: if ((ctrl >= self._qubit_number) or (ctrl < 0)): raise IndexError('One of the control qubit is not valid for the current quantum circuit.')<|docstring|>Check if the operation is valid. If not, raise an exception. :param operation: The operation to check for validity. :raise IndexError: if the qubits of the operation (target or control(s)) are not within the range of the current instance. :raise RuntimeError: if one of the qubits on the operation (target or control(s)) is None or if the target qubit is also listed in the control qubit(s).<|endoftext|>
0ac4418dc38a5986423a9890d765bf2149ca57c3d5d5e45a5449fe645d4963d4
def pop(self) -> qop.QuantumOperation: 'Deletes the last inserted operation from the instance and returns it.\n\n :return: The last inserted operation.\n ' if (self._node_counter <= self._qubit_number): raise RuntimeError('Attempting to pop a QuantumOperation from an empty QuantumCircuit.') op = self.last for (pred, _, key) in self._graph.in_edges(nbunch=(self._node_counter - 1), keys=True): self._last_inserted_operations[key] = pred self._graph.remove_node((self._node_counter - 1)) self._node_counter -= 1 if self._cache_matrix: self._matrix = (self._matrix @ op.matrix(self._qubit_number).T.conj()) return op
Deletes the last inserted operation from the instance and returns it. :return: The last inserted operation.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
pop
nelimee/qtoolkit
3
python
def pop(self) -> qop.QuantumOperation: 'Deletes the last inserted operation from the instance and returns it.\n\n :return: The last inserted operation.\n ' if (self._node_counter <= self._qubit_number): raise RuntimeError('Attempting to pop a QuantumOperation from an empty QuantumCircuit.') op = self.last for (pred, _, key) in self._graph.in_edges(nbunch=(self._node_counter - 1), keys=True): self._last_inserted_operations[key] = pred self._graph.remove_node((self._node_counter - 1)) self._node_counter -= 1 if self._cache_matrix: self._matrix = (self._matrix @ op.matrix(self._qubit_number).T.conj()) return op
def pop(self) -> qop.QuantumOperation: 'Deletes the last inserted operation from the instance and returns it.\n\n :return: The last inserted operation.\n ' if (self._node_counter <= self._qubit_number): raise RuntimeError('Attempting to pop a QuantumOperation from an empty QuantumCircuit.') op = self.last for (pred, _, key) in self._graph.in_edges(nbunch=(self._node_counter - 1), keys=True): self._last_inserted_operations[key] = pred self._graph.remove_node((self._node_counter - 1)) self._node_counter -= 1 if self._cache_matrix: self._matrix = (self._matrix @ op.matrix(self._qubit_number).T.conj()) return op<|docstring|>Deletes the last inserted operation from the instance and returns it. :return: The last inserted operation.<|endoftext|>
0c0b0c15973191ab8411576f88519789485805b279f205159aab5f62fd7e7202
def _create_edge(self, from_id: int, to_id: int, qubit_id: int) -> None: 'Create an edge between `from_id` and `to_id`.\n\n :param from_id: Source of the edge.\n :param to_id: Target of the edge.\n :param qubit_id: Identifier of the qubit concerned by the target\n operation.\n ' self._graph.add_edge(from_id, to_id, key=qubit_id)
Create an edge between `from_id` and `to_id`. :param from_id: Source of the edge. :param to_id: Target of the edge. :param qubit_id: Identifier of the qubit concerned by the target operation.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
_create_edge
nelimee/qtoolkit
3
python
def _create_edge(self, from_id: int, to_id: int, qubit_id: int) -> None: 'Create an edge between `from_id` and `to_id`.\n\n :param from_id: Source of the edge.\n :param to_id: Target of the edge.\n :param qubit_id: Identifier of the qubit concerned by the target\n operation.\n ' self._graph.add_edge(from_id, to_id, key=qubit_id)
def _create_edge(self, from_id: int, to_id: int, qubit_id: int) -> None: 'Create an edge between `from_id` and `to_id`.\n\n :param from_id: Source of the edge.\n :param to_id: Target of the edge.\n :param qubit_id: Identifier of the qubit concerned by the target\n operation.\n ' self._graph.add_edge(from_id, to_id, key=qubit_id)<|docstring|>Create an edge between `from_id` and `to_id`. :param from_id: Source of the edge. :param to_id: Target of the edge. :param qubit_id: Identifier of the qubit concerned by the target operation.<|endoftext|>
a803dd36da85d948dc31819d84fa8ee90ad1e95600dbdeb8f9a601856c3ee6ec
def get_n_last_operations_on_qubit_reversed(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the reverse order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' try: all_ops_gen = self.get_operations_on_qubit_reversed(qubit_id) for op_id in range(n): (yield next(all_ops_gen)) except StopIteration: raise IndexError(f'Cannot retrieve {n} operations on qubit n°{qubit_id}: only {op_id} operation are available.')
Get the `n` last inserted operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param n: Number of quantum operation to retrieve. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over the `n` last quantum operations involving `qubit_id` in the reverse order of insertion. :raise IndexError: if `qubit_id` is involved in less than `n` operations.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
get_n_last_operations_on_qubit_reversed
nelimee/qtoolkit
3
python
def get_n_last_operations_on_qubit_reversed(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the reverse order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' try: all_ops_gen = self.get_operations_on_qubit_reversed(qubit_id) for op_id in range(n): (yield next(all_ops_gen)) except StopIteration: raise IndexError(f'Cannot retrieve {n} operations on qubit n°{qubit_id}: only {op_id} operation are available.')
def get_n_last_operations_on_qubit_reversed(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the reverse order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' try: all_ops_gen = self.get_operations_on_qubit_reversed(qubit_id) for op_id in range(n): (yield next(all_ops_gen)) except StopIteration: raise IndexError(f'Cannot retrieve {n} operations on qubit n°{qubit_id}: only {op_id} operation are available.')<|docstring|>Get the `n` last inserted operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param n: Number of quantum operation to retrieve. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over the `n` last quantum operations involving `qubit_id` in the reverse order of insertion. :raise IndexError: if `qubit_id` is involved in less than `n` operations.<|endoftext|>
38d190fa9f43b58a6d54c862759b9ec798bb3408e1d9af6e5b537e012590533a
def get_n_last_operations_on_qubit(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' return list(self.get_n_last_operations_on_qubit_reversed(n, qubit_id))[::(- 1)]
Get the `n` last inserted operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param n: Number of quantum operation to retrieve. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over the `n` last quantum operations involving `qubit_id` in the order of insertion. :raise IndexError: if `qubit_id` is involved in less than `n` operations.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
get_n_last_operations_on_qubit
nelimee/qtoolkit
3
python
def get_n_last_operations_on_qubit(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' return list(self.get_n_last_operations_on_qubit_reversed(n, qubit_id))[::(- 1)]
def get_n_last_operations_on_qubit(self, n: int, qubit_id: int) -> typing.Iterable[qop.QuantumOperation]: 'Get the `n` last inserted operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param n: Number of quantum operation to retrieve.\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over the `n` last quantum operations involving\n `qubit_id` in the order of insertion.\n :raise IndexError: if `qubit_id` is involved in less than `n`\n operations.\n ' return list(self.get_n_last_operations_on_qubit_reversed(n, qubit_id))[::(- 1)]<|docstring|>Get the `n` last inserted operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param n: Number of quantum operation to retrieve. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over the `n` last quantum operations involving `qubit_id` in the order of insertion. :raise IndexError: if `qubit_id` is involved in less than `n` operations.<|endoftext|>
50e5fb9332496db8b159696334af5f13280b4d86849b9c8845d1c3345bc58c57
def get_operations_on_qubit_reversed(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the reverse order of insertion.\n ' current = self._last_inserted_operations[qubit_id] while (current >= self.qubit_number): (yield self._graph.nodes[current]['op']) current = next(filter((lambda node_id: (qubit_id in self._graph.get_edge_data(node_id, current))), self._graph.predecessors(current)))
Get all the operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over all the quantum operations involving `qubit_id` in the reverse order of insertion.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
get_operations_on_qubit_reversed
nelimee/qtoolkit
3
python
def get_operations_on_qubit_reversed(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the reverse order of insertion.\n ' current = self._last_inserted_operations[qubit_id] while (current >= self.qubit_number): (yield self._graph.nodes[current]['op']) current = next(filter((lambda node_id: (qubit_id in self._graph.get_edge_data(node_id, current))), self._graph.predecessors(current)))
def get_operations_on_qubit_reversed(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the reverse order of insertion.\n ' current = self._last_inserted_operations[qubit_id] while (current >= self.qubit_number): (yield self._graph.nodes[current]['op']) current = next(filter((lambda node_id: (qubit_id in self._graph.get_edge_data(node_id, current))), self._graph.predecessors(current)))<|docstring|>Get all the operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over all the quantum operations involving `qubit_id` in the reverse order of insertion.<|endoftext|>
c069a802bc8afcd7faf6cc56d241512da9a60eec3c33e9cf446e92c9f156bfcb
def get_operations_on_qubit(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the order of insertion.\n ' return list(self.get_operations_on_qubit_reversed(qubit_id))[::(- 1)]
Get all the operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over all the quantum operations involving `qubit_id` in the order of insertion.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
get_operations_on_qubit
nelimee/qtoolkit
3
python
def get_operations_on_qubit(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the order of insertion.\n ' return list(self.get_operations_on_qubit_reversed(qubit_id))[::(- 1)]
def get_operations_on_qubit(self, qubit_id: int): 'Get all the operations involving `qubit_id`.\n\n The returned operations can have the qubit `qubit_id` either as target\n or control qubit.\n\n :param qubit_id: Identifier of the qubit we are interested in.\n :return: an iterable over all the quantum operations involving\n `qubit_id` in the order of insertion.\n ' return list(self.get_operations_on_qubit_reversed(qubit_id))[::(- 1)]<|docstring|>Get all the operations involving `qubit_id`. The returned operations can have the qubit `qubit_id` either as target or control qubit. :param qubit_id: Identifier of the qubit we are interested in. :return: an iterable over all the quantum operations involving `qubit_id` in the order of insertion.<|endoftext|>
3312cb47e261d33ba4e7992b3e18d1280d1c0ca7f12d96909c9e771d5c0672b5
def __getitem__(self, idx: int) -> qop.QuantumOperation: 'Method used when []-indexing is used.\n\n :param idx: The position of the operation we want to retrieve.\n :return: The idx-th inserted operation.\n ' return self._graph.nodes[(idx + self._qubit_number)]['op']
Method used when []-indexing is used. :param idx: The position of the operation we want to retrieve. :return: The idx-th inserted operation.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
__getitem__
nelimee/qtoolkit
3
python
def __getitem__(self, idx: int) -> qop.QuantumOperation: 'Method used when []-indexing is used.\n\n :param idx: The position of the operation we want to retrieve.\n :return: The idx-th inserted operation.\n ' return self._graph.nodes[(idx + self._qubit_number)]['op']
def __getitem__(self, idx: int) -> qop.QuantumOperation: 'Method used when []-indexing is used.\n\n :param idx: The position of the operation we want to retrieve.\n :return: The idx-th inserted operation.\n ' return self._graph.nodes[(idx + self._qubit_number)]['op']<|docstring|>Method used when []-indexing is used. :param idx: The position of the operation we want to retrieve. :return: The idx-th inserted operation.<|endoftext|>
b54c130edf5ea3c49df67cbc0f873106b9eac700ddd732a248404a7fe0ebe522
@property def last(self) -> qop.QuantumOperation: 'Getter for the last inserted operation.\n\n :return: the last inserted operation.\n :raise IndexError: if the circuit is empty.\n ' if (self._node_counter == self._qubit_number): raise IndexError('Trying to recover the last operation of an empty QuantumCircuit.') return self._graph.nodes[(self._node_counter - 1)]['op']
Getter for the last inserted operation. :return: the last inserted operation. :raise IndexError: if the circuit is empty.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
last
nelimee/qtoolkit
3
python
@property def last(self) -> qop.QuantumOperation: 'Getter for the last inserted operation.\n\n :return: the last inserted operation.\n :raise IndexError: if the circuit is empty.\n ' if (self._node_counter == self._qubit_number): raise IndexError('Trying to recover the last operation of an empty QuantumCircuit.') return self._graph.nodes[(self._node_counter - 1)]['op']
@property def last(self) -> qop.QuantumOperation: 'Getter for the last inserted operation.\n\n :return: the last inserted operation.\n :raise IndexError: if the circuit is empty.\n ' if (self._node_counter == self._qubit_number): raise IndexError('Trying to recover the last operation of an empty QuantumCircuit.') return self._graph.nodes[(self._node_counter - 1)]['op']<|docstring|>Getter for the last inserted operation. :return: the last inserted operation. :raise IndexError: if the circuit is empty.<|endoftext|>
fb45c528e22a572b226d7f0b7583015910fa98ca77b9b15d932be4b3f1bcb3b1
@property def operations(self) -> typing.Iterable[qop.QuantumOperation]: 'Getter on the operations performed in this quantum circuit.\n\n :return: a generator that generates all the operations of the circuit.\n ' return (self._graph.nodes[i]['op'] for i in range(self._qubit_number, self._node_counter))
Getter on the operations performed in this quantum circuit. :return: a generator that generates all the operations of the circuit.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
operations
nelimee/qtoolkit
3
python
@property def operations(self) -> typing.Iterable[qop.QuantumOperation]: 'Getter on the operations performed in this quantum circuit.\n\n :return: a generator that generates all the operations of the circuit.\n ' return (self._graph.nodes[i]['op'] for i in range(self._qubit_number, self._node_counter))
@property def operations(self) -> typing.Iterable[qop.QuantumOperation]: 'Getter on the operations performed in this quantum circuit.\n\n :return: a generator that generates all the operations of the circuit.\n ' return (self._graph.nodes[i]['op'] for i in range(self._qubit_number, self._node_counter))<|docstring|>Getter on the operations performed in this quantum circuit. :return: a generator that generates all the operations of the circuit.<|endoftext|>
e9f4815821c05df828e62d41c5749efcec9e4b254d3ed6a50548111f8cb67fe1
def gates_on_qubit(self, qubit_index: int) -> typing.Iterable[qop.QuantumOperation]: 'Getter for the gates applied on the qubit at the given index.\n\n :param qubit_index: the qubit we are interested in.\n :return: a generator yielding all the quantum gates in the circuit\n that involve the specified qubit.\n ' return (op.gate for op in self.get_operations_on_qubit(qubit_index))
Getter for the gates applied on the qubit at the given index. :param qubit_index: the qubit we are interested in. :return: a generator yielding all the quantum gates in the circuit that involve the specified qubit.
qtoolkit/data_structures/quantum_circuit/quantum_circuit.py
gates_on_qubit
nelimee/qtoolkit
3
python
def gates_on_qubit(self, qubit_index: int) -> typing.Iterable[qop.QuantumOperation]: 'Getter for the gates applied on the qubit at the given index.\n\n :param qubit_index: the qubit we are interested in.\n :return: a generator yielding all the quantum gates in the circuit\n that involve the specified qubit.\n ' return (op.gate for op in self.get_operations_on_qubit(qubit_index))
def gates_on_qubit(self, qubit_index: int) -> typing.Iterable[qop.QuantumOperation]: 'Getter for the gates applied on the qubit at the given index.\n\n :param qubit_index: the qubit we are interested in.\n :return: a generator yielding all the quantum gates in the circuit\n that involve the specified qubit.\n ' return (op.gate for op in self.get_operations_on_qubit(qubit_index))<|docstring|>Getter for the gates applied on the qubit at the given index. :param qubit_index: the qubit we are interested in. :return: a generator yielding all the quantum gates in the circuit that involve the specified qubit.<|endoftext|>