ext
stringclasses
9 values
sha
stringlengths
40
40
content
stringlengths
3
1.04M
py
1a4a6eddf05555bc16718929ae110f85df55cca2
""" 模板方法模式 结构中只存在父类与子类之间的继承关系:先定义一个类模板,在模板类中定义各种操作的顺序(骨架),但并不实现这些操作,这些操作由子类来操作; 将一些复杂流程的实现步骤封装在一系列基本方法中,在抽象父类中提供模板方法来定义这些基本方法的执行次序框架; 通过其子类来覆盖某些具体的执行步骤,从而使得相同的算法框架可以有不同的执行结果; 在本例中模板为钓鱼方法,决定了抽象方法的调用顺序,这在父类中实现、由子类实例调用; 抽象方法为钓鱼方法的每个步骤(准备鱼饵、选择出行方式、选择钓鱼地点),这在子类中实现,由钓鱼方法内部调用。 """ import abc """ 钓鱼模板基类 """ class Fishing(object, metaclass=abc.ABCMeta): def finishing(self): """ 具体方法:确定了要依次执行哪些操作才能钓鱼 :return: """ self.prepare_bait() self.go_to_riverbank() self.find_location() print("start fishing") @abc.abstractmethod def prepare_bait(self): """ 准备鱼饵 :return: """ pass @abc.abstractmethod def go_to_riverbank(self): """ 选择出行方式 :return: """ pass @abc.abstractmethod def find_location(self): """ 选择钓鱼地点 :return: """ pass """ John 钓鱼步骤 """ class JohnFishing(Fishing): def prepare_bait(self): print("John: buy bait from Taobao") def go_to_riverbank(self): """ 从淘宝购买鱼饵 """ print("John: to river by driving") def find_location(self): """ 在岛上选择钓点 """ print("John: select location on the island") """ Simon 钓鱼步骤 """ class SimonFishing(Fishing): def prepare_bait(self): """ 从京东购买鱼饵 """ print("Simon: buy bait from JD") def go_to_riverbank(self): """ 骑自行车去钓鱼 """ print("Simon: to river by biking") def find_location(self): """ 在河边选择钓点 """ print("Simon: select location on the riverbank") if __name__ == '__main__': f = JohnFishing() # John 去钓鱼 f.finishing() f = SimonFishing() # Simon 去钓鱼 f.finishing()
py
1a4a6f616c982e59154767fe96e8ef78396613be
import abc from time import time from urllib import parse from ... import gvars from ...utils import open_connection class HTTPResponse: def __init__(self, client): self.client = client self.done = False self.header_size = 0 self.body_size = 0 self.speed = 0 self.start = time() @property def size(self): return self.header_size + self.body_size def on_header(self, name: bytes, value: bytes): self.header_size += len(name) + len(value) def on_message_complete(self): self.done = True seconds = time() - self.start self.speed = int(self.size / 1024 / seconds) # KB/s def on_body(self, body: bytes): self.body_size += len(body) class ClientBase(abc.ABC): sock = None target_addr = ("unknown", -1) def __init__(self, namespace): self.ns = namespace def __repr__(self): return f"{self.__class__.__name__}({self})" def __str__(self): return f"{self.bind_address} -- {self.target_address}" async def __aenter__(self): return self async def __aexit__(self, et, e, tb): await self.close() async def close(self): if self.sock: await self.sock.close() self.sock = None @property @abc.abstractmethod def proto(self): "" @property def bind_address(self) -> str: return f"{self.ns.bind_addr[0]}:{self.ns.bind_addr[1]}" @property def target_address(self) -> str: return f"{self.target_addr[0]}:{self.target_addr[1]}" async def connect(self, target_addr, source_addr=None): self.target_addr = target_addr if self.sock: return self.sock = await open_connection(*self.ns.bind_addr, source_addr=source_addr) @abc.abstractmethod async def init(self): "" async def recv(self, size): return await self.sock.recv(size) async def sendall(self, data): return await self.sock.sendall(data) async def http_request( self, uri: str, method: str = "GET", headers: list = None, response_cls=None ): import httptools response_cls = response_cls or HTTPResponse url = parse.urlparse(uri) host, _, port = url.netloc.partition(":") try: port = int(port) except ValueError: if url.scheme == "http": port = 80 elif url.scheme == "https": port = 443 else: raise Exception(f"unknown scheme: {url.scheme}") target_addr = (host, port) await self.connect(target_addr) await self.init() header_list = [f"Host: {self.target_address}".encode()] if headers: for header in headers: if isinstance(header, str): header = header.encode() header_list.append(header) ver = b"HTTP/1.1" method = method.upper().encode() url = url.geturl().encode() data = b"%b %b %b\r\n%b\r\n\r\n" % (method, url, ver, b"\r\n".join(header_list)) await self.sendall(data) response = response_cls(self) parser = httptools.HttpResponseParser(response) while not response.done: data = await self.recv(gvars.PACKET_SIZE) if not data: raise Exception("Incomplete response") parser.feed_data(data) return response
py
1a4a70620ae1908624f5391e48f21ca43e9d38d9
""" Django settings for mysite project. Generated by 'django-admin startproject' using Django 1.11.6. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '-xzp&e14hnrs4&p+=x#46nyv3w)(5&0@pj&b+l%_0+dx-e(z!b' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = '*' # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'polls', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'mysite.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')] , 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'mysite.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'dj1902', 'USER': 'root', 'PASSWORD': 'tedu.cn', 'HOST': '127.0.0.1', 'PORT': '3306', } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'zh-hans' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/'
py
1a4a73850560f8cb75e5d6f2eb9957774fba703e
# accounts/admin.py from django.contrib import admin from django.contrib.auth.admin import UserAdmin from .forms import CustomUserCreationForm, CustomUserChangeForm from .models import CustomUser class CustomUserAdmin(UserAdmin): add_form = CustomUserCreationForm form = CustomUserChangeForm model = CustomUser list_display = ['email', 'username',] admin.site.register(CustomUser, CustomUserAdmin)
py
1a4a73cf9bc5d4062537e9791b62e71b8ee586c5
from django.conf import settings from django.conf.urls import handler404, handler500 from django.conf.urls.static import static from django.contrib import admin from django.urls import include, path handler404 = 'foodgram_project.views.page_not_found' # noqa handler500 = 'foodgram_project.views.server_error' # noqa urlpatterns = [ path('auth/', include('users.urls')), path('auth/', include('django.contrib.auth.urls')), path('admin/', admin.site.urls), path('about/', include('about.urls')), path('', include('foodgram.urls')), ] if settings.DEBUG: urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
py
1a4a744a708634e5396f821acda9831fd73ef509
import sqlite3 as sql import queries as qrs import pandas as pd # assignment 1 def connect_db(db='../rpg_db.sqlite3'): return sql.connect(db) def exec(conn, query): curs = conn.cursor() curs.execute(query) res = curs.fetchall() return res # assignment 2 df = pd.DataFrame(pd.read_csv('../buddymove_holidayiq.csv')) print(df.shape) print(df.isnull().count()) conn = sql.connect('../buddymove_holidayiq.sqlite3') # df.to_sql('review', conn) # how many rows row_count = 'SELECT COUNT(*) FROM review' # how many users who reviewed at least 100 'Nature' and at least 100 in 'Shopping' nature_and_shopping = 'SELECT COUNT(*) FROM review WHERE Nature >= 100 AND Shopping >= 100' print(exec(conn, row_count)) print(exec(conn, nature_and_shopping))
py
1a4a75f515ffb383e9921cf1e04d309e4c57a77e
#!/usr/bin/python # -*- coding: utf-8 -*- """ PyCOMPSs Testbench ======================== """ # Imports import unittest from modules.testMpiDecorator import testMpiDecorator def main(): suite = unittest.TestLoader().loadTestsFromTestCase(testMpiDecorator) unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == "__main__": main()
py
1a4a75ffb0e305aec3b048fbea411d7419b1328f
#!/usr/bin/env python # $Id$ """14,211,672 solutions""" import puzzler from puzzler.puzzles.polyhexes1234 import Polyhexes1234IrregularHexagon2 puzzler.run(Polyhexes1234IrregularHexagon2)
py
1a4a760f08cd8ec329e33234af0b0bb822860548
#!/usr/bin/python # # Converts KML files to BigQuery WKT geography objects (CSV) from __future__ import print_function import sys import re import xml.etree.ElementTree e = xml.etree.ElementTree.parse(sys.argv[1]).getroot() p = re.compile('^\{(.+?)\}') matches = p.match(e.tag) xmlns = matches.group(1) document = e[0] for pmark in document.findall('.//xmlns:Placemark', { 'xmlns' : xmlns }): name = pmark.find('./xmlns:name', { 'xmlns' : xmlns }) print('"%s",' % name.text, end='') for coord in pmark.findall('.//xmlns:coordinates', { 'xmlns' : xmlns }): coordmatches = re.findall('(\d+\.\d+,\d+\.\d+),0[ \n]', coord.text) print('"POLYGON((', end='') i = 0 for m in coordmatches: if i > 0: print(',', end='') print(m.replace(',', ' '), end='') i += 1 print('))"')
py
1a4a78245bf0bb553ca948827d8deb57a40a6f41
#!usr/bin/python3.7 #author: kang-newbie #github: https://github.com/kang-newbie #contact: https://t.me/kang_nuubi import os,sys,time try: os.mkdir('audio') except: pass def os_detek(): if os.name in ['nt', 'win32']: os.system('cls') else: os.system('clear') banner=""" ;;;;;;;;;;;;;;;;; ; KARIN ; Author: KANG-NEWBIE ; SPEECH - TEXT ; Contact: t.me/kang_nuubi ;;;;;;;;;;;;;;;;; """ def sarg(): exit(""" Usage: python %s --lang (language) Example: python %s --lang id or python %s --lang en"""%(sys.argv[0],sys.argv[0],sys.argv[0])) if __name__=='__main__': os_detek() print(banner) a=sys.version.split('.') if a[0] != '3': exit('use python version 3.x.x') if len(sys.argv) != 3: sarg() if 'id' in sys.argv[2]: os.system('python3 src/karin_id.py') elif 'en' in sys.argv[2]: os.system('python3 src/karin_en.py') else: sarg()
py
1a4a79766214bce2c54862f0ba91b60a1d34cf35
from __future__ import absolute_import from __future__ import division from __future__ import print_function import ee from landdegradation.util import TEImage from landdegradation.schemas.schemas import BandInfo def download(asset, name, temporal_resolution, start_year, end_year, EXECUTION_ID, logger): """ Download dataset from GEE assets. """ logger.debug("Entering download function.") in_img = ee.Image(asset) # if temporal_resolution != "one time": # assert (start_year and end_year), "start year or end year not defined" # out = in_img.select('y{}'.format(start_year)) # band_info = [BandInfo(name, add_to_map=True, metadata={'year': start_year})] # for y in range(start_year + 1, end_year + 1): # out.addBands(in_img.select('y{}'.format(start_year))) # band_info.append(BandInfo(name, metadata={'year': start_year})) # else: # out = in_img # band_info = [BandInfo(name, add_to_map=True)] out = in_img band_info = [BandInfo(name, add_to_map=True, metadata=in_img.getInfo()['properties'])] n_bands = len(in_img.getInfo()['bands']) if n_bands > 1: band_info.extend([BandInfo(name, add_to_map=False, metadata=in_img.getInfo()['properties'])] * (n_bands - 1)) return TEImage(out, band_info)
py
1a4a7a00570a030611dd92019bfae780561d007b
# model settings input_size = 300 model = dict( type='SingleStageDetector', #pretrained='open-mmlab://vgg16_caffe', pretrained='vgg16_caffe-292e1171.pth', backbone=dict( type='SSDVGG', input_size=input_size, depth=16, with_last_pool=False, ceil_mode=True, out_indices=(3, 4), out_feature_indices=(22, 34), l2_norm_scale=20), neck=None, bbox_head=dict( type='SSDHead', input_size=input_size, in_channels=(512, 1024, 512, 256, 256, 256), num_classes=2, anchor_strides=(8, 16, 32, 64, 100, 300), basesize_ratio_range=(0.15, 0.9), anchor_ratios=([2], [2, 3], [2, 3], [2, 3], [2], [2]), target_means=(.0, .0, .0, .0), target_stds=(0.1, 0.1, 0.2, 0.2))) cudnn_benchmark = True train_cfg = dict( assigner=dict( type='MaxIoUAssigner', pos_iou_thr=0.5, neg_iou_thr=0.5, min_pos_iou=0., ignore_iof_thr=-1, gt_max_assign_all=False), smoothl1_beta=1., allowed_border=-1, pos_weight=-1, neg_pos_ratio=3, debug=False) test_cfg = dict( nms=dict(type='nms', iou_thr=0.45), min_bbox_size=0, score_thr=0.02, max_per_img=200) # model training and testing settings # dataset settings dataset_type = 'CocoDataset' #data_root = 'data/WIDERFACE2019' data_root = 'WIDER/' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) data = dict( imgs_per_gpu=4, workers_per_gpu=1, train=dict( type=dataset_type, ann_file=data_root + '/WIDER_train/instances_train2019.json', img_prefix=data_root + '/WIDER_train/images/', img_scale=(300, 300), img_norm_cfg=img_norm_cfg, size_divisor=32, flip_ratio=0.5, with_mask=False, with_crowd=True, with_label=True, extra_aug=dict( photo_metric_distortion=dict( brightness_delta=32, contrast_range=(0.5, 1.5), saturation_range=(0.5, 1.5), hue_delta=18), expand=dict( mean=img_norm_cfg['mean'], to_rgb=img_norm_cfg['to_rgb'], ratio_range=(1, 4)), random_crop=dict( min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3)), resize_keep_ratio=False), val=dict( type=dataset_type, ann_file=data_root + 'WIDER_val/instances_val2019.json', img_prefix=data_root + 'WIDER_val/images/', img_scale=(300, 300), img_norm_cfg=img_norm_cfg, size_divisor=32, flip_ratio=0, with_mask=False, with_crowd=True, with_label=True), test=dict( type=dataset_type, ann_file=data_root + '/WIDER_val/instances_val2019.json', img_prefix=data_root + '/WIDER_val/images/', # img_scale=(640, 640),#no result img_scale=(300, 300), img_norm_cfg=img_norm_cfg, size_divisor=32, flip_ratio=0.0, with_mask=False, with_label=False, test_mode=True, resize_keep_ratio=False)) # optimizer optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) optimizer_config = dict() # learning policy lr_config = dict( policy='step', warmup='linear', warmup_iters=1000, warmup_ratio=1.0 / 3, step=[16, 20]) checkpoint_config = dict(interval=1) # yapf:disable log_config = dict( interval=50, hooks=[ dict(type='TextLoggerHook') ]) # yapf:enable # runtime settings total_epochs = 24 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/ssd300_coco' load_from = None resume_from = None workflow = [('train', 1)]
py
1a4a7aa33e8e9fd99be9bd1333192cb7d713bdad
from rest_framework import renderers class PlainTextRenderer(renderers.BaseRenderer): media_type = 'text/plain' format = 'text' def render(self, data, media_type=None, renderer_context=None): json_data = renderers.JSONRenderer().render(data, media_type, renderer_context) return str(json_data).encode(self.charset)
py
1a4a7ace276641d8ba0fe761be9f2c5181f911f5
import json import numpy import os import re import sys # This script depends on a SJSON parsing package: # https://pypi.python.org/pypi/SJSON/1.1.0 # https://shelter13.net/projects/SJSON/ # https://bitbucket.org/Anteru/sjson/src import sjson def get_clip_names(benchmarks): clip_names = [] for bench in benchmarks: run_name = bench['name'] matches = re.search('^([\w\_]+).acl/', run_name) if matches == None: print('Failed to find the clip name from benchmark run: {}', run_name) else: clip_name = matches.group(1) clip_names.append(clip_name) bench['clip_name'] = clip_name return sorted(list(set(clip_names))) def get_median_runs(clip_name, benchmarks): pose = None bone = None for bench in benchmarks: if bench['clip_name'] != clip_name: continue # Not our clip if 'Dir:0' not in bench['name']: continue # Wrong direction if bench['run_type'] != 'aggregate': continue # Not an aggregate value if bench['aggregate_name'] != 'median': continue # Not our median if 'Func:0' in bench['name']: # Decompress pose pose = bench elif 'Func:1' in bench['name']: # Decompress bone bone = bench return (pose, bone) def ns_to_us(time_ns): return time_ns / 1000.0 def bytessec_to_mbsec(bytes_per_sec): return bytes_per_sec / (1024.0 * 1024.0) def ms_to_s(time_ms): return time_ms / 1000.0 def bytes_to_mb(num_bytes): return num_bytes / (1024 * 1024) if __name__ == "__main__": if sys.version_info < (3, 4): print('Python 3.4 or higher needed to run this script') sys.exit(1) if len(sys.argv) != 2 and len(sys.argv) != 3: print('Usage: python gen_decomp_delta_stats.py <path/to/input_file.sjson> [-warm]') sys.exit(1) input_sjson_file = sys.argv[1] if not input_sjson_file.endswith('.sjson'): print('Expected SJSON input file, found: {}'.format(input_sjson_file)) sys.exit(1) if not os.path.exists(input_sjson_file): print('Input file not found: {}'.format(input_sjson_file)) sys.exit(1) with open(input_sjson_file, 'r') as file: input_sjson_data = sjson.loads(file.read()) clip_names = [] decomp_delta_us_csv_file = open('decomp_delta_forward_stats_us.csv', 'w') decomp_delta_mbsec_csv_file = open('decomp_delta_forward_stats_mbsec.csv', 'w') pose_size_per_clip = {} per_entry_data = [] for entry in input_sjson_data['inputs']: print('Processing {} ...'.format(entry['stats_dir'])) benchmark_json_file = os.path.join(entry['stats_dir'], 'benchmark_results.json') with open(benchmark_json_file, 'r') as file: json_data = json.loads(file.read()) benchmarks = json_data['benchmarks'] if len(clip_names) == 0: clip_names = get_clip_names(benchmarks) print('Variants,Config,Version,{}'.format(','.join(clip_names)), file = decomp_delta_us_csv_file) print('Variants,Config,Version,{}'.format(','.join(clip_names)), file = decomp_delta_mbsec_csv_file) else: get_clip_names(benchmarks) pose_medians_us = {} bone_medians_us = {} pose_medians_mbsec = {} bone_medians_mbsec = {} for clip_name in clip_names: print(' Processing {} ...'.format(clip_name)) (pose_median_run, bone_median_run) = get_median_runs(clip_name, benchmarks) # Convert from nanoseconds into milliseconds pose_median = ns_to_us(pose_median_run['real_time']) bone_median = ns_to_us(bone_median_run['real_time']) pose_medians_us[clip_name] = pose_median bone_medians_us[clip_name] = bone_median # Convert from bytes/sec to megabytes/sec pose_speed = bytessec_to_mbsec(pose_median_run['Speed']) bone_speed = bytessec_to_mbsec(bone_median_run['Speed']) pose_medians_mbsec[clip_name] = pose_speed bone_medians_mbsec[clip_name] = bone_speed data = {} data['name'] = entry['name'] data['version'] = entry['version'] data['pose_medians_us'] = pose_medians_us data['bone_medians_us'] = bone_medians_us data['pose_medians_mbsec'] = pose_medians_mbsec data['bone_medians_mbsec'] = bone_medians_mbsec data['clip_names'] = clip_names per_entry_data.append(data) for data in per_entry_data: pose_medians_us = data['pose_medians_us'] bone_medians_us = data['bone_medians_us'] pose_medians_mbsec = data['pose_medians_mbsec'] bone_medians_mbsec = data['bone_medians_mbsec'] clip_names = data['clip_names'] pose_medians_us_csv = [] bone_medians_us_csv = [] pose_medians_mbsec_csv = [] bone_medians_mbsec_csv = [] for clip_name in clip_names: pose_size = pose_size_per_clip[clip_name] pose_cold_median_us = pose_medians_us[clip_name] bone_cold_median_us = bone_medians_us[clip_name] pose_medians_us_csv.append(str(pose_cold_median_us)) bone_medians_us_csv.append(str(bone_cold_median_us)) pose_cold_speed_mbsec = pose_medians_mbsec[clip_name] bone_cold_speed_mbsec = bone_medians_mbsec[clip_name] # Convert the speed into MB/sec pose_medians_mbsec_csv.append(str(pose_cold_speed_mbsec)) bone_medians_mbsec_csv.append(str(bone_cold_speed_mbsec)) print('decompress_pose,{},{},{}'.format(data['name'], data['version'], ','.join(pose_medians_us_csv)), file = decomp_delta_us_csv_file) print('decompress_bone,{},{},{}'.format(data['name'], data['version'], ','.join(bone_medians_us_csv)), file = decomp_delta_us_csv_file) print('decompress_pose,{},{},{}'.format(data['name'], data['version'], ','.join(pose_medians_mbsec_csv)), file = decomp_delta_mbsec_csv_file) print('decompress_bone,{},{},{}'.format(data['name'], data['version'], ','.join(bone_medians_mbsec_csv)), file = decomp_delta_mbsec_csv_file) decomp_delta_us_csv_file.close() decomp_delta_mbsec_csv_file.close()
py
1a4a7afa5302601fc4014bd0d73164497ed2ef45
# -*- coding: utf-8 -*- ''' A salt interface to psutil, a system and process library. See http://code.google.com/p/psutil. :depends: - psutil Python module, version 0.3.0 or later - python-utmp package (optional) ''' # Import python libs from __future__ import absolute_import import time import datetime import re # Import salt libs from salt.exceptions import SaltInvocationError, CommandExecutionError # Import third party libs import salt.ext.six as six # pylint: disable=import-error try: import salt.utils.psutil_compat as psutil HAS_PSUTIL = True PSUTIL2 = getattr(psutil, 'version_info', ()) >= (2, 0) except ImportError: HAS_PSUTIL = False # pylint: enable=import-error def __virtual__(): if not HAS_PSUTIL: return False, 'The ps module cannot be loaded: python module psutil not installed.' # Functions and attributes used in this execution module seem to have been # added as of psutil 0.3.0, from an inspection of the source code. Only # make this module available if the version of psutil is >= 0.3.0. Note # that this may need to be tweaked if we find post-0.3.0 versions which # also have problems running the functions in this execution module, but # most distributions have already moved to later versions (for example, # as of Dec. 2013 EPEL is on 0.6.1, Debian 7 is on 0.5.1, etc.). if psutil.version_info >= (0, 3, 0): return True return (False, 'The ps execution module cannot be loaded: the psutil python module version {0} is less than 0.3.0'.format(psutil.version_info)) def _get_proc_cmdline(proc): ''' Returns the cmdline of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' try: return proc.cmdline() if PSUTIL2 else proc.cmdline except (psutil.NoSuchProcess, psutil.AccessDenied): return '' def _get_proc_create_time(proc): ''' Returns the create_time of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' try: return proc.create_time() if PSUTIL2 else proc.create_time except (psutil.NoSuchProcess, psutil.AccessDenied): return None def _get_proc_name(proc): ''' Returns the name of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' try: return proc.name() if PSUTIL2 else proc.name except (psutil.NoSuchProcess, psutil.AccessDenied): return [] def _get_proc_status(proc): ''' Returns the status of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' try: return proc.status() if PSUTIL2 else proc.status except (psutil.NoSuchProcess, psutil.AccessDenied): return None def _get_proc_username(proc): ''' Returns the username of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' try: return proc.username() if PSUTIL2 else proc.username except (psutil.NoSuchProcess, psutil.AccessDenied, KeyError): return None def _get_proc_pid(proc): ''' Returns the pid of a Process instance. It's backward compatible with < 2.0 versions of psutil. ''' return proc.pid def top(num_processes=5, interval=3): ''' Return a list of top CPU consuming processes during the interval. num_processes = return the top N CPU consuming processes interval = the number of seconds to sample CPU usage over CLI Examples: .. code-block:: bash salt '*' ps.top salt '*' ps.top 5 10 ''' result = [] start_usage = {} for pid in psutil.pids(): try: process = psutil.Process(pid) user, system = process.cpu_times() except ValueError: user, system, _, _ = process.cpu_times() except psutil.NoSuchProcess: continue start_usage[process] = user + system time.sleep(interval) usage = set() for process, start in six.iteritems(start_usage): try: user, system = process.cpu_times() except ValueError: user, system, _, _ = process.cpu_times() except psutil.NoSuchProcess: continue now = user + system diff = now - start usage.add((diff, process)) for idx, (diff, process) in enumerate(reversed(sorted(usage))): if num_processes and idx >= num_processes: break if len(_get_proc_cmdline(process)) == 0: cmdline = _get_proc_name(process) else: cmdline = _get_proc_cmdline(process) info = {'cmd': cmdline, 'user': _get_proc_username(process), 'status': _get_proc_status(process), 'pid': _get_proc_pid(process), 'create_time': _get_proc_create_time(process), 'cpu': {}, 'mem': {}, } for key, value in six.iteritems(process.cpu_times()._asdict()): info['cpu'][key] = value for key, value in six.iteritems(process.memory_info()._asdict()): info['mem'][key] = value result.append(info) return result def get_pid_list(): ''' Return a list of process ids (PIDs) for all running processes. CLI Example: .. code-block:: bash salt '*' ps.get_pid_list ''' return psutil.pids() def proc_info(pid, attrs=None): ''' Return a dictionary of information for a process id (PID). CLI Example: .. code-block:: bash salt '*' ps.proc_info 2322 salt '*' ps.proc_info 2322 attrs='["pid", "name"]' pid PID of process to query. attrs Optional list of desired process attributes. The list of possible attributes can be found here: http://pythonhosted.org/psutil/#psutil.Process ''' try: proc = psutil.Process(pid) return proc.as_dict(attrs) except (psutil.NoSuchProcess, psutil.AccessDenied, AttributeError) as exc: raise CommandExecutionError(exc) def kill_pid(pid, signal=15): ''' Kill a process by PID. .. code-block:: bash salt 'minion' ps.kill_pid pid [signal=signal_number] pid PID of process to kill. signal Signal to send to the process. See manpage entry for kill for possible values. Default: 15 (SIGTERM). **Example:** Send SIGKILL to process with PID 2000: .. code-block:: bash salt 'minion' ps.kill_pid 2000 signal=9 ''' try: psutil.Process(pid).send_signal(signal) return True except psutil.NoSuchProcess: return False def pkill(pattern, user=None, signal=15, full=False): ''' Kill processes matching a pattern. .. code-block:: bash salt '*' ps.pkill pattern [user=username] [signal=signal_number] \\ [full=(true|false)] pattern Pattern to search for in the process list. user Limit matches to the given username. Default: All users. signal Signal to send to the process(es). See manpage entry for kill for possible values. Default: 15 (SIGTERM). full A boolean value indicating whether only the name of the command or the full command line should be matched against the pattern. **Examples:** Send SIGHUP to all httpd processes on all 'www' minions: .. code-block:: bash salt 'www.*' ps.pkill httpd signal=1 Send SIGKILL to all bash processes owned by user 'tom': .. code-block:: bash salt '*' ps.pkill bash signal=9 user=tom ''' killed = [] for proc in psutil.process_iter(): name_match = pattern in ' '.join(_get_proc_cmdline(proc)) if full \ else pattern in _get_proc_name(proc) user_match = True if user is None else user == _get_proc_username(proc) if name_match and user_match: try: proc.send_signal(signal) killed.append(_get_proc_pid(proc)) except psutil.NoSuchProcess: pass if not killed: return None else: return {'killed': killed} def pgrep(pattern, user=None, full=False): ''' Return the pids for processes matching a pattern. If full is true, the full command line is searched for a match, otherwise only the name of the command is searched. .. code-block:: bash salt '*' ps.pgrep pattern [user=username] [full=(true|false)] pattern Pattern to search for in the process list. user Limit matches to the given username. Default: All users. full A boolean value indicating whether only the name of the command or the full command line should be matched against the pattern. **Examples:** Find all httpd processes on all 'www' minions: .. code-block:: bash salt 'www.*' ps.pgrep httpd Find all bash processes owned by user 'tom': .. code-block:: bash salt '*' ps.pgrep bash user=tom ''' procs = [] for proc in psutil.process_iter(): name_match = pattern in ' '.join(_get_proc_cmdline(proc)) if full \ else pattern in _get_proc_name(proc) user_match = True if user is None else user == _get_proc_username(proc) if name_match and user_match: procs.append(_get_proc_pid(proc)) return procs or None def cpu_percent(interval=0.1, per_cpu=False): ''' Return the percent of time the CPU is busy. interval the number of seconds to sample CPU usage over per_cpu if True return an array of CPU percent busy for each CPU, otherwise aggregate all percents into one number CLI Example: .. code-block:: bash salt '*' ps.cpu_percent ''' if per_cpu: result = list(psutil.cpu_percent(interval, True)) else: result = psutil.cpu_percent(interval) return result def cpu_times(per_cpu=False): ''' Return the percent of time the CPU spends in each state, e.g. user, system, idle, nice, iowait, irq, softirq. per_cpu if True return an array of percents for each CPU, otherwise aggregate all percents into one number CLI Example: .. code-block:: bash salt '*' ps.cpu_times ''' if per_cpu: result = [dict(times._asdict()) for times in psutil.cpu_times(True)] else: result = dict(psutil.cpu_times(per_cpu)._asdict()) return result def virtual_memory(): ''' .. versionadded:: 2014.7.0 Return a dict that describes statistics about system memory usage. .. note:: This function is only available in psutil version 0.6.0 and above. CLI Example: .. code-block:: bash salt '*' ps.virtual_memory ''' if psutil.version_info < (0, 6, 0): msg = 'virtual_memory is only available in psutil 0.6.0 or greater' raise CommandExecutionError(msg) return dict(psutil.virtual_memory()._asdict()) def swap_memory(): ''' .. versionadded:: 2014.7.0 Return a dict that describes swap memory statistics. .. note:: This function is only available in psutil version 0.6.0 and above. CLI Example: .. code-block:: bash salt '*' ps.swap_memory ''' if psutil.version_info < (0, 6, 0): msg = 'swap_memory is only available in psutil 0.6.0 or greater' raise CommandExecutionError(msg) return dict(psutil.swap_memory()._asdict()) def disk_partitions(all=False): ''' Return a list of disk partitions and their device, mount point, and filesystem type. all if set to False, only return local, physical partitions (hard disk, USB, CD/DVD partitions). If True, return all filesystems. CLI Example: .. code-block:: bash salt '*' ps.disk_partitions ''' result = [dict(partition._asdict()) for partition in psutil.disk_partitions(all)] return result def disk_usage(path): ''' Given a path, return a dict listing the total available space as well as the free space, and used space. CLI Example: .. code-block:: bash salt '*' ps.disk_usage /home ''' return dict(psutil.disk_usage(path)._asdict()) def disk_partition_usage(all=False): ''' Return a list of disk partitions plus the mount point, filesystem and usage statistics. CLI Example: .. code-block:: bash salt '*' ps.disk_partition_usage ''' result = disk_partitions(all) for partition in result: partition.update(disk_usage(partition['mountpoint'])) return result def total_physical_memory(): ''' Return the total number of bytes of physical memory. CLI Example: .. code-block:: bash salt '*' ps.total_physical_memory ''' if psutil.version_info < (0, 6, 0): msg = 'virtual_memory is only available in psutil 0.6.0 or greater' raise CommandExecutionError(msg) try: return psutil.virtual_memory().total except AttributeError: # TOTAL_PHYMEM is deprecated but with older psutil versions this is # needed as a fallback. return psutil.TOTAL_PHYMEM def num_cpus(): ''' Return the number of CPUs. CLI Example: .. code-block:: bash salt '*' ps.num_cpus ''' try: return psutil.cpu_count() except AttributeError: # NUM_CPUS is deprecated but with older psutil versions this is needed # as a fallback. return psutil.NUM_CPUS def boot_time(time_format=None): ''' Return the boot time in number of seconds since the epoch began. CLI Example: time_format Optionally specify a `strftime`_ format string. Use ``time_format='%c'`` to get a nicely-formatted locale specific date and time (i.e. ``Fri May 2 19:08:32 2014``). .. _strftime: https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior .. versionadded:: 2014.1.4 .. code-block:: bash salt '*' ps.boot_time ''' try: b_time = int(psutil.boot_time()) except AttributeError: # get_boot_time() has been removed in newer psutil versions, and has # been replaced by boot_time() which provides the same information. b_time = int(psutil.boot_time()) if time_format: # Load epoch timestamp as a datetime.datetime object b_time = datetime.datetime.fromtimestamp(b_time) try: return b_time.strftime(time_format) except TypeError as exc: raise SaltInvocationError('Invalid format string: {0}'.format(exc)) return b_time def network_io_counters(interface=None): ''' Return network I/O statistics. CLI Example: .. code-block:: bash salt '*' ps.network_io_counters salt '*' ps.network_io_counters interface=eth0 ''' if not interface: return dict(psutil.net_io_counters()._asdict()) else: stats = psutil.net_io_counters(pernic=True) if interface in stats: return dict(stats[interface]._asdict()) else: return False def disk_io_counters(device=None): ''' Return disk I/O statistics. CLI Example: .. code-block:: bash salt '*' ps.disk_io_counters salt '*' ps.disk_io_counters device=sda1 ''' if not device: return dict(psutil.disk_io_counters()._asdict()) else: stats = psutil.disk_io_counters(perdisk=True) if device in stats: return dict(stats[device]._asdict()) else: return False def get_users(): ''' Return logged-in users. CLI Example: .. code-block:: bash salt '*' ps.get_users ''' try: recs = psutil.users() return [dict(x._asdict()) for x in recs] except AttributeError: # get_users is only present in psutil > v0.5.0 # try utmp try: import utmp # pylint: disable=import-error result = [] while True: rec = utmp.utmpaccess.getutent() if rec is None: return result elif rec[0] == 7: started = rec[8] if isinstance(started, tuple): started = started[0] result.append({'name': rec[4], 'terminal': rec[2], 'started': started, 'host': rec[5]}) except ImportError: return False def lsof(name): ''' Retrieve the lsof informations of the given process name. CLI Example: .. code-block:: bash salt '*' ps.lsof apache2 ''' sanitize_name = str(name) lsof_infos = __salt__['cmd.run']("lsof -c " + sanitize_name) ret = [] ret.extend([sanitize_name, lsof_infos]) return ret def netstat(name): ''' Retrieve the netstat informations of the given process name. CLI Example: .. code-block:: bash salt '*' ps.netstat apache2 ''' sanitize_name = str(name) netstat_infos = __salt__['cmd.run']("netstat -nap") found_infos = [] ret = [] for info in netstat_infos.splitlines(): if info.find(sanitize_name) != -1: found_infos.append(info) ret.extend([sanitize_name, found_infos]) return ret def psaux(name): ''' Retrieve information corresponding to a "ps aux" filtered with the given pattern. It could be just a name or a regular expression (using python search from "re" module). CLI Example: .. code-block:: bash salt '*' ps.psaux www-data.+apache2 ''' sanitize_name = str(name) pattern = re.compile(sanitize_name) salt_exception_pattern = re.compile("salt.+ps.psaux.+") ps_aux = __salt__['cmd.run']("ps aux") found_infos = [] ret = [] nb_lines = 0 for info in ps_aux.splitlines(): found = pattern.search(info) if found is not None: # remove 'salt' command from results if not salt_exception_pattern.search(info): nb_lines += 1 found_infos.append(info) pid_count = str(nb_lines) + " occurence(s)." ret = [] ret.extend([sanitize_name, found_infos, pid_count]) return ret
py
1a4a7b07def79f101a829a32c0c1eedd560520c5
# implementation of SLIC Superpixel algorithm # reference: SLIC Superpixels Compared to State-of-the-art Superpixel Methods # DOI: 10.1109/TPAMI.2012.120 # website: https://infoscience.epfl.ch/record/177415 # reference: SLIC算法分割超像素原理及Python实现: https://www.kawabangga.com/posts/1923 import cv2 as cv import numpy as np import argparse parser = argparse.ArgumentParser() parser.add_argument('--image', default="Lena.jpg", type=str, help='input image name') parser.add_argument('--k', default=1000, type=int, help='number of clusters') parser.add_argument('--m', default=30, type=int, help='balancing parameter') args = parser.parse_args() class Block(object): def __init__(self, num, h, w, l=0, a=0, b=0): self.number = num self.h = h self.w = w self.l = l self.a = a self.b = b self.pixels = [] # positions of the pixels which belongs to this block def change_pos(self, h, w): self.h = h self.w = w def change_color(self, l, a, b): self.l = l self.a = a self.b = b class Cluster(object): def __init__(self, image, number, m): self.image = image self.k = number self.m = m self.height = image.shape[0] self.width = image.shape[1] self.pixels = self.height * self.width self.block_length = int(np.sqrt(self.pixels / self.k)) self.label = np.full((self.height, self.width), -1, np.int32) self.dis = np.full_like(self.label, np.inf, np.float32) self.blocks = [] self.grad = cv.Laplacian(self.image, cv.CV_64F) w = 0 h = self.block_length j = 0 # in case that only half of the last line is covered for i in range(self.k + 2 * int(self.width / self.block_length)): w += self.block_length if (i % 2) == 0: h -= int((self.block_length - 1) / 2) if h < 0: break else: h += int((self.block_length - 1) / 2) if h >= self.height: break if w >= self.width: if (j % 2) == 0: w = self.block_length else: w = int(self.block_length / 2) h += self.block_length j += 1 if h >= self.height: break self.blocks.append(Block(i, h, w)) self.adjust_blocks() # adjust the positions of block centers # move them to the points with the minimum gradients within the 3x3 regions def adjust_blocks(self): for block in self.blocks: min_grad = np.sum(self.grad[block.h][block.w]) min_h = block.h min_w = block.w for i in range(-1, 2): if block.h + i < 0 or block.h + i >= self.height: continue # in case that the index goes out of boundary for j in range(-1, 2): if block.w + j < 0 or block.w + j >= self.width: continue new_grad = np.sum(self.grad[block.h + i][block.w + j]) if new_grad < min_grad: min_grad = new_grad min_h = block.h + i min_w = block.w + j block.change_pos(min_h, min_w) block.pixels.append((min_h, min_w)) def distance(self, h1, w1, h2, w2): l1 = int(self.image[h1][w1][0]) l2 = int(self.image[h2][w2][0]) a1 = int(self.image[h1][w1][1]) a2 = int(self.image[h2][w2][1]) b1 = int(self.image[h1][w1][2]) b2 = int(self.image[h2][w2][2]) d_lab = np.sqrt((np.square(l1 - l2) + np.square(a1 - a2) + np.square(b1 - b2))) d_xy = np.sqrt(np.square(h1 - h2) + np.square(w1 - w2)) distance = d_lab + d_xy * (self.m / self.block_length) return distance def assign(self): for block in self.blocks: for h2 in range(block.h - 2 * self.block_length, block.h + 2 * self.block_length): # out of boundary if h2 < 0: continue if h2 >= self.height: break # in boundary for w2 in range(block.w - 2 * self.block_length, block.w + 2 * self.block_length): # out of boundary if w2 < 0: continue if w2 >= self.width: break # in boundary d = self.distance(block.h, block.w, h2, w2) if self.label[h2][w2] < 0 or d < self.dis[h2][w2]: if self.label[h2][w2] >= 0: self.blocks[int(self.label[h2][w2])].pixels.remove((h2, w2)) self.label[h2][w2] = block.number self.dis[h2][w2] = d block.pixels.append((h2, w2)) # re-compute the center of the block number_pixels = len(block.pixels) new_h = 0 new_w = 0 for pixel in block.pixels: new_h += pixel[0] new_w += pixel[1] new_h = int(new_h / number_pixels) new_w = int(new_w / number_pixels) block.change_pos(new_h, new_w) block.pixels.append((new_h, new_w)) def color(self): for block in self.blocks: avg_l = 0 avg_a = 0 avg_b = 0 length = len(block.pixels) for pixel in block.pixels: _l = int(self.image[pixel[0]][pixel[1]][0]) _a = int(self.image[pixel[0]][pixel[1]][1]) _b = int(self.image[pixel[0]][pixel[1]][2]) avg_l += _l avg_a += _a avg_b += _b avg_l = int(avg_l / length) avg_a = int(avg_a / length) avg_b = int(avg_b / length) block.change_color(avg_l, avg_a, avg_b) # use the average color def output(self): new_image = np.zeros_like(self.image) self.color() for block in self.blocks: for pixel in block.pixels: new_image[pixel[0]][pixel[1]][0] = block.l new_image[pixel[0]][pixel[1]][1] = block.a new_image[pixel[0]][pixel[1]][2] = block.b ''' new_image[block.h][block.w][0] = 0 new_image[block.h][block.w][1] = 0 new_image[block.h][block.w][2] = 0 ''' new_image = cv.cvtColor(new_image, cv.COLOR_LAB2BGR) return new_image if __name__ == '__main__': file_name = args.image cluster_number = args.k m_param = args.m img = cv.imread(file_name) img = cv.cvtColor(img, cv.COLOR_BGR2LAB) app = Cluster(image=img, number=int(cluster_number), m=int(m_param)) for it in range(10): app.assign() out_image = app.output() name = "_new_" + str(it) + ".jpg" cv.imwrite(name, out_image) print(it)
py
1a4a7b9d915749aeb1fcb751380dcdc75e3a2746
# Generated by Django 3.1.4 on 2020-12-02 21:55 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Band', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64)), ('area', models.CharField(max_length=64)), ('genre', models.CharField(max_length=64)), ('date_formed', models.DateTimeField()), ], ), migrations.CreateModel( name='Album', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64)), ('label', models.CharField(max_length=64)), ('release', models.DateTimeField()), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.band')), ], ), ]
py
1a4a7c273eaba0aa12d26abb6d52c6465fd74378
# coding: utf-8 import re import six from huaweicloudsdkcore.sdk_response import SdkResponse from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class ShowProcessResponse(SdkResponse): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { 'code': 'str', 'message': 'str', 'json': 'UploadProcessJson', 'extend': 'str' } attribute_map = { 'code': 'code', 'message': 'message', 'json': 'json', 'extend': 'extend' } def __init__(self, code=None, message=None, json=None, extend=None): """ShowProcessResponse - a model defined in huaweicloud sdk""" super(ShowProcessResponse, self).__init__() self._code = None self._message = None self._json = None self._extend = None self.discriminator = None if code is not None: self.code = code if message is not None: self.message = message if json is not None: self.json = json if extend is not None: self.extend = extend @property def code(self): """Gets the code of this ShowProcessResponse. code :return: The code of this ShowProcessResponse. :rtype: str """ return self._code @code.setter def code(self, code): """Sets the code of this ShowProcessResponse. code :param code: The code of this ShowProcessResponse. :type: str """ self._code = code @property def message(self): """Gets the message of this ShowProcessResponse. message :return: The message of this ShowProcessResponse. :rtype: str """ return self._message @message.setter def message(self, message): """Sets the message of this ShowProcessResponse. message :param message: The message of this ShowProcessResponse. :type: str """ self._message = message @property def json(self): """Gets the json of this ShowProcessResponse. :return: The json of this ShowProcessResponse. :rtype: UploadProcessJson """ return self._json @json.setter def json(self, json): """Sets the json of this ShowProcessResponse. :param json: The json of this ShowProcessResponse. :type: UploadProcessJson """ self._json = json @property def extend(self): """Gets the extend of this ShowProcessResponse. extend :return: The extend of this ShowProcessResponse. :rtype: str """ return self._extend @extend.setter def extend(self, extend): """Sets the extend of this ShowProcessResponse. extend :param extend: The extend of this ShowProcessResponse. :type: str """ self._extend = extend def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): """For `print`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ShowProcessResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
py
1a4a7c399fb5e1d48b304f8929a7a91f8085b845
# -*- coding:utf-8 -*- __author__ = 'Randolph' import os import math import gensim import logging import json import torch import numpy as np import pandas as pd from scipy import stats from texttable import Texttable from gensim.models import KeyedVectors import torch.nn.utils.rnn as rnn_utils def option(): """ Choose training or restore pattern. Returns: The OPTION """ OPTION = input("[Input] Train or Restore? (T/R): ") while not (OPTION.upper() in ['T', 'R']): OPTION = input("[Warning] The format of your input is illegal, please re-input: ") return OPTION.upper() def logger_fn(name, input_file, level=logging.INFO): """ The Logger. Args: name: The name of the logger input_file: The logger file path level: The logger level Returns: The logger """ logger = logging.getLogger(name) logger.setLevel(level) log_dir = os.path.dirname(input_file) if not os.path.exists(log_dir): os.makedirs(log_dir) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') # File Handler fh = logging.FileHandler(input_file, mode='w') fh.setFormatter(formatter) logger.addHandler(fh) # stream Handler sh = logging.StreamHandler() sh.setFormatter(formatter) sh.setLevel(logging.WARNING) logger.addHandler(sh) return logger def tab_printer(args, logger): """ Function to print the logs in a nice tabular format. Args: args: Parameters used for the model. logger: The logger """ args = vars(args) keys = sorted(args.keys()) t = Texttable() t.add_rows([[k.replace("_", " ").capitalize(), args[k]] for k in keys]) t.add_rows([["Parameter", "Value"]]) logger.info('\n' + t.draw()) def get_model_name(): """ Get the model name used for test. Returns: The model name """ MODEL = input("[Input] Please input the model file you want to test, it should be like (1490175368): ") while not (MODEL.isdigit() and len(MODEL) == 10): MODEL = input("[Warning] The format of your input is illegal, " "it should be like (1490175368), please re-input: ") return MODEL def create_prediction_file(save_dir, identifiers, predictions): """ Create the prediction file. Args: save_dir: The all classes predicted results provided by network identifiers: The data record id predictions: The predict scores """ if not os.path.exists(save_dir): os.makedirs(save_dir) preds_file = os.path.abspath(os.path.join(save_dir, 'submission.json')) with open(preds_file, 'w') as fout: tmp_dict = {} for index, predicted_label in enumerate(predictions): if identifiers[index] not in tmp_dict: tmp_dict[identifiers[index]] = [predicted_label] else: tmp_dict[identifiers[index]].append(predicted_label) for key in tmp_dict.keys(): data_record = { 'item_id': key, 'label_list': tmp_dict[key], } fout.write(json.dumps(data_record, ensure_ascii=False) + '\n') def evaluation(true_label, pred_label): """ Calculate the PCC & DOA. Args: true_label: The true labels pred_label: The predicted labels Returns: The value of PCC & DOA """ # compute pcc pcc, _ = stats.pearsonr(pred_label, true_label) if math.isnan(pcc): print('[Error]: PCC=nan', true_label, pred_label) # compute doa n = 0 correct_num = 0 for i in range(len(true_label) - 1): for j in range(i + 1, len(true_label)): if (true_label[i] > true_label[j]) and (pred_label[i] > pred_label[j]): correct_num += 1 elif (true_label[i] == true_label[j]) and (pred_label[i] == pred_label[j]): continue elif (true_label[i] < true_label[j]) and (pred_label[i] < pred_label[j]): correct_num += 1 n += 1 if n == 0: print(true_label) return -1, -1 doa = correct_num / n return pcc, doa def course2vec(course2idx_file): """ Return the word2vec model matrix. Args: course2idx_file: The course2idx file Returns: The word2vec model matrix Raises: IOError: If word2vec model file doesn't exist """ if not os.path.isfile(course2idx_file): raise IOError("[Error] The word2vec file doesn't exist. ") with open(course2idx_file, 'r') as handle: course2idx = json.load(handle) course_cnt = len(course2idx) return course_cnt def load_data_and_labels(input_file): if not input_file.endswith('.json'): raise IOError("[Error] The research data is not a json file. " "Please preprocess the research data into the json file.") with open(input_file) as fin: id_list = [] activity_list = [] timestep_list = [] labels_list = [] for index, eachline in enumerate(fin): data = json.loads(eachline) id = data['item_id'] activity = data['activity'] timestep = data['timestep'] labels = data['labels'] id_list.append(id) activity_list.append(activity) timestep_list.append(timestep) labels_list.append(labels) class _Data: def __init__(self): pass @property def id(self): return id_list @property def activity(self): return activity_list @property def timestep(self): return timestep_list @property def labels(self): return labels_list return _Data() class MyData(torch.utils.data.Dataset): """ 定义数据读取迭代器结构 """ def __init__(self, data_seq, data_tsp, data_label): self.seqs = data_seq self.tsp = data_tsp self.labels = data_label def __len__(self): return len(self.seqs) def __getitem__(self, idx): return self.seqs[idx], self.tsp[idx], self.labels[idx] def collate_fn(data): """ Version for PyTorch Args: data: The research data. 0-dim: word token index / 1-dim: data label Returns: pad_content: The padded data lens: The ground truth lengths labels: The data labels """ data.sort(key=lambda x: len(x[0]), reverse=True) data_lens = [len(i[0]) for i in data] data_x = [torch.tensor(i[0]) for i in data] data_tsp = [i[1] for i in data] data_y = torch.tensor([i[2] for i in data]) pad_content = rnn_utils.pad_sequence(data_x, batch_first=True, padding_value=0.) return pad_content.unsqueeze(-1), data_lens, data_tsp, data_y
py
1a4a7d1fe73c7cad9e1c68d8deab0991edb558a4
import tempfile import pathlib import os import shutil from renamer.search import FileSearcher class TestFileSearcher: @classmethod def setup_class(cls): cls.file_searcher = FileSearcher() cls.tmpdir = tempfile.mkdtemp() cls.file1_path = os.path.join(cls.tmpdir, 'file1.txt') cls.file2_path = os.path.join(cls.tmpdir, 'tmp', 'file2.txt') cls.keyword = os.path.join(cls.tmpdir, '**') os.makedirs(os.path.join(cls.tmpdir, 'tmp')) pathlib.Path(cls.file1_path).touch() pathlib.Path(cls.file2_path).touch() @classmethod def teardown_class(cls): shutil.rmtree(cls.tmpdir) def test_search(self): result = self.file_searcher.search(self.keyword) assert result == [self.file1_path] def test_search_recursive(self): result = self.file_searcher.search(self.keyword, recursive=True) assert result == [self.file1_path, self.file2_path] def test_select_file(self): test_case = { 'files': [ os.path.join(self.tmpdir, 'file1.txt'), os.path.join(self.tmpdir, 'tmp')], 'want': [ os.path.join(self.tmpdir, 'file1.txt')]} result = self.file_searcher._select_file(test_case['files']) assert result == test_case['want']
py
1a4a7dbfd21402213728a87c80734a203451e3f6
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/protobuf/unittest.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import service as _service from google.protobuf import service_reflection # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2 try: google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2 except AttributeError: google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google.protobuf.unittest_import_public_pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/protobuf/unittest.proto', package='protobuf_unittest', syntax='proto2', serialized_options=_b('B\rUnittestProtoH\001\200\001\001\210\001\001\220\001\001\370\001\001'), serialized_pb=_b('\n\x1egoogle/protobuf/unittest.proto\x12\x11protobuf_unittest\x1a%google/protobuf/unittest_import.proto\"\xed\x18\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12\x44\n\roptionalgroup\x18\x10 \x01(\n2-.protobuf_unittest.TestAllTypes.OptionalGroup\x12N\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14optional_import_enum\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12P\n\x15optional_lazy_message\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12\x44\n\rrepeatedgroup\x18. \x03(\n2-.protobuf_unittest.TestAllTypes.RepeatedGroup\x12N\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14repeated_import_enum\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12P\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x19\n\rdefault_int32\x18= \x01(\x05:\x02\x34\x31\x12\x19\n\rdefault_int64\x18> \x01(\x03:\x02\x34\x32\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint32\x18? \x01(\r:\x02\x34\x33\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint64\x18@ \x01(\x04:\x02\x34\x34\x12\x1b\n\x0e\x64\x65\x66\x61ult_sint32\x18\x41 \x01(\x11:\x03-45\x12\x1a\n\x0e\x64\x65\x66\x61ult_sint64\x18\x42 \x01(\x12:\x02\x34\x36\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed32\x18\x43 \x01(\x07:\x02\x34\x37\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed64\x18\x44 \x01(\x06:\x02\x34\x38\x12\x1c\n\x10\x64\x65\x66\x61ult_sfixed32\x18\x45 \x01(\x0f:\x02\x34\x39\x12\x1d\n\x10\x64\x65\x66\x61ult_sfixed64\x18\x46 \x01(\x10:\x03-50\x12\x1b\n\rdefault_float\x18G \x01(\x02:\x04\x35\x31.5\x12\x1d\n\x0e\x64\x65\x66\x61ult_double\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30\x12\x1a\n\x0c\x64\x65\x66\x61ult_bool\x18I \x01(\x08:\x04true\x12\x1d\n\x0e\x64\x65\x66\x61ult_string\x18J \x01(\t:\x05hello\x12\x1c\n\rdefault_bytes\x18K \x01(\x0c:\x05world\x12L\n\x13\x64\x65\x66\x61ult_nested_enum\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R\x12I\n\x14\x64\x65\x66\x61ult_foreign_enum\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR\x12M\n\x13\x64\x65\x66\x61ult_import_enum\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR\x12%\n\x14\x64\x65\x66\x61ult_string_piece\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02\x12\x1d\n\x0c\x64\x65\x66\x61ult_cord\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12M\n\x14oneof_nested_message\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x61\x18/ \x01(\x05\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\xbb\x01\n\x12NestedTestAllTypes\x12\x34\n\x05\x63hild\x18\x01 \x01(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\x12\x30\n\x07payload\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12=\n\x0erepeated_child\x18\x03 \x03(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\"m\n\x14TestDeprecatedFields\x12\x1c\n\x10\x64\x65precated_int32\x18\x01 \x01(\x05\x42\x02\x18\x01\x12\'\n\x19\x64\x65precated_int32_in_oneof\x18\x02 \x01(\x05\x42\x02\x18\x01H\x00\x42\x0e\n\x0coneof_fields\"\x1b\n\x15TestDeprecatedMessage:\x02\x18\x01\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x01(\x05\"0\n\x12TestReservedFieldsJ\x04\x08\x02\x10\x03J\x04\x08\x0f\x10\x10J\x04\x08\t\x10\x0cR\x03\x62\x61rR\x03\x62\x61z\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"$\n\x17OptionalGroup_extension\x12\t\n\x01\x61\x18\x11 \x01(\x05\"$\n\x17RepeatedGroup_extension\x12\t\n\x01\x61\x18/ \x01(\x05\"\xa9\x01\n\tTestGroup\x12\x41\n\roptionalgroup\x18\x10 \x01(\n2*.protobuf_unittest.TestGroup.OptionalGroup\x12=\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\"\x1e\n\x12TestGroupExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\xb7\x03\n\x13TestNestedExtension\x1a$\n\x17OptionalGroup_extension\x12\t\n\x01\x61\x18\x11 \x01(\x05\x32\x39\n\x04test\x12$.protobuf_unittest.TestAllExtensions\x18\xea\x07 \x01(\t:\x04test2F\n\x17nested_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\xeb\x07 \x01(\t2\x86\x01\n\x17optionalgroup_extension\x12%.protobuf_unittest.TestGroupExtension\x18\x10 \x01(\n2>.protobuf_unittest.TestNestedExtension.OptionalGroup_extension2n\n\x1foptional_foreign_enum_extension\x12%.protobuf_unittest.TestGroupExtension\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\"\xd5\x05\n\x0cTestRequired\x12\t\n\x01\x61\x18\x01 \x02(\x05\x12\x0e\n\x06\x64ummy2\x18\x02 \x01(\x05\x12\t\n\x01\x62\x18\x03 \x02(\x05\x12\x0e\n\x06\x64ummy4\x18\x04 \x01(\x05\x12\x0e\n\x06\x64ummy5\x18\x05 \x01(\x05\x12\x0e\n\x06\x64ummy6\x18\x06 \x01(\x05\x12\x0e\n\x06\x64ummy7\x18\x07 \x01(\x05\x12\x0e\n\x06\x64ummy8\x18\x08 \x01(\x05\x12\x0e\n\x06\x64ummy9\x18\t \x01(\x05\x12\x0f\n\x07\x64ummy10\x18\n \x01(\x05\x12\x0f\n\x07\x64ummy11\x18\x0b \x01(\x05\x12\x0f\n\x07\x64ummy12\x18\x0c \x01(\x05\x12\x0f\n\x07\x64ummy13\x18\r \x01(\x05\x12\x0f\n\x07\x64ummy14\x18\x0e \x01(\x05\x12\x0f\n\x07\x64ummy15\x18\x0f \x01(\x05\x12\x0f\n\x07\x64ummy16\x18\x10 \x01(\x05\x12\x0f\n\x07\x64ummy17\x18\x11 \x01(\x05\x12\x0f\n\x07\x64ummy18\x18\x12 \x01(\x05\x12\x0f\n\x07\x64ummy19\x18\x13 \x01(\x05\x12\x0f\n\x07\x64ummy20\x18\x14 \x01(\x05\x12\x0f\n\x07\x64ummy21\x18\x15 \x01(\x05\x12\x0f\n\x07\x64ummy22\x18\x16 \x01(\x05\x12\x0f\n\x07\x64ummy23\x18\x17 \x01(\x05\x12\x0f\n\x07\x64ummy24\x18\x18 \x01(\x05\x12\x0f\n\x07\x64ummy25\x18\x19 \x01(\x05\x12\x0f\n\x07\x64ummy26\x18\x1a \x01(\x05\x12\x0f\n\x07\x64ummy27\x18\x1b \x01(\x05\x12\x0f\n\x07\x64ummy28\x18\x1c \x01(\x05\x12\x0f\n\x07\x64ummy29\x18\x1d \x01(\x05\x12\x0f\n\x07\x64ummy30\x18\x1e \x01(\x05\x12\x0f\n\x07\x64ummy31\x18\x1f \x01(\x05\x12\x0f\n\x07\x64ummy32\x18 \x01(\x05\x12\t\n\x01\x63\x18! \x02(\x05\x32V\n\x06single\x12$.protobuf_unittest.TestAllExtensions\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired2U\n\x05multi\x12$.protobuf_unittest.TestAllExtensions\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\"\x9a\x01\n\x13TestRequiredForeign\x12\x39\n\x10optional_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\x39\n\x10repeated_message\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\r\n\x05\x64ummy\x18\x03 \x01(\x05\"\xc6\x01\n\x13TestRequiredMessage\x12\x39\n\x10optional_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\x39\n\x10repeated_message\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\x39\n\x10required_message\x18\x03 \x02(\x0b\x32\x1f.protobuf_unittest.TestRequired\"Z\n\x11TestForeignNested\x12\x45\n\x0e\x66oreign_nested\x18\x01 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\"\x12\n\x10TestEmptyMessage\"*\n\x1eTestEmptyMessageWithExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"7\n\x1bTestMultipleExtensionRanges*\x04\x08*\x10+*\x06\x08\xaf \x10\x94!*\n\x08\x80\x80\x04\x10\x80\x80\x80\x80\x02\"4\n\x18TestReallyLargeTagNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x02\x62\x62\x18\xff\xff\xff\x7f \x01(\x05\"U\n\x14TestRecursiveMessage\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestRecursiveMessage\x12\t\n\x01i\x18\x02 \x01(\x05\"\xe1\x02\n\x14TestMutualRecursionA\x12\x33\n\x02\x62\x62\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionB\x12\x42\n\x08subgroup\x18\x02 \x01(\n20.protobuf_unittest.TestMutualRecursionA.SubGroup\x1a@\n\nSubMessage\x12\x32\n\x01\x62\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionB\x1a\x8d\x01\n\x08SubGroup\x12G\n\x0bsub_message\x18\x03 \x01(\x0b\x32\x32.protobuf_unittest.TestMutualRecursionA.SubMessage\x12\x38\n\x0fnot_in_this_scc\x18\x04 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\"b\n\x14TestMutualRecursionB\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionA\x12\x16\n\x0eoptional_int32\x18\x02 \x01(\x05\"\xca\x01\n\x11TestIsInitialized\x12\x44\n\x0bsub_message\x18\x01 \x01(\x0b\x32/.protobuf_unittest.TestIsInitialized.SubMessage\x1ao\n\nSubMessage\x12J\n\x08subgroup\x18\x01 \x01(\n28.protobuf_unittest.TestIsInitialized.SubMessage.SubGroup\x1a\x15\n\x08SubGroup\x12\t\n\x01i\x18\x02 \x02(\x05\"\xb3\x01\n\x12TestDupFieldNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\x36\n\x03\x66oo\x18\x02 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Foo\x12\x36\n\x03\x62\x61r\x18\x03 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Bar\x1a\x10\n\x03\x46oo\x12\t\n\x01\x61\x18\x01 \x01(\x05\x1a\x10\n\x03\x42\x61r\x12\t\n\x01\x61\x18\x01 \x01(\x05\"L\n\x10TestEagerMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x00\"K\n\x0fTestLazyMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x01\"\x80\x02\n\x18TestNestedMessageHasBits\x12Z\n\x17optional_nested_message\x18\x01 \x01(\x0b\x32\x39.protobuf_unittest.TestNestedMessageHasBits.NestedMessage\x1a\x87\x01\n\rNestedMessage\x12$\n\x1cnestedmessage_repeated_int32\x18\x01 \x03(\x05\x12P\n%nestedmessage_repeated_foreignmessage\x18\x02 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\"\xe5\x03\n\x17TestCamelCaseFieldNames\x12\x16\n\x0ePrimitiveField\x18\x01 \x01(\x05\x12\x13\n\x0bStringField\x18\x02 \x01(\t\x12\x31\n\tEnumField\x18\x03 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x37\n\x0cMessageField\x18\x04 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12\x1c\n\x10StringPieceField\x18\x05 \x01(\tB\x02\x08\x02\x12\x15\n\tCordField\x18\x06 \x01(\tB\x02\x08\x01\x12\x1e\n\x16RepeatedPrimitiveField\x18\x07 \x03(\x05\x12\x1b\n\x13RepeatedStringField\x18\x08 \x03(\t\x12\x39\n\x11RepeatedEnumField\x18\t \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12?\n\x14RepeatedMessageField\x18\n \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12$\n\x18RepeatedStringPieceField\x18\x0b \x03(\tB\x02\x08\x02\x12\x1d\n\x11RepeatedCordField\x18\x0c \x03(\tB\x02\x08\x01\"\xd5\x01\n\x12TestFieldOrderings\x12\x11\n\tmy_string\x18\x0b \x01(\t\x12\x0e\n\x06my_int\x18\x01 \x01(\x03\x12\x10\n\x08my_float\x18\x65 \x01(\x02\x12U\n\x17optional_nested_message\x18\xc8\x01 \x01(\x0b\x32\x33.protobuf_unittest.TestFieldOrderings.NestedMessage\x1a\'\n\rNestedMessage\x12\n\n\x02oo\x18\x02 \x01(\x03\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05*\x04\x08\x02\x10\x0b*\x04\x08\x0c\x10\x65\"\x9c\x01\n\x17TestExtensionOrderings1\x12\x11\n\tmy_string\x18\x01 \x01(\t2n\n\x13test_ext_orderings1\x12%.protobuf_unittest.TestFieldOrderings\x18\r \x01(\x0b\x32*.protobuf_unittest.TestExtensionOrderings1\"\xd4\x02\n\x17TestExtensionOrderings2\x12\x11\n\tmy_string\x18\x01 \x01(\t\x1a\xb5\x01\n\x17TestExtensionOrderings3\x12\x11\n\tmy_string\x18\x01 \x01(\t2\x86\x01\n\x13test_ext_orderings3\x12%.protobuf_unittest.TestFieldOrderings\x18\x0e \x01(\x0b\x32\x42.protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings32n\n\x13test_ext_orderings2\x12%.protobuf_unittest.TestFieldOrderings\x18\x0c \x01(\x0b\x32*.protobuf_unittest.TestExtensionOrderings2\"\xb6\x07\n\x18TestExtremeDefaultValues\x12?\n\rescaped_bytes\x18\x01 \x01(\x0c:(\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\\"\\376\x12 \n\x0clarge_uint32\x18\x02 \x01(\r:\n4294967295\x12*\n\x0clarge_uint64\x18\x03 \x01(\x04:\x14\x31\x38\x34\x34\x36\x37\x34\x34\x30\x37\x33\x37\x30\x39\x35\x35\x31\x36\x31\x35\x12 \n\x0bsmall_int32\x18\x04 \x01(\x05:\x0b-2147483647\x12)\n\x0bsmall_int64\x18\x05 \x01(\x03:\x14-9223372036854775807\x12\'\n\x12really_small_int32\x18\x15 \x01(\x05:\x0b-2147483648\x12\x30\n\x12really_small_int64\x18\x16 \x01(\x03:\x14-9223372036854775808\x12\x18\n\x0butf8_string\x18\x06 \x01(\t:\x03\xe1\x88\xb4\x12\x15\n\nzero_float\x18\x07 \x01(\x02:\x01\x30\x12\x14\n\tone_float\x18\x08 \x01(\x02:\x01\x31\x12\x18\n\x0bsmall_float\x18\t \x01(\x02:\x03\x31.5\x12\x1e\n\x12negative_one_float\x18\n \x01(\x02:\x02-1\x12\x1c\n\x0enegative_float\x18\x0b \x01(\x02:\x04-1.5\x12\x1a\n\x0blarge_float\x18\x0c \x01(\x02:\x05\x32\x65+08\x12$\n\x14small_negative_float\x18\r \x01(\x02:\x06-8e-28\x12\x17\n\ninf_double\x18\x0e \x01(\x01:\x03inf\x12\x1c\n\x0eneg_inf_double\x18\x0f \x01(\x01:\x04-inf\x12\x17\n\nnan_double\x18\x10 \x01(\x01:\x03nan\x12\x16\n\tinf_float\x18\x11 \x01(\x02:\x03inf\x12\x1b\n\rneg_inf_float\x18\x12 \x01(\x02:\x04-inf\x12\x16\n\tnan_float\x18\x13 \x01(\x02:\x03nan\x12+\n\x0c\x63pp_trigraph\x18\x14 \x01(\t:\x15? ? ?? ?? ??? ??/ ??-\x12 \n\x10string_with_zero\x18\x17 \x01(\t:\x06hel\x00lo\x12\"\n\x0f\x62ytes_with_zero\x18\x18 \x01(\x0c:\twor\\000ld\x12(\n\x16string_piece_with_zero\x18\x19 \x01(\t:\x04\x61\x62\x00\x63\x42\x02\x08\x02\x12 \n\x0e\x63ord_with_zero\x18\x1a \x01(\t:\x04\x31\x32\x00\x33\x42\x02\x08\x01\x12&\n\x12replacement_string\x18\x1b \x01(\t:\n${unknown}\"K\n\x11SparseEnumMessage\x12\x36\n\x0bsparse_enum\x18\x01 \x01(\x0e\x32!.protobuf_unittest.TestSparseEnum\"\x19\n\tOneString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\t\"\x1a\n\nMoreString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\t\"\x18\n\x08OneBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x19\n\tMoreBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\x0c\"\x1c\n\x0cInt32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x05\"\x1d\n\rUint32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\r\"\x1c\n\x0cInt64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x03\"\x1d\n\rUint64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x04\"\x1b\n\x0b\x42oolMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x08\"\xd0\x01\n\tTestOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x36\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesH\x00\x12\x39\n\x08\x66oogroup\x18\x04 \x01(\n2%.protobuf_unittest.TestOneof.FooGroupH\x00\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\tB\x05\n\x03\x66oo\"\xe7\x01\n\x1cTestOneofBackwardsCompatible\x12\x0f\n\x07\x66oo_int\x18\x01 \x01(\x05\x12\x12\n\nfoo_string\x18\x02 \x01(\t\x12\x34\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12J\n\x08\x66oogroup\x18\x04 \x01(\n28.protobuf_unittest.TestOneofBackwardsCompatible.FooGroup\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\t\"\x9e\x06\n\nTestOneof2\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x16\n\x08\x66oo_cord\x18\x03 \x01(\tB\x02\x08\x01H\x00\x12\x1e\n\x10\x66oo_string_piece\x18\x04 \x01(\tB\x02\x08\x02H\x00\x12\x13\n\tfoo_bytes\x18\x05 \x01(\x0cH\x00\x12<\n\x08\x66oo_enum\x18\x06 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnumH\x00\x12\x42\n\x0b\x66oo_message\x18\x07 \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageH\x00\x12:\n\x08\x66oogroup\x18\x08 \x01(\n2&.protobuf_unittest.TestOneof2.FooGroupH\x00\x12K\n\x10\x66oo_lazy_message\x18\x0b \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageB\x02(\x01H\x00\x12\x14\n\x07\x62\x61r_int\x18\x0c \x01(\x05:\x01\x35H\x01\x12\x1c\n\nbar_string\x18\r \x01(\t:\x06STRINGH\x01\x12\x1c\n\x08\x62\x61r_cord\x18\x0e \x01(\t:\x04\x43ORDB\x02\x08\x01H\x01\x12&\n\x10\x62\x61r_string_piece\x18\x0f \x01(\t:\x06SPIECEB\x02\x08\x02H\x01\x12\x1a\n\tbar_bytes\x18\x10 \x01(\x0c:\x05\x42YTESH\x01\x12\x41\n\x08\x62\x61r_enum\x18\x11 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnum:\x03\x42\x41RH\x01\x12\x0f\n\x07\x62\x61z_int\x18\x12 \x01(\x05\x12\x17\n\nbaz_string\x18\x13 \x01(\t:\x03\x42\x41Z\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\t \x01(\x05\x12\t\n\x01\x62\x18\n \x01(\t\x1a\x33\n\rNestedMessage\x12\x0f\n\x07qux_int\x18\x01 \x01(\x03\x12\x11\n\tcorge_int\x18\x02 \x03(\x05\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x42\x05\n\x03\x66ooB\x05\n\x03\x62\x61r\"\xb8\x01\n\x11TestRequiredOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12I\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x32.protobuf_unittest.TestRequiredOneof.NestedMessageH\x00\x1a(\n\rNestedMessage\x12\x17\n\x0frequired_double\x18\x01 \x02(\x01\x42\x05\n\x03\x66oo\"\xaa\x03\n\x0fTestPackedTypes\x12\x18\n\x0cpacked_int32\x18Z \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18[ \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18\\ \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18] \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18` \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x01\x12\x37\n\x0bpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01\"\xc8\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0eunpacked_int32\x18Z \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_int64\x18[ \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0funpacked_uint32\x18\\ \x03(\rB\x02\x10\x00\x12\x1b\n\x0funpacked_uint64\x18] \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed32\x18` \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0funpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x00\x12\x19\n\runpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x00\x12\x39\n\runpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00\" \n\x14TestPackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\"\n\x16TestUnpackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\x99\x04\n\x15TestDynamicExtensions\x12\x19\n\x10scalar_extension\x18\xd0\x0f \x01(\x07\x12\x37\n\x0e\x65num_extension\x18\xd1\x0f \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12Y\n\x16\x64ynamic_enum_extension\x18\xd2\x0f \x01(\x0e\x32\x38.protobuf_unittest.TestDynamicExtensions.DynamicEnumType\x12=\n\x11message_extension\x18\xd3\x0f \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12_\n\x19\x64ynamic_message_extension\x18\xd4\x0f \x01(\x0b\x32;.protobuf_unittest.TestDynamicExtensions.DynamicMessageType\x12\x1b\n\x12repeated_extension\x18\xd5\x0f \x03(\t\x12\x1d\n\x10packed_extension\x18\xd6\x0f \x03(\x11\x42\x02\x10\x01\x1a,\n\x12\x44ynamicMessageType\x12\x16\n\rdynamic_field\x18\xb4\x10 \x01(\x05\"G\n\x0f\x44ynamicEnumType\x12\x10\n\x0b\x44YNAMIC_FOO\x10\x98\x11\x12\x10\n\x0b\x44YNAMIC_BAR\x10\x99\x11\x12\x10\n\x0b\x44YNAMIC_BAZ\x10\x9a\x11\"\xc0\x01\n#TestRepeatedScalarDifferentTagSizes\x12\x18\n\x10repeated_fixed32\x18\x0c \x03(\x07\x12\x16\n\x0erepeated_int32\x18\r \x03(\x05\x12\x19\n\x10repeated_fixed64\x18\xfe\x0f \x03(\x06\x12\x17\n\x0erepeated_int64\x18\xff\x0f \x03(\x03\x12\x18\n\x0erepeated_float\x18\xfe\xff\x0f \x03(\x02\x12\x19\n\x0frepeated_uint64\x18\xff\xff\x0f \x03(\x04\"\xf7\t\n\x10TestParsingMerge\x12;\n\x12required_all_types\x18\x01 \x02(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12optional_all_types\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12repeated_all_types\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12H\n\roptionalgroup\x18\n \x01(\n21.protobuf_unittest.TestParsingMerge.OptionalGroup\x12H\n\rrepeatedgroup\x18\x14 \x03(\n21.protobuf_unittest.TestParsingMerge.RepeatedGroup\x1a\xaa\x04\n\x17RepeatedFieldsGenerator\x12/\n\x06\x66ield1\x18\x01 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield2\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield3\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12R\n\x06group1\x18\n \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1\x12R\n\x06group2\x18\x14 \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2\x12.\n\x04\x65xt1\x18\xe8\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12.\n\x04\x65xt2\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group1\x12/\n\x06\x66ield1\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group2\x12/\n\x06\x66ield1\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rOptionalGroup\x12\x41\n\x18optional_group_all_types\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rRepeatedGroup\x12\x41\n\x18repeated_group_all_types\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\x32[\n\x0coptional_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes2[\n\x0crepeated_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\"D\n\x1bTestCommentInjectionMessage\x12%\n\x01\x61\x18\x01 \x01(\t:\x1a*/ <- Neither should this.\"\x0c\n\nFooRequest\"\r\n\x0b\x46ooResponse\"\x12\n\x10\x46ooClientMessage\"\x12\n\x10\x46ooServerMessage\"\x0c\n\nBarRequest\"\r\n\x0b\x42\x61rResponse\"\x92\x01\n\x0cTestJsonName\x12\x13\n\x0b\x66ield_name1\x18\x01 \x01(\x05\x12\x12\n\nfieldName2\x18\x02 \x01(\x05\x12\x12\n\nFieldName3\x18\x03 \x01(\x05\x12\x14\n\x0c_field_name4\x18\x04 \x01(\x05\x12\x13\n\x0b\x46IELD_NAME5\x18\x05 \x01(\x05\x12\x1a\n\x0b\x66ield_name6\x18\x06 \x01(\x05R\x05@type\"\xfd\x05\n\x14TestHugeFieldNumbers\x12\x1a\n\x0eoptional_int32\x18\xf0\xf8\xff\xff\x01 \x01(\x05\x12\x14\n\x08\x66ixed_32\x18\xf1\xf8\xff\xff\x01 \x01(\x05\x12\x1e\n\x0erepeated_int32\x18\xf2\xf8\xff\xff\x01 \x03(\x05\x42\x02\x10\x00\x12\x1c\n\x0cpacked_int32\x18\xf3\xf8\xff\xff\x01 \x03(\x05\x42\x02\x10\x01\x12\x39\n\roptional_enum\x18\xf4\xf8\xff\xff\x01 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x1b\n\x0foptional_string\x18\xf5\xf8\xff\xff\x01 \x01(\t\x12\x1a\n\x0eoptional_bytes\x18\xf6\xf8\xff\xff\x01 \x01(\x0c\x12?\n\x10optional_message\x18\xf7\xf8\xff\xff\x01 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12P\n\roptionalgroup\x18\xf8\xf8\xff\xff\x01 \x01(\n25.protobuf_unittest.TestHugeFieldNumbers.OptionalGroup\x12[\n\x11string_string_map\x18\xfa\xf8\xff\xff\x01 \x03(\x0b\x32<.protobuf_unittest.TestHugeFieldNumbers.StringStringMapEntry\x12\x1a\n\x0coneof_uint32\x18\xfb\xf8\xff\xff\x01 \x01(\rH\x00\x12\x43\n\x14oneof_test_all_types\x18\xfc\xf8\xff\xff\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesH\x00\x12\x1a\n\x0coneof_string\x18\xfd\xf8\xff\xff\x01 \x01(\tH\x00\x12\x19\n\x0boneof_bytes\x18\xfe\xf8\xff\xff\x01 \x01(\x0cH\x00\x1a$\n\rOptionalGroup\x12\x13\n\x07group_a\x18\xf9\xf8\xff\xff\x01 \x01(\x05\x1a\x36\n\x14StringStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*\x0c\x08\xe0\xaa\xff\xff\x01\x10\xf0\xf8\xff\xff\x01\x42\r\n\x0boneof_field\"\xb1\x01\n\x18TestExtensionInsideTable\x12\x0e\n\x06\x66ield1\x18\x01 \x01(\x05\x12\x0e\n\x06\x66ield2\x18\x02 \x01(\x05\x12\x0e\n\x06\x66ield3\x18\x03 \x01(\x05\x12\x0e\n\x06\x66ield4\x18\x04 \x01(\x05\x12\x0e\n\x06\x66ield6\x18\x06 \x01(\x05\x12\x0e\n\x06\x66ield7\x18\x07 \x01(\x05\x12\x0e\n\x06\x66ield8\x18\x08 \x01(\x05\x12\x0e\n\x06\x66ield9\x18\t \x01(\x05\x12\x0f\n\x07\x66ield10\x18\n \x01(\x05*\x04\x08\x05\x10\x06*@\n\x0b\x46oreignEnum\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06*K\n\x14TestEnumWithDupValue\x12\x08\n\x04\x46OO1\x10\x01\x12\x08\n\x04\x42\x41R1\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x08\n\x04\x46OO2\x10\x01\x12\x08\n\x04\x42\x41R2\x10\x02\x1a\x02\x10\x01*\x89\x01\n\x0eTestSparseEnum\x12\x0c\n\x08SPARSE_A\x10{\x12\x0e\n\x08SPARSE_B\x10\xa6\xe7\x03\x12\x0f\n\x08SPARSE_C\x10\xb2\xb1\x80\x06\x12\x15\n\x08SPARSE_D\x10\xf1\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x15\n\x08SPARSE_E\x10\xb4\xde\xfc\xff\xff\xff\xff\xff\xff\x01\x12\x0c\n\x08SPARSE_F\x10\x00\x12\x0c\n\x08SPARSE_G\x10\x02\x32\x99\x01\n\x0bTestService\x12\x44\n\x03\x46oo\x12\x1d.protobuf_unittest.FooRequest\x1a\x1e.protobuf_unittest.FooResponse\x12\x44\n\x03\x42\x61r\x12\x1d.protobuf_unittest.BarRequest\x1a\x1e.protobuf_unittest.BarResponse:F\n\x18optional_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x01 \x01(\x05:F\n\x18optional_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x02 \x01(\x03:G\n\x19optional_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x03 \x01(\r:G\n\x19optional_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x04 \x01(\x04:G\n\x19optional_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x05 \x01(\x11:G\n\x19optional_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x06 \x01(\x12:H\n\x1aoptional_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x07 \x01(\x07:H\n\x1aoptional_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x08 \x01(\x06:I\n\x1boptional_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\t \x01(\x0f:I\n\x1boptional_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\n \x01(\x10:F\n\x18optional_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0b \x01(\x02:G\n\x19optional_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0c \x01(\x01:E\n\x17optional_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18\r \x01(\x08:G\n\x19optional_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0e \x01(\t:F\n\x18optional_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0f \x01(\x0c:q\n\x17optionalgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x10 \x01(\n2*.protobuf_unittest.OptionalGroup_extension:~\n!optional_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"optional_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!optional_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1eoptional_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1foptional_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1eoptional_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1foptional_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x18 \x01(\tB\x02\x08\x02:I\n\x17optional_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x19 \x01(\tB\x02\x08\x01:\x85\x01\n(optional_public_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage:\x80\x01\n\x1foptional_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:F\n\x18repeated_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1f \x03(\x05:F\n\x18repeated_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18 \x03(\x03:G\n\x19repeated_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18! \x03(\r:G\n\x19repeated_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\" \x03(\x04:G\n\x19repeated_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18# \x03(\x11:G\n\x19repeated_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18$ \x03(\x12:H\n\x1arepeated_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18% \x03(\x07:H\n\x1arepeated_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18& \x03(\x06:I\n\x1brepeated_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\' \x03(\x0f:I\n\x1brepeated_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18( \x03(\x10:F\n\x18repeated_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18) \x03(\x02:G\n\x19repeated_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18* \x03(\x01:E\n\x17repeated_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18+ \x03(\x08:G\n\x19repeated_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18, \x03(\t:F\n\x18repeated_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18- \x03(\x0c:q\n\x17repeatedgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18. \x03(\n2*.protobuf_unittest.RepeatedGroup_extension:~\n!repeated_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"repeated_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!repeated_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1erepeated_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1frepeated_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1erepeated_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1frepeated_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x36 \x03(\tB\x02\x08\x02:I\n\x17repeated_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x37 \x03(\tB\x02\x08\x01:\x80\x01\n\x1frepeated_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:I\n\x17\x64\x65\x66\x61ult_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18= \x01(\x05:\x02\x34\x31:I\n\x17\x64\x65\x66\x61ult_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18> \x01(\x03:\x02\x34\x32:J\n\x18\x64\x65\x66\x61ult_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18? \x01(\r:\x02\x34\x33:J\n\x18\x64\x65\x66\x61ult_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18@ \x01(\x04:\x02\x34\x34:K\n\x18\x64\x65\x66\x61ult_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x41 \x01(\x11:\x03-45:J\n\x18\x64\x65\x66\x61ult_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x42 \x01(\x12:\x02\x34\x36:K\n\x19\x64\x65\x66\x61ult_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x43 \x01(\x07:\x02\x34\x37:K\n\x19\x64\x65\x66\x61ult_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x44 \x01(\x06:\x02\x34\x38:L\n\x1a\x64\x65\x66\x61ult_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x45 \x01(\x0f:\x02\x34\x39:M\n\x1a\x64\x65\x66\x61ult_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x46 \x01(\x10:\x03-50:K\n\x17\x64\x65\x66\x61ult_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18G \x01(\x02:\x04\x35\x31.5:M\n\x18\x64\x65\x66\x61ult_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30:J\n\x16\x64\x65\x66\x61ult_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18I \x01(\x08:\x04true:M\n\x18\x64\x65\x66\x61ult_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18J \x01(\t:\x05hello:L\n\x17\x64\x65\x66\x61ult_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18K \x01(\x0c:\x05world:|\n\x1d\x64\x65\x66\x61ult_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R:y\n\x1e\x64\x65\x66\x61ult_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR:}\n\x1d\x64\x65\x66\x61ult_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR:U\n\x1e\x64\x65\x66\x61ult_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02:M\n\x16\x64\x65\x66\x61ult_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01:D\n\x16oneof_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18o \x01(\r:{\n\x1eoneof_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:D\n\x16oneof_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18q \x01(\t:C\n\x15oneof_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18r \x01(\x0c:B\n\x13my_extension_string\x12%.protobuf_unittest.TestFieldOrderings\x18\x32 \x01(\t:?\n\x10my_extension_int\x12%.protobuf_unittest.TestFieldOrderings\x18\x05 \x01(\x05:K\n\x16packed_int32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18Z \x03(\x05\x42\x02\x10\x01:K\n\x16packed_int64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18[ \x03(\x03\x42\x02\x10\x01:L\n\x17packed_uint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\\ \x03(\rB\x02\x10\x01:L\n\x17packed_uint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18] \x03(\x04\x42\x02\x10\x01:L\n\x17packed_sint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18^ \x03(\x11\x42\x02\x10\x01:L\n\x17packed_sint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18_ \x03(\x12\x42\x02\x10\x01:M\n\x18packed_fixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18` \x03(\x07\x42\x02\x10\x01:M\n\x18packed_fixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x01:N\n\x19packed_sfixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x01:N\n\x19packed_sfixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x01:K\n\x16packed_float_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x01:L\n\x17packed_double_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x01:J\n\x15packed_bool_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x01:j\n\x15packed_enum_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01:O\n\x18unpacked_int32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18Z \x03(\x05\x42\x02\x10\x00:O\n\x18unpacked_int64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18[ \x03(\x03\x42\x02\x10\x00:P\n\x19unpacked_uint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\\ \x03(\rB\x02\x10\x00:P\n\x19unpacked_uint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18] \x03(\x04\x42\x02\x10\x00:P\n\x19unpacked_sint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18^ \x03(\x11\x42\x02\x10\x00:P\n\x19unpacked_sint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18_ \x03(\x12\x42\x02\x10\x00:Q\n\x1aunpacked_fixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18` \x03(\x07\x42\x02\x10\x00:Q\n\x1aunpacked_fixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x00:R\n\x1bunpacked_sfixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x00:R\n\x1bunpacked_sfixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x00:O\n\x18unpacked_float_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x00:P\n\x19unpacked_double_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x00:N\n\x17unpacked_bool_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x00:n\n\x17unpacked_enum_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00:d\n\x0etest_all_types\x12\'.protobuf_unittest.TestHugeFieldNumbers\x18\xe0\xaa\xff\xff\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes:Z\n%test_extension_inside_table_extension\x12+.protobuf_unittest.TestExtensionInsideTable\x18\x05 \x01(\x05\x42\x1d\x42\rUnittestProtoH\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf8\x01\x01') , dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,]) _FOREIGNENUM = _descriptor.EnumDescriptor( name='ForeignEnum', full_name='protobuf_unittest.ForeignEnum', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='FOREIGN_FOO', index=0, number=4, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='FOREIGN_BAR', index=1, number=5, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='FOREIGN_BAZ', index=2, number=6, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=15306, serialized_end=15370, ) _sym_db.RegisterEnumDescriptor(_FOREIGNENUM) ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM) _TESTENUMWITHDUPVALUE = _descriptor.EnumDescriptor( name='TestEnumWithDupValue', full_name='protobuf_unittest.TestEnumWithDupValue', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='FOO1', index=0, number=1, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAR1', index=1, number=2, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAZ', index=2, number=3, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='FOO2', index=3, number=1, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAR2', index=4, number=2, serialized_options=None, type=None), ], containing_type=None, serialized_options=_b('\020\001'), serialized_start=15372, serialized_end=15447, ) _sym_db.RegisterEnumDescriptor(_TESTENUMWITHDUPVALUE) TestEnumWithDupValue = enum_type_wrapper.EnumTypeWrapper(_TESTENUMWITHDUPVALUE) _TESTSPARSEENUM = _descriptor.EnumDescriptor( name='TestSparseEnum', full_name='protobuf_unittest.TestSparseEnum', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='SPARSE_A', index=0, number=123, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_B', index=1, number=62374, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_C', index=2, number=12589234, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_D', index=3, number=-15, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_E', index=4, number=-53452, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_F', index=5, number=0, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='SPARSE_G', index=6, number=2, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=15450, serialized_end=15587, ) _sym_db.RegisterEnumDescriptor(_TESTSPARSEENUM) TestSparseEnum = enum_type_wrapper.EnumTypeWrapper(_TESTSPARSEENUM) FOREIGN_FOO = 4 FOREIGN_BAR = 5 FOREIGN_BAZ = 6 FOO1 = 1 BAR1 = 2 BAZ = 3 FOO2 = 1 BAR2 = 2 SPARSE_A = 123 SPARSE_B = 62374 SPARSE_C = 12589234 SPARSE_D = -15 SPARSE_E = -53452 SPARSE_F = 0 SPARSE_G = 2 OPTIONAL_INT32_EXTENSION_FIELD_NUMBER = 1 optional_int32_extension = _descriptor.FieldDescriptor( name='optional_int32_extension', full_name='protobuf_unittest.optional_int32_extension', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_INT64_EXTENSION_FIELD_NUMBER = 2 optional_int64_extension = _descriptor.FieldDescriptor( name='optional_int64_extension', full_name='protobuf_unittest.optional_int64_extension', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_UINT32_EXTENSION_FIELD_NUMBER = 3 optional_uint32_extension = _descriptor.FieldDescriptor( name='optional_uint32_extension', full_name='protobuf_unittest.optional_uint32_extension', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_UINT64_EXTENSION_FIELD_NUMBER = 4 optional_uint64_extension = _descriptor.FieldDescriptor( name='optional_uint64_extension', full_name='protobuf_unittest.optional_uint64_extension', index=3, number=4, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_SINT32_EXTENSION_FIELD_NUMBER = 5 optional_sint32_extension = _descriptor.FieldDescriptor( name='optional_sint32_extension', full_name='protobuf_unittest.optional_sint32_extension', index=4, number=5, type=17, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_SINT64_EXTENSION_FIELD_NUMBER = 6 optional_sint64_extension = _descriptor.FieldDescriptor( name='optional_sint64_extension', full_name='protobuf_unittest.optional_sint64_extension', index=5, number=6, type=18, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_FIXED32_EXTENSION_FIELD_NUMBER = 7 optional_fixed32_extension = _descriptor.FieldDescriptor( name='optional_fixed32_extension', full_name='protobuf_unittest.optional_fixed32_extension', index=6, number=7, type=7, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_FIXED64_EXTENSION_FIELD_NUMBER = 8 optional_fixed64_extension = _descriptor.FieldDescriptor( name='optional_fixed64_extension', full_name='protobuf_unittest.optional_fixed64_extension', index=7, number=8, type=6, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_SFIXED32_EXTENSION_FIELD_NUMBER = 9 optional_sfixed32_extension = _descriptor.FieldDescriptor( name='optional_sfixed32_extension', full_name='protobuf_unittest.optional_sfixed32_extension', index=8, number=9, type=15, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_SFIXED64_EXTENSION_FIELD_NUMBER = 10 optional_sfixed64_extension = _descriptor.FieldDescriptor( name='optional_sfixed64_extension', full_name='protobuf_unittest.optional_sfixed64_extension', index=9, number=10, type=16, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_FLOAT_EXTENSION_FIELD_NUMBER = 11 optional_float_extension = _descriptor.FieldDescriptor( name='optional_float_extension', full_name='protobuf_unittest.optional_float_extension', index=10, number=11, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_DOUBLE_EXTENSION_FIELD_NUMBER = 12 optional_double_extension = _descriptor.FieldDescriptor( name='optional_double_extension', full_name='protobuf_unittest.optional_double_extension', index=11, number=12, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_BOOL_EXTENSION_FIELD_NUMBER = 13 optional_bool_extension = _descriptor.FieldDescriptor( name='optional_bool_extension', full_name='protobuf_unittest.optional_bool_extension', index=12, number=13, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_STRING_EXTENSION_FIELD_NUMBER = 14 optional_string_extension = _descriptor.FieldDescriptor( name='optional_string_extension', full_name='protobuf_unittest.optional_string_extension', index=13, number=14, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_BYTES_EXTENSION_FIELD_NUMBER = 15 optional_bytes_extension = _descriptor.FieldDescriptor( name='optional_bytes_extension', full_name='protobuf_unittest.optional_bytes_extension', index=14, number=15, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONALGROUP_EXTENSION_FIELD_NUMBER = 16 optionalgroup_extension = _descriptor.FieldDescriptor( name='optionalgroup_extension', full_name='protobuf_unittest.optionalgroup_extension', index=15, number=16, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 18 optional_nested_message_extension = _descriptor.FieldDescriptor( name='optional_nested_message_extension', full_name='protobuf_unittest.optional_nested_message_extension', index=16, number=18, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 19 optional_foreign_message_extension = _descriptor.FieldDescriptor( name='optional_foreign_message_extension', full_name='protobuf_unittest.optional_foreign_message_extension', index=17, number=19, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 20 optional_import_message_extension = _descriptor.FieldDescriptor( name='optional_import_message_extension', full_name='protobuf_unittest.optional_import_message_extension', index=18, number=20, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 21 optional_nested_enum_extension = _descriptor.FieldDescriptor( name='optional_nested_enum_extension', full_name='protobuf_unittest.optional_nested_enum_extension', index=19, number=21, type=14, cpp_type=8, label=1, has_default_value=False, default_value=1, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 22 optional_foreign_enum_extension = _descriptor.FieldDescriptor( name='optional_foreign_enum_extension', full_name='protobuf_unittest.optional_foreign_enum_extension', index=20, number=22, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 23 optional_import_enum_extension = _descriptor.FieldDescriptor( name='optional_import_enum_extension', full_name='protobuf_unittest.optional_import_enum_extension', index=21, number=23, type=14, cpp_type=8, label=1, has_default_value=False, default_value=7, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_STRING_PIECE_EXTENSION_FIELD_NUMBER = 24 optional_string_piece_extension = _descriptor.FieldDescriptor( name='optional_string_piece_extension', full_name='protobuf_unittest.optional_string_piece_extension', index=22, number=24, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR) OPTIONAL_CORD_EXTENSION_FIELD_NUMBER = 25 optional_cord_extension = _descriptor.FieldDescriptor( name='optional_cord_extension', full_name='protobuf_unittest.optional_cord_extension', index=23, number=25, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR) OPTIONAL_PUBLIC_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 26 optional_public_import_message_extension = _descriptor.FieldDescriptor( name='optional_public_import_message_extension', full_name='protobuf_unittest.optional_public_import_message_extension', index=24, number=26, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) OPTIONAL_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 27 optional_lazy_message_extension = _descriptor.FieldDescriptor( name='optional_lazy_message_extension', full_name='protobuf_unittest.optional_lazy_message_extension', index=25, number=27, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR) REPEATED_INT32_EXTENSION_FIELD_NUMBER = 31 repeated_int32_extension = _descriptor.FieldDescriptor( name='repeated_int32_extension', full_name='protobuf_unittest.repeated_int32_extension', index=26, number=31, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_INT64_EXTENSION_FIELD_NUMBER = 32 repeated_int64_extension = _descriptor.FieldDescriptor( name='repeated_int64_extension', full_name='protobuf_unittest.repeated_int64_extension', index=27, number=32, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_UINT32_EXTENSION_FIELD_NUMBER = 33 repeated_uint32_extension = _descriptor.FieldDescriptor( name='repeated_uint32_extension', full_name='protobuf_unittest.repeated_uint32_extension', index=28, number=33, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_UINT64_EXTENSION_FIELD_NUMBER = 34 repeated_uint64_extension = _descriptor.FieldDescriptor( name='repeated_uint64_extension', full_name='protobuf_unittest.repeated_uint64_extension', index=29, number=34, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_SINT32_EXTENSION_FIELD_NUMBER = 35 repeated_sint32_extension = _descriptor.FieldDescriptor( name='repeated_sint32_extension', full_name='protobuf_unittest.repeated_sint32_extension', index=30, number=35, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_SINT64_EXTENSION_FIELD_NUMBER = 36 repeated_sint64_extension = _descriptor.FieldDescriptor( name='repeated_sint64_extension', full_name='protobuf_unittest.repeated_sint64_extension', index=31, number=36, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_FIXED32_EXTENSION_FIELD_NUMBER = 37 repeated_fixed32_extension = _descriptor.FieldDescriptor( name='repeated_fixed32_extension', full_name='protobuf_unittest.repeated_fixed32_extension', index=32, number=37, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_FIXED64_EXTENSION_FIELD_NUMBER = 38 repeated_fixed64_extension = _descriptor.FieldDescriptor( name='repeated_fixed64_extension', full_name='protobuf_unittest.repeated_fixed64_extension', index=33, number=38, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_SFIXED32_EXTENSION_FIELD_NUMBER = 39 repeated_sfixed32_extension = _descriptor.FieldDescriptor( name='repeated_sfixed32_extension', full_name='protobuf_unittest.repeated_sfixed32_extension', index=34, number=39, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_SFIXED64_EXTENSION_FIELD_NUMBER = 40 repeated_sfixed64_extension = _descriptor.FieldDescriptor( name='repeated_sfixed64_extension', full_name='protobuf_unittest.repeated_sfixed64_extension', index=35, number=40, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_FLOAT_EXTENSION_FIELD_NUMBER = 41 repeated_float_extension = _descriptor.FieldDescriptor( name='repeated_float_extension', full_name='protobuf_unittest.repeated_float_extension', index=36, number=41, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_DOUBLE_EXTENSION_FIELD_NUMBER = 42 repeated_double_extension = _descriptor.FieldDescriptor( name='repeated_double_extension', full_name='protobuf_unittest.repeated_double_extension', index=37, number=42, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_BOOL_EXTENSION_FIELD_NUMBER = 43 repeated_bool_extension = _descriptor.FieldDescriptor( name='repeated_bool_extension', full_name='protobuf_unittest.repeated_bool_extension', index=38, number=43, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_STRING_EXTENSION_FIELD_NUMBER = 44 repeated_string_extension = _descriptor.FieldDescriptor( name='repeated_string_extension', full_name='protobuf_unittest.repeated_string_extension', index=39, number=44, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_BYTES_EXTENSION_FIELD_NUMBER = 45 repeated_bytes_extension = _descriptor.FieldDescriptor( name='repeated_bytes_extension', full_name='protobuf_unittest.repeated_bytes_extension', index=40, number=45, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATEDGROUP_EXTENSION_FIELD_NUMBER = 46 repeatedgroup_extension = _descriptor.FieldDescriptor( name='repeatedgroup_extension', full_name='protobuf_unittest.repeatedgroup_extension', index=41, number=46, type=10, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 48 repeated_nested_message_extension = _descriptor.FieldDescriptor( name='repeated_nested_message_extension', full_name='protobuf_unittest.repeated_nested_message_extension', index=42, number=48, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 49 repeated_foreign_message_extension = _descriptor.FieldDescriptor( name='repeated_foreign_message_extension', full_name='protobuf_unittest.repeated_foreign_message_extension', index=43, number=49, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 50 repeated_import_message_extension = _descriptor.FieldDescriptor( name='repeated_import_message_extension', full_name='protobuf_unittest.repeated_import_message_extension', index=44, number=50, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 51 repeated_nested_enum_extension = _descriptor.FieldDescriptor( name='repeated_nested_enum_extension', full_name='protobuf_unittest.repeated_nested_enum_extension', index=45, number=51, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 52 repeated_foreign_enum_extension = _descriptor.FieldDescriptor( name='repeated_foreign_enum_extension', full_name='protobuf_unittest.repeated_foreign_enum_extension', index=46, number=52, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 53 repeated_import_enum_extension = _descriptor.FieldDescriptor( name='repeated_import_enum_extension', full_name='protobuf_unittest.repeated_import_enum_extension', index=47, number=53, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) REPEATED_STRING_PIECE_EXTENSION_FIELD_NUMBER = 54 repeated_string_piece_extension = _descriptor.FieldDescriptor( name='repeated_string_piece_extension', full_name='protobuf_unittest.repeated_string_piece_extension', index=48, number=54, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR) REPEATED_CORD_EXTENSION_FIELD_NUMBER = 55 repeated_cord_extension = _descriptor.FieldDescriptor( name='repeated_cord_extension', full_name='protobuf_unittest.repeated_cord_extension', index=49, number=55, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR) REPEATED_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 57 repeated_lazy_message_extension = _descriptor.FieldDescriptor( name='repeated_lazy_message_extension', full_name='protobuf_unittest.repeated_lazy_message_extension', index=50, number=57, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR) DEFAULT_INT32_EXTENSION_FIELD_NUMBER = 61 default_int32_extension = _descriptor.FieldDescriptor( name='default_int32_extension', full_name='protobuf_unittest.default_int32_extension', index=51, number=61, type=5, cpp_type=1, label=1, has_default_value=True, default_value=41, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_INT64_EXTENSION_FIELD_NUMBER = 62 default_int64_extension = _descriptor.FieldDescriptor( name='default_int64_extension', full_name='protobuf_unittest.default_int64_extension', index=52, number=62, type=3, cpp_type=2, label=1, has_default_value=True, default_value=42, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_UINT32_EXTENSION_FIELD_NUMBER = 63 default_uint32_extension = _descriptor.FieldDescriptor( name='default_uint32_extension', full_name='protobuf_unittest.default_uint32_extension', index=53, number=63, type=13, cpp_type=3, label=1, has_default_value=True, default_value=43, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_UINT64_EXTENSION_FIELD_NUMBER = 64 default_uint64_extension = _descriptor.FieldDescriptor( name='default_uint64_extension', full_name='protobuf_unittest.default_uint64_extension', index=54, number=64, type=4, cpp_type=4, label=1, has_default_value=True, default_value=44, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_SINT32_EXTENSION_FIELD_NUMBER = 65 default_sint32_extension = _descriptor.FieldDescriptor( name='default_sint32_extension', full_name='protobuf_unittest.default_sint32_extension', index=55, number=65, type=17, cpp_type=1, label=1, has_default_value=True, default_value=-45, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_SINT64_EXTENSION_FIELD_NUMBER = 66 default_sint64_extension = _descriptor.FieldDescriptor( name='default_sint64_extension', full_name='protobuf_unittest.default_sint64_extension', index=56, number=66, type=18, cpp_type=2, label=1, has_default_value=True, default_value=46, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_FIXED32_EXTENSION_FIELD_NUMBER = 67 default_fixed32_extension = _descriptor.FieldDescriptor( name='default_fixed32_extension', full_name='protobuf_unittest.default_fixed32_extension', index=57, number=67, type=7, cpp_type=3, label=1, has_default_value=True, default_value=47, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_FIXED64_EXTENSION_FIELD_NUMBER = 68 default_fixed64_extension = _descriptor.FieldDescriptor( name='default_fixed64_extension', full_name='protobuf_unittest.default_fixed64_extension', index=58, number=68, type=6, cpp_type=4, label=1, has_default_value=True, default_value=48, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_SFIXED32_EXTENSION_FIELD_NUMBER = 69 default_sfixed32_extension = _descriptor.FieldDescriptor( name='default_sfixed32_extension', full_name='protobuf_unittest.default_sfixed32_extension', index=59, number=69, type=15, cpp_type=1, label=1, has_default_value=True, default_value=49, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_SFIXED64_EXTENSION_FIELD_NUMBER = 70 default_sfixed64_extension = _descriptor.FieldDescriptor( name='default_sfixed64_extension', full_name='protobuf_unittest.default_sfixed64_extension', index=60, number=70, type=16, cpp_type=2, label=1, has_default_value=True, default_value=-50, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_FLOAT_EXTENSION_FIELD_NUMBER = 71 default_float_extension = _descriptor.FieldDescriptor( name='default_float_extension', full_name='protobuf_unittest.default_float_extension', index=61, number=71, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(51.5), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_DOUBLE_EXTENSION_FIELD_NUMBER = 72 default_double_extension = _descriptor.FieldDescriptor( name='default_double_extension', full_name='protobuf_unittest.default_double_extension', index=62, number=72, type=1, cpp_type=5, label=1, has_default_value=True, default_value=float(52000), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_BOOL_EXTENSION_FIELD_NUMBER = 73 default_bool_extension = _descriptor.FieldDescriptor( name='default_bool_extension', full_name='protobuf_unittest.default_bool_extension', index=63, number=73, type=8, cpp_type=7, label=1, has_default_value=True, default_value=True, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_STRING_EXTENSION_FIELD_NUMBER = 74 default_string_extension = _descriptor.FieldDescriptor( name='default_string_extension', full_name='protobuf_unittest.default_string_extension', index=64, number=74, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("hello").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_BYTES_EXTENSION_FIELD_NUMBER = 75 default_bytes_extension = _descriptor.FieldDescriptor( name='default_bytes_extension', full_name='protobuf_unittest.default_bytes_extension', index=65, number=75, type=12, cpp_type=9, label=1, has_default_value=True, default_value=_b("world"), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 81 default_nested_enum_extension = _descriptor.FieldDescriptor( name='default_nested_enum_extension', full_name='protobuf_unittest.default_nested_enum_extension', index=66, number=81, type=14, cpp_type=8, label=1, has_default_value=True, default_value=2, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 82 default_foreign_enum_extension = _descriptor.FieldDescriptor( name='default_foreign_enum_extension', full_name='protobuf_unittest.default_foreign_enum_extension', index=67, number=82, type=14, cpp_type=8, label=1, has_default_value=True, default_value=5, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 83 default_import_enum_extension = _descriptor.FieldDescriptor( name='default_import_enum_extension', full_name='protobuf_unittest.default_import_enum_extension', index=68, number=83, type=14, cpp_type=8, label=1, has_default_value=True, default_value=8, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) DEFAULT_STRING_PIECE_EXTENSION_FIELD_NUMBER = 84 default_string_piece_extension = _descriptor.FieldDescriptor( name='default_string_piece_extension', full_name='protobuf_unittest.default_string_piece_extension', index=69, number=84, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("abc").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR) DEFAULT_CORD_EXTENSION_FIELD_NUMBER = 85 default_cord_extension = _descriptor.FieldDescriptor( name='default_cord_extension', full_name='protobuf_unittest.default_cord_extension', index=70, number=85, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("123").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR) ONEOF_UINT32_EXTENSION_FIELD_NUMBER = 111 oneof_uint32_extension = _descriptor.FieldDescriptor( name='oneof_uint32_extension', full_name='protobuf_unittest.oneof_uint32_extension', index=71, number=111, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) ONEOF_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 112 oneof_nested_message_extension = _descriptor.FieldDescriptor( name='oneof_nested_message_extension', full_name='protobuf_unittest.oneof_nested_message_extension', index=72, number=112, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) ONEOF_STRING_EXTENSION_FIELD_NUMBER = 113 oneof_string_extension = _descriptor.FieldDescriptor( name='oneof_string_extension', full_name='protobuf_unittest.oneof_string_extension', index=73, number=113, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) ONEOF_BYTES_EXTENSION_FIELD_NUMBER = 114 oneof_bytes_extension = _descriptor.FieldDescriptor( name='oneof_bytes_extension', full_name='protobuf_unittest.oneof_bytes_extension', index=74, number=114, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) MY_EXTENSION_STRING_FIELD_NUMBER = 50 my_extension_string = _descriptor.FieldDescriptor( name='my_extension_string', full_name='protobuf_unittest.my_extension_string', index=75, number=50, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) MY_EXTENSION_INT_FIELD_NUMBER = 5 my_extension_int = _descriptor.FieldDescriptor( name='my_extension_int', full_name='protobuf_unittest.my_extension_int', index=76, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) PACKED_INT32_EXTENSION_FIELD_NUMBER = 90 packed_int32_extension = _descriptor.FieldDescriptor( name='packed_int32_extension', full_name='protobuf_unittest.packed_int32_extension', index=77, number=90, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_INT64_EXTENSION_FIELD_NUMBER = 91 packed_int64_extension = _descriptor.FieldDescriptor( name='packed_int64_extension', full_name='protobuf_unittest.packed_int64_extension', index=78, number=91, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_UINT32_EXTENSION_FIELD_NUMBER = 92 packed_uint32_extension = _descriptor.FieldDescriptor( name='packed_uint32_extension', full_name='protobuf_unittest.packed_uint32_extension', index=79, number=92, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_UINT64_EXTENSION_FIELD_NUMBER = 93 packed_uint64_extension = _descriptor.FieldDescriptor( name='packed_uint64_extension', full_name='protobuf_unittest.packed_uint64_extension', index=80, number=93, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_SINT32_EXTENSION_FIELD_NUMBER = 94 packed_sint32_extension = _descriptor.FieldDescriptor( name='packed_sint32_extension', full_name='protobuf_unittest.packed_sint32_extension', index=81, number=94, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_SINT64_EXTENSION_FIELD_NUMBER = 95 packed_sint64_extension = _descriptor.FieldDescriptor( name='packed_sint64_extension', full_name='protobuf_unittest.packed_sint64_extension', index=82, number=95, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96 packed_fixed32_extension = _descriptor.FieldDescriptor( name='packed_fixed32_extension', full_name='protobuf_unittest.packed_fixed32_extension', index=83, number=96, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97 packed_fixed64_extension = _descriptor.FieldDescriptor( name='packed_fixed64_extension', full_name='protobuf_unittest.packed_fixed64_extension', index=84, number=97, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98 packed_sfixed32_extension = _descriptor.FieldDescriptor( name='packed_sfixed32_extension', full_name='protobuf_unittest.packed_sfixed32_extension', index=85, number=98, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99 packed_sfixed64_extension = _descriptor.FieldDescriptor( name='packed_sfixed64_extension', full_name='protobuf_unittest.packed_sfixed64_extension', index=86, number=99, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100 packed_float_extension = _descriptor.FieldDescriptor( name='packed_float_extension', full_name='protobuf_unittest.packed_float_extension', index=87, number=100, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101 packed_double_extension = _descriptor.FieldDescriptor( name='packed_double_extension', full_name='protobuf_unittest.packed_double_extension', index=88, number=101, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_BOOL_EXTENSION_FIELD_NUMBER = 102 packed_bool_extension = _descriptor.FieldDescriptor( name='packed_bool_extension', full_name='protobuf_unittest.packed_bool_extension', index=89, number=102, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) PACKED_ENUM_EXTENSION_FIELD_NUMBER = 103 packed_enum_extension = _descriptor.FieldDescriptor( name='packed_enum_extension', full_name='protobuf_unittest.packed_enum_extension', index=90, number=103, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR) UNPACKED_INT32_EXTENSION_FIELD_NUMBER = 90 unpacked_int32_extension = _descriptor.FieldDescriptor( name='unpacked_int32_extension', full_name='protobuf_unittest.unpacked_int32_extension', index=91, number=90, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_INT64_EXTENSION_FIELD_NUMBER = 91 unpacked_int64_extension = _descriptor.FieldDescriptor( name='unpacked_int64_extension', full_name='protobuf_unittest.unpacked_int64_extension', index=92, number=91, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_UINT32_EXTENSION_FIELD_NUMBER = 92 unpacked_uint32_extension = _descriptor.FieldDescriptor( name='unpacked_uint32_extension', full_name='protobuf_unittest.unpacked_uint32_extension', index=93, number=92, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_UINT64_EXTENSION_FIELD_NUMBER = 93 unpacked_uint64_extension = _descriptor.FieldDescriptor( name='unpacked_uint64_extension', full_name='protobuf_unittest.unpacked_uint64_extension', index=94, number=93, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_SINT32_EXTENSION_FIELD_NUMBER = 94 unpacked_sint32_extension = _descriptor.FieldDescriptor( name='unpacked_sint32_extension', full_name='protobuf_unittest.unpacked_sint32_extension', index=95, number=94, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_SINT64_EXTENSION_FIELD_NUMBER = 95 unpacked_sint64_extension = _descriptor.FieldDescriptor( name='unpacked_sint64_extension', full_name='protobuf_unittest.unpacked_sint64_extension', index=96, number=95, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96 unpacked_fixed32_extension = _descriptor.FieldDescriptor( name='unpacked_fixed32_extension', full_name='protobuf_unittest.unpacked_fixed32_extension', index=97, number=96, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97 unpacked_fixed64_extension = _descriptor.FieldDescriptor( name='unpacked_fixed64_extension', full_name='protobuf_unittest.unpacked_fixed64_extension', index=98, number=97, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98 unpacked_sfixed32_extension = _descriptor.FieldDescriptor( name='unpacked_sfixed32_extension', full_name='protobuf_unittest.unpacked_sfixed32_extension', index=99, number=98, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99 unpacked_sfixed64_extension = _descriptor.FieldDescriptor( name='unpacked_sfixed64_extension', full_name='protobuf_unittest.unpacked_sfixed64_extension', index=100, number=99, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100 unpacked_float_extension = _descriptor.FieldDescriptor( name='unpacked_float_extension', full_name='protobuf_unittest.unpacked_float_extension', index=101, number=100, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101 unpacked_double_extension = _descriptor.FieldDescriptor( name='unpacked_double_extension', full_name='protobuf_unittest.unpacked_double_extension', index=102, number=101, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_BOOL_EXTENSION_FIELD_NUMBER = 102 unpacked_bool_extension = _descriptor.FieldDescriptor( name='unpacked_bool_extension', full_name='protobuf_unittest.unpacked_bool_extension', index=103, number=102, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) UNPACKED_ENUM_EXTENSION_FIELD_NUMBER = 103 unpacked_enum_extension = _descriptor.FieldDescriptor( name='unpacked_enum_extension', full_name='protobuf_unittest.unpacked_enum_extension', index=104, number=103, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR) TEST_ALL_TYPES_FIELD_NUMBER = 536860000 test_all_types = _descriptor.FieldDescriptor( name='test_all_types', full_name='protobuf_unittest.test_all_types', index=105, number=536860000, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) TEST_EXTENSION_INSIDE_TABLE_EXTENSION_FIELD_NUMBER = 5 test_extension_inside_table_extension = _descriptor.FieldDescriptor( name='test_extension_inside_table_extension', full_name='protobuf_unittest.test_extension_inside_table_extension', index=106, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR) _TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor( name='NestedEnum', full_name='protobuf_unittest.TestAllTypes.NestedEnum', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='FOO', index=0, number=1, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAR', index=1, number=2, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAZ', index=2, number=3, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='NEG', index=3, number=-1, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=3202, serialized_end=3259, ) _sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM) _TESTONEOF2_NESTEDENUM = _descriptor.EnumDescriptor( name='NestedEnum', full_name='protobuf_unittest.TestOneof2.NestedEnum', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='FOO', index=0, number=1, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAR', index=1, number=2, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='BAZ', index=2, number=3, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=3202, serialized_end=3241, ) _sym_db.RegisterEnumDescriptor(_TESTONEOF2_NESTEDENUM) _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE = _descriptor.EnumDescriptor( name='DynamicEnumType', full_name='protobuf_unittest.TestDynamicExtensions.DynamicEnumType', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='DYNAMIC_FOO', index=0, number=2200, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='DYNAMIC_BAR', index=1, number=2201, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='DYNAMIC_BAZ', index=2, number=2202, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=12499, serialized_end=12570, ) _sym_db.RegisterEnumDescriptor(_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE) _TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor( name='NestedMessage', full_name='protobuf_unittest.TestAllTypes.NestedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='bb', full_name='protobuf_unittest.TestAllTypes.NestedMessage.bb', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3117, serialized_end=3144, ) _TESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor( name='OptionalGroup', full_name='protobuf_unittest.TestAllTypes.OptionalGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestAllTypes.OptionalGroup.a', index=0, number=17, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3146, serialized_end=3172, ) _TESTALLTYPES_REPEATEDGROUP = _descriptor.Descriptor( name='RepeatedGroup', full_name='protobuf_unittest.TestAllTypes.RepeatedGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestAllTypes.RepeatedGroup.a', index=0, number=47, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3174, serialized_end=3200, ) _TESTALLTYPES = _descriptor.Descriptor( name='TestAllTypes', full_name='protobuf_unittest.TestAllTypes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_int32', full_name='protobuf_unittest.TestAllTypes.optional_int32', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_int64', full_name='protobuf_unittest.TestAllTypes.optional_int64', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_uint32', full_name='protobuf_unittest.TestAllTypes.optional_uint32', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_uint64', full_name='protobuf_unittest.TestAllTypes.optional_uint64', index=3, number=4, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_sint32', full_name='protobuf_unittest.TestAllTypes.optional_sint32', index=4, number=5, type=17, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_sint64', full_name='protobuf_unittest.TestAllTypes.optional_sint64', index=5, number=6, type=18, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_fixed32', full_name='protobuf_unittest.TestAllTypes.optional_fixed32', index=6, number=7, type=7, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_fixed64', full_name='protobuf_unittest.TestAllTypes.optional_fixed64', index=7, number=8, type=6, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_sfixed32', full_name='protobuf_unittest.TestAllTypes.optional_sfixed32', index=8, number=9, type=15, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_sfixed64', full_name='protobuf_unittest.TestAllTypes.optional_sfixed64', index=9, number=10, type=16, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_float', full_name='protobuf_unittest.TestAllTypes.optional_float', index=10, number=11, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_double', full_name='protobuf_unittest.TestAllTypes.optional_double', index=11, number=12, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_bool', full_name='protobuf_unittest.TestAllTypes.optional_bool', index=12, number=13, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_string', full_name='protobuf_unittest.TestAllTypes.optional_string', index=13, number=14, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_bytes', full_name='protobuf_unittest.TestAllTypes.optional_bytes', index=14, number=15, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optionalgroup', full_name='protobuf_unittest.TestAllTypes.optionalgroup', index=15, number=16, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_nested_message', full_name='protobuf_unittest.TestAllTypes.optional_nested_message', index=16, number=18, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_foreign_message', full_name='protobuf_unittest.TestAllTypes.optional_foreign_message', index=17, number=19, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_import_message', full_name='protobuf_unittest.TestAllTypes.optional_import_message', index=18, number=20, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_nested_enum', full_name='protobuf_unittest.TestAllTypes.optional_nested_enum', index=19, number=21, type=14, cpp_type=8, label=1, has_default_value=False, default_value=1, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_foreign_enum', full_name='protobuf_unittest.TestAllTypes.optional_foreign_enum', index=20, number=22, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_import_enum', full_name='protobuf_unittest.TestAllTypes.optional_import_enum', index=21, number=23, type=14, cpp_type=8, label=1, has_default_value=False, default_value=7, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_string_piece', full_name='protobuf_unittest.TestAllTypes.optional_string_piece', index=22, number=24, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_cord', full_name='protobuf_unittest.TestAllTypes.optional_cord', index=23, number=25, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_public_import_message', full_name='protobuf_unittest.TestAllTypes.optional_public_import_message', index=24, number=26, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_lazy_message', full_name='protobuf_unittest.TestAllTypes.optional_lazy_message', index=25, number=27, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_int32', full_name='protobuf_unittest.TestAllTypes.repeated_int32', index=26, number=31, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_int64', full_name='protobuf_unittest.TestAllTypes.repeated_int64', index=27, number=32, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_uint32', full_name='protobuf_unittest.TestAllTypes.repeated_uint32', index=28, number=33, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_uint64', full_name='protobuf_unittest.TestAllTypes.repeated_uint64', index=29, number=34, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_sint32', full_name='protobuf_unittest.TestAllTypes.repeated_sint32', index=30, number=35, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_sint64', full_name='protobuf_unittest.TestAllTypes.repeated_sint64', index=31, number=36, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_fixed32', full_name='protobuf_unittest.TestAllTypes.repeated_fixed32', index=32, number=37, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_fixed64', full_name='protobuf_unittest.TestAllTypes.repeated_fixed64', index=33, number=38, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_sfixed32', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed32', index=34, number=39, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_sfixed64', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed64', index=35, number=40, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_float', full_name='protobuf_unittest.TestAllTypes.repeated_float', index=36, number=41, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_double', full_name='protobuf_unittest.TestAllTypes.repeated_double', index=37, number=42, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_bool', full_name='protobuf_unittest.TestAllTypes.repeated_bool', index=38, number=43, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_string', full_name='protobuf_unittest.TestAllTypes.repeated_string', index=39, number=44, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_bytes', full_name='protobuf_unittest.TestAllTypes.repeated_bytes', index=40, number=45, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeatedgroup', full_name='protobuf_unittest.TestAllTypes.repeatedgroup', index=41, number=46, type=10, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_nested_message', full_name='protobuf_unittest.TestAllTypes.repeated_nested_message', index=42, number=48, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_foreign_message', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_message', index=43, number=49, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_import_message', full_name='protobuf_unittest.TestAllTypes.repeated_import_message', index=44, number=50, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_nested_enum', full_name='protobuf_unittest.TestAllTypes.repeated_nested_enum', index=45, number=51, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_foreign_enum', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_enum', index=46, number=52, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_import_enum', full_name='protobuf_unittest.TestAllTypes.repeated_import_enum', index=47, number=53, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_string_piece', full_name='protobuf_unittest.TestAllTypes.repeated_string_piece', index=48, number=54, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_cord', full_name='protobuf_unittest.TestAllTypes.repeated_cord', index=49, number=55, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_lazy_message', full_name='protobuf_unittest.TestAllTypes.repeated_lazy_message', index=50, number=57, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_int32', full_name='protobuf_unittest.TestAllTypes.default_int32', index=51, number=61, type=5, cpp_type=1, label=1, has_default_value=True, default_value=41, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_int64', full_name='protobuf_unittest.TestAllTypes.default_int64', index=52, number=62, type=3, cpp_type=2, label=1, has_default_value=True, default_value=42, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_uint32', full_name='protobuf_unittest.TestAllTypes.default_uint32', index=53, number=63, type=13, cpp_type=3, label=1, has_default_value=True, default_value=43, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_uint64', full_name='protobuf_unittest.TestAllTypes.default_uint64', index=54, number=64, type=4, cpp_type=4, label=1, has_default_value=True, default_value=44, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_sint32', full_name='protobuf_unittest.TestAllTypes.default_sint32', index=55, number=65, type=17, cpp_type=1, label=1, has_default_value=True, default_value=-45, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_sint64', full_name='protobuf_unittest.TestAllTypes.default_sint64', index=56, number=66, type=18, cpp_type=2, label=1, has_default_value=True, default_value=46, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_fixed32', full_name='protobuf_unittest.TestAllTypes.default_fixed32', index=57, number=67, type=7, cpp_type=3, label=1, has_default_value=True, default_value=47, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_fixed64', full_name='protobuf_unittest.TestAllTypes.default_fixed64', index=58, number=68, type=6, cpp_type=4, label=1, has_default_value=True, default_value=48, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_sfixed32', full_name='protobuf_unittest.TestAllTypes.default_sfixed32', index=59, number=69, type=15, cpp_type=1, label=1, has_default_value=True, default_value=49, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_sfixed64', full_name='protobuf_unittest.TestAllTypes.default_sfixed64', index=60, number=70, type=16, cpp_type=2, label=1, has_default_value=True, default_value=-50, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_float', full_name='protobuf_unittest.TestAllTypes.default_float', index=61, number=71, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(51.5), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_double', full_name='protobuf_unittest.TestAllTypes.default_double', index=62, number=72, type=1, cpp_type=5, label=1, has_default_value=True, default_value=float(52000), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_bool', full_name='protobuf_unittest.TestAllTypes.default_bool', index=63, number=73, type=8, cpp_type=7, label=1, has_default_value=True, default_value=True, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_string', full_name='protobuf_unittest.TestAllTypes.default_string', index=64, number=74, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("hello").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_bytes', full_name='protobuf_unittest.TestAllTypes.default_bytes', index=65, number=75, type=12, cpp_type=9, label=1, has_default_value=True, default_value=_b("world"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_nested_enum', full_name='protobuf_unittest.TestAllTypes.default_nested_enum', index=66, number=81, type=14, cpp_type=8, label=1, has_default_value=True, default_value=2, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_foreign_enum', full_name='protobuf_unittest.TestAllTypes.default_foreign_enum', index=67, number=82, type=14, cpp_type=8, label=1, has_default_value=True, default_value=5, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_import_enum', full_name='protobuf_unittest.TestAllTypes.default_import_enum', index=68, number=83, type=14, cpp_type=8, label=1, has_default_value=True, default_value=8, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_string_piece', full_name='protobuf_unittest.TestAllTypes.default_string_piece', index=69, number=84, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("abc").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='default_cord', full_name='protobuf_unittest.TestAllTypes.default_cord', index=70, number=85, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("123").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_uint32', full_name='protobuf_unittest.TestAllTypes.oneof_uint32', index=71, number=111, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_nested_message', full_name='protobuf_unittest.TestAllTypes.oneof_nested_message', index=72, number=112, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_string', full_name='protobuf_unittest.TestAllTypes.oneof_string', index=73, number=113, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_bytes', full_name='protobuf_unittest.TestAllTypes.oneof_bytes', index=74, number=114, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTALLTYPES_NESTEDMESSAGE, _TESTALLTYPES_OPTIONALGROUP, _TESTALLTYPES_REPEATEDGROUP, ], enum_types=[ _TESTALLTYPES_NESTEDENUM, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='oneof_field', full_name='protobuf_unittest.TestAllTypes.oneof_field', index=0, containing_type=None, fields=[]), ], serialized_start=93, serialized_end=3274, ) _NESTEDTESTALLTYPES = _descriptor.Descriptor( name='NestedTestAllTypes', full_name='protobuf_unittest.NestedTestAllTypes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='child', full_name='protobuf_unittest.NestedTestAllTypes.child', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='payload', full_name='protobuf_unittest.NestedTestAllTypes.payload', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_child', full_name='protobuf_unittest.NestedTestAllTypes.repeated_child', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3277, serialized_end=3464, ) _TESTDEPRECATEDFIELDS = _descriptor.Descriptor( name='TestDeprecatedFields', full_name='protobuf_unittest.TestDeprecatedFields', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='deprecated_int32', full_name='protobuf_unittest.TestDeprecatedFields.deprecated_int32', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='deprecated_int32_in_oneof', full_name='protobuf_unittest.TestDeprecatedFields.deprecated_int32_in_oneof', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\030\001'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='oneof_fields', full_name='protobuf_unittest.TestDeprecatedFields.oneof_fields', index=0, containing_type=None, fields=[]), ], serialized_start=3466, serialized_end=3575, ) _TESTDEPRECATEDMESSAGE = _descriptor.Descriptor( name='TestDeprecatedMessage', full_name='protobuf_unittest.TestDeprecatedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=_b('\030\001'), is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3577, serialized_end=3604, ) _FOREIGNMESSAGE = _descriptor.Descriptor( name='ForeignMessage', full_name='protobuf_unittest.ForeignMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='c', full_name='protobuf_unittest.ForeignMessage.c', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='d', full_name='protobuf_unittest.ForeignMessage.d', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3606, serialized_end=3644, ) _TESTRESERVEDFIELDS = _descriptor.Descriptor( name='TestReservedFields', full_name='protobuf_unittest.TestReservedFields', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3646, serialized_end=3694, ) _TESTALLEXTENSIONS = _descriptor.Descriptor( name='TestAllExtensions', full_name='protobuf_unittest.TestAllExtensions', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], oneofs=[ ], serialized_start=3696, serialized_end=3725, ) _OPTIONALGROUP_EXTENSION = _descriptor.Descriptor( name='OptionalGroup_extension', full_name='protobuf_unittest.OptionalGroup_extension', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.OptionalGroup_extension.a', index=0, number=17, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3727, serialized_end=3763, ) _REPEATEDGROUP_EXTENSION = _descriptor.Descriptor( name='RepeatedGroup_extension', full_name='protobuf_unittest.RepeatedGroup_extension', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.RepeatedGroup_extension.a', index=0, number=47, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3765, serialized_end=3801, ) _TESTGROUP_OPTIONALGROUP = _descriptor.Descriptor( name='OptionalGroup', full_name='protobuf_unittest.TestGroup.OptionalGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestGroup.OptionalGroup.a', index=0, number=17, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3146, serialized_end=3172, ) _TESTGROUP = _descriptor.Descriptor( name='TestGroup', full_name='protobuf_unittest.TestGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optionalgroup', full_name='protobuf_unittest.TestGroup.optionalgroup', index=0, number=16, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_foreign_enum', full_name='protobuf_unittest.TestGroup.optional_foreign_enum', index=1, number=22, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTGROUP_OPTIONALGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3804, serialized_end=3973, ) _TESTGROUPEXTENSION = _descriptor.Descriptor( name='TestGroupExtension', full_name='protobuf_unittest.TestGroupExtension', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], oneofs=[ ], serialized_start=3975, serialized_end=4005, ) _TESTNESTEDEXTENSION_OPTIONALGROUP_EXTENSION = _descriptor.Descriptor( name='OptionalGroup_extension', full_name='protobuf_unittest.TestNestedExtension.OptionalGroup_extension', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestNestedExtension.OptionalGroup_extension.a', index=0, number=17, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3727, serialized_end=3763, ) _TESTNESTEDEXTENSION = _descriptor.Descriptor( name='TestNestedExtension', full_name='protobuf_unittest.TestNestedExtension', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ _descriptor.FieldDescriptor( name='test', full_name='protobuf_unittest.TestNestedExtension.test', index=0, number=1002, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("test").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='nested_string_extension', full_name='protobuf_unittest.TestNestedExtension.nested_string_extension', index=1, number=1003, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optionalgroup_extension', full_name='protobuf_unittest.TestNestedExtension.optionalgroup_extension', index=2, number=16, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_foreign_enum_extension', full_name='protobuf_unittest.TestNestedExtension.optional_foreign_enum_extension', index=3, number=22, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[_TESTNESTEDEXTENSION_OPTIONALGROUP_EXTENSION, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4008, serialized_end=4447, ) _TESTREQUIRED = _descriptor.Descriptor( name='TestRequired', full_name='protobuf_unittest.TestRequired', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestRequired.a', index=0, number=1, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy2', full_name='protobuf_unittest.TestRequired.dummy2', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='b', full_name='protobuf_unittest.TestRequired.b', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy4', full_name='protobuf_unittest.TestRequired.dummy4', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy5', full_name='protobuf_unittest.TestRequired.dummy5', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy6', full_name='protobuf_unittest.TestRequired.dummy6', index=5, number=6, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy7', full_name='protobuf_unittest.TestRequired.dummy7', index=6, number=7, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy8', full_name='protobuf_unittest.TestRequired.dummy8', index=7, number=8, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy9', full_name='protobuf_unittest.TestRequired.dummy9', index=8, number=9, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy10', full_name='protobuf_unittest.TestRequired.dummy10', index=9, number=10, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy11', full_name='protobuf_unittest.TestRequired.dummy11', index=10, number=11, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy12', full_name='protobuf_unittest.TestRequired.dummy12', index=11, number=12, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy13', full_name='protobuf_unittest.TestRequired.dummy13', index=12, number=13, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy14', full_name='protobuf_unittest.TestRequired.dummy14', index=13, number=14, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy15', full_name='protobuf_unittest.TestRequired.dummy15', index=14, number=15, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy16', full_name='protobuf_unittest.TestRequired.dummy16', index=15, number=16, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy17', full_name='protobuf_unittest.TestRequired.dummy17', index=16, number=17, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy18', full_name='protobuf_unittest.TestRequired.dummy18', index=17, number=18, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy19', full_name='protobuf_unittest.TestRequired.dummy19', index=18, number=19, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy20', full_name='protobuf_unittest.TestRequired.dummy20', index=19, number=20, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy21', full_name='protobuf_unittest.TestRequired.dummy21', index=20, number=21, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy22', full_name='protobuf_unittest.TestRequired.dummy22', index=21, number=22, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy23', full_name='protobuf_unittest.TestRequired.dummy23', index=22, number=23, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy24', full_name='protobuf_unittest.TestRequired.dummy24', index=23, number=24, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy25', full_name='protobuf_unittest.TestRequired.dummy25', index=24, number=25, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy26', full_name='protobuf_unittest.TestRequired.dummy26', index=25, number=26, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy27', full_name='protobuf_unittest.TestRequired.dummy27', index=26, number=27, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy28', full_name='protobuf_unittest.TestRequired.dummy28', index=27, number=28, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy29', full_name='protobuf_unittest.TestRequired.dummy29', index=28, number=29, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy30', full_name='protobuf_unittest.TestRequired.dummy30', index=29, number=30, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy31', full_name='protobuf_unittest.TestRequired.dummy31', index=30, number=31, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy32', full_name='protobuf_unittest.TestRequired.dummy32', index=31, number=32, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='c', full_name='protobuf_unittest.TestRequired.c', index=32, number=33, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ _descriptor.FieldDescriptor( name='single', full_name='protobuf_unittest.TestRequired.single', index=0, number=1000, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='multi', full_name='protobuf_unittest.TestRequired.multi', index=1, number=1001, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4450, serialized_end=5175, ) _TESTREQUIREDFOREIGN = _descriptor.Descriptor( name='TestRequiredForeign', full_name='protobuf_unittest.TestRequiredForeign', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_message', full_name='protobuf_unittest.TestRequiredForeign.optional_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_message', full_name='protobuf_unittest.TestRequiredForeign.repeated_message', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dummy', full_name='protobuf_unittest.TestRequiredForeign.dummy', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5178, serialized_end=5332, ) _TESTREQUIREDMESSAGE = _descriptor.Descriptor( name='TestRequiredMessage', full_name='protobuf_unittest.TestRequiredMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_message', full_name='protobuf_unittest.TestRequiredMessage.optional_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_message', full_name='protobuf_unittest.TestRequiredMessage.repeated_message', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='required_message', full_name='protobuf_unittest.TestRequiredMessage.required_message', index=2, number=3, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5335, serialized_end=5533, ) _TESTFOREIGNNESTED = _descriptor.Descriptor( name='TestForeignNested', full_name='protobuf_unittest.TestForeignNested', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='foreign_nested', full_name='protobuf_unittest.TestForeignNested.foreign_nested', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5535, serialized_end=5625, ) _TESTEMPTYMESSAGE = _descriptor.Descriptor( name='TestEmptyMessage', full_name='protobuf_unittest.TestEmptyMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5627, serialized_end=5645, ) _TESTEMPTYMESSAGEWITHEXTENSIONS = _descriptor.Descriptor( name='TestEmptyMessageWithExtensions', full_name='protobuf_unittest.TestEmptyMessageWithExtensions', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], oneofs=[ ], serialized_start=5647, serialized_end=5689, ) _TESTMULTIPLEEXTENSIONRANGES = _descriptor.Descriptor( name='TestMultipleExtensionRanges', full_name='protobuf_unittest.TestMultipleExtensionRanges', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(42, 43), (4143, 4244), (65536, 536870912), ], oneofs=[ ], serialized_start=5691, serialized_end=5746, ) _TESTREALLYLARGETAGNUMBER = _descriptor.Descriptor( name='TestReallyLargeTagNumber', full_name='protobuf_unittest.TestReallyLargeTagNumber', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestReallyLargeTagNumber.a', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bb', full_name='protobuf_unittest.TestReallyLargeTagNumber.bb', index=1, number=268435455, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5748, serialized_end=5800, ) _TESTRECURSIVEMESSAGE = _descriptor.Descriptor( name='TestRecursiveMessage', full_name='protobuf_unittest.TestRecursiveMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestRecursiveMessage.a', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='i', full_name='protobuf_unittest.TestRecursiveMessage.i', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5802, serialized_end=5887, ) _TESTMUTUALRECURSIONA_SUBMESSAGE = _descriptor.Descriptor( name='SubMessage', full_name='protobuf_unittest.TestMutualRecursionA.SubMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='b', full_name='protobuf_unittest.TestMutualRecursionA.SubMessage.b', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6035, serialized_end=6099, ) _TESTMUTUALRECURSIONA_SUBGROUP = _descriptor.Descriptor( name='SubGroup', full_name='protobuf_unittest.TestMutualRecursionA.SubGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sub_message', full_name='protobuf_unittest.TestMutualRecursionA.SubGroup.sub_message', index=0, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='not_in_this_scc', full_name='protobuf_unittest.TestMutualRecursionA.SubGroup.not_in_this_scc', index=1, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6102, serialized_end=6243, ) _TESTMUTUALRECURSIONA = _descriptor.Descriptor( name='TestMutualRecursionA', full_name='protobuf_unittest.TestMutualRecursionA', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='bb', full_name='protobuf_unittest.TestMutualRecursionA.bb', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='subgroup', full_name='protobuf_unittest.TestMutualRecursionA.subgroup', index=1, number=2, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTMUTUALRECURSIONA_SUBMESSAGE, _TESTMUTUALRECURSIONA_SUBGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5890, serialized_end=6243, ) _TESTMUTUALRECURSIONB = _descriptor.Descriptor( name='TestMutualRecursionB', full_name='protobuf_unittest.TestMutualRecursionB', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestMutualRecursionB.a', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_int32', full_name='protobuf_unittest.TestMutualRecursionB.optional_int32', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6245, serialized_end=6343, ) _TESTISINITIALIZED_SUBMESSAGE_SUBGROUP = _descriptor.Descriptor( name='SubGroup', full_name='protobuf_unittest.TestIsInitialized.SubMessage.SubGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='i', full_name='protobuf_unittest.TestIsInitialized.SubMessage.SubGroup.i', index=0, number=2, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6527, serialized_end=6548, ) _TESTISINITIALIZED_SUBMESSAGE = _descriptor.Descriptor( name='SubMessage', full_name='protobuf_unittest.TestIsInitialized.SubMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='subgroup', full_name='protobuf_unittest.TestIsInitialized.SubMessage.subgroup', index=0, number=1, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTISINITIALIZED_SUBMESSAGE_SUBGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6437, serialized_end=6548, ) _TESTISINITIALIZED = _descriptor.Descriptor( name='TestIsInitialized', full_name='protobuf_unittest.TestIsInitialized', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sub_message', full_name='protobuf_unittest.TestIsInitialized.sub_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTISINITIALIZED_SUBMESSAGE, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6346, serialized_end=6548, ) _TESTDUPFIELDNUMBER_FOO = _descriptor.Descriptor( name='Foo', full_name='protobuf_unittest.TestDupFieldNumber.Foo', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestDupFieldNumber.Foo.a', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6696, serialized_end=6712, ) _TESTDUPFIELDNUMBER_BAR = _descriptor.Descriptor( name='Bar', full_name='protobuf_unittest.TestDupFieldNumber.Bar', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestDupFieldNumber.Bar.a', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6714, serialized_end=6730, ) _TESTDUPFIELDNUMBER = _descriptor.Descriptor( name='TestDupFieldNumber', full_name='protobuf_unittest.TestDupFieldNumber', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestDupFieldNumber.a', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo', full_name='protobuf_unittest.TestDupFieldNumber.foo', index=1, number=2, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar', full_name='protobuf_unittest.TestDupFieldNumber.bar', index=2, number=3, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTDUPFIELDNUMBER_FOO, _TESTDUPFIELDNUMBER_BAR, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6551, serialized_end=6730, ) _TESTEAGERMESSAGE = _descriptor.Descriptor( name='TestEagerMessage', full_name='protobuf_unittest.TestEagerMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sub_message', full_name='protobuf_unittest.TestEagerMessage.sub_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('(\000'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6732, serialized_end=6808, ) _TESTLAZYMESSAGE = _descriptor.Descriptor( name='TestLazyMessage', full_name='protobuf_unittest.TestLazyMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sub_message', full_name='protobuf_unittest.TestLazyMessage.sub_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6810, serialized_end=6885, ) _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE = _descriptor.Descriptor( name='NestedMessage', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='nestedmessage_repeated_int32', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_int32', index=0, number=1, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='nestedmessage_repeated_foreignmessage', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_foreignmessage', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7009, serialized_end=7144, ) _TESTNESTEDMESSAGEHASBITS = _descriptor.Descriptor( name='TestNestedMessageHasBits', full_name='protobuf_unittest.TestNestedMessageHasBits', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_nested_message', full_name='protobuf_unittest.TestNestedMessageHasBits.optional_nested_message', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6888, serialized_end=7144, ) _TESTCAMELCASEFIELDNAMES = _descriptor.Descriptor( name='TestCamelCaseFieldNames', full_name='protobuf_unittest.TestCamelCaseFieldNames', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='PrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.PrimitiveField', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='StringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringField', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='EnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.EnumField', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='MessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.MessageField', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='StringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringPieceField', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='CordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.CordField', index=5, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedPrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedPrimitiveField', index=6, number=7, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedStringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringField', index=7, number=8, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedEnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedEnumField', index=8, number=9, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedMessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedMessageField', index=9, number=10, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedStringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringPieceField', index=10, number=11, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='RepeatedCordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedCordField', index=11, number=12, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7147, serialized_end=7632, ) _TESTFIELDORDERINGS_NESTEDMESSAGE = _descriptor.Descriptor( name='NestedMessage', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='oo', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.oo', index=0, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bb', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.bb', index=1, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7797, serialized_end=7836, ) _TESTFIELDORDERINGS = _descriptor.Descriptor( name='TestFieldOrderings', full_name='protobuf_unittest.TestFieldOrderings', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='my_string', full_name='protobuf_unittest.TestFieldOrderings.my_string', index=0, number=11, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='my_int', full_name='protobuf_unittest.TestFieldOrderings.my_int', index=1, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='my_float', full_name='protobuf_unittest.TestFieldOrderings.my_float', index=2, number=101, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_nested_message', full_name='protobuf_unittest.TestFieldOrderings.optional_nested_message', index=3, number=200, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTFIELDORDERINGS_NESTEDMESSAGE, ], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(2, 11), (12, 101), ], oneofs=[ ], serialized_start=7635, serialized_end=7848, ) _TESTEXTENSIONORDERINGS1 = _descriptor.Descriptor( name='TestExtensionOrderings1', full_name='protobuf_unittest.TestExtensionOrderings1', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='my_string', full_name='protobuf_unittest.TestExtensionOrderings1.my_string', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ _descriptor.FieldDescriptor( name='test_ext_orderings1', full_name='protobuf_unittest.TestExtensionOrderings1.test_ext_orderings1', index=0, number=13, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7851, serialized_end=8007, ) _TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3 = _descriptor.Descriptor( name='TestExtensionOrderings3', full_name='protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='my_string', full_name='protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3.my_string', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ _descriptor.FieldDescriptor( name='test_ext_orderings3', full_name='protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3.test_ext_orderings3', index=0, number=14, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8057, serialized_end=8238, ) _TESTEXTENSIONORDERINGS2 = _descriptor.Descriptor( name='TestExtensionOrderings2', full_name='protobuf_unittest.TestExtensionOrderings2', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='my_string', full_name='protobuf_unittest.TestExtensionOrderings2.my_string', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ _descriptor.FieldDescriptor( name='test_ext_orderings2', full_name='protobuf_unittest.TestExtensionOrderings2.test_ext_orderings2', index=0, number=12, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[_TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8010, serialized_end=8350, ) _TESTEXTREMEDEFAULTVALUES = _descriptor.Descriptor( name='TestExtremeDefaultValues', full_name='protobuf_unittest.TestExtremeDefaultValues', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='escaped_bytes', full_name='protobuf_unittest.TestExtremeDefaultValues.escaped_bytes', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=True, default_value=_b("\000\001\007\010\014\n\r\t\013\\\'\"\376"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='large_uint32', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint32', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=True, default_value=4294967295, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='large_uint64', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint64', index=2, number=3, type=4, cpp_type=4, label=1, has_default_value=True, default_value=18446744073709551615, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int32', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=True, default_value=-2147483647, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int64', index=4, number=5, type=3, cpp_type=2, label=1, has_default_value=True, default_value=-9223372036854775807, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='really_small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int32', index=5, number=21, type=5, cpp_type=1, label=1, has_default_value=True, default_value=-2147483648, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='really_small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int64', index=6, number=22, type=3, cpp_type=2, label=1, has_default_value=True, default_value=-9223372036854775808, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='utf8_string', full_name='protobuf_unittest.TestExtremeDefaultValues.utf8_string', index=7, number=6, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("\341\210\264").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='zero_float', full_name='protobuf_unittest.TestExtremeDefaultValues.zero_float', index=8, number=7, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.one_float', index=9, number=8, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='small_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_float', index=10, number=9, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(1.5), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='negative_one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_one_float', index=11, number=10, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(-1), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_float', index=12, number=11, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(-1.5), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='large_float', full_name='protobuf_unittest.TestExtremeDefaultValues.large_float', index=13, number=12, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(2e+08), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='small_negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_negative_float', index=14, number=13, type=2, cpp_type=6, label=1, has_default_value=True, default_value=float(-8e-28), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_double', index=15, number=14, type=1, cpp_type=5, label=1, has_default_value=True, default_value=1e10000, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='neg_inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_double', index=16, number=15, type=1, cpp_type=5, label=1, has_default_value=True, default_value=-1e10000, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='nan_double', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_double', index=17, number=16, type=1, cpp_type=5, label=1, has_default_value=True, default_value=(1e10000 * 0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_float', index=18, number=17, type=2, cpp_type=6, label=1, has_default_value=True, default_value=1e10000, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='neg_inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_float', index=19, number=18, type=2, cpp_type=6, label=1, has_default_value=True, default_value=-1e10000, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='nan_float', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_float', index=20, number=19, type=2, cpp_type=6, label=1, has_default_value=True, default_value=(1e10000 * 0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='cpp_trigraph', full_name='protobuf_unittest.TestExtremeDefaultValues.cpp_trigraph', index=21, number=20, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("? ? ?? ?? ??? ??/ ??-").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_with_zero', index=22, number=23, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("hel\000lo").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bytes_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.bytes_with_zero', index=23, number=24, type=12, cpp_type=9, label=1, has_default_value=True, default_value=_b("wor\000ld"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_piece_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_piece_with_zero', index=24, number=25, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("ab\000c").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='cord_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.cord_with_zero', index=25, number=26, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("12\0003").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='replacement_string', full_name='protobuf_unittest.TestExtremeDefaultValues.replacement_string', index=26, number=27, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("${unknown}").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8353, serialized_end=9303, ) _SPARSEENUMMESSAGE = _descriptor.Descriptor( name='SparseEnumMessage', full_name='protobuf_unittest.SparseEnumMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sparse_enum', full_name='protobuf_unittest.SparseEnumMessage.sparse_enum', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=123, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9305, serialized_end=9380, ) _ONESTRING = _descriptor.Descriptor( name='OneString', full_name='protobuf_unittest.OneString', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.OneString.data', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9382, serialized_end=9407, ) _MORESTRING = _descriptor.Descriptor( name='MoreString', full_name='protobuf_unittest.MoreString', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.MoreString.data', index=0, number=1, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9409, serialized_end=9435, ) _ONEBYTES = _descriptor.Descriptor( name='OneBytes', full_name='protobuf_unittest.OneBytes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.OneBytes.data', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9437, serialized_end=9461, ) _MOREBYTES = _descriptor.Descriptor( name='MoreBytes', full_name='protobuf_unittest.MoreBytes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.MoreBytes.data', index=0, number=1, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9463, serialized_end=9488, ) _INT32MESSAGE = _descriptor.Descriptor( name='Int32Message', full_name='protobuf_unittest.Int32Message', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.Int32Message.data', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9490, serialized_end=9518, ) _UINT32MESSAGE = _descriptor.Descriptor( name='Uint32Message', full_name='protobuf_unittest.Uint32Message', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.Uint32Message.data', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9520, serialized_end=9549, ) _INT64MESSAGE = _descriptor.Descriptor( name='Int64Message', full_name='protobuf_unittest.Int64Message', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.Int64Message.data', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9551, serialized_end=9579, ) _UINT64MESSAGE = _descriptor.Descriptor( name='Uint64Message', full_name='protobuf_unittest.Uint64Message', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.Uint64Message.data', index=0, number=1, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9581, serialized_end=9610, ) _BOOLMESSAGE = _descriptor.Descriptor( name='BoolMessage', full_name='protobuf_unittest.BoolMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='data', full_name='protobuf_unittest.BoolMessage.data', index=0, number=1, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9612, serialized_end=9639, ) _TESTONEOF_FOOGROUP = _descriptor.Descriptor( name='FooGroup', full_name='protobuf_unittest.TestOneof.FooGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestOneof.FooGroup.a', index=0, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='b', full_name='protobuf_unittest.TestOneof.FooGroup.b', index=1, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9811, serialized_end=9843, ) _TESTONEOF = _descriptor.Descriptor( name='TestOneof', full_name='protobuf_unittest.TestOneof', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='foo_int', full_name='protobuf_unittest.TestOneof.foo_int', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_string', full_name='protobuf_unittest.TestOneof.foo_string', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_message', full_name='protobuf_unittest.TestOneof.foo_message', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foogroup', full_name='protobuf_unittest.TestOneof.foogroup', index=3, number=4, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTONEOF_FOOGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='foo', full_name='protobuf_unittest.TestOneof.foo', index=0, containing_type=None, fields=[]), ], serialized_start=9642, serialized_end=9850, ) _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP = _descriptor.Descriptor( name='FooGroup', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.a', index=0, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='b', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.b', index=1, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9811, serialized_end=9843, ) _TESTONEOFBACKWARDSCOMPATIBLE = _descriptor.Descriptor( name='TestOneofBackwardsCompatible', full_name='protobuf_unittest.TestOneofBackwardsCompatible', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='foo_int', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_int', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_string', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_string', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_message', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_message', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foogroup', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foogroup', index=3, number=4, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=9853, serialized_end=10084, ) _TESTONEOF2_FOOGROUP = _descriptor.Descriptor( name='FooGroup', full_name='protobuf_unittest.TestOneof2.FooGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestOneof2.FooGroup.a', index=0, number=9, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='b', full_name='protobuf_unittest.TestOneof2.FooGroup.b', index=1, number=10, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=10745, serialized_end=10777, ) _TESTONEOF2_NESTEDMESSAGE = _descriptor.Descriptor( name='NestedMessage', full_name='protobuf_unittest.TestOneof2.NestedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='qux_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.qux_int', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='corge_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.corge_int', index=1, number=2, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=10779, serialized_end=10830, ) _TESTONEOF2 = _descriptor.Descriptor( name='TestOneof2', full_name='protobuf_unittest.TestOneof2', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='foo_int', full_name='protobuf_unittest.TestOneof2.foo_int', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_string', full_name='protobuf_unittest.TestOneof2.foo_string', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_cord', full_name='protobuf_unittest.TestOneof2.foo_cord', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_string_piece', full_name='protobuf_unittest.TestOneof2.foo_string_piece', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_bytes', full_name='protobuf_unittest.TestOneof2.foo_bytes', index=4, number=5, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_enum', full_name='protobuf_unittest.TestOneof2.foo_enum', index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=False, default_value=1, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_message', full_name='protobuf_unittest.TestOneof2.foo_message', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foogroup', full_name='protobuf_unittest.TestOneof2.foogroup', index=7, number=8, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_lazy_message', full_name='protobuf_unittest.TestOneof2.foo_lazy_message', index=8, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('(\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_int', full_name='protobuf_unittest.TestOneof2.bar_int', index=9, number=12, type=5, cpp_type=1, label=1, has_default_value=True, default_value=5, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_string', full_name='protobuf_unittest.TestOneof2.bar_string', index=10, number=13, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("STRING").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_cord', full_name='protobuf_unittest.TestOneof2.bar_cord', index=11, number=14, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("CORD").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_string_piece', full_name='protobuf_unittest.TestOneof2.bar_string_piece', index=12, number=15, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("SPIECE").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\010\002'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_bytes', full_name='protobuf_unittest.TestOneof2.bar_bytes', index=13, number=16, type=12, cpp_type=9, label=1, has_default_value=True, default_value=_b("BYTES"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bar_enum', full_name='protobuf_unittest.TestOneof2.bar_enum', index=14, number=17, type=14, cpp_type=8, label=1, has_default_value=True, default_value=2, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='baz_int', full_name='protobuf_unittest.TestOneof2.baz_int', index=15, number=18, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='baz_string', full_name='protobuf_unittest.TestOneof2.baz_string', index=16, number=19, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("BAZ").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTONEOF2_FOOGROUP, _TESTONEOF2_NESTEDMESSAGE, ], enum_types=[ _TESTONEOF2_NESTEDENUM, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='foo', full_name='protobuf_unittest.TestOneof2.foo', index=0, containing_type=None, fields=[]), _descriptor.OneofDescriptor( name='bar', full_name='protobuf_unittest.TestOneof2.bar', index=1, containing_type=None, fields=[]), ], serialized_start=10087, serialized_end=10885, ) _TESTREQUIREDONEOF_NESTEDMESSAGE = _descriptor.Descriptor( name='NestedMessage', full_name='protobuf_unittest.TestRequiredOneof.NestedMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='required_double', full_name='protobuf_unittest.TestRequiredOneof.NestedMessage.required_double', index=0, number=1, type=1, cpp_type=5, label=2, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=11025, serialized_end=11065, ) _TESTREQUIREDONEOF = _descriptor.Descriptor( name='TestRequiredOneof', full_name='protobuf_unittest.TestRequiredOneof', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='foo_int', full_name='protobuf_unittest.TestRequiredOneof.foo_int', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_string', full_name='protobuf_unittest.TestRequiredOneof.foo_string', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='foo_message', full_name='protobuf_unittest.TestRequiredOneof.foo_message', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTREQUIREDONEOF_NESTEDMESSAGE, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='foo', full_name='protobuf_unittest.TestRequiredOneof.foo', index=0, containing_type=None, fields=[]), ], serialized_start=10888, serialized_end=11072, ) _TESTPACKEDTYPES = _descriptor.Descriptor( name='TestPackedTypes', full_name='protobuf_unittest.TestPackedTypes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='packed_int32', full_name='protobuf_unittest.TestPackedTypes.packed_int32', index=0, number=90, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_int64', full_name='protobuf_unittest.TestPackedTypes.packed_int64', index=1, number=91, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_uint32', full_name='protobuf_unittest.TestPackedTypes.packed_uint32', index=2, number=92, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_uint64', full_name='protobuf_unittest.TestPackedTypes.packed_uint64', index=3, number=93, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_sint32', full_name='protobuf_unittest.TestPackedTypes.packed_sint32', index=4, number=94, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_sint64', full_name='protobuf_unittest.TestPackedTypes.packed_sint64', index=5, number=95, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_fixed32', full_name='protobuf_unittest.TestPackedTypes.packed_fixed32', index=6, number=96, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_fixed64', full_name='protobuf_unittest.TestPackedTypes.packed_fixed64', index=7, number=97, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_sfixed32', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed32', index=8, number=98, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_sfixed64', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed64', index=9, number=99, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_float', full_name='protobuf_unittest.TestPackedTypes.packed_float', index=10, number=100, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_double', full_name='protobuf_unittest.TestPackedTypes.packed_double', index=11, number=101, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_bool', full_name='protobuf_unittest.TestPackedTypes.packed_bool', index=12, number=102, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_enum', full_name='protobuf_unittest.TestPackedTypes.packed_enum', index=13, number=103, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=11075, serialized_end=11501, ) _TESTUNPACKEDTYPES = _descriptor.Descriptor( name='TestUnpackedTypes', full_name='protobuf_unittest.TestUnpackedTypes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='unpacked_int32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int32', index=0, number=90, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_int64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int64', index=1, number=91, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_uint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint32', index=2, number=92, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_uint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint64', index=3, number=93, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_sint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint32', index=4, number=94, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_sint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint64', index=5, number=95, type=18, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_fixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed32', index=6, number=96, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_fixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed64', index=7, number=97, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_sfixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed32', index=8, number=98, type=15, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_sfixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed64', index=9, number=99, type=16, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_float', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_float', index=10, number=100, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_double', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_double', index=11, number=101, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_bool', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_bool', index=12, number=102, type=8, cpp_type=7, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='unpacked_enum', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_enum', index=13, number=103, type=14, cpp_type=8, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=11504, serialized_end=11960, ) _TESTPACKEDEXTENSIONS = _descriptor.Descriptor( name='TestPackedExtensions', full_name='protobuf_unittest.TestPackedExtensions', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], oneofs=[ ], serialized_start=11962, serialized_end=11994, ) _TESTUNPACKEDEXTENSIONS = _descriptor.Descriptor( name='TestUnpackedExtensions', full_name='protobuf_unittest.TestUnpackedExtensions', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1, 536870912), ], oneofs=[ ], serialized_start=11996, serialized_end=12030, ) _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE = _descriptor.Descriptor( name='DynamicMessageType', full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='dynamic_field', full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType.dynamic_field', index=0, number=2100, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=12453, serialized_end=12497, ) _TESTDYNAMICEXTENSIONS = _descriptor.Descriptor( name='TestDynamicExtensions', full_name='protobuf_unittest.TestDynamicExtensions', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='scalar_extension', full_name='protobuf_unittest.TestDynamicExtensions.scalar_extension', index=0, number=2000, type=7, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.enum_extension', index=1, number=2001, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dynamic_enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_enum_extension', index=2, number=2002, type=14, cpp_type=8, label=1, has_default_value=False, default_value=2200, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='message_extension', full_name='protobuf_unittest.TestDynamicExtensions.message_extension', index=3, number=2003, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dynamic_message_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_message_extension', index=4, number=2004, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_extension', full_name='protobuf_unittest.TestDynamicExtensions.repeated_extension', index=5, number=2005, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_extension', full_name='protobuf_unittest.TestDynamicExtensions.packed_extension', index=6, number=2006, type=17, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE, ], enum_types=[ _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=12033, serialized_end=12570, ) _TESTREPEATEDSCALARDIFFERENTTAGSIZES = _descriptor.Descriptor( name='TestRepeatedScalarDifferentTagSizes', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='repeated_fixed32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed32', index=0, number=12, type=7, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_int32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int32', index=1, number=13, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_fixed64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed64', index=2, number=2046, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_int64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int64', index=3, number=2047, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_float', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_float', index=4, number=262142, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_uint64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_uint64', index=5, number=262143, type=4, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=12573, serialized_end=12765, ) _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1 = _descriptor.Descriptor( name='Group1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1.field1', index=0, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=13558, serialized_end=13615, ) _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2 = _descriptor.Descriptor( name='Group2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2.field1', index=0, number=21, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=13617, serialized_end=13674, ) _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR = _descriptor.Descriptor( name='RepeatedFieldsGenerator', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field1', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field2', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field3', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field3', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='group1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group1', index=3, number=10, type=10, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='group2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group2', index=4, number=20, type=10, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ext1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext1', index=5, number=1000, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ext2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext2', index=6, number=1001, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1, _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=13120, serialized_end=13674, ) _TESTPARSINGMERGE_OPTIONALGROUP = _descriptor.Descriptor( name='OptionalGroup', full_name='protobuf_unittest.TestParsingMerge.OptionalGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_group_all_types', full_name='protobuf_unittest.TestParsingMerge.OptionalGroup.optional_group_all_types', index=0, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=13676, serialized_end=13758, ) _TESTPARSINGMERGE_REPEATEDGROUP = _descriptor.Descriptor( name='RepeatedGroup', full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='repeated_group_all_types', full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup.repeated_group_all_types', index=0, number=21, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=13760, serialized_end=13842, ) _TESTPARSINGMERGE = _descriptor.Descriptor( name='TestParsingMerge', full_name='protobuf_unittest.TestParsingMerge', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='required_all_types', full_name='protobuf_unittest.TestParsingMerge.required_all_types', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_all_types', full_name='protobuf_unittest.TestParsingMerge.optional_all_types', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_all_types', full_name='protobuf_unittest.TestParsingMerge.repeated_all_types', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optionalgroup', full_name='protobuf_unittest.TestParsingMerge.optionalgroup', index=3, number=10, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeatedgroup', full_name='protobuf_unittest.TestParsingMerge.repeatedgroup', index=4, number=20, type=10, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ _descriptor.FieldDescriptor( name='optional_ext', full_name='protobuf_unittest.TestParsingMerge.optional_ext', index=0, number=1000, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_ext', full_name='protobuf_unittest.TestParsingMerge.repeated_ext', index=1, number=1001, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR, _TESTPARSINGMERGE_OPTIONALGROUP, _TESTPARSINGMERGE_REPEATEDGROUP, ], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(1000, 536870912), ], oneofs=[ ], serialized_start=12768, serialized_end=14039, ) _TESTCOMMENTINJECTIONMESSAGE = _descriptor.Descriptor( name='TestCommentInjectionMessage', full_name='protobuf_unittest.TestCommentInjectionMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='a', full_name='protobuf_unittest.TestCommentInjectionMessage.a', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=True, default_value=_b("*/ <- Neither should this.").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14041, serialized_end=14109, ) _FOOREQUEST = _descriptor.Descriptor( name='FooRequest', full_name='protobuf_unittest.FooRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14111, serialized_end=14123, ) _FOORESPONSE = _descriptor.Descriptor( name='FooResponse', full_name='protobuf_unittest.FooResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14125, serialized_end=14138, ) _FOOCLIENTMESSAGE = _descriptor.Descriptor( name='FooClientMessage', full_name='protobuf_unittest.FooClientMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14140, serialized_end=14158, ) _FOOSERVERMESSAGE = _descriptor.Descriptor( name='FooServerMessage', full_name='protobuf_unittest.FooServerMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14160, serialized_end=14178, ) _BARREQUEST = _descriptor.Descriptor( name='BarRequest', full_name='protobuf_unittest.BarRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14180, serialized_end=14192, ) _BARRESPONSE = _descriptor.Descriptor( name='BarResponse', full_name='protobuf_unittest.BarResponse', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14194, serialized_end=14207, ) _TESTJSONNAME = _descriptor.Descriptor( name='TestJsonName', full_name='protobuf_unittest.TestJsonName', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='field_name1', full_name='protobuf_unittest.TestJsonName.field_name1', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='fieldName2', full_name='protobuf_unittest.TestJsonName.fieldName2', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='FieldName3', full_name='protobuf_unittest.TestJsonName.FieldName3', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='_field_name4', full_name='protobuf_unittest.TestJsonName._field_name4', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='FIELD_NAME5', full_name='protobuf_unittest.TestJsonName.FIELD_NAME5', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field_name6', full_name='protobuf_unittest.TestJsonName.field_name6', index=5, number=6, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, json_name='@type', file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=14210, serialized_end=14356, ) _TESTHUGEFIELDNUMBERS_OPTIONALGROUP = _descriptor.Descriptor( name='OptionalGroup', full_name='protobuf_unittest.TestHugeFieldNumbers.OptionalGroup', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='group_a', full_name='protobuf_unittest.TestHugeFieldNumbers.OptionalGroup.group_a', index=0, number=536870009, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=15003, serialized_end=15039, ) _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY = _descriptor.Descriptor( name='StringStringMapEntry', full_name='protobuf_unittest.TestHugeFieldNumbers.StringStringMapEntry', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='key', full_name='protobuf_unittest.TestHugeFieldNumbers.StringStringMapEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='protobuf_unittest.TestHugeFieldNumbers.StringStringMapEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=_b('8\001'), is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=15041, serialized_end=15095, ) _TESTHUGEFIELDNUMBERS = _descriptor.Descriptor( name='TestHugeFieldNumbers', full_name='protobuf_unittest.TestHugeFieldNumbers', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='optional_int32', full_name='protobuf_unittest.TestHugeFieldNumbers.optional_int32', index=0, number=536870000, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='fixed_32', full_name='protobuf_unittest.TestHugeFieldNumbers.fixed_32', index=1, number=536870001, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='repeated_int32', full_name='protobuf_unittest.TestHugeFieldNumbers.repeated_int32', index=2, number=536870002, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\000'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='packed_int32', full_name='protobuf_unittest.TestHugeFieldNumbers.packed_int32', index=3, number=536870003, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b('\020\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_enum', full_name='protobuf_unittest.TestHugeFieldNumbers.optional_enum', index=4, number=536870004, type=14, cpp_type=8, label=1, has_default_value=False, default_value=4, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_string', full_name='protobuf_unittest.TestHugeFieldNumbers.optional_string', index=5, number=536870005, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_bytes', full_name='protobuf_unittest.TestHugeFieldNumbers.optional_bytes', index=6, number=536870006, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optional_message', full_name='protobuf_unittest.TestHugeFieldNumbers.optional_message', index=7, number=536870007, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='optionalgroup', full_name='protobuf_unittest.TestHugeFieldNumbers.optionalgroup', index=8, number=536870008, type=10, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='string_string_map', full_name='protobuf_unittest.TestHugeFieldNumbers.string_string_map', index=9, number=536870010, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_uint32', full_name='protobuf_unittest.TestHugeFieldNumbers.oneof_uint32', index=10, number=536870011, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_test_all_types', full_name='protobuf_unittest.TestHugeFieldNumbers.oneof_test_all_types', index=11, number=536870012, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_string', full_name='protobuf_unittest.TestHugeFieldNumbers.oneof_string', index=12, number=536870013, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='oneof_bytes', full_name='protobuf_unittest.TestHugeFieldNumbers.oneof_bytes', index=13, number=536870014, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_TESTHUGEFIELDNUMBERS_OPTIONALGROUP, _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY, ], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(536860000, 536870000), ], oneofs=[ _descriptor.OneofDescriptor( name='oneof_field', full_name='protobuf_unittest.TestHugeFieldNumbers.oneof_field', index=0, containing_type=None, fields=[]), ], serialized_start=14359, serialized_end=15124, ) _TESTEXTENSIONINSIDETABLE = _descriptor.Descriptor( name='TestExtensionInsideTable', full_name='protobuf_unittest.TestExtensionInsideTable', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='field1', full_name='protobuf_unittest.TestExtensionInsideTable.field1', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field2', full_name='protobuf_unittest.TestExtensionInsideTable.field2', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field3', full_name='protobuf_unittest.TestExtensionInsideTable.field3', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field4', full_name='protobuf_unittest.TestExtensionInsideTable.field4', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field6', full_name='protobuf_unittest.TestExtensionInsideTable.field6', index=4, number=6, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field7', full_name='protobuf_unittest.TestExtensionInsideTable.field7', index=5, number=7, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field8', full_name='protobuf_unittest.TestExtensionInsideTable.field8', index=6, number=8, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field9', full_name='protobuf_unittest.TestExtensionInsideTable.field9', index=7, number=9, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field10', full_name='protobuf_unittest.TestExtensionInsideTable.field10', index=8, number=10, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=True, syntax='proto2', extension_ranges=[(5, 6), ], oneofs=[ ], serialized_start=15127, serialized_end=15304, ) _TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES _TESTALLTYPES_OPTIONALGROUP.containing_type = _TESTALLTYPES _TESTALLTYPES_REPEATEDGROUP.containing_type = _TESTALLTYPES _TESTALLTYPES.fields_by_name['optionalgroup'].message_type = _TESTALLTYPES_OPTIONALGROUP _TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE _TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE _TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM _TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM _TESTALLTYPES.fields_by_name['optional_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM _TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE _TESTALLTYPES.fields_by_name['optional_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTALLTYPES.fields_by_name['repeatedgroup'].message_type = _TESTALLTYPES_REPEATEDGROUP _TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE _TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE _TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM _TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM _TESTALLTYPES.fields_by_name['repeated_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM _TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTALLTYPES.fields_by_name['default_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM _TESTALLTYPES.fields_by_name['default_foreign_enum'].enum_type = _FOREIGNENUM _TESTALLTYPES.fields_by_name['default_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM _TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES _TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( _TESTALLTYPES.fields_by_name['oneof_uint32']) _TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] _TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( _TESTALLTYPES.fields_by_name['oneof_nested_message']) _TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] _TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( _TESTALLTYPES.fields_by_name['oneof_string']) _TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] _TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( _TESTALLTYPES.fields_by_name['oneof_bytes']) _TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] _NESTEDTESTALLTYPES.fields_by_name['child'].message_type = _NESTEDTESTALLTYPES _NESTEDTESTALLTYPES.fields_by_name['payload'].message_type = _TESTALLTYPES _NESTEDTESTALLTYPES.fields_by_name['repeated_child'].message_type = _NESTEDTESTALLTYPES _TESTDEPRECATEDFIELDS.oneofs_by_name['oneof_fields'].fields.append( _TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32_in_oneof']) _TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32_in_oneof'].containing_oneof = _TESTDEPRECATEDFIELDS.oneofs_by_name['oneof_fields'] _TESTGROUP_OPTIONALGROUP.containing_type = _TESTGROUP _TESTGROUP.fields_by_name['optionalgroup'].message_type = _TESTGROUP_OPTIONALGROUP _TESTGROUP.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM _TESTNESTEDEXTENSION_OPTIONALGROUP_EXTENSION.containing_type = _TESTNESTEDEXTENSION _TESTREQUIREDFOREIGN.fields_by_name['optional_message'].message_type = _TESTREQUIRED _TESTREQUIREDFOREIGN.fields_by_name['repeated_message'].message_type = _TESTREQUIRED _TESTREQUIREDMESSAGE.fields_by_name['optional_message'].message_type = _TESTREQUIRED _TESTREQUIREDMESSAGE.fields_by_name['repeated_message'].message_type = _TESTREQUIRED _TESTREQUIREDMESSAGE.fields_by_name['required_message'].message_type = _TESTREQUIRED _TESTFOREIGNNESTED.fields_by_name['foreign_nested'].message_type = _TESTALLTYPES_NESTEDMESSAGE _TESTRECURSIVEMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMESSAGE _TESTMUTUALRECURSIONA_SUBMESSAGE.fields_by_name['b'].message_type = _TESTMUTUALRECURSIONB _TESTMUTUALRECURSIONA_SUBMESSAGE.containing_type = _TESTMUTUALRECURSIONA _TESTMUTUALRECURSIONA_SUBGROUP.fields_by_name['sub_message'].message_type = _TESTMUTUALRECURSIONA_SUBMESSAGE _TESTMUTUALRECURSIONA_SUBGROUP.fields_by_name['not_in_this_scc'].message_type = _TESTALLTYPES _TESTMUTUALRECURSIONA_SUBGROUP.containing_type = _TESTMUTUALRECURSIONA _TESTMUTUALRECURSIONA.fields_by_name['bb'].message_type = _TESTMUTUALRECURSIONB _TESTMUTUALRECURSIONA.fields_by_name['subgroup'].message_type = _TESTMUTUALRECURSIONA_SUBGROUP _TESTMUTUALRECURSIONB.fields_by_name['a'].message_type = _TESTMUTUALRECURSIONA _TESTISINITIALIZED_SUBMESSAGE_SUBGROUP.containing_type = _TESTISINITIALIZED_SUBMESSAGE _TESTISINITIALIZED_SUBMESSAGE.fields_by_name['subgroup'].message_type = _TESTISINITIALIZED_SUBMESSAGE_SUBGROUP _TESTISINITIALIZED_SUBMESSAGE.containing_type = _TESTISINITIALIZED _TESTISINITIALIZED.fields_by_name['sub_message'].message_type = _TESTISINITIALIZED_SUBMESSAGE _TESTDUPFIELDNUMBER_FOO.containing_type = _TESTDUPFIELDNUMBER _TESTDUPFIELDNUMBER_BAR.containing_type = _TESTDUPFIELDNUMBER _TESTDUPFIELDNUMBER.fields_by_name['foo'].message_type = _TESTDUPFIELDNUMBER_FOO _TESTDUPFIELDNUMBER.fields_by_name['bar'].message_type = _TESTDUPFIELDNUMBER_BAR _TESTEAGERMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES _TESTLAZYMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.fields_by_name['nestedmessage_repeated_foreignmessage'].message_type = _FOREIGNMESSAGE _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.containing_type = _TESTNESTEDMESSAGEHASBITS _TESTNESTEDMESSAGEHASBITS.fields_by_name['optional_nested_message'].message_type = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE _TESTCAMELCASEFIELDNAMES.fields_by_name['EnumField'].enum_type = _FOREIGNENUM _TESTCAMELCASEFIELDNAMES.fields_by_name['MessageField'].message_type = _FOREIGNMESSAGE _TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedEnumField'].enum_type = _FOREIGNENUM _TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedMessageField'].message_type = _FOREIGNMESSAGE _TESTFIELDORDERINGS_NESTEDMESSAGE.containing_type = _TESTFIELDORDERINGS _TESTFIELDORDERINGS.fields_by_name['optional_nested_message'].message_type = _TESTFIELDORDERINGS_NESTEDMESSAGE _TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3.containing_type = _TESTEXTENSIONORDERINGS2 _SPARSEENUMMESSAGE.fields_by_name['sparse_enum'].enum_type = _TESTSPARSEENUM _TESTONEOF_FOOGROUP.containing_type = _TESTONEOF _TESTONEOF.fields_by_name['foo_message'].message_type = _TESTALLTYPES _TESTONEOF.fields_by_name['foogroup'].message_type = _TESTONEOF_FOOGROUP _TESTONEOF.oneofs_by_name['foo'].fields.append( _TESTONEOF.fields_by_name['foo_int']) _TESTONEOF.fields_by_name['foo_int'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] _TESTONEOF.oneofs_by_name['foo'].fields.append( _TESTONEOF.fields_by_name['foo_string']) _TESTONEOF.fields_by_name['foo_string'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] _TESTONEOF.oneofs_by_name['foo'].fields.append( _TESTONEOF.fields_by_name['foo_message']) _TESTONEOF.fields_by_name['foo_message'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] _TESTONEOF.oneofs_by_name['foo'].fields.append( _TESTONEOF.fields_by_name['foogroup']) _TESTONEOF.fields_by_name['foogroup'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP.containing_type = _TESTONEOFBACKWARDSCOMPATIBLE _TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foo_message'].message_type = _TESTALLTYPES _TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foogroup'].message_type = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP _TESTONEOF2_FOOGROUP.containing_type = _TESTONEOF2 _TESTONEOF2_NESTEDMESSAGE.containing_type = _TESTONEOF2 _TESTONEOF2.fields_by_name['foo_enum'].enum_type = _TESTONEOF2_NESTEDENUM _TESTONEOF2.fields_by_name['foo_message'].message_type = _TESTONEOF2_NESTEDMESSAGE _TESTONEOF2.fields_by_name['foogroup'].message_type = _TESTONEOF2_FOOGROUP _TESTONEOF2.fields_by_name['foo_lazy_message'].message_type = _TESTONEOF2_NESTEDMESSAGE _TESTONEOF2.fields_by_name['bar_enum'].enum_type = _TESTONEOF2_NESTEDENUM _TESTONEOF2_NESTEDENUM.containing_type = _TESTONEOF2 _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_int']) _TESTONEOF2.fields_by_name['foo_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_string']) _TESTONEOF2.fields_by_name['foo_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_cord']) _TESTONEOF2.fields_by_name['foo_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_string_piece']) _TESTONEOF2.fields_by_name['foo_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_bytes']) _TESTONEOF2.fields_by_name['foo_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_enum']) _TESTONEOF2.fields_by_name['foo_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_message']) _TESTONEOF2.fields_by_name['foo_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foogroup']) _TESTONEOF2.fields_by_name['foogroup'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['foo'].fields.append( _TESTONEOF2.fields_by_name['foo_lazy_message']) _TESTONEOF2.fields_by_name['foo_lazy_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_int']) _TESTONEOF2.fields_by_name['bar_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_string']) _TESTONEOF2.fields_by_name['bar_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_cord']) _TESTONEOF2.fields_by_name['bar_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_string_piece']) _TESTONEOF2.fields_by_name['bar_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_bytes']) _TESTONEOF2.fields_by_name['bar_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTONEOF2.oneofs_by_name['bar'].fields.append( _TESTONEOF2.fields_by_name['bar_enum']) _TESTONEOF2.fields_by_name['bar_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] _TESTREQUIREDONEOF_NESTEDMESSAGE.containing_type = _TESTREQUIREDONEOF _TESTREQUIREDONEOF.fields_by_name['foo_message'].message_type = _TESTREQUIREDONEOF_NESTEDMESSAGE _TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( _TESTREQUIREDONEOF.fields_by_name['foo_int']) _TESTREQUIREDONEOF.fields_by_name['foo_int'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] _TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( _TESTREQUIREDONEOF.fields_by_name['foo_string']) _TESTREQUIREDONEOF.fields_by_name['foo_string'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] _TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( _TESTREQUIREDONEOF.fields_by_name['foo_message']) _TESTREQUIREDONEOF.fields_by_name['foo_message'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] _TESTPACKEDTYPES.fields_by_name['packed_enum'].enum_type = _FOREIGNENUM _TESTUNPACKEDTYPES.fields_by_name['unpacked_enum'].enum_type = _FOREIGNENUM _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE.containing_type = _TESTDYNAMICEXTENSIONS _TESTDYNAMICEXTENSIONS.fields_by_name['enum_extension'].enum_type = _FOREIGNENUM _TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_enum_extension'].enum_type = _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE _TESTDYNAMICEXTENSIONS.fields_by_name['message_extension'].message_type = _FOREIGNMESSAGE _TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_message_extension'].message_type = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE.containing_type = _TESTDYNAMICEXTENSIONS _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.fields_by_name['field1'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.fields_by_name['field1'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field1'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field2'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field3'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group1'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1 _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group2'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2 _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext1'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext2'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.containing_type = _TESTPARSINGMERGE _TESTPARSINGMERGE_OPTIONALGROUP.fields_by_name['optional_group_all_types'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_OPTIONALGROUP.containing_type = _TESTPARSINGMERGE _TESTPARSINGMERGE_REPEATEDGROUP.fields_by_name['repeated_group_all_types'].message_type = _TESTALLTYPES _TESTPARSINGMERGE_REPEATEDGROUP.containing_type = _TESTPARSINGMERGE _TESTPARSINGMERGE.fields_by_name['required_all_types'].message_type = _TESTALLTYPES _TESTPARSINGMERGE.fields_by_name['optional_all_types'].message_type = _TESTALLTYPES _TESTPARSINGMERGE.fields_by_name['repeated_all_types'].message_type = _TESTALLTYPES _TESTPARSINGMERGE.fields_by_name['optionalgroup'].message_type = _TESTPARSINGMERGE_OPTIONALGROUP _TESTPARSINGMERGE.fields_by_name['repeatedgroup'].message_type = _TESTPARSINGMERGE_REPEATEDGROUP _TESTHUGEFIELDNUMBERS_OPTIONALGROUP.containing_type = _TESTHUGEFIELDNUMBERS _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY.containing_type = _TESTHUGEFIELDNUMBERS _TESTHUGEFIELDNUMBERS.fields_by_name['optional_enum'].enum_type = _FOREIGNENUM _TESTHUGEFIELDNUMBERS.fields_by_name['optional_message'].message_type = _FOREIGNMESSAGE _TESTHUGEFIELDNUMBERS.fields_by_name['optionalgroup'].message_type = _TESTHUGEFIELDNUMBERS_OPTIONALGROUP _TESTHUGEFIELDNUMBERS.fields_by_name['string_string_map'].message_type = _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_test_all_types'].message_type = _TESTALLTYPES _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'].fields.append( _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_uint32']) _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_uint32'].containing_oneof = _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'] _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'].fields.append( _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_test_all_types']) _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_test_all_types'].containing_oneof = _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'] _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'].fields.append( _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_string']) _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_string'].containing_oneof = _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'] _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'].fields.append( _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_bytes']) _TESTHUGEFIELDNUMBERS.fields_by_name['oneof_bytes'].containing_oneof = _TESTHUGEFIELDNUMBERS.oneofs_by_name['oneof_field'] DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES DESCRIPTOR.message_types_by_name['NestedTestAllTypes'] = _NESTEDTESTALLTYPES DESCRIPTOR.message_types_by_name['TestDeprecatedFields'] = _TESTDEPRECATEDFIELDS DESCRIPTOR.message_types_by_name['TestDeprecatedMessage'] = _TESTDEPRECATEDMESSAGE DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE DESCRIPTOR.message_types_by_name['TestReservedFields'] = _TESTRESERVEDFIELDS DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS DESCRIPTOR.message_types_by_name['OptionalGroup_extension'] = _OPTIONALGROUP_EXTENSION DESCRIPTOR.message_types_by_name['RepeatedGroup_extension'] = _REPEATEDGROUP_EXTENSION DESCRIPTOR.message_types_by_name['TestGroup'] = _TESTGROUP DESCRIPTOR.message_types_by_name['TestGroupExtension'] = _TESTGROUPEXTENSION DESCRIPTOR.message_types_by_name['TestNestedExtension'] = _TESTNESTEDEXTENSION DESCRIPTOR.message_types_by_name['TestRequired'] = _TESTREQUIRED DESCRIPTOR.message_types_by_name['TestRequiredForeign'] = _TESTREQUIREDFOREIGN DESCRIPTOR.message_types_by_name['TestRequiredMessage'] = _TESTREQUIREDMESSAGE DESCRIPTOR.message_types_by_name['TestForeignNested'] = _TESTFOREIGNNESTED DESCRIPTOR.message_types_by_name['TestEmptyMessage'] = _TESTEMPTYMESSAGE DESCRIPTOR.message_types_by_name['TestEmptyMessageWithExtensions'] = _TESTEMPTYMESSAGEWITHEXTENSIONS DESCRIPTOR.message_types_by_name['TestMultipleExtensionRanges'] = _TESTMULTIPLEEXTENSIONRANGES DESCRIPTOR.message_types_by_name['TestReallyLargeTagNumber'] = _TESTREALLYLARGETAGNUMBER DESCRIPTOR.message_types_by_name['TestRecursiveMessage'] = _TESTRECURSIVEMESSAGE DESCRIPTOR.message_types_by_name['TestMutualRecursionA'] = _TESTMUTUALRECURSIONA DESCRIPTOR.message_types_by_name['TestMutualRecursionB'] = _TESTMUTUALRECURSIONB DESCRIPTOR.message_types_by_name['TestIsInitialized'] = _TESTISINITIALIZED DESCRIPTOR.message_types_by_name['TestDupFieldNumber'] = _TESTDUPFIELDNUMBER DESCRIPTOR.message_types_by_name['TestEagerMessage'] = _TESTEAGERMESSAGE DESCRIPTOR.message_types_by_name['TestLazyMessage'] = _TESTLAZYMESSAGE DESCRIPTOR.message_types_by_name['TestNestedMessageHasBits'] = _TESTNESTEDMESSAGEHASBITS DESCRIPTOR.message_types_by_name['TestCamelCaseFieldNames'] = _TESTCAMELCASEFIELDNAMES DESCRIPTOR.message_types_by_name['TestFieldOrderings'] = _TESTFIELDORDERINGS DESCRIPTOR.message_types_by_name['TestExtensionOrderings1'] = _TESTEXTENSIONORDERINGS1 DESCRIPTOR.message_types_by_name['TestExtensionOrderings2'] = _TESTEXTENSIONORDERINGS2 DESCRIPTOR.message_types_by_name['TestExtremeDefaultValues'] = _TESTEXTREMEDEFAULTVALUES DESCRIPTOR.message_types_by_name['SparseEnumMessage'] = _SPARSEENUMMESSAGE DESCRIPTOR.message_types_by_name['OneString'] = _ONESTRING DESCRIPTOR.message_types_by_name['MoreString'] = _MORESTRING DESCRIPTOR.message_types_by_name['OneBytes'] = _ONEBYTES DESCRIPTOR.message_types_by_name['MoreBytes'] = _MOREBYTES DESCRIPTOR.message_types_by_name['Int32Message'] = _INT32MESSAGE DESCRIPTOR.message_types_by_name['Uint32Message'] = _UINT32MESSAGE DESCRIPTOR.message_types_by_name['Int64Message'] = _INT64MESSAGE DESCRIPTOR.message_types_by_name['Uint64Message'] = _UINT64MESSAGE DESCRIPTOR.message_types_by_name['BoolMessage'] = _BOOLMESSAGE DESCRIPTOR.message_types_by_name['TestOneof'] = _TESTONEOF DESCRIPTOR.message_types_by_name['TestOneofBackwardsCompatible'] = _TESTONEOFBACKWARDSCOMPATIBLE DESCRIPTOR.message_types_by_name['TestOneof2'] = _TESTONEOF2 DESCRIPTOR.message_types_by_name['TestRequiredOneof'] = _TESTREQUIREDONEOF DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES DESCRIPTOR.message_types_by_name['TestPackedExtensions'] = _TESTPACKEDEXTENSIONS DESCRIPTOR.message_types_by_name['TestUnpackedExtensions'] = _TESTUNPACKEDEXTENSIONS DESCRIPTOR.message_types_by_name['TestDynamicExtensions'] = _TESTDYNAMICEXTENSIONS DESCRIPTOR.message_types_by_name['TestRepeatedScalarDifferentTagSizes'] = _TESTREPEATEDSCALARDIFFERENTTAGSIZES DESCRIPTOR.message_types_by_name['TestParsingMerge'] = _TESTPARSINGMERGE DESCRIPTOR.message_types_by_name['TestCommentInjectionMessage'] = _TESTCOMMENTINJECTIONMESSAGE DESCRIPTOR.message_types_by_name['FooRequest'] = _FOOREQUEST DESCRIPTOR.message_types_by_name['FooResponse'] = _FOORESPONSE DESCRIPTOR.message_types_by_name['FooClientMessage'] = _FOOCLIENTMESSAGE DESCRIPTOR.message_types_by_name['FooServerMessage'] = _FOOSERVERMESSAGE DESCRIPTOR.message_types_by_name['BarRequest'] = _BARREQUEST DESCRIPTOR.message_types_by_name['BarResponse'] = _BARRESPONSE DESCRIPTOR.message_types_by_name['TestJsonName'] = _TESTJSONNAME DESCRIPTOR.message_types_by_name['TestHugeFieldNumbers'] = _TESTHUGEFIELDNUMBERS DESCRIPTOR.message_types_by_name['TestExtensionInsideTable'] = _TESTEXTENSIONINSIDETABLE DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM DESCRIPTOR.enum_types_by_name['TestEnumWithDupValue'] = _TESTENUMWITHDUPVALUE DESCRIPTOR.enum_types_by_name['TestSparseEnum'] = _TESTSPARSEENUM DESCRIPTOR.extensions_by_name['optional_int32_extension'] = optional_int32_extension DESCRIPTOR.extensions_by_name['optional_int64_extension'] = optional_int64_extension DESCRIPTOR.extensions_by_name['optional_uint32_extension'] = optional_uint32_extension DESCRIPTOR.extensions_by_name['optional_uint64_extension'] = optional_uint64_extension DESCRIPTOR.extensions_by_name['optional_sint32_extension'] = optional_sint32_extension DESCRIPTOR.extensions_by_name['optional_sint64_extension'] = optional_sint64_extension DESCRIPTOR.extensions_by_name['optional_fixed32_extension'] = optional_fixed32_extension DESCRIPTOR.extensions_by_name['optional_fixed64_extension'] = optional_fixed64_extension DESCRIPTOR.extensions_by_name['optional_sfixed32_extension'] = optional_sfixed32_extension DESCRIPTOR.extensions_by_name['optional_sfixed64_extension'] = optional_sfixed64_extension DESCRIPTOR.extensions_by_name['optional_float_extension'] = optional_float_extension DESCRIPTOR.extensions_by_name['optional_double_extension'] = optional_double_extension DESCRIPTOR.extensions_by_name['optional_bool_extension'] = optional_bool_extension DESCRIPTOR.extensions_by_name['optional_string_extension'] = optional_string_extension DESCRIPTOR.extensions_by_name['optional_bytes_extension'] = optional_bytes_extension DESCRIPTOR.extensions_by_name['optionalgroup_extension'] = optionalgroup_extension DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension DESCRIPTOR.extensions_by_name['optional_foreign_message_extension'] = optional_foreign_message_extension DESCRIPTOR.extensions_by_name['optional_import_message_extension'] = optional_import_message_extension DESCRIPTOR.extensions_by_name['optional_nested_enum_extension'] = optional_nested_enum_extension DESCRIPTOR.extensions_by_name['optional_foreign_enum_extension'] = optional_foreign_enum_extension DESCRIPTOR.extensions_by_name['optional_import_enum_extension'] = optional_import_enum_extension DESCRIPTOR.extensions_by_name['optional_string_piece_extension'] = optional_string_piece_extension DESCRIPTOR.extensions_by_name['optional_cord_extension'] = optional_cord_extension DESCRIPTOR.extensions_by_name['optional_public_import_message_extension'] = optional_public_import_message_extension DESCRIPTOR.extensions_by_name['optional_lazy_message_extension'] = optional_lazy_message_extension DESCRIPTOR.extensions_by_name['repeated_int32_extension'] = repeated_int32_extension DESCRIPTOR.extensions_by_name['repeated_int64_extension'] = repeated_int64_extension DESCRIPTOR.extensions_by_name['repeated_uint32_extension'] = repeated_uint32_extension DESCRIPTOR.extensions_by_name['repeated_uint64_extension'] = repeated_uint64_extension DESCRIPTOR.extensions_by_name['repeated_sint32_extension'] = repeated_sint32_extension DESCRIPTOR.extensions_by_name['repeated_sint64_extension'] = repeated_sint64_extension DESCRIPTOR.extensions_by_name['repeated_fixed32_extension'] = repeated_fixed32_extension DESCRIPTOR.extensions_by_name['repeated_fixed64_extension'] = repeated_fixed64_extension DESCRIPTOR.extensions_by_name['repeated_sfixed32_extension'] = repeated_sfixed32_extension DESCRIPTOR.extensions_by_name['repeated_sfixed64_extension'] = repeated_sfixed64_extension DESCRIPTOR.extensions_by_name['repeated_float_extension'] = repeated_float_extension DESCRIPTOR.extensions_by_name['repeated_double_extension'] = repeated_double_extension DESCRIPTOR.extensions_by_name['repeated_bool_extension'] = repeated_bool_extension DESCRIPTOR.extensions_by_name['repeated_string_extension'] = repeated_string_extension DESCRIPTOR.extensions_by_name['repeated_bytes_extension'] = repeated_bytes_extension DESCRIPTOR.extensions_by_name['repeatedgroup_extension'] = repeatedgroup_extension DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension DESCRIPTOR.extensions_by_name['repeated_foreign_message_extension'] = repeated_foreign_message_extension DESCRIPTOR.extensions_by_name['repeated_import_message_extension'] = repeated_import_message_extension DESCRIPTOR.extensions_by_name['repeated_nested_enum_extension'] = repeated_nested_enum_extension DESCRIPTOR.extensions_by_name['repeated_foreign_enum_extension'] = repeated_foreign_enum_extension DESCRIPTOR.extensions_by_name['repeated_import_enum_extension'] = repeated_import_enum_extension DESCRIPTOR.extensions_by_name['repeated_string_piece_extension'] = repeated_string_piece_extension DESCRIPTOR.extensions_by_name['repeated_cord_extension'] = repeated_cord_extension DESCRIPTOR.extensions_by_name['repeated_lazy_message_extension'] = repeated_lazy_message_extension DESCRIPTOR.extensions_by_name['default_int32_extension'] = default_int32_extension DESCRIPTOR.extensions_by_name['default_int64_extension'] = default_int64_extension DESCRIPTOR.extensions_by_name['default_uint32_extension'] = default_uint32_extension DESCRIPTOR.extensions_by_name['default_uint64_extension'] = default_uint64_extension DESCRIPTOR.extensions_by_name['default_sint32_extension'] = default_sint32_extension DESCRIPTOR.extensions_by_name['default_sint64_extension'] = default_sint64_extension DESCRIPTOR.extensions_by_name['default_fixed32_extension'] = default_fixed32_extension DESCRIPTOR.extensions_by_name['default_fixed64_extension'] = default_fixed64_extension DESCRIPTOR.extensions_by_name['default_sfixed32_extension'] = default_sfixed32_extension DESCRIPTOR.extensions_by_name['default_sfixed64_extension'] = default_sfixed64_extension DESCRIPTOR.extensions_by_name['default_float_extension'] = default_float_extension DESCRIPTOR.extensions_by_name['default_double_extension'] = default_double_extension DESCRIPTOR.extensions_by_name['default_bool_extension'] = default_bool_extension DESCRIPTOR.extensions_by_name['default_string_extension'] = default_string_extension DESCRIPTOR.extensions_by_name['default_bytes_extension'] = default_bytes_extension DESCRIPTOR.extensions_by_name['default_nested_enum_extension'] = default_nested_enum_extension DESCRIPTOR.extensions_by_name['default_foreign_enum_extension'] = default_foreign_enum_extension DESCRIPTOR.extensions_by_name['default_import_enum_extension'] = default_import_enum_extension DESCRIPTOR.extensions_by_name['default_string_piece_extension'] = default_string_piece_extension DESCRIPTOR.extensions_by_name['default_cord_extension'] = default_cord_extension DESCRIPTOR.extensions_by_name['oneof_uint32_extension'] = oneof_uint32_extension DESCRIPTOR.extensions_by_name['oneof_nested_message_extension'] = oneof_nested_message_extension DESCRIPTOR.extensions_by_name['oneof_string_extension'] = oneof_string_extension DESCRIPTOR.extensions_by_name['oneof_bytes_extension'] = oneof_bytes_extension DESCRIPTOR.extensions_by_name['my_extension_string'] = my_extension_string DESCRIPTOR.extensions_by_name['my_extension_int'] = my_extension_int DESCRIPTOR.extensions_by_name['packed_int32_extension'] = packed_int32_extension DESCRIPTOR.extensions_by_name['packed_int64_extension'] = packed_int64_extension DESCRIPTOR.extensions_by_name['packed_uint32_extension'] = packed_uint32_extension DESCRIPTOR.extensions_by_name['packed_uint64_extension'] = packed_uint64_extension DESCRIPTOR.extensions_by_name['packed_sint32_extension'] = packed_sint32_extension DESCRIPTOR.extensions_by_name['packed_sint64_extension'] = packed_sint64_extension DESCRIPTOR.extensions_by_name['packed_fixed32_extension'] = packed_fixed32_extension DESCRIPTOR.extensions_by_name['packed_fixed64_extension'] = packed_fixed64_extension DESCRIPTOR.extensions_by_name['packed_sfixed32_extension'] = packed_sfixed32_extension DESCRIPTOR.extensions_by_name['packed_sfixed64_extension'] = packed_sfixed64_extension DESCRIPTOR.extensions_by_name['packed_float_extension'] = packed_float_extension DESCRIPTOR.extensions_by_name['packed_double_extension'] = packed_double_extension DESCRIPTOR.extensions_by_name['packed_bool_extension'] = packed_bool_extension DESCRIPTOR.extensions_by_name['packed_enum_extension'] = packed_enum_extension DESCRIPTOR.extensions_by_name['unpacked_int32_extension'] = unpacked_int32_extension DESCRIPTOR.extensions_by_name['unpacked_int64_extension'] = unpacked_int64_extension DESCRIPTOR.extensions_by_name['unpacked_uint32_extension'] = unpacked_uint32_extension DESCRIPTOR.extensions_by_name['unpacked_uint64_extension'] = unpacked_uint64_extension DESCRIPTOR.extensions_by_name['unpacked_sint32_extension'] = unpacked_sint32_extension DESCRIPTOR.extensions_by_name['unpacked_sint64_extension'] = unpacked_sint64_extension DESCRIPTOR.extensions_by_name['unpacked_fixed32_extension'] = unpacked_fixed32_extension DESCRIPTOR.extensions_by_name['unpacked_fixed64_extension'] = unpacked_fixed64_extension DESCRIPTOR.extensions_by_name['unpacked_sfixed32_extension'] = unpacked_sfixed32_extension DESCRIPTOR.extensions_by_name['unpacked_sfixed64_extension'] = unpacked_sfixed64_extension DESCRIPTOR.extensions_by_name['unpacked_float_extension'] = unpacked_float_extension DESCRIPTOR.extensions_by_name['unpacked_double_extension'] = unpacked_double_extension DESCRIPTOR.extensions_by_name['unpacked_bool_extension'] = unpacked_bool_extension DESCRIPTOR.extensions_by_name['unpacked_enum_extension'] = unpacked_enum_extension DESCRIPTOR.extensions_by_name['test_all_types'] = test_all_types DESCRIPTOR.extensions_by_name['test_extension_inside_table_extension'] = test_extension_inside_table_extension _sym_db.RegisterFileDescriptor(DESCRIPTOR) TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict( NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.NestedMessage) )) , OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( DESCRIPTOR = _TESTALLTYPES_OPTIONALGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.OptionalGroup) )) , RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict( DESCRIPTOR = _TESTALLTYPES_REPEATEDGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.RepeatedGroup) )) , DESCRIPTOR = _TESTALLTYPES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes) )) _sym_db.RegisterMessage(TestAllTypes) _sym_db.RegisterMessage(TestAllTypes.NestedMessage) _sym_db.RegisterMessage(TestAllTypes.OptionalGroup) _sym_db.RegisterMessage(TestAllTypes.RepeatedGroup) NestedTestAllTypes = _reflection.GeneratedProtocolMessageType('NestedTestAllTypes', (_message.Message,), dict( DESCRIPTOR = _NESTEDTESTALLTYPES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedTestAllTypes) )) _sym_db.RegisterMessage(NestedTestAllTypes) TestDeprecatedFields = _reflection.GeneratedProtocolMessageType('TestDeprecatedFields', (_message.Message,), dict( DESCRIPTOR = _TESTDEPRECATEDFIELDS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDeprecatedFields) )) _sym_db.RegisterMessage(TestDeprecatedFields) TestDeprecatedMessage = _reflection.GeneratedProtocolMessageType('TestDeprecatedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTDEPRECATEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDeprecatedMessage) )) _sym_db.RegisterMessage(TestDeprecatedMessage) ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( DESCRIPTOR = _FOREIGNMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.ForeignMessage) )) _sym_db.RegisterMessage(ForeignMessage) TestReservedFields = _reflection.GeneratedProtocolMessageType('TestReservedFields', (_message.Message,), dict( DESCRIPTOR = _TESTRESERVEDFIELDS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReservedFields) )) _sym_db.RegisterMessage(TestReservedFields) TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), dict( DESCRIPTOR = _TESTALLEXTENSIONS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllExtensions) )) _sym_db.RegisterMessage(TestAllExtensions) OptionalGroup_extension = _reflection.GeneratedProtocolMessageType('OptionalGroup_extension', (_message.Message,), dict( DESCRIPTOR = _OPTIONALGROUP_EXTENSION, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.OptionalGroup_extension) )) _sym_db.RegisterMessage(OptionalGroup_extension) RepeatedGroup_extension = _reflection.GeneratedProtocolMessageType('RepeatedGroup_extension', (_message.Message,), dict( DESCRIPTOR = _REPEATEDGROUP_EXTENSION, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.RepeatedGroup_extension) )) _sym_db.RegisterMessage(RepeatedGroup_extension) TestGroup = _reflection.GeneratedProtocolMessageType('TestGroup', (_message.Message,), dict( OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( DESCRIPTOR = _TESTGROUP_OPTIONALGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestGroup.OptionalGroup) )) , DESCRIPTOR = _TESTGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestGroup) )) _sym_db.RegisterMessage(TestGroup) _sym_db.RegisterMessage(TestGroup.OptionalGroup) TestGroupExtension = _reflection.GeneratedProtocolMessageType('TestGroupExtension', (_message.Message,), dict( DESCRIPTOR = _TESTGROUPEXTENSION, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestGroupExtension) )) _sym_db.RegisterMessage(TestGroupExtension) TestNestedExtension = _reflection.GeneratedProtocolMessageType('TestNestedExtension', (_message.Message,), dict( OptionalGroup_extension = _reflection.GeneratedProtocolMessageType('OptionalGroup_extension', (_message.Message,), dict( DESCRIPTOR = _TESTNESTEDEXTENSION_OPTIONALGROUP_EXTENSION, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedExtension.OptionalGroup_extension) )) , DESCRIPTOR = _TESTNESTEDEXTENSION, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedExtension) )) _sym_db.RegisterMessage(TestNestedExtension) _sym_db.RegisterMessage(TestNestedExtension.OptionalGroup_extension) TestRequired = _reflection.GeneratedProtocolMessageType('TestRequired', (_message.Message,), dict( DESCRIPTOR = _TESTREQUIRED, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequired) )) _sym_db.RegisterMessage(TestRequired) TestRequiredForeign = _reflection.GeneratedProtocolMessageType('TestRequiredForeign', (_message.Message,), dict( DESCRIPTOR = _TESTREQUIREDFOREIGN, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredForeign) )) _sym_db.RegisterMessage(TestRequiredForeign) TestRequiredMessage = _reflection.GeneratedProtocolMessageType('TestRequiredMessage', (_message.Message,), dict( DESCRIPTOR = _TESTREQUIREDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessage) )) _sym_db.RegisterMessage(TestRequiredMessage) TestForeignNested = _reflection.GeneratedProtocolMessageType('TestForeignNested', (_message.Message,), dict( DESCRIPTOR = _TESTFOREIGNNESTED, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestForeignNested) )) _sym_db.RegisterMessage(TestForeignNested) TestEmptyMessage = _reflection.GeneratedProtocolMessageType('TestEmptyMessage', (_message.Message,), dict( DESCRIPTOR = _TESTEMPTYMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessage) )) _sym_db.RegisterMessage(TestEmptyMessage) TestEmptyMessageWithExtensions = _reflection.GeneratedProtocolMessageType('TestEmptyMessageWithExtensions', (_message.Message,), dict( DESCRIPTOR = _TESTEMPTYMESSAGEWITHEXTENSIONS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessageWithExtensions) )) _sym_db.RegisterMessage(TestEmptyMessageWithExtensions) TestMultipleExtensionRanges = _reflection.GeneratedProtocolMessageType('TestMultipleExtensionRanges', (_message.Message,), dict( DESCRIPTOR = _TESTMULTIPLEEXTENSIONRANGES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMultipleExtensionRanges) )) _sym_db.RegisterMessage(TestMultipleExtensionRanges) TestReallyLargeTagNumber = _reflection.GeneratedProtocolMessageType('TestReallyLargeTagNumber', (_message.Message,), dict( DESCRIPTOR = _TESTREALLYLARGETAGNUMBER, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReallyLargeTagNumber) )) _sym_db.RegisterMessage(TestReallyLargeTagNumber) TestRecursiveMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMessage', (_message.Message,), dict( DESCRIPTOR = _TESTRECURSIVEMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMessage) )) _sym_db.RegisterMessage(TestRecursiveMessage) TestMutualRecursionA = _reflection.GeneratedProtocolMessageType('TestMutualRecursionA', (_message.Message,), dict( SubMessage = _reflection.GeneratedProtocolMessageType('SubMessage', (_message.Message,), dict( DESCRIPTOR = _TESTMUTUALRECURSIONA_SUBMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionA.SubMessage) )) , SubGroup = _reflection.GeneratedProtocolMessageType('SubGroup', (_message.Message,), dict( DESCRIPTOR = _TESTMUTUALRECURSIONA_SUBGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionA.SubGroup) )) , DESCRIPTOR = _TESTMUTUALRECURSIONA, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionA) )) _sym_db.RegisterMessage(TestMutualRecursionA) _sym_db.RegisterMessage(TestMutualRecursionA.SubMessage) _sym_db.RegisterMessage(TestMutualRecursionA.SubGroup) TestMutualRecursionB = _reflection.GeneratedProtocolMessageType('TestMutualRecursionB', (_message.Message,), dict( DESCRIPTOR = _TESTMUTUALRECURSIONB, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionB) )) _sym_db.RegisterMessage(TestMutualRecursionB) TestIsInitialized = _reflection.GeneratedProtocolMessageType('TestIsInitialized', (_message.Message,), dict( SubMessage = _reflection.GeneratedProtocolMessageType('SubMessage', (_message.Message,), dict( SubGroup = _reflection.GeneratedProtocolMessageType('SubGroup', (_message.Message,), dict( DESCRIPTOR = _TESTISINITIALIZED_SUBMESSAGE_SUBGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestIsInitialized.SubMessage.SubGroup) )) , DESCRIPTOR = _TESTISINITIALIZED_SUBMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestIsInitialized.SubMessage) )) , DESCRIPTOR = _TESTISINITIALIZED, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestIsInitialized) )) _sym_db.RegisterMessage(TestIsInitialized) _sym_db.RegisterMessage(TestIsInitialized.SubMessage) _sym_db.RegisterMessage(TestIsInitialized.SubMessage.SubGroup) TestDupFieldNumber = _reflection.GeneratedProtocolMessageType('TestDupFieldNumber', (_message.Message,), dict( Foo = _reflection.GeneratedProtocolMessageType('Foo', (_message.Message,), dict( DESCRIPTOR = _TESTDUPFIELDNUMBER_FOO, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Foo) )) , Bar = _reflection.GeneratedProtocolMessageType('Bar', (_message.Message,), dict( DESCRIPTOR = _TESTDUPFIELDNUMBER_BAR, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Bar) )) , DESCRIPTOR = _TESTDUPFIELDNUMBER, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber) )) _sym_db.RegisterMessage(TestDupFieldNumber) _sym_db.RegisterMessage(TestDupFieldNumber.Foo) _sym_db.RegisterMessage(TestDupFieldNumber.Bar) TestEagerMessage = _reflection.GeneratedProtocolMessageType('TestEagerMessage', (_message.Message,), dict( DESCRIPTOR = _TESTEAGERMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEagerMessage) )) _sym_db.RegisterMessage(TestEagerMessage) TestLazyMessage = _reflection.GeneratedProtocolMessageType('TestLazyMessage', (_message.Message,), dict( DESCRIPTOR = _TESTLAZYMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestLazyMessage) )) _sym_db.RegisterMessage(TestLazyMessage) TestNestedMessageHasBits = _reflection.GeneratedProtocolMessageType('TestNestedMessageHasBits', (_message.Message,), dict( NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits.NestedMessage) )) , DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits) )) _sym_db.RegisterMessage(TestNestedMessageHasBits) _sym_db.RegisterMessage(TestNestedMessageHasBits.NestedMessage) TestCamelCaseFieldNames = _reflection.GeneratedProtocolMessageType('TestCamelCaseFieldNames', (_message.Message,), dict( DESCRIPTOR = _TESTCAMELCASEFIELDNAMES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCamelCaseFieldNames) )) _sym_db.RegisterMessage(TestCamelCaseFieldNames) TestFieldOrderings = _reflection.GeneratedProtocolMessageType('TestFieldOrderings', (_message.Message,), dict( NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTFIELDORDERINGS_NESTEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings.NestedMessage) )) , DESCRIPTOR = _TESTFIELDORDERINGS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings) )) _sym_db.RegisterMessage(TestFieldOrderings) _sym_db.RegisterMessage(TestFieldOrderings.NestedMessage) TestExtensionOrderings1 = _reflection.GeneratedProtocolMessageType('TestExtensionOrderings1', (_message.Message,), dict( DESCRIPTOR = _TESTEXTENSIONORDERINGS1, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtensionOrderings1) )) _sym_db.RegisterMessage(TestExtensionOrderings1) TestExtensionOrderings2 = _reflection.GeneratedProtocolMessageType('TestExtensionOrderings2', (_message.Message,), dict( TestExtensionOrderings3 = _reflection.GeneratedProtocolMessageType('TestExtensionOrderings3', (_message.Message,), dict( DESCRIPTOR = _TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3) )) , DESCRIPTOR = _TESTEXTENSIONORDERINGS2, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtensionOrderings2) )) _sym_db.RegisterMessage(TestExtensionOrderings2) _sym_db.RegisterMessage(TestExtensionOrderings2.TestExtensionOrderings3) TestExtremeDefaultValues = _reflection.GeneratedProtocolMessageType('TestExtremeDefaultValues', (_message.Message,), dict( DESCRIPTOR = _TESTEXTREMEDEFAULTVALUES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtremeDefaultValues) )) _sym_db.RegisterMessage(TestExtremeDefaultValues) SparseEnumMessage = _reflection.GeneratedProtocolMessageType('SparseEnumMessage', (_message.Message,), dict( DESCRIPTOR = _SPARSEENUMMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.SparseEnumMessage) )) _sym_db.RegisterMessage(SparseEnumMessage) OneString = _reflection.GeneratedProtocolMessageType('OneString', (_message.Message,), dict( DESCRIPTOR = _ONESTRING, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.OneString) )) _sym_db.RegisterMessage(OneString) MoreString = _reflection.GeneratedProtocolMessageType('MoreString', (_message.Message,), dict( DESCRIPTOR = _MORESTRING, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreString) )) _sym_db.RegisterMessage(MoreString) OneBytes = _reflection.GeneratedProtocolMessageType('OneBytes', (_message.Message,), dict( DESCRIPTOR = _ONEBYTES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.OneBytes) )) _sym_db.RegisterMessage(OneBytes) MoreBytes = _reflection.GeneratedProtocolMessageType('MoreBytes', (_message.Message,), dict( DESCRIPTOR = _MOREBYTES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreBytes) )) _sym_db.RegisterMessage(MoreBytes) Int32Message = _reflection.GeneratedProtocolMessageType('Int32Message', (_message.Message,), dict( DESCRIPTOR = _INT32MESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.Int32Message) )) _sym_db.RegisterMessage(Int32Message) Uint32Message = _reflection.GeneratedProtocolMessageType('Uint32Message', (_message.Message,), dict( DESCRIPTOR = _UINT32MESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint32Message) )) _sym_db.RegisterMessage(Uint32Message) Int64Message = _reflection.GeneratedProtocolMessageType('Int64Message', (_message.Message,), dict( DESCRIPTOR = _INT64MESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.Int64Message) )) _sym_db.RegisterMessage(Int64Message) Uint64Message = _reflection.GeneratedProtocolMessageType('Uint64Message', (_message.Message,), dict( DESCRIPTOR = _UINT64MESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint64Message) )) _sym_db.RegisterMessage(Uint64Message) BoolMessage = _reflection.GeneratedProtocolMessageType('BoolMessage', (_message.Message,), dict( DESCRIPTOR = _BOOLMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.BoolMessage) )) _sym_db.RegisterMessage(BoolMessage) TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), dict( FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( DESCRIPTOR = _TESTONEOF_FOOGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof.FooGroup) )) , DESCRIPTOR = _TESTONEOF, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof) )) _sym_db.RegisterMessage(TestOneof) _sym_db.RegisterMessage(TestOneof.FooGroup) TestOneofBackwardsCompatible = _reflection.GeneratedProtocolMessageType('TestOneofBackwardsCompatible', (_message.Message,), dict( FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible.FooGroup) )) , DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible) )) _sym_db.RegisterMessage(TestOneofBackwardsCompatible) _sym_db.RegisterMessage(TestOneofBackwardsCompatible.FooGroup) TestOneof2 = _reflection.GeneratedProtocolMessageType('TestOneof2', (_message.Message,), dict( FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( DESCRIPTOR = _TESTONEOF2_FOOGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.FooGroup) )) , NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTONEOF2_NESTEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.NestedMessage) )) , DESCRIPTOR = _TESTONEOF2, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2) )) _sym_db.RegisterMessage(TestOneof2) _sym_db.RegisterMessage(TestOneof2.FooGroup) _sym_db.RegisterMessage(TestOneof2.NestedMessage) TestRequiredOneof = _reflection.GeneratedProtocolMessageType('TestRequiredOneof', (_message.Message,), dict( NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( DESCRIPTOR = _TESTREQUIREDONEOF_NESTEDMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof.NestedMessage) )) , DESCRIPTOR = _TESTREQUIREDONEOF, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof) )) _sym_db.RegisterMessage(TestRequiredOneof) _sym_db.RegisterMessage(TestRequiredOneof.NestedMessage) TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict( DESCRIPTOR = _TESTPACKEDTYPES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedTypes) )) _sym_db.RegisterMessage(TestPackedTypes) TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict( DESCRIPTOR = _TESTUNPACKEDTYPES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedTypes) )) _sym_db.RegisterMessage(TestUnpackedTypes) TestPackedExtensions = _reflection.GeneratedProtocolMessageType('TestPackedExtensions', (_message.Message,), dict( DESCRIPTOR = _TESTPACKEDEXTENSIONS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedExtensions) )) _sym_db.RegisterMessage(TestPackedExtensions) TestUnpackedExtensions = _reflection.GeneratedProtocolMessageType('TestUnpackedExtensions', (_message.Message,), dict( DESCRIPTOR = _TESTUNPACKEDEXTENSIONS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedExtensions) )) _sym_db.RegisterMessage(TestUnpackedExtensions) TestDynamicExtensions = _reflection.GeneratedProtocolMessageType('TestDynamicExtensions', (_message.Message,), dict( DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), dict( DESCRIPTOR = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions.DynamicMessageType) )) , DESCRIPTOR = _TESTDYNAMICEXTENSIONS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions) )) _sym_db.RegisterMessage(TestDynamicExtensions) _sym_db.RegisterMessage(TestDynamicExtensions.DynamicMessageType) TestRepeatedScalarDifferentTagSizes = _reflection.GeneratedProtocolMessageType('TestRepeatedScalarDifferentTagSizes', (_message.Message,), dict( DESCRIPTOR = _TESTREPEATEDSCALARDIFFERENTTAGSIZES, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRepeatedScalarDifferentTagSizes) )) _sym_db.RegisterMessage(TestRepeatedScalarDifferentTagSizes) TestParsingMerge = _reflection.GeneratedProtocolMessageType('TestParsingMerge', (_message.Message,), dict( RepeatedFieldsGenerator = _reflection.GeneratedProtocolMessageType('RepeatedFieldsGenerator', (_message.Message,), dict( Group1 = _reflection.GeneratedProtocolMessageType('Group1', (_message.Message,), dict( DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1) )) , Group2 = _reflection.GeneratedProtocolMessageType('Group2', (_message.Message,), dict( DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2) )) , DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator) )) , OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( DESCRIPTOR = _TESTPARSINGMERGE_OPTIONALGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.OptionalGroup) )) , RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict( DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedGroup) )) , DESCRIPTOR = _TESTPARSINGMERGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge) )) _sym_db.RegisterMessage(TestParsingMerge) _sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator) _sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group1) _sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group2) _sym_db.RegisterMessage(TestParsingMerge.OptionalGroup) _sym_db.RegisterMessage(TestParsingMerge.RepeatedGroup) TestCommentInjectionMessage = _reflection.GeneratedProtocolMessageType('TestCommentInjectionMessage', (_message.Message,), dict( DESCRIPTOR = _TESTCOMMENTINJECTIONMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCommentInjectionMessage) )) _sym_db.RegisterMessage(TestCommentInjectionMessage) FooRequest = _reflection.GeneratedProtocolMessageType('FooRequest', (_message.Message,), dict( DESCRIPTOR = _FOOREQUEST, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.FooRequest) )) _sym_db.RegisterMessage(FooRequest) FooResponse = _reflection.GeneratedProtocolMessageType('FooResponse', (_message.Message,), dict( DESCRIPTOR = _FOORESPONSE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.FooResponse) )) _sym_db.RegisterMessage(FooResponse) FooClientMessage = _reflection.GeneratedProtocolMessageType('FooClientMessage', (_message.Message,), dict( DESCRIPTOR = _FOOCLIENTMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.FooClientMessage) )) _sym_db.RegisterMessage(FooClientMessage) FooServerMessage = _reflection.GeneratedProtocolMessageType('FooServerMessage', (_message.Message,), dict( DESCRIPTOR = _FOOSERVERMESSAGE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.FooServerMessage) )) _sym_db.RegisterMessage(FooServerMessage) BarRequest = _reflection.GeneratedProtocolMessageType('BarRequest', (_message.Message,), dict( DESCRIPTOR = _BARREQUEST, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.BarRequest) )) _sym_db.RegisterMessage(BarRequest) BarResponse = _reflection.GeneratedProtocolMessageType('BarResponse', (_message.Message,), dict( DESCRIPTOR = _BARRESPONSE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.BarResponse) )) _sym_db.RegisterMessage(BarResponse) TestJsonName = _reflection.GeneratedProtocolMessageType('TestJsonName', (_message.Message,), dict( DESCRIPTOR = _TESTJSONNAME, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestJsonName) )) _sym_db.RegisterMessage(TestJsonName) TestHugeFieldNumbers = _reflection.GeneratedProtocolMessageType('TestHugeFieldNumbers', (_message.Message,), dict( OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( DESCRIPTOR = _TESTHUGEFIELDNUMBERS_OPTIONALGROUP, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestHugeFieldNumbers.OptionalGroup) )) , StringStringMapEntry = _reflection.GeneratedProtocolMessageType('StringStringMapEntry', (_message.Message,), dict( DESCRIPTOR = _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestHugeFieldNumbers.StringStringMapEntry) )) , DESCRIPTOR = _TESTHUGEFIELDNUMBERS, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestHugeFieldNumbers) )) _sym_db.RegisterMessage(TestHugeFieldNumbers) _sym_db.RegisterMessage(TestHugeFieldNumbers.OptionalGroup) _sym_db.RegisterMessage(TestHugeFieldNumbers.StringStringMapEntry) TestExtensionInsideTable = _reflection.GeneratedProtocolMessageType('TestExtensionInsideTable', (_message.Message,), dict( DESCRIPTOR = _TESTEXTENSIONINSIDETABLE, __module__ = 'google.protobuf.unittest_pb2' # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtensionInsideTable) )) _sym_db.RegisterMessage(TestExtensionInsideTable) TestAllExtensions.RegisterExtension(optional_int32_extension) TestAllExtensions.RegisterExtension(optional_int64_extension) TestAllExtensions.RegisterExtension(optional_uint32_extension) TestAllExtensions.RegisterExtension(optional_uint64_extension) TestAllExtensions.RegisterExtension(optional_sint32_extension) TestAllExtensions.RegisterExtension(optional_sint64_extension) TestAllExtensions.RegisterExtension(optional_fixed32_extension) TestAllExtensions.RegisterExtension(optional_fixed64_extension) TestAllExtensions.RegisterExtension(optional_sfixed32_extension) TestAllExtensions.RegisterExtension(optional_sfixed64_extension) TestAllExtensions.RegisterExtension(optional_float_extension) TestAllExtensions.RegisterExtension(optional_double_extension) TestAllExtensions.RegisterExtension(optional_bool_extension) TestAllExtensions.RegisterExtension(optional_string_extension) TestAllExtensions.RegisterExtension(optional_bytes_extension) optionalgroup_extension.message_type = _OPTIONALGROUP_EXTENSION TestAllExtensions.RegisterExtension(optionalgroup_extension) optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE TestAllExtensions.RegisterExtension(optional_nested_message_extension) optional_foreign_message_extension.message_type = _FOREIGNMESSAGE TestAllExtensions.RegisterExtension(optional_foreign_message_extension) optional_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE TestAllExtensions.RegisterExtension(optional_import_message_extension) optional_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM TestAllExtensions.RegisterExtension(optional_nested_enum_extension) optional_foreign_enum_extension.enum_type = _FOREIGNENUM TestAllExtensions.RegisterExtension(optional_foreign_enum_extension) optional_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM TestAllExtensions.RegisterExtension(optional_import_enum_extension) TestAllExtensions.RegisterExtension(optional_string_piece_extension) TestAllExtensions.RegisterExtension(optional_cord_extension) optional_public_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE TestAllExtensions.RegisterExtension(optional_public_import_message_extension) optional_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE TestAllExtensions.RegisterExtension(optional_lazy_message_extension) TestAllExtensions.RegisterExtension(repeated_int32_extension) TestAllExtensions.RegisterExtension(repeated_int64_extension) TestAllExtensions.RegisterExtension(repeated_uint32_extension) TestAllExtensions.RegisterExtension(repeated_uint64_extension) TestAllExtensions.RegisterExtension(repeated_sint32_extension) TestAllExtensions.RegisterExtension(repeated_sint64_extension) TestAllExtensions.RegisterExtension(repeated_fixed32_extension) TestAllExtensions.RegisterExtension(repeated_fixed64_extension) TestAllExtensions.RegisterExtension(repeated_sfixed32_extension) TestAllExtensions.RegisterExtension(repeated_sfixed64_extension) TestAllExtensions.RegisterExtension(repeated_float_extension) TestAllExtensions.RegisterExtension(repeated_double_extension) TestAllExtensions.RegisterExtension(repeated_bool_extension) TestAllExtensions.RegisterExtension(repeated_string_extension) TestAllExtensions.RegisterExtension(repeated_bytes_extension) repeatedgroup_extension.message_type = _REPEATEDGROUP_EXTENSION TestAllExtensions.RegisterExtension(repeatedgroup_extension) repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE TestAllExtensions.RegisterExtension(repeated_nested_message_extension) repeated_foreign_message_extension.message_type = _FOREIGNMESSAGE TestAllExtensions.RegisterExtension(repeated_foreign_message_extension) repeated_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE TestAllExtensions.RegisterExtension(repeated_import_message_extension) repeated_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM TestAllExtensions.RegisterExtension(repeated_nested_enum_extension) repeated_foreign_enum_extension.enum_type = _FOREIGNENUM TestAllExtensions.RegisterExtension(repeated_foreign_enum_extension) repeated_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM TestAllExtensions.RegisterExtension(repeated_import_enum_extension) TestAllExtensions.RegisterExtension(repeated_string_piece_extension) TestAllExtensions.RegisterExtension(repeated_cord_extension) repeated_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE TestAllExtensions.RegisterExtension(repeated_lazy_message_extension) TestAllExtensions.RegisterExtension(default_int32_extension) TestAllExtensions.RegisterExtension(default_int64_extension) TestAllExtensions.RegisterExtension(default_uint32_extension) TestAllExtensions.RegisterExtension(default_uint64_extension) TestAllExtensions.RegisterExtension(default_sint32_extension) TestAllExtensions.RegisterExtension(default_sint64_extension) TestAllExtensions.RegisterExtension(default_fixed32_extension) TestAllExtensions.RegisterExtension(default_fixed64_extension) TestAllExtensions.RegisterExtension(default_sfixed32_extension) TestAllExtensions.RegisterExtension(default_sfixed64_extension) TestAllExtensions.RegisterExtension(default_float_extension) TestAllExtensions.RegisterExtension(default_double_extension) TestAllExtensions.RegisterExtension(default_bool_extension) TestAllExtensions.RegisterExtension(default_string_extension) TestAllExtensions.RegisterExtension(default_bytes_extension) default_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM TestAllExtensions.RegisterExtension(default_nested_enum_extension) default_foreign_enum_extension.enum_type = _FOREIGNENUM TestAllExtensions.RegisterExtension(default_foreign_enum_extension) default_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM TestAllExtensions.RegisterExtension(default_import_enum_extension) TestAllExtensions.RegisterExtension(default_string_piece_extension) TestAllExtensions.RegisterExtension(default_cord_extension) TestAllExtensions.RegisterExtension(oneof_uint32_extension) oneof_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE TestAllExtensions.RegisterExtension(oneof_nested_message_extension) TestAllExtensions.RegisterExtension(oneof_string_extension) TestAllExtensions.RegisterExtension(oneof_bytes_extension) TestFieldOrderings.RegisterExtension(my_extension_string) TestFieldOrderings.RegisterExtension(my_extension_int) TestPackedExtensions.RegisterExtension(packed_int32_extension) TestPackedExtensions.RegisterExtension(packed_int64_extension) TestPackedExtensions.RegisterExtension(packed_uint32_extension) TestPackedExtensions.RegisterExtension(packed_uint64_extension) TestPackedExtensions.RegisterExtension(packed_sint32_extension) TestPackedExtensions.RegisterExtension(packed_sint64_extension) TestPackedExtensions.RegisterExtension(packed_fixed32_extension) TestPackedExtensions.RegisterExtension(packed_fixed64_extension) TestPackedExtensions.RegisterExtension(packed_sfixed32_extension) TestPackedExtensions.RegisterExtension(packed_sfixed64_extension) TestPackedExtensions.RegisterExtension(packed_float_extension) TestPackedExtensions.RegisterExtension(packed_double_extension) TestPackedExtensions.RegisterExtension(packed_bool_extension) packed_enum_extension.enum_type = _FOREIGNENUM TestPackedExtensions.RegisterExtension(packed_enum_extension) TestUnpackedExtensions.RegisterExtension(unpacked_int32_extension) TestUnpackedExtensions.RegisterExtension(unpacked_int64_extension) TestUnpackedExtensions.RegisterExtension(unpacked_uint32_extension) TestUnpackedExtensions.RegisterExtension(unpacked_uint64_extension) TestUnpackedExtensions.RegisterExtension(unpacked_sint32_extension) TestUnpackedExtensions.RegisterExtension(unpacked_sint64_extension) TestUnpackedExtensions.RegisterExtension(unpacked_fixed32_extension) TestUnpackedExtensions.RegisterExtension(unpacked_fixed64_extension) TestUnpackedExtensions.RegisterExtension(unpacked_sfixed32_extension) TestUnpackedExtensions.RegisterExtension(unpacked_sfixed64_extension) TestUnpackedExtensions.RegisterExtension(unpacked_float_extension) TestUnpackedExtensions.RegisterExtension(unpacked_double_extension) TestUnpackedExtensions.RegisterExtension(unpacked_bool_extension) unpacked_enum_extension.enum_type = _FOREIGNENUM TestUnpackedExtensions.RegisterExtension(unpacked_enum_extension) test_all_types.message_type = _TESTALLTYPES TestHugeFieldNumbers.RegisterExtension(test_all_types) TestExtensionInsideTable.RegisterExtension(test_extension_inside_table_extension) TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['test']) TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['nested_string_extension']) _TESTNESTEDEXTENSION.extensions_by_name['optionalgroup_extension'].message_type = _TESTNESTEDEXTENSION_OPTIONALGROUP_EXTENSION TestGroupExtension.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['optionalgroup_extension']) _TESTNESTEDEXTENSION.extensions_by_name['optional_foreign_enum_extension'].enum_type = _FOREIGNENUM TestGroupExtension.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['optional_foreign_enum_extension']) _TESTREQUIRED.extensions_by_name['single'].message_type = _TESTREQUIRED TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['single']) _TESTREQUIRED.extensions_by_name['multi'].message_type = _TESTREQUIRED TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['multi']) _TESTEXTENSIONORDERINGS1.extensions_by_name['test_ext_orderings1'].message_type = _TESTEXTENSIONORDERINGS1 TestFieldOrderings.RegisterExtension(_TESTEXTENSIONORDERINGS1.extensions_by_name['test_ext_orderings1']) _TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3.extensions_by_name['test_ext_orderings3'].message_type = _TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3 TestFieldOrderings.RegisterExtension(_TESTEXTENSIONORDERINGS2_TESTEXTENSIONORDERINGS3.extensions_by_name['test_ext_orderings3']) _TESTEXTENSIONORDERINGS2.extensions_by_name['test_ext_orderings2'].message_type = _TESTEXTENSIONORDERINGS2 TestFieldOrderings.RegisterExtension(_TESTEXTENSIONORDERINGS2.extensions_by_name['test_ext_orderings2']) _TESTPARSINGMERGE.extensions_by_name['optional_ext'].message_type = _TESTALLTYPES TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['optional_ext']) _TESTPARSINGMERGE.extensions_by_name['repeated_ext'].message_type = _TESTALLTYPES TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['repeated_ext']) DESCRIPTOR._options = None _TESTENUMWITHDUPVALUE._options = None optional_string_piece_extension._options = None optional_cord_extension._options = None optional_lazy_message_extension._options = None repeated_string_piece_extension._options = None repeated_cord_extension._options = None repeated_lazy_message_extension._options = None default_string_piece_extension._options = None default_cord_extension._options = None packed_int32_extension._options = None packed_int64_extension._options = None packed_uint32_extension._options = None packed_uint64_extension._options = None packed_sint32_extension._options = None packed_sint64_extension._options = None packed_fixed32_extension._options = None packed_fixed64_extension._options = None packed_sfixed32_extension._options = None packed_sfixed64_extension._options = None packed_float_extension._options = None packed_double_extension._options = None packed_bool_extension._options = None packed_enum_extension._options = None unpacked_int32_extension._options = None unpacked_int64_extension._options = None unpacked_uint32_extension._options = None unpacked_uint64_extension._options = None unpacked_sint32_extension._options = None unpacked_sint64_extension._options = None unpacked_fixed32_extension._options = None unpacked_fixed64_extension._options = None unpacked_sfixed32_extension._options = None unpacked_sfixed64_extension._options = None unpacked_float_extension._options = None unpacked_double_extension._options = None unpacked_bool_extension._options = None unpacked_enum_extension._options = None _TESTALLTYPES.fields_by_name['optional_string_piece']._options = None _TESTALLTYPES.fields_by_name['optional_cord']._options = None _TESTALLTYPES.fields_by_name['optional_lazy_message']._options = None _TESTALLTYPES.fields_by_name['repeated_string_piece']._options = None _TESTALLTYPES.fields_by_name['repeated_cord']._options = None _TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = None _TESTALLTYPES.fields_by_name['default_string_piece']._options = None _TESTALLTYPES.fields_by_name['default_cord']._options = None _TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32']._options = None _TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32_in_oneof']._options = None _TESTDEPRECATEDMESSAGE._options = None _TESTEAGERMESSAGE.fields_by_name['sub_message']._options = None _TESTLAZYMESSAGE.fields_by_name['sub_message']._options = None _TESTCAMELCASEFIELDNAMES.fields_by_name['StringPieceField']._options = None _TESTCAMELCASEFIELDNAMES.fields_by_name['CordField']._options = None _TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedStringPieceField']._options = None _TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedCordField']._options = None _TESTEXTREMEDEFAULTVALUES.fields_by_name['string_piece_with_zero']._options = None _TESTEXTREMEDEFAULTVALUES.fields_by_name['cord_with_zero']._options = None _TESTONEOF2.fields_by_name['foo_cord']._options = None _TESTONEOF2.fields_by_name['foo_string_piece']._options = None _TESTONEOF2.fields_by_name['foo_lazy_message']._options = None _TESTONEOF2.fields_by_name['bar_cord']._options = None _TESTONEOF2.fields_by_name['bar_string_piece']._options = None _TESTPACKEDTYPES.fields_by_name['packed_int32']._options = None _TESTPACKEDTYPES.fields_by_name['packed_int64']._options = None _TESTPACKEDTYPES.fields_by_name['packed_uint32']._options = None _TESTPACKEDTYPES.fields_by_name['packed_uint64']._options = None _TESTPACKEDTYPES.fields_by_name['packed_sint32']._options = None _TESTPACKEDTYPES.fields_by_name['packed_sint64']._options = None _TESTPACKEDTYPES.fields_by_name['packed_fixed32']._options = None _TESTPACKEDTYPES.fields_by_name['packed_fixed64']._options = None _TESTPACKEDTYPES.fields_by_name['packed_sfixed32']._options = None _TESTPACKEDTYPES.fields_by_name['packed_sfixed64']._options = None _TESTPACKEDTYPES.fields_by_name['packed_float']._options = None _TESTPACKEDTYPES.fields_by_name['packed_double']._options = None _TESTPACKEDTYPES.fields_by_name['packed_bool']._options = None _TESTPACKEDTYPES.fields_by_name['packed_enum']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_int32']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_int64']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_uint32']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_uint64']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_sint32']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_sint64']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed32']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed64']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed32']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed64']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_float']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_double']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_bool']._options = None _TESTUNPACKEDTYPES.fields_by_name['unpacked_enum']._options = None _TESTDYNAMICEXTENSIONS.fields_by_name['packed_extension']._options = None _TESTHUGEFIELDNUMBERS_STRINGSTRINGMAPENTRY._options = None _TESTHUGEFIELDNUMBERS.fields_by_name['repeated_int32']._options = None _TESTHUGEFIELDNUMBERS.fields_by_name['packed_int32']._options = None _TESTSERVICE = _descriptor.ServiceDescriptor( name='TestService', full_name='protobuf_unittest.TestService', file=DESCRIPTOR, index=0, serialized_options=None, serialized_start=15590, serialized_end=15743, methods=[ _descriptor.MethodDescriptor( name='Foo', full_name='protobuf_unittest.TestService.Foo', index=0, containing_service=None, input_type=_FOOREQUEST, output_type=_FOORESPONSE, serialized_options=None, ), _descriptor.MethodDescriptor( name='Bar', full_name='protobuf_unittest.TestService.Bar', index=1, containing_service=None, input_type=_BARREQUEST, output_type=_BARRESPONSE, serialized_options=None, ), ]) _sym_db.RegisterServiceDescriptor(_TESTSERVICE) DESCRIPTOR.services_by_name['TestService'] = _TESTSERVICE TestService = service_reflection.GeneratedServiceType('TestService', (_service.Service,), dict( DESCRIPTOR = _TESTSERVICE, __module__ = 'google.protobuf.unittest_pb2' )) TestService_Stub = service_reflection.GeneratedServiceStubType('TestService_Stub', (TestService,), dict( DESCRIPTOR = _TESTSERVICE, __module__ = 'google.protobuf.unittest_pb2' )) # @@protoc_insertion_point(module_scope)
py
1a4a7ef1e3a3c610e72bcf9fdbefcb30e5989400
"""djangorest URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.2/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include('api.urls')), ]
py
1a4a808cf9ec2252fc130ec69fc5ffcb03a54f3a
import os from datetime import datetime, timezone import numpy as np import pandas as pd import pyspark.sql.functions as F import pyspark.sql.types as pt import pytest from pyspark.sql import SparkSession import ibis from ibis import util from ibis.backends.tests.base import BackendTest, RoundAwayFromZero _pyspark_testing_client = None def get_common_spark_testing_client(data_directory, connect): spark = ( SparkSession.builder.config('spark.default.parallelism', 4) .config('spark.driver.bindAddress', '127.0.0.1') .getOrCreate() ) _spark_testing_client = connect(spark) s = _spark_testing_client._session num_partitions = 4 df_functional_alltypes = ( s.read.csv( path=str(data_directory / 'functional_alltypes.csv'), schema=pt.StructType( [ pt.StructField('index', pt.IntegerType(), True), pt.StructField('Unnamed: 0', pt.IntegerType(), True), pt.StructField('id', pt.IntegerType(), True), # cast below, Spark can't read 0/1 as bool pt.StructField('bool_col', pt.ByteType(), True), pt.StructField('tinyint_col', pt.ByteType(), True), pt.StructField('smallint_col', pt.ShortType(), True), pt.StructField('int_col', pt.IntegerType(), True), pt.StructField('bigint_col', pt.LongType(), True), pt.StructField('float_col', pt.FloatType(), True), pt.StructField('double_col', pt.DoubleType(), True), pt.StructField('date_string_col', pt.StringType(), True), pt.StructField('string_col', pt.StringType(), True), pt.StructField('timestamp_col', pt.TimestampType(), True), pt.StructField('year', pt.IntegerType(), True), pt.StructField('month', pt.IntegerType(), True), ] ), mode='FAILFAST', header=True, ) .repartition(num_partitions) .sort('index') ) df_functional_alltypes = df_functional_alltypes.withColumn( "bool_col", df_functional_alltypes["bool_col"].cast("boolean") ) df_functional_alltypes.createOrReplaceTempView('functional_alltypes') df_batting = ( s.read.csv( path=str(data_directory / 'batting.csv'), schema=pt.StructType( [ pt.StructField('playerID', pt.StringType(), True), pt.StructField('yearID', pt.IntegerType(), True), pt.StructField('stint', pt.IntegerType(), True), pt.StructField('teamID', pt.StringType(), True), pt.StructField('lgID', pt.StringType(), True), pt.StructField('G', pt.IntegerType(), True), pt.StructField('AB', pt.DoubleType(), True), pt.StructField('R', pt.DoubleType(), True), pt.StructField('H', pt.DoubleType(), True), pt.StructField('X2B', pt.DoubleType(), True), pt.StructField('X3B', pt.DoubleType(), True), pt.StructField('HR', pt.DoubleType(), True), pt.StructField('RBI', pt.DoubleType(), True), pt.StructField('SB', pt.DoubleType(), True), pt.StructField('CS', pt.DoubleType(), True), pt.StructField('BB', pt.DoubleType(), True), pt.StructField('SO', pt.DoubleType(), True), pt.StructField('IBB', pt.DoubleType(), True), pt.StructField('HBP', pt.DoubleType(), True), pt.StructField('SH', pt.DoubleType(), True), pt.StructField('SF', pt.DoubleType(), True), pt.StructField('GIDP', pt.DoubleType(), True), ] ), header=True, ) .repartition(num_partitions) .sort('playerID') ) df_batting.createOrReplaceTempView('batting') df_awards_players = ( s.read.csv( path=str(data_directory / 'awards_players.csv'), schema=pt.StructType( [ pt.StructField('playerID', pt.StringType(), True), pt.StructField('awardID', pt.StringType(), True), pt.StructField('yearID', pt.IntegerType(), True), pt.StructField('lgID', pt.StringType(), True), pt.StructField('tie', pt.StringType(), True), pt.StructField('notes', pt.StringType(), True), ] ), header=True, ) .repartition(num_partitions) .sort('playerID') ) df_awards_players.createOrReplaceTempView('awards_players') df_simple = s.createDataFrame([(1, 'a')], ['foo', 'bar']) df_simple.createOrReplaceTempView('simple') df_struct = s.createDataFrame([((1, 2, 'a'),)], ['struct_col']) df_struct.createOrReplaceTempView('struct') df_nested_types = s.createDataFrame( [([1, 2], [[3, 4], [5, 6]], {'a': [[2, 4], [3, 5]]})], [ 'list_of_ints', 'list_of_list_of_ints', 'map_string_list_of_list_of_ints', ], ) df_nested_types.createOrReplaceTempView('nested_types') df_complicated = s.createDataFrame( [({(1, 3): [[2, 4], [3, 5]]},)], ['map_tuple_list_of_list_of_ints'] ) df_complicated.createOrReplaceTempView('complicated') df_udf = s.createDataFrame( [('a', 1, 4.0, 'a'), ('b', 2, 5.0, 'a'), ('c', 3, 6.0, 'b')], ['a', 'b', 'c', 'key'], ) df_udf.createOrReplaceTempView('udf') df_udf_nan = s.createDataFrame( pd.DataFrame( { 'a': np.arange(10, dtype=float), 'b': [3.0, np.NaN] * 5, 'key': list('ddeefffggh'), } ) ) df_udf_nan.createOrReplaceTempView('udf_nan') df_udf_null = s.createDataFrame( [ (float(i), None if i % 2 else 3.0, 'ddeefffggh'[i]) for i in range(10) ], ['a', 'b', 'key'], ) df_udf_null.createOrReplaceTempView('udf_null') df_udf_random = s.createDataFrame( pd.DataFrame( { 'a': np.arange(4, dtype=float).tolist() + np.random.rand(3).tolist(), 'b': np.arange(4, dtype=float).tolist() + np.random.rand(3).tolist(), 'key': list('ddeefff'), } ) ) df_udf_random.createOrReplaceTempView('udf_random') return _spark_testing_client def get_pyspark_testing_client(data_directory): global _pyspark_testing_client if _pyspark_testing_client is None: _pyspark_testing_client = get_common_spark_testing_client( data_directory, lambda session: ibis.backends.pyspark.Backend().connect(session), ) return _pyspark_testing_client class TestConf(BackendTest, RoundAwayFromZero): supported_to_timestamp_units = {'s'} @staticmethod def connect(data_directory): return get_pyspark_testing_client(data_directory) @pytest.fixture(scope='session') def client(data_directory): client = get_pyspark_testing_client(data_directory) df = client._session.range(0, 10) df = df.withColumn("str_col", F.lit('value')) df.createTempView('basic_table') df_nans = client._session.createDataFrame( [ [np.NaN, 'Alfred', None], [27.0, 'Batman', 'motocycle'], [3.0, None, 'joker'], ], ['age', 'user', 'toy'], ) df_nans.createTempView('nan_table') df_dates = client._session.createDataFrame( [['2018-01-02'], ['2018-01-03'], ['2018-01-04']], ['date_str'] ) df_dates.createTempView('date_table') df_arrays = client._session.createDataFrame( [ ['k1', [1, 2, 3], ['a']], ['k2', [4, 5], ['test1', 'test2', 'test3']], ['k3', [6], ['w', 'x', 'y', 'z']], ['k1', [], ['cat', 'dog']], ['k1', [7, 8], []], ], ['key', 'array_int', 'array_str'], ) df_arrays.createTempView('array_table') df_time_indexed = client._session.createDataFrame( [ [datetime(2017, 1, 2, 5, tzinfo=timezone.utc), 1, 1.0], [datetime(2017, 1, 2, 5, tzinfo=timezone.utc), 2, 2.0], [datetime(2017, 1, 2, 6, tzinfo=timezone.utc), 1, 3.0], [datetime(2017, 1, 2, 6, tzinfo=timezone.utc), 2, 4.0], [datetime(2017, 1, 2, 7, tzinfo=timezone.utc), 1, 5.0], [datetime(2017, 1, 2, 7, tzinfo=timezone.utc), 2, 6.0], [datetime(2017, 1, 4, 8, tzinfo=timezone.utc), 1, 7.0], [datetime(2017, 1, 4, 8, tzinfo=timezone.utc), 2, 8.0], ], ['time', 'key', 'value'], ) df_time_indexed.createTempView('time_indexed_table') return client class IbisWindow: # Test util class to generate different types of ibis windows def __init__(self, windows): self.windows = windows def get_windows(self): # Return a list of Ibis windows return [ ibis.window( preceding=w[0], following=w[1], order_by='time', group_by='key', ) for w in self.windows ] @pytest.fixture def ibis_windows(request): return IbisWindow(request.param).get_windows() def _random_identifier(suffix): return '__ibis_test_{}_{}'.format(suffix, util.guid()) @pytest.fixture(scope='session', autouse=True) def test_data_db(client): try: name = os.environ.get('IBIS_TEST_DATA_DB', 'ibis_testing') client.create_database(name) client.set_database(name) yield name finally: client.drop_database(name, force=True) @pytest.fixture def temp_database(client, test_data_db): name = _random_identifier('database') client.create_database(name) try: yield name finally: client.set_database(test_data_db) client.drop_database(name, force=True) @pytest.fixture def temp_table(client): name = _random_identifier('table') try: yield name finally: assert client.exists_table(name), name client.drop_table(name) @pytest.fixture(scope='session') def alltypes(client): return client.table('functional_alltypes').relabel( {'Unnamed: 0': 'Unnamed:0'} ) @pytest.fixture(scope='session') def tmp_dir(): return '/tmp/__ibis_test_{}'.format(util.guid()) @pytest.fixture def temp_table_db(client, temp_database): name = _random_identifier('table') try: yield temp_database, name finally: assert client.exists_table(name, database=temp_database), name client.drop_table(name, database=temp_database) @pytest.fixture def temp_view(client): name = _random_identifier('view') try: yield name finally: assert client.exists_table(name), name client.drop_view(name)
py
1a4a830fd78e4bcd417c67afee9a3a02977e34d6
from abc import ABC, abstractmethod from .io.chem import load_molecule, build_fp from .io.backends import PyTablesStorageBackend from .FPSim2lib.utils import PyPopcount import numpy as np class BaseEngine(ABC): fp_filename = None storage = None def __init__( self, fp_filename: str, storage_backend: str, in_memory_fps: bool, fps_sort: bool, ) -> None: self.fp_filename = fp_filename self.in_memory_fps = in_memory_fps if storage_backend == "pytables": self.storage = PyTablesStorageBackend( fp_filename, in_memory_fps=in_memory_fps, fps_sort=fps_sort ) @property def fps(self): if self.in_memory_fps: return self.storage.fps else: raise Exception("FPs not loaded into memory.") @property def popcnt_bins(self): return self.storage.popcnt_bins @property def fp_type(self): return self.storage.fp_type @property def fp_params(self): return self.storage.fp_params @property def rdkit_ver(self): return self.storage.rdkit_ver def load_query(self, query_string: str) -> np.ndarray: """Loads the query molecule from SMILES, molblock or InChI. Parameters ---------- query_string : str SMILES, InChi or molblock. Returns ------- query : numpy array Numpy array query molecule. """ rdmol = load_molecule(query_string) fp = build_fp(rdmol, self.fp_type, self.fp_params, 0) return np.array(fp, dtype=np.uint64) @abstractmethod def similarity( self, query_string: str, threshold: float, n_workers=1 ) -> np.ndarray: """Tanimoto similarity search """
py
1a4a835fe67074cab78e2930e697b8f4b79187e3
""" This file contains base classes that other algorithm classes subclass. Each algorithm file also implements a algorithm factory function that takes in an algorithm config (`config.algo`) and returns the particular Algo subclass that should be instantiated, along with any extra kwargs. These factory functions are registered into a global dictionary with the @register_algo_factory_func function decorator. This makes it easy for @algo_factory to instantiate the correct `Algo` subclass. """ import textwrap from copy import deepcopy from collections import OrderedDict import torch.nn as nn import robomimic.utils.tensor_utils as TensorUtils import robomimic.utils.torch_utils as TorchUtils import robomimic.utils.obs_utils as ObsUtils # mapping from algo name to factory functions that map algo configs to algo class names REGISTERED_ALGO_FACTORY_FUNCS = OrderedDict() def register_algo_factory_func(algo_name): """ Function decorator to register algo factory functions that map algo configs to algo class names. Each algorithm implements such a function, and decorates it with this decorator. Args: algo_name (str): the algorithm name to register the algorithm under """ def decorator(factory_func): REGISTERED_ALGO_FACTORY_FUNCS[algo_name] = factory_func return decorator def algo_name_to_factory_func(algo_name): """ Uses registry to retrieve algo factory function from algo name. Args: algo_name (str): the algorithm name """ return REGISTERED_ALGO_FACTORY_FUNCS[algo_name] def algo_factory(algo_name, config, obs_key_shapes, ac_dim, device): """ Factory function for creating algorithms based on the algorithm name and config. Args: algo_name (str): the algorithm name config (BaseConfig instance): config object obs_key_shapes (OrderedDict): dictionary that maps observation keys to shapes ac_dim (int): dimension of action space device (torch.Device): where the algo should live (i.e. cpu, gpu) """ # @algo_name is included as an arg to be explicit, but make sure it matches the config assert algo_name == config.algo_name # use algo factory func to get algo class and kwargs from algo config factory_func = algo_name_to_factory_func(algo_name) algo_cls, algo_kwargs = factory_func(config.algo) # create algo instance return algo_cls( algo_config=config.algo, obs_config=config.observation, global_config=config, obs_key_shapes=obs_key_shapes, ac_dim=ac_dim, device=device, **algo_kwargs ) class Algo(object): """ Base algorithm class that all other algorithms subclass. Defines several functions that should be overriden by subclasses, in order to provide a standard API to be used by training functions such as @run_epoch in utils/train_utils.py. """ def __init__( self, algo_config, obs_config, global_config, obs_key_shapes, ac_dim, device ): """ Args: algo_config (Config object): instance of Config corresponding to the algo section of the config obs_config (Config object): instance of Config corresponding to the observation section of the config global_config (Config object): global training config obs_key_shapes (OrderedDict): dictionary that maps observation keys to shapes ac_dim (int): dimension of action space device (torch.Device): where the algo should live (i.e. cpu, gpu) """ self.optim_params = deepcopy(algo_config.optim_params) self.algo_config = algo_config self.obs_config = obs_config self.global_config = global_config self.ac_dim = ac_dim self.device = device self.obs_key_shapes = obs_key_shapes self.nets = nn.ModuleDict() self._create_shapes(obs_config.modalities, obs_key_shapes) self._create_networks() self._create_optimizers() assert isinstance(self.nets, nn.ModuleDict) def _create_shapes(self, obs_keys, obs_key_shapes): """ Create obs_shapes, goal_shapes, and subgoal_shapes dictionaries, to make it easy for this algorithm object to keep track of observation key shapes. Each dictionary maps observation key to shape. Args: obs_keys (dict): dict of required observation keys for this training run (usually specified by the obs config), e.g., {"obs": ["rgb", "proprio"], "goal": ["proprio"]} obs_key_shapes (dict): dict of observation key shapes, e.g., {"rgb": [3, 224, 224]} """ # determine shapes self.obs_shapes = OrderedDict() self.goal_shapes = OrderedDict() self.subgoal_shapes = OrderedDict() # We check across all modality groups (obs, goal, subgoal), and see if the inputted observation key exists # across all modalitie specified in the config. If so, we store its corresponding shape internally for k in obs_key_shapes: if "obs" in self.obs_config.modalities and k in [obs_key for modality in self.obs_config.modalities.obs.values() for obs_key in modality]: self.obs_shapes[k] = obs_key_shapes[k] if "goal" in self.obs_config.modalities and k in [obs_key for modality in self.obs_config.modalities.goal.values() for obs_key in modality]: self.goal_shapes[k] = obs_key_shapes[k] if "subgoal" in self.obs_config.modalities and k in [obs_key for modality in self.obs_config.modalities.subgoal.values() for obs_key in modality]: self.subgoal_shapes[k] = obs_key_shapes[k] def _create_networks(self): """ Creates networks and places them into @self.nets. @self.nets should be a ModuleDict. """ raise NotImplementedError def _create_optimizers(self): """ Creates optimizers using @self.optim_params and places them into @self.optimizers. """ self.optimizers = dict() self.lr_schedulers = dict() for k in self.optim_params: # only make optimizers for networks that have been created - @optim_params may have more # settings for unused networks if k in self.nets: if isinstance(self.nets[k], nn.ModuleList): self.optimizers[k] = [ TorchUtils.optimizer_from_optim_params(net_optim_params=self.optim_params[k], net=self.nets[k][i]) for i in range(len(self.nets[k])) ] self.lr_schedulers[k] = [ TorchUtils.lr_scheduler_from_optim_params(net_optim_params=self.optim_params[k], net=self.nets[k][i], optimizer=self.optimizers[k][i]) for i in range(len(self.nets[k])) ] else: self.optimizers[k] = TorchUtils.optimizer_from_optim_params( net_optim_params=self.optim_params[k], net=self.nets[k]) self.lr_schedulers[k] = TorchUtils.lr_scheduler_from_optim_params( net_optim_params=self.optim_params[k], net=self.nets[k], optimizer=self.optimizers[k]) def process_batch_for_training(self, batch): """ Processes input batch from a data loader to filter out relevant information and prepare the batch for training. Args: batch (dict): dictionary with torch.Tensors sampled from a data loader Returns: input_batch (dict): processed and filtered batch that will be used for training """ return batch def train_on_batch(self, batch, epoch, validate=False): """ Training on a single batch of data. Args: batch (dict): dictionary with torch.Tensors sampled from a data loader and filtered by @process_batch_for_training epoch (int): epoch number - required by some Algos that need to perform staged training and early stopping validate (bool): if True, don't perform any learning updates. Returns: info (dict): dictionary of relevant inputs, outputs, and losses that might be relevant for logging """ assert validate or self.nets.training return OrderedDict() def log_info(self, info): """ Process info dictionary from @train_on_batch to summarize information to pass to tensorboard for logging. Args: info (dict): dictionary of info Returns: loss log (dict): name -> summary statistic """ log = OrderedDict() # record current optimizer learning rates for k in self.optimizers: for i, param_group in enumerate(self.optimizers[k].param_groups): log["Optimizer/{}{}_lr".format(k, i)] = param_group["lr"] return log def on_epoch_end(self, epoch): """ Called at the end of each epoch. """ # LR scheduling updates for k in self.lr_schedulers: if self.lr_schedulers[k] is not None: self.lr_schedulers[k].step() def set_eval(self): """ Prepare networks for evaluation. """ self.nets.eval() def set_train(self): """ Prepare networks for training. """ self.nets.train() def serialize(self): """ Get dictionary of current model parameters. """ return self.nets.state_dict() def deserialize(self, model_dict): """ Load model from a checkpoint. Args: model_dict (dict): a dictionary saved by self.serialize() that contains the same keys as @self.network_classes """ self.nets.load_state_dict(model_dict) def __repr__(self): """ Pretty print algorithm and network description. """ return "{} (\n".format(self.__class__.__name__) + \ textwrap.indent(self.nets.__repr__(), ' ') + "\n)" def reset(self): """ Reset algo state to prepare for environment rollouts. """ pass class PolicyAlgo(Algo): """ Base class for all algorithms that can be used as policies. """ def get_action(self, obs_dict, goal_dict=None): """ Get policy action outputs. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: action (torch.Tensor): action tensor """ raise NotImplementedError class ValueAlgo(Algo): """ Base class for all algorithms that can learn a value function. """ def get_state_value(self, obs_dict, goal_dict=None): """ Get state value outputs. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: value (torch.Tensor): value tensor """ raise NotImplementedError def get_state_action_value(self, obs_dict, actions, goal_dict=None): """ Get state-action value outputs. Args: obs_dict (dict): current observation actions (torch.Tensor): action goal_dict (dict): (optional) goal Returns: value (torch.Tensor): value tensor """ raise NotImplementedError class PlannerAlgo(Algo): """ Base class for all algorithms that can be used for planning subgoals conditioned on current observations and potential goal observations. """ def get_subgoal_predictions(self, obs_dict, goal_dict=None): """ Get predicted subgoal outputs. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: subgoal prediction (dict): name -> Tensor [batch_size, ...] """ raise NotImplementedError def sample_subgoals(self, obs_dict, goal_dict, num_samples=1): """ For planners that rely on sampling subgoals. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: subgoals (dict): name -> Tensor [batch_size, num_samples, ...] """ raise NotImplementedError class HierarchicalAlgo(Algo): """ Base class for all hierarchical algorithms that consist of (1) subgoal planning and (2) subgoal-conditioned policy learning. """ def get_action(self, obs_dict, goal_dict=None): """ Get policy action outputs. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: action (torch.Tensor): action tensor """ raise NotImplementedError def get_subgoal_predictions(self, obs_dict, goal_dict=None): """ Get subgoal predictions from high-level subgoal planner. Args: obs_dict (dict): current observation goal_dict (dict): (optional) goal Returns: subgoal (dict): predicted subgoal """ raise NotImplementedError @property def current_subgoal(self): """ Get the current subgoal for conditioning the low-level policy Returns: current subgoal (dict): predicted subgoal """ raise NotImplementedError class RolloutPolicy(object): """ Wraps @Algo object to make it easy to run policies in a rollout loop. """ def __init__(self, policy, obs_normalization_stats=None): """ Args: policy (Algo instance): @Algo object to wrap to prepare for rollouts obs_normalization_stats (dict): optionally pass a dictionary for observation normalization. This should map observation keys to dicts with a "mean" and "std" of shape (1, ...) where ... is the default shape for the observation. """ self.policy = policy self.obs_normalization_stats = obs_normalization_stats def start_episode(self): """ Prepare the policy to start a new rollout. """ self.policy.set_eval() self.policy.reset() def _prepare_observation(self, ob): """ Prepare raw observation dict from environment for policy. Args: ob (dict): single observation dictionary from environment (no batch dimension, and np.array values for each key) """ if self.obs_normalization_stats is not None: ob = ObsUtils.normalize_obs(ob, obs_normalization_stats=self.obs_normalization_stats) ob = TensorUtils.to_tensor(ob) ob = TensorUtils.to_batch(ob) ob = TensorUtils.to_device(ob, self.policy.device) ob = TensorUtils.to_float(ob) return ob def __repr__(self): """Pretty print network description""" return self.policy.__repr__() def __call__(self, ob, goal=None): """ Produce action from raw observation dict (and maybe goal dict) from environment. Args: ob (dict): single observation dictionary from environment (no batch dimension, and np.array values for each key) goal (dict): goal observation """ ob = self._prepare_observation(ob) if goal is not None: goal = self._prepare_observation(goal) ac = self.policy.get_action(obs_dict=ob, goal_dict=goal) return TensorUtils.to_numpy(ac[0])
py
1a4a844480cdae007106a3a9fd36e4d163944a3e
from matrx.actions.action import Action, ActionResult from matrx.objects.agent_body import AgentBody def _act_move(grid_world, agent_id, dx, dy): """ Private MATRX method. The method that actually mutates the location of an AgentBody based on a delta-x and delta-y. Parameters ---------- grid_world : GridWorld The GridWorld instance in which the agent resides whose location should be updated. agent_id : string The unique identifier for the agent whose location should be changed. dx : {-1, 0, 1} The delta change on the x-coordinate. dy : {-1, 0, 1} The delta change on the y-coordinate. Returns ------- MoveActionResult The result of the actual change of the location of an AgentBody. Always returns a success. """ agent_avatar = grid_world.get_env_object(agent_id, obj_type=AgentBody) loc = agent_avatar.location new_loc = [loc[0] + dx, loc[1] + dy] grid_world.registered_agents[agent_id].location = new_loc return MoveActionResult(MoveActionResult.RESULT_SUCCESS, succeeded=True) def _is_possible_movement(grid_world, agent_id, dx, dy): """ Private MATRX method. Wrapper around the check if a certain movement is possible. Parameters ---------- grid_world : GridWorld The GridWorld instance in which the agent resides whose location should be updated. agent_id : string The unique identifier for the agent whose location should be changed. dx : {-1, 0, 1} The delta change on the x-coordinate. dy : {-1, 0, 1} The delta change on the y-coordinate. Returns ------- MoveActionResult The expected result of performing this movement. See Also -------- possible_movement : The main method this method wraps. """ return _possible_movement(grid_world, agent_id, dx, dy) def _possible_movement(grid_world, agent_id, dx, dy): """ Private MATRX method. Checks if the delta-x and delta-y change in the agent's location is possible. Parameters ---------- grid_world : GridWorld The GridWorld instance in which the agent resides whose location should be updated. agent_id : string The unique identifier for the agent whose location should be changed. dx : {-1, 0, 1} The delta change on the x-coordinate. dy : {-1, 0, 1} The delta change on the y-coordinate. Returns ------- MoveActionResult Whether the MoveAction is expected to be possible. Can return the following results (see also :class:`matrx.actions.move_actions.MoveActionResult`): * The ActionResult depicting the action's success or failure and reason for that result. * RESULT_SUCCESS: When the MoveAction is possible. * RESULT_NO_MOVE: If the agent is already at the location it wishes to move to. * RESULT_OCCUPIED: When the new location is occupied by an intraversable agent. * RESULT_NOT_PASSABLE_OBJECT: When the new location is occupied by an intraversable object. * RESULT_OUT_OF_BOUNDS: When the new location is outside the GridWorld's bounds. """ agent_avatar = grid_world.get_env_object(agent_id, obj_type=AgentBody) assert agent_avatar is not None loc = agent_avatar.location new_loc = [loc[0] + dx, loc[1] + dy] if 0 <= new_loc[0] < grid_world.shape[0] and 0 <= new_loc[1] < grid_world.shape[1]: loc_obj_ids = grid_world.grid[new_loc[1], new_loc[0]] if loc_obj_ids is None: # there is nothing at that location return MoveActionResult(MoveActionResult.RESULT_SUCCESS, succeeded=True) else: # Go through all objects at the desired locations for loc_obj_id in loc_obj_ids: # Check if loc_obj_id is the id of an agent if loc_obj_id in grid_world.registered_agents.keys(): # get the actual agent loc_obj = grid_world.registered_agents[loc_obj_id] # Check if the agent that takes the move action is not that agent at that location (meaning that # for some reason the move action has no effect. If this is the case, we send the appropriate # result if loc_obj_id == agent_id: # The desired location contains a different agent and we cannot step at locations with agents return MoveActionResult(MoveActionResult.RESULT_NO_MOVE, succeeded=False) # Check if the agent on the other location (if not itself) is traverable. Otherwise we return that # the location is occupied. elif not loc_obj.is_traversable: return MoveActionResult(MoveActionResult.RESULT_OCCUPIED, succeeded=False) # If there are no agents at the desired location or we can move on top of other agents, we check if # there are objects in the way that are not passable. if loc_obj_id in grid_world.environment_objects.keys(): # get the actual object loc_obj = grid_world.environment_objects[loc_obj_id] # Check if the object is not passable, if this is not the case is_traversable is False if not loc_obj.is_traversable: # The desired location contains an object that is not passable return MoveActionResult(MoveActionResult.RESULT_NOT_PASSABLE_OBJECT, succeeded=False) # Either the desired location contains the agent at previous tick, and/or all objects there are passable return MoveActionResult(MoveActionResult.RESULT_SUCCESS, succeeded=True) else: return MoveActionResult(MoveActionResult.RESULT_OUT_OF_BOUNDS, succeeded=False) class MoveActionResult(ActionResult): """ActionResult for a Move action The results uniquely for Move action are (as class constants): * RESULT_SUCCESS: When the MoveAction is possible. * RESULT_NO_MOVE: If the agent is already at the location it wishes to move to. * RESULT_OCCUPIED: When the new location is occupied by an intraversable agent. * RESULT_NOT_PASSABLE_OBJECT: When the new location is occupied by an intraversable object. * RESULT_OUT_OF_BOUNDS: When the new location is outside the GridWorld's bounds. Parameters ---------- result : str A string representing the reason for a (expected) success or fail of a :class:`matrx.actions.move_actions.Move`. succeeded : bool A boolean representing the (expected) success or fail of a :class:`matrx.actions.move_actions.Move`. See Also -------- Move """ """ When the move action is success. """ RESULT_SUCCESS = 'Move action success' """ When the agent is already at the location it tries to move to. """ RESULT_NO_MOVE = 'Move action resulted in a new location with the agent already present.' """ When the move action would lead the agent outside the world bounds. """ RESULT_OUT_OF_BOUNDS = 'Move action out of bounds' """ When the move action would lead the agent to a location occupied by another agent. """ RESULT_OCCUPIED = 'Move action towards occupied space' """ When the move action would lead the agent to a location occupied by an intraversable object. """ RESULT_NOT_PASSABLE_OBJECT = 'Move action toward space which is not traversable by agent due object' def __init__(self, result, succeeded): super().__init__(result, succeeded) class Move(Action): """ The class wrapping all Move actions. Parameters ---------- duration_in_ticks : int Optional. Default: ``1``. Should be zero or larger. The default duration of this action in ticks during which the :class:`matrx.grid_world.GridWorld` blocks the agent performing other actions. By default this is 1, meaning that all actions of this type will take both the tick in which it was decided upon and the subsequent tick. When the agent is blocked / busy with an action, only the :meth:`matrx.agents.agent_brain.AgentBrain.filter_observations` method is called for that agent, and the :meth:`matrx.agents.agent_brain.AgentBrain.decide_on_action` method is skipped. This means that agents that are busy with an action can only perceive the world but not decide on a new action untill the action has completed. An agent can overwrite the duration of an action by returning the ``action_duration`` in the ``action_kwargs`` in the :meth:`matrx.agents.agent_brain.AgentBrain.decide_on_action` method, as so: ``return >action_name<, {'action_duration': >ticks<}`` Attributes ---------- dx : {-1, 0, 1} The delta change on the x-coordinate. dy : {-1, 0, 1} The delta change on the y-coordinate. See Also -------- MoveNorth MoveNorthEast MoveEast MoveSouthEast MoveSouth MoveSouthWest MoveWest MoveNorthWest """ def __init__(self, duration_in_ticks=0): super().__init__(duration_in_ticks) self.dx = 0 self.dy = 0 def is_possible(self, grid_world, agent_id, world_state, **kwargs): """ Checks if the move is possible. Checks for the following: * If the agent is already at the location it wishes to move to. * When the new location is occupied by an intraversable agent. * When the new location is occupied by an intraversable object. * When the new location is outside the GridWorld's bounds. Parameters ---------- grid_world : GridWorld The :class:`matrx.grid_world.GridWorld` instance in which the agent resides whose location should be updated. agent_id : str The unique identifier for the agent whose location should be changed. world_state : State The State object representing the entire world. Can be used to simplify search of objects and properties when checking if an action can be performed. Note that this is the State of the entire world, not that of the agent performing the action. **kwargs : dict Not used. Returns ------- MoveActionResult Whether the MoveAction is expected to be possible. See :class:`matrx.actions.move_actions.MoveActionResult` for the results it can contain. """ result = _is_possible_movement(grid_world, agent_id=agent_id, dx=self.dx, dy=self.dy) return result def mutate(self, grid_world, agent_id, world_state, **kwargs): """ Mutates an agent's location Changes an agent's location property based on the attributes `dx` and `dy`. Parameters ---------- grid_world : GridWorld The :class:`matrx.grid_world.GridWorld` instance in which the agent resides whose location should be updated. world_state : State The State object representing the entire world. Can be used to simplify search of objects and properties when performing an action. Note that this is the State of the entire world, not that of the agent performing the action. agent_id : str The unique identifier for the agent whose location should be changed. Returns ------- MoveActionResult The result of the actual change of the location of an agent. Always returns a success. """ return _act_move(grid_world, agent_id=agent_id, dx=self.dx, dy=self.dy) class MoveNorth(Move): """ Moves the agent North. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = 0 * delta-y = -1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = 0 self.dy = -1 class MoveNorthEast(Move): """ Moves the agent North-East. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = 1 * delta-y = -1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = +1 self.dy = -1 class MoveEast(Move): """ Moves the agent East. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = 1 * delta-y = 0 See Also -------- Move """ def __init__(self): super().__init__() self.dx = +1 self.dy = 0 class MoveSouthEast(Move): """ Moves the agent South-East. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = 1 * delta-y = 1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = +1 self.dy = +1 class MoveSouth(Move): """ Moves the agent South. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = 0 * delta-y = 1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = 0 self.dy = +1 class MoveSouthWest(Move): """ Moves the agent South-West. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = -1 * delta-y = 1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = -1 self.dy = +1 class MoveWest(Move): """ Moves the agent West. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = -1 * delta-y = 0 See Also -------- Move """ def __init__(self): super().__init__() self.dx = -1 self.dy = 0 class MoveNorthWest(Move): """ Moves the agent North-West. Inherits from :class:`matrx.actions.move_actions.Move` and sets the delta-x and delta-y as follows: * delta-x = -1 * delta-y = -1 See Also -------- Move """ def __init__(self): super().__init__() self.dx = -1 self.dy = -1
py
1a4a855efc228df343bc01d72ea3f9a19e4898e2
import torch import numpy as np import networkx as nx class CenterObjective(): def __init__(self, dist, dmax, temp, hardmax=False): ''' dist: (num customers) * (num locations) matrix dmax: maximum distance that can be suffered by any customer (e.g., if no facilities are chosen) temp: how hard to make the softmax over customers ''' self.dmax = dmax dist, order = torch.sort(dist, dim=1) self.order = order dmax_vec = dmax*torch.ones(dist.shape[0], 1) off_one = torch.cat((dist[:, 1:], dmax_vec), dim=1) self.m = dist - off_one self.temp = temp self.hardmax = hardmax def __call__(self, x): ''' Evaluates E_S[softmax_{customers} min_{i \in S} dist(customer, i)] where the expectation is over the set of facility locations S. Every location is included in S independently with probability x_i. ''' x_sort = x[self.order] probs = 1 - torch.cumprod(1 - x_sort, dim=1) vals = self.dmax + (self.m*probs).sum(dim=1) if self.hardmax: return vals.max() weights = torch.softmax(self.temp*vals, dim=0) return torch.dot(vals, weights) def gonzalez_kcenter(dist, K): ''' Algorithm of Gonzalez (1985) which iteratively selects the point furthest from the current solution Gonzalez, Teofilo F. (1985). "Clustering to minimize the maximum intercluster distance". Theoretical Computer Science. ''' S = [np.random.choice(list(range(dist.shape[1])))] while len(S) < K: dist_to_S = dist[:, S].min(dim = 1)[0] S.append(dist_to_S.argmax().item()) x = torch.zeros(dist.shape[1]) x[S] = 1 return x def greedy_kcenter(dist, dmax, K): ''' Greedily add locations to minimize the kcenter objective ''' obj = CenterObjective(dist, dmax, None, True) x = torch.zeros(dist.shape[1]) currval = obj(x) for _ in range(K): best_i = 0 for i in range(dist.shape[1]): if x[i] < 0.5: x[i] = 1 obj_val = obj(x) if obj_val < currval: currval = obj_val best_i = i x[i] = 0 x[best_i] = 1 return x def make_all_dists(bin_adj, dmax, use_weights=False): g = nx.from_numpy_array(bin_adj.cpu().detach().numpy()) if not use_weights: lengths = nx.shortest_path_length(g) else: lengths = nx.shortest_path_length(g, weight='weight') dist = torch.zeros_like(bin_adj) for u, lens_u in lengths: for v in range(bin_adj.shape[0]): if v in lens_u: dist[u,v] = lens_u[v] else: dist[u,v] = dmax return dist def make_dists_igraph(adj): import igraph adj = adj.detach().numpy() dense = np.random.rand(adj.shape[0], adj.shape[1]) e1 = dense.nonzero()[0] e1 = e1.reshape(e1.shape[0], 1) e2 = dense.nonzero()[1] e2 = e2.reshape(e2.shape[0], 1) stuff = np.concatenate((e1, e2), axis=1) allstuff = np.concatenate((stuff, adj.flatten().reshape(stuff.shape[0], 1)), axis=1) np.savetxt('tmp_twostage', allstuff, fmt = '%d %d %f') g = igraph.Graph.Read_Ncol('tmp_twostage', weights=True, directed=True) dists = g.shortest_paths(weights='weight') dists = torch.tensor(np.array(dists)) return dists.float() def rounding(x): ''' Fast pipage rounding implementation for uniform matroid ''' i = 0 j = 1 x = x.clone() for t in range(len(x)-1): if x[i] == 0 and x[j] == 0: i = max((i,j)) + 1 elif x[i] + x[j] < 1: if np.random.rand() < x[i]/(x[i] + x[j]): x[i] = x[i] + x[j] x[j] = 0 j = max((i,j)) + 1 else: x[j] = x[i] + x[j] x[i] = 0 i = max((i,j)) + 1 else: if np.random.rand() < (1 - x[j])/(2 - x[i] - x[j]): x[j] = x[i] + x[j] - 1 x[i] = 1 i = max((i,j)) + 1 else: x[i] = x[i] + x[j] - 1 x[j] = 1 j = max((i,j)) + 1 return x
py
1a4a865c4176859e0b5b4b5444b68e253bcf9d64
import scipy.stats import numpy as np from math import ceil from .. import img_as_float from ..restoration._denoise_cy import _denoise_bilateral, _denoise_tv_bregman from .._shared.utils import warn import pywt import skimage.color as color import numbers def denoise_bilateral(image, win_size=None, sigma_color=None, sigma_spatial=1, bins=10000, mode='constant', cval=0, multichannel=False): """Denoise image using bilateral filter. This is an edge-preserving, denoising filter. It averages pixels based on their spatial closeness and radiometric similarity [1]_. Spatial closeness is measured by the Gaussian function of the Euclidean distance between two pixels and a certain standard deviation (`sigma_spatial`). Radiometric similarity is measured by the Gaussian function of the Euclidean distance between two color values and a certain standard deviation (`sigma_color`). Parameters ---------- image : ndarray, shape (M, N[, 3]) Input image, 2D grayscale or RGB. win_size : int Window size for filtering. If win_size is not specified, it is calculated as ``max(5, 2 * ceil(3 * sigma_spatial) + 1)``. sigma_color : float Standard deviation for grayvalue/color distance (radiometric similarity). A larger value results in averaging of pixels with larger radiometric differences. Note, that the image will be converted using the `img_as_float` function and thus the standard deviation is in respect to the range ``[0, 1]``. If the value is ``None`` the standard deviation of the ``image`` will be used. sigma_spatial : float Standard deviation for range distance. A larger value results in averaging of pixels with larger spatial differences. bins : int Number of discrete values for Gaussian weights of color filtering. A larger value results in improved accuracy. mode : {'constant', 'edge', 'symmetric', 'reflect', 'wrap'} How to handle values outside the image borders. See `numpy.pad` for detail. cval : string Used in conjunction with mode 'constant', the value outside the image boundaries. multichannel : bool Whether the last axis of the image is to be interpreted as multiple channels or another spatial dimension. Returns ------- denoised : ndarray Denoised image. References ---------- .. [1] http://users.soe.ucsc.edu/~manduchi/Papers/ICCV98.pdf Examples -------- >>> from skimage import data, img_as_float >>> astro = img_as_float(data.astronaut()) >>> astro = astro[220:300, 220:320] >>> noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape) >>> noisy = np.clip(noisy, 0, 1) >>> denoised = denoise_bilateral(noisy, sigma_color=0.05, sigma_spatial=15, ... multichannel=True) """ if multichannel: if image.ndim != 3: if image.ndim == 2: raise ValueError("Use ``multichannel=False`` for 2D grayscale " "images. The last axis of the input image " "must be multiple color channels not another " "spatial dimension.") else: raise ValueError("Bilateral filter is only implemented for " "2D grayscale images (image.ndim == 2) and " "2D multichannel (image.ndim == 3) images, " "but the input image has {0} dimensions. " "".format(image.ndim)) elif image.shape[2] not in (3, 4): if image.shape[2] > 4: msg = ("The last axis of the input image is interpreted as " "channels. Input image with shape {0} has {1} channels " "in last axis. ``denoise_bilateral`` is implemented " "for 2D grayscale and color images only") warn(msg.format(image.shape, image.shape[2])) else: msg = "Input image must be grayscale, RGB, or RGBA; " \ "but has shape {0}." warn(msg.format(image.shape)) else: if image.ndim > 2: raise ValueError("Bilateral filter is not implemented for " "grayscale images of 3 or more dimensions, " "but input image has {0} dimension. Use " "``multichannel=True`` for 2-D RGB " "images.".format(image.shape)) if win_size is None: win_size = max(5, 2 * int(ceil(3 * sigma_spatial)) + 1) return _denoise_bilateral(image, win_size, sigma_color, sigma_spatial, bins, mode, cval) def denoise_tv_bregman(image, weight, max_iter=100, eps=1e-3, isotropic=True): """Perform total-variation denoising using split-Bregman optimization. Total-variation denoising (also know as total-variation regularization) tries to find an image with less total-variation under the constraint of being similar to the input image, which is controlled by the regularization parameter ([1]_, [2]_, [3]_, [4]_). Parameters ---------- image : ndarray Input data to be denoised (converted using img_as_float`). weight : float Denoising weight. The smaller the `weight`, the more denoising (at the expense of less similarity to the `input`). The regularization parameter `lambda` is chosen as `2 * weight`. eps : float, optional Relative difference of the value of the cost function that determines the stop criterion. The algorithm stops when:: SUM((u(n) - u(n-1))**2) < eps max_iter : int, optional Maximal number of iterations used for the optimization. isotropic : boolean, optional Switch between isotropic and anisotropic TV denoising. Returns ------- u : ndarray Denoised image. References ---------- .. [1] http://en.wikipedia.org/wiki/Total_variation_denoising .. [2] Tom Goldstein and Stanley Osher, "The Split Bregman Method For L1 Regularized Problems", ftp://ftp.math.ucla.edu/pub/camreport/cam08-29.pdf .. [3] Pascal Getreuer, "Rudin–Osher–Fatemi Total Variation Denoising using Split Bregman" in Image Processing On Line on 2012–05–19, http://www.ipol.im/pub/art/2012/g-tvd/article_lr.pdf .. [4] http://www.math.ucsb.edu/~cgarcia/UGProjects/BregmanAlgorithms_JacquelineBush.pdf """ return _denoise_tv_bregman(image, weight, max_iter, eps, isotropic) def _denoise_tv_chambolle_nd(image, weight=0.1, eps=2.e-4, n_iter_max=200): """Perform total-variation denoising on n-dimensional images. Parameters ---------- image : ndarray n-D input data to be denoised. weight : float, optional Denoising weight. The greater `weight`, the more denoising (at the expense of fidelity to `input`). eps : float, optional Relative difference of the value of the cost function that determines the stop criterion. The algorithm stops when: (E_(n-1) - E_n) < eps * E_0 n_iter_max : int, optional Maximal number of iterations used for the optimization. Returns ------- out : ndarray Denoised array of floats. Notes ----- Rudin, Osher and Fatemi algorithm. """ ndim = image.ndim p = np.zeros((image.ndim, ) + image.shape, dtype=image.dtype) g = np.zeros_like(p) d = np.zeros_like(image) i = 0 while i < n_iter_max: if i > 0: # d will be the (negative) divergence of p d = -p.sum(0) slices_d = [slice(None), ] * ndim slices_p = [slice(None), ] * (ndim + 1) for ax in range(ndim): slices_d[ax] = slice(1, None) slices_p[ax+1] = slice(0, -1) slices_p[0] = ax d[tuple(slices_d)] += p[tuple(slices_p)] slices_d[ax] = slice(None) slices_p[ax+1] = slice(None) out = image + d else: out = image E = (d ** 2).sum() # g stores the gradients of out along each axis # e.g. g[0] is the first order finite difference along axis 0 slices_g = [slice(None), ] * (ndim + 1) for ax in range(ndim): slices_g[ax+1] = slice(0, -1) slices_g[0] = ax g[tuple(slices_g)] = np.diff(out, axis=ax) slices_g[ax+1] = slice(None) norm = np.sqrt((g ** 2).sum(axis=0))[np.newaxis, ...] E += weight * norm.sum() tau = 1. / (2.*ndim) norm *= tau / weight norm += 1. p -= tau * g p /= norm E /= float(image.size) if i == 0: E_init = E E_previous = E else: if np.abs(E_previous - E) < eps * E_init: break else: E_previous = E i += 1 return out def denoise_tv_chambolle(image, weight=0.1, eps=2.e-4, n_iter_max=200, multichannel=False): """Perform total-variation denoising on n-dimensional images. Parameters ---------- image : ndarray of ints, uints or floats Input data to be denoised. `image` can be of any numeric type, but it is cast into an ndarray of floats for the computation of the denoised image. weight : float, optional Denoising weight. The greater `weight`, the more denoising (at the expense of fidelity to `input`). eps : float, optional Relative difference of the value of the cost function that determines the stop criterion. The algorithm stops when: (E_(n-1) - E_n) < eps * E_0 n_iter_max : int, optional Maximal number of iterations used for the optimization. multichannel : bool, optional Apply total-variation denoising separately for each channel. This option should be true for color images, otherwise the denoising is also applied in the channels dimension. Returns ------- out : ndarray Denoised image. Notes ----- Make sure to set the multichannel parameter appropriately for color images. The principle of total variation denoising is explained in http://en.wikipedia.org/wiki/Total_variation_denoising The principle of total variation denoising is to minimize the total variation of the image, which can be roughly described as the integral of the norm of the image gradient. Total variation denoising tends to produce "cartoon-like" images, that is, piecewise-constant images. This code is an implementation of the algorithm of Rudin, Fatemi and Osher that was proposed by Chambolle in [1]_. References ---------- .. [1] A. Chambolle, An algorithm for total variation minimization and applications, Journal of Mathematical Imaging and Vision, Springer, 2004, 20, 89-97. Examples -------- 2D example on astronaut image: >>> from skimage import color, data >>> img = color.rgb2gray(data.astronaut())[:50, :50] >>> img += 0.5 * img.std() * np.random.randn(*img.shape) >>> denoised_img = denoise_tv_chambolle(img, weight=60) 3D example on synthetic data: >>> x, y, z = np.ogrid[0:20, 0:20, 0:20] >>> mask = (x - 22)**2 + (y - 20)**2 + (z - 17)**2 < 8**2 >>> mask = mask.astype(np.float) >>> mask += 0.2*np.random.randn(*mask.shape) >>> res = denoise_tv_chambolle(mask, weight=100) """ im_type = image.dtype if not im_type.kind == 'f': image = img_as_float(image) if multichannel: out = np.zeros_like(image) for c in range(image.shape[-1]): out[..., c] = _denoise_tv_chambolle_nd(image[..., c], weight, eps, n_iter_max) else: out = _denoise_tv_chambolle_nd(image, weight, eps, n_iter_max) return out def _bayes_thresh(details, var): """BayesShrink threshold for a zero-mean details coeff array.""" # Equivalent to: dvar = np.var(details) for 0-mean details array dvar = np.mean(details*details) eps = np.finfo(details.dtype).eps thresh = var / np.sqrt(max(dvar - var, eps)) return thresh def _universal_thresh(img, sigma): """ Universal threshold used by the VisuShrink method """ return sigma*np.sqrt(2*np.log(img.size)) def _sigma_est_dwt(detail_coeffs, distribution='Gaussian'): """Calculate the robust median estimator of the noise standard deviation. Parameters ---------- detail_coeffs : ndarray The detail coefficients corresponding to the discrete wavelet transform of an image. distribution : str The underlying noise distribution. Returns ------- sigma : float The estimated noise standard deviation (see section 4.2 of [1]_). References ---------- .. [1] D. L. Donoho and I. M. Johnstone. "Ideal spatial adaptation by wavelet shrinkage." Biometrika 81.3 (1994): 425-455. DOI:10.1093/biomet/81.3.425 """ # Consider regions with detail coefficients exactly zero to be masked out detail_coeffs = detail_coeffs[np.nonzero(detail_coeffs)] if distribution.lower() == 'gaussian': # 75th quantile of the underlying, symmetric noise distribution denom = scipy.stats.norm.ppf(0.75) sigma = np.median(np.abs(detail_coeffs)) / denom else: raise ValueError("Only Gaussian noise estimation is currently " "supported") return sigma def _wavelet_threshold(image, wavelet, method=None, threshold=None, sigma=None, mode='soft', wavelet_levels=None): """Perform wavelet thresholding. Parameters ---------- image : ndarray (2d or 3d) of ints, uints or floats Input data to be denoised. `image` can be of any numeric type, but it is cast into an ndarray of floats for the computation of the denoised image. wavelet : string The type of wavelet to perform. Can be any of the options pywt.wavelist outputs. For example, this may be any of ``{db1, db2, db3, db4, haar}``. method : {'BayesShrink', 'VisuShrink'}, optional Thresholding method to be used. The currently supported methods are "BayesShrink" [1]_ and "VisuShrink" [2]_. If it is set to None, a user-specified ``threshold`` must be supplied instead. threshold : float, optional The thresholding value to apply during wavelet coefficient thresholding. The default value (None) uses the selected ``method`` to estimate appropriate threshold(s) for noise removal. sigma : float, optional The standard deviation of the noise. The noise is estimated when sigma is None (the default) by the method in [2]_. mode : {'soft', 'hard'}, optional An optional argument to choose the type of denoising performed. It noted that choosing soft thresholding given additive noise finds the best approximation of the original image. wavelet_levels : int or None, optional The number of wavelet decomposition levels to use. The default is three less than the maximum number of possible decomposition levels (see Notes below). Returns ------- out : ndarray Denoised image. References ---------- .. [1] Chang, S. Grace, Bin Yu, and Martin Vetterli. "Adaptive wavelet thresholding for image denoising and compression." Image Processing, IEEE Transactions on 9.9 (2000): 1532-1546. DOI: 10.1109/83.862633 .. [2] D. L. Donoho and I. M. Johnstone. "Ideal spatial adaptation by wavelet shrinkage." Biometrika 81.3 (1994): 425-455. DOI: 10.1093/biomet/81.3.425 """ wavelet = pywt.Wavelet(wavelet) # original_extent is used to workaround PyWavelets issue #80 # odd-sized input results in an image with 1 extra sample after waverecn original_extent = tuple(slice(s) for s in image.shape) # Determine the number of wavelet decomposition levels if wavelet_levels is None: # Determine the maximum number of possible levels for image dlen = wavelet.dec_len wavelet_levels = np.min( [pywt.dwt_max_level(s, dlen) for s in image.shape]) # Skip coarsest wavelet scales (see Notes in docstring). wavelet_levels = max(wavelet_levels - 3, 1) coeffs = pywt.wavedecn(image, wavelet=wavelet, level=wavelet_levels) # Detail coefficients at each decomposition level dcoeffs = coeffs[1:] if sigma is None: # Estimate the noise via the method in [2]_ detail_coeffs = dcoeffs[-1]['d' * image.ndim] sigma = _sigma_est_dwt(detail_coeffs, distribution='Gaussian') if method is not None and threshold is not None: warn(("Thresholding method {} selected. The user-specified threshold " "will be ignored.").format(method)) if threshold is None: var = sigma**2 if method is None: raise ValueError( "If method is None, a threshold must be provided.") elif method == "BayesShrink": # The BayesShrink thresholds from [1]_ in docstring threshold = [{key: _bayes_thresh(level[key], var) for key in level} for level in dcoeffs] elif method == "VisuShrink": # The VisuShrink thresholds from [2]_ in docstring threshold = _universal_thresh(image, sigma) else: raise ValueError("Unrecognized method: {}".format(method)) if np.isscalar(threshold): # A single threshold for all coefficient arrays denoised_detail = [{key: pywt.threshold(level[key], value=threshold, mode=mode) for key in level} for level in dcoeffs] else: # Dict of unique threshold coefficients for each detail coeff. array denoised_detail = [{key: pywt.threshold(level[key], value=thresh[key], mode=mode) for key in level} for thresh, level in zip(threshold, dcoeffs)] denoised_coeffs = [coeffs[0]] + denoised_detail return pywt.waverecn(denoised_coeffs, wavelet)[original_extent] def denoise_wavelet(image, sigma=None, wavelet='db1', mode='soft', wavelet_levels=None, multichannel=False, convert2ycbcr=False, method='BayesShrink'): """Perform wavelet denoising on an image. Parameters ---------- image : ndarray ([M[, N[, ...P]][, C]) of ints, uints or floats Input data to be denoised. `image` can be of any numeric type, but it is cast into an ndarray of floats for the computation of the denoised image. sigma : float or list, optional The noise standard deviation used when computing the wavelet detail coefficient threshold(s). When None (default), the noise standard deviation is estimated via the method in [2]_. wavelet : string, optional The type of wavelet to perform and can be any of the options ``pywt.wavelist`` outputs. The default is `'db1'`. For example, ``wavelet`` can be any of ``{'db2', 'haar', 'sym9'}`` and many more. mode : {'soft', 'hard'}, optional An optional argument to choose the type of denoising performed. It noted that choosing soft thresholding given additive noise finds the best approximation of the original image. wavelet_levels : int or None, optional The number of wavelet decomposition levels to use. The default is three less than the maximum number of possible decomposition levels. multichannel : bool, optional Apply wavelet denoising separately for each channel (where channels correspond to the final axis of the array). convert2ycbcr : bool, optional If True and multichannel True, do the wavelet denoising in the YCbCr colorspace instead of the RGB color space. This typically results in better performance for RGB images. method : {'BayesShrink', 'VisuShrink'}, optional Thresholding method to be used. The currently supported methods are "BayesShrink" [1]_ and "VisuShrink" [2]_. Defaults to "BayesShrink". Returns ------- out : ndarray Denoised image. Notes ----- The wavelet domain is a sparse representation of the image, and can be thought of similarly to the frequency domain of the Fourier transform. Sparse representations have most values zero or near-zero and truly random noise is (usually) represented by many small values in the wavelet domain. Setting all values below some threshold to 0 reduces the noise in the image, but larger thresholds also decrease the detail present in the image. If the input is 3D, this function performs wavelet denoising on each color plane separately. The output image is clipped between either [-1, 1] and [0, 1] depending on the input image range. When YCbCr conversion is done, every color channel is scaled between 0 and 1, and `sigma` values are applied to these scaled color channels. Many wavelet coefficient thresholding approaches have been proposed. By default, ``denoise_wavelet`` applies BayesShrink, which is an adaptive thresholding method that computes separate thresholds for each wavelet sub-band as described in [1]_. If ``method == "VisuShrink"``, a single "universal threshold" is applied to all wavelet detail coefficients as described in [2]_. This threshold is designed to remove all Gaussian noise at a given ``sigma`` with high probability, but tends to produce images that appear overly smooth. References ---------- .. [1] Chang, S. Grace, Bin Yu, and Martin Vetterli. "Adaptive wavelet thresholding for image denoising and compression." Image Processing, IEEE Transactions on 9.9 (2000): 1532-1546. DOI: 10.1109/83.862633 .. [2] D. L. Donoho and I. M. Johnstone. "Ideal spatial adaptation by wavelet shrinkage." Biometrika 81.3 (1994): 425-455. DOI: 10.1093/biomet/81.3.425 Examples -------- >>> from skimage import color, data >>> img = img_as_float(data.astronaut()) >>> img = color.rgb2gray(img) >>> img += 0.1 * np.random.randn(*img.shape) >>> img = np.clip(img, 0, 1) >>> denoised_img = denoise_wavelet(img, sigma=0.1) """ if method not in ["BayesShrink", "VisuShrink"]: raise ValueError( ('Invalid method: {}. The currently supported methods are ' '"BayesShrink" and "VisuShrink"').format(method)) image = img_as_float(image) if multichannel: if isinstance(sigma, numbers.Number) or sigma is None: sigma = [sigma] * image.shape[-1] if multichannel: if convert2ycbcr: out = color.rgb2ycbcr(image) for i in range(3): # renormalizing this color channel to live in [0, 1] min, max = out[..., i].min(), out[..., i].max() channel = out[..., i] - min channel /= max - min out[..., i] = denoise_wavelet(channel, wavelet=wavelet, method=method, sigma=sigma[i], mode=mode, wavelet_levels=wavelet_levels) out[..., i] = out[..., i] * (max - min) out[..., i] += min out = color.ycbcr2rgb(out) else: out = np.empty_like(image) for c in range(image.shape[-1]): out[..., c] = _wavelet_threshold(image[..., c], wavelet=wavelet, method=method, sigma=sigma[c], mode=mode, wavelet_levels=wavelet_levels) else: out = _wavelet_threshold(image, wavelet=wavelet, method=method, sigma=sigma, mode=mode, wavelet_levels=wavelet_levels) clip_range = (-1, 1) if image.min() < 0 else (0, 1) return np.clip(out, *clip_range) def estimate_sigma(image, average_sigmas=False, multichannel=False): """ Robust wavelet-based estimator of the (Gaussian) noise standard deviation. Parameters ---------- image : ndarray Image for which to estimate the noise standard deviation. average_sigmas : bool, optional If true, average the channel estimates of `sigma`. Otherwise return a list of sigmas corresponding to each channel. multichannel : bool Estimate sigma separately for each channel. Returns ------- sigma : float or list Estimated noise standard deviation(s). If `multichannel` is True and `average_sigmas` is False, a separate noise estimate for each channel is returned. Otherwise, the average of the individual channel estimates is returned. Notes ----- This function assumes the noise follows a Gaussian distribution. The estimation algorithm is based on the median absolute deviation of the wavelet detail coefficients as described in section 4.2 of [1]_. References ---------- .. [1] D. L. Donoho and I. M. Johnstone. "Ideal spatial adaptation by wavelet shrinkage." Biometrika 81.3 (1994): 425-455. DOI:10.1093/biomet/81.3.425 Examples -------- >>> import skimage.data >>> from skimage import img_as_float >>> img = img_as_float(skimage.data.camera()) >>> sigma = 0.1 >>> img = img + sigma * np.random.standard_normal(img.shape) >>> sigma_hat = estimate_sigma(img, multichannel=False) """ if multichannel: nchannels = image.shape[-1] sigmas = [estimate_sigma( image[..., c], multichannel=False) for c in range(nchannels)] if average_sigmas: sigmas = np.mean(sigmas) return sigmas elif image.shape[-1] <= 4: msg = ("image is size {0} on the last axis, but multichannel is " "False. If this is a color image, please set multichannel " "to True for proper noise estimation.") warn(msg.format(image.shape[-1])) coeffs = pywt.dwtn(image, wavelet='db2') detail_coeffs = coeffs['d' * image.ndim] return _sigma_est_dwt(detail_coeffs, distribution='Gaussian')
py
1a4a86e091f4c889efa4f3e1720e30c7808628a4
from recsys.preprocess import * from sklearn import model_selection import numpy as np from recsys.utility import * RANDOM_STATE = 42 np.random.seed(RANDOM_STATE) train = get_train() target_playlist = get_target_playlists() target_tracks = get_target_tracks() # Uncomment if you want to test # train, test, target_playlist, target_tracks = train_test_split(train, test_size=0.20) most_popular = get_most_popular_tracks(train) tracks_in_playlist = get_playlist_track_list2(train) tracks_to_suggest = most_popular.index.values predictions = [] predictions = pd.DataFrame(target_playlist) predictions.index = target_playlist['playlist_id'] predictions['track_ids'] = [np.array([]) for i in range(len(predictions))] for it,row in target_playlist.iterrows(): count = 0 i = 0 pred = [] while count < 5: if tracks_to_suggest[i] not in tracks_in_playlist.loc[row['playlist_id']]['track_ids']: # Predict track i # IMPORTANT: should we check if the track to suggest is in target_tracks? pred.append(tracks_to_suggest[i]) count += 1 i += 1 predictions.loc[row['playlist_id']] = predictions.loc[row['playlist_id']].set_value('track_ids', np.array(pred)) # To evaluate, just use: # evaluate(recommendations=predictions, test=test) # Make the dataframe friendly for output -> convert np.array in string predictions['track_ids'] = predictions['track_ids'].apply(lambda x : ' '.join(map(str, x))) predictions.to_csv('results.csv', index=False)
py
1a4a873c67d37d949adb61c38ccf802c3e202634
""" Engines API Allow clients to fetch Analytics through APIs. # noqa: E501 The version of the OpenAPI document: v3:[pa,spar,vault,pub,quant,fi,axp,afi,npo,bpm,fpo,others],v1:[fiab] Contact: [email protected] Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from fds.analyticsapi.engines.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) class QuantFormula(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { ('source',): { 'SCREENINGEXPRESSION': "ScreeningExpression", 'FQLEXPRESSION': "FqlExpression", 'UNIVERSALSCREENPARAMETER': "UniversalScreenParameter", 'ALLUNIVERSALSCREENPARAMETERS': "AllUniversalScreenParameters", }, } validations = { } additional_properties_type = None _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { 'source': (str,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'source': 'source', # noqa: E501 } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, source, *args, **kwargs): # noqa: E501 """QuantFormula - a model defined in OpenAPI Args: source (str): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.source = source for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value)
py
1a4a8752d798311d6e71b6ef2672502a18119691
# 2021/5/12 # sou meirin # Done import cv2 def video2frame(src, tgt): cap = cv2.VideoCapture(src) count = 0 while 1: ret, frame = cap.read() if ret: cv2.imwrite(tgt + "/" + str('%06d' % count) + ".jpg", frame) count += 1 else: break cap.release() if __name__ == '__main__': video2frame('/Users/songminglun/Documents/ILCS/murase/murase.mp4', '/Users/songminglun/Documents/ILCS/murase/murase') print('Done')
py
1a4a88209c5b56a9032620c0c98c37e5e81d87a9
""" Custom `Screen` class for the `pyte` library. Changes compared to the original `Screen` class: - We store the layout in a dict instead of a list, in order to have a scalable window. When the window size is reduced and increased again, the hidden text will appear again. - 256 colour support (xterm) - Per character diffs instead of per line diffs. """ from collections import defaultdict from pyte import charsets as cs from pyte import modes as mo from pyte.graphics import FG, BG from pyte.screens import Margins, Cursor, Char import pyte from .log import logger # Patch pyte.graphics to accept High intensity colours as well. FG.update({ 90: "hi_fg_1", 91: "hi_fg_2", 92: "hi_fg_3", 93: "hi_fg_4", 94: "hi_fg_5", 95: "hi_fg_6", 96: "hi_fg_7", 97: "hi_fg_8", 98: "hi_fg_9", 99: "hi_fg_10", }) BG.update({ 100: "hi_bg_1", 101: "hi_bg_2", 102: "hi_bg_3", 103: "hi_bg_4", 104: "hi_bg_5", 105: "hi_bg_6", 106: "hi_bg_7", 107: "hi_bg_8", 108: "hi_bg_9", 109: "hi_bg_10", }) class BetterScreen(pyte.Screen): swap_variables = [ 'mode', 'margins', 'charset', 'g0_charset', 'g1_charset', 'tabstops', 'cursor', 'line_offset', ] def __init__(self, lines, columns): self.lines = lines self.columns = columns self.reset() def __before__(self, command): return logger.info(' %r' % command) def reset(self): self.buffer = defaultdict(lambda: defaultdict(lambda: Char(data=' '))) self.mode = set([mo.DECAWM, mo.DECTCEM]) self.margins = Margins(0, self.lines - 1) self.line_offset = 0 # Index of the line that's currently displayed on top. # According to VT220 manual and ``linux/drivers/tty/vt.c`` # the default G0 charset is latin-1, but for reasons unknown # latin-1 breaks ascii-graphics; so G0 defaults to cp437. self.charset = 0 self.g0_charset = cs.IBMPC_MAP self.g1_charset = cs.VT100_MAP # From ``man terminfo`` -- "... hardware tabs are initially # set every `n` spaces when the terminal is powered up. Since # we aim to support VT102 / VT220 and linux -- we use n = 8. self.tabstops = set(range(7, self.columns, 8)) self.cursor = Cursor(0, 0) self.cursor_position() def dump_character_diff(self, previous_dump): """ Create a copy of the visible buffer. """ space = Char(data=' ') result = defaultdict(lambda: defaultdict(lambda: Char(data=' '))) offset = self.line_offset def chars_eq(c1, c2): return c1 == c2 #or (c1.data == ' ' and c2.data == ' ') # TODO: unless they have a background or underline, etc... for y in range(0, self.lines): if (y + offset) in self.buffer: line = self.buffer[y + offset] else: # Empty line line = defaultdict(lambda: Char(data=' ')) for x in range(0, self.columns): char = line.get(x, space) #if not previous_dump or previous_dump[y][x] != char: if not (previous_dump and chars_eq(previous_dump[y][x], char)): result[y][x] = char return result def resize(self, lines=None, columns=None): # don't do anything except saving the dimensions self.lines = lines if lines is not None else self.lines self.columns = columns if columns is not None else self.columns self._reset_offset_and_margins() def _reset_offset_and_margins(self): """ Recalculate offset and move cursor (make sure that the bottom is visible.) """ self.margins = Margins(0, self.lines - 1) if self.buffer: new_line_offset = max(0, max(self.buffer.keys()) - self.lines + 4) self.cursor.y += (self.line_offset - new_line_offset) self.line_offset = new_line_offset # TODO: maybe put this in a scroll_offset function. def set_mode(self, *modes, **kwargs): # Private mode codes are shifted, to be distingiushed from non # private ones. if kwargs.get("private"): modes = [mode << 5 for mode in modes] self.mode.update(modes) # When DECOLM mode is set, the screen is erased and the cursor # moves to the home position. if mo.DECCOLM in modes: self.resize(columns=132) self.erase_in_display(2) self.cursor_position() # According to `vttest`, DECOM should also home the cursor, see # vttest/main.c:303. if mo.DECOM in modes: self.cursor_position() # Mark all displayed characters as reverse. # TODO !! if mo.DECSCNM in modes: for line in self.buffer.values(): for pos, char in line.items(): line[pos] = char._replace(reverse=True) self.select_graphic_rendition(g._SGR["+reverse"]) # Make the cursor visible. if mo.DECTCEM in modes: self.cursor.hidden = False # On "\e[?1049h", enter alternate screen mode. Backup the current state, if (1049 << 5) in modes: self._original_screen = self.buffer self._original_screen_vars = \ { v:getattr(self, v) for v in self.swap_variables } self.reset() self._reset_offset_and_margins() def reset_mode(self, *modes, **kwargs): # Private mode codes are shifted, to be distingiushed from non # private ones. if kwargs.get("private"): modes = [mode << 5 for mode in modes] self.mode.difference_update(modes) # Lines below follow the logic in :meth:`set_mode`. if mo.DECCOLM in modes: self.resize(columns=80) self.erase_in_display(2) self.cursor_position() if mo.DECOM in modes: self.cursor_position() if mo.DECSCNM in modes: # TODO verify!! for line in self.buffer.values(): for pos, char in line.items(): line[pos] = char._replace(reverse=False) self.select_graphic_rendition(g._SGR["-reverse"]) # Hide the cursor. if mo.DECTCEM in modes: self.cursor.hidden = True # On "\e[?1049l", restore from alternate screen mode. if (1049 << 5) in modes and self._original_screen: for k, v in self._original_screen_vars.items(): setattr(self, k, v) self.buffer = self._original_screen self._original_screen = None self._original_screen_vars = {} self._reset_offset_and_margins() def draw(self, char): # Translating a given character. char = char.translate([self.g0_charset, self.g1_charset][self.charset]) # If this was the last column in a line and auto wrap mode is # enabled, move the cursor to the beginning of the next line, # otherwise replace characters already displayed with newly # entered. if self.cursor.x == self.columns: if mo.DECAWM in self.mode: self.carriage_return() self.linefeed() else: self.cursor.x -= 1 # If Insert mode is set, new characters move old characters to # the right, otherwise terminal is in Replace mode and new # characters replace old characters at cursor position. if mo.IRM in self.mode: self.insert_characters(1) self._set_char(self.cursor.x, self.cursor.y, self.cursor.attrs._replace(data=char)) # .. note:: We can't use :meth:`cursor_forward()`, because that # way, we'll never know when to linefeed. self.cursor.x += 1 def _set_char(self, x, y, char): self.buffer[y + self.line_offset][x] = char def index(self): """Move the cursor down one line in the same column. If the cursor is at the last line, create a new line at the bottom. """ top, bottom = self.margins # When scrolling over the full screen -> keep history. if top == 0 and bottom == self.lines - 1: if self.cursor.y == self.lines - 1: self.line_offset += 1 else: self.cursor_down() else: if self.cursor.y == bottom: for line in range(top, bottom): self.buffer[line] = self.buffer[line+1] del self.buffer[line+1] else: self.cursor_down() def reverse_index(self): # XXX: Used when going multiline with bash. (only second part tested.) top, bottom = self.margins # When scrolling over the full screen -> keep history. if self.cursor.y == top: for line in range(bottom, top, -1): self.buffer[line] = self.buffer[line-1] del self.buffer[line-1] else: self.cursor_up() def insert_lines(self, count=None): """Inserts the indicated # of lines at line with cursor. Lines displayed **at** and below the cursor move down. Lines moved past the bottom margin are lost. :param count: number of lines to delete. """ count = count or 1 top, bottom = self.margins # If cursor is outside scrolling margins it -- do nothin'. if top <= self.cursor.y <= bottom: #if (bottom + self.line_offset) in self.buffer: # del self.buffer[bottom + self.line_offset] for line in range(bottom, self.cursor.y + count - 1, -1): self.buffer[line + self.line_offset] = self.buffer[line + self.line_offset - count] del self.buffer[line + self.line_offset - count] self.carriage_return() def delete_lines(self, count=None): """Deletes the indicated # of lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up. :param int count: number of lines to delete. """ count = count or 1 top, bottom = self.margins # If cursor is outside scrolling margins it -- do nothin'. if top <= self.cursor.y <= bottom: for line in range(self.cursor.y, bottom - count, -1): self.buffer[line + self.line_offset] = self.buffer[line + self.line_offset + count] del self.buffer[line + self.line_offset + count] def insert_characters(self, count=None): # XXX: used by pressing space in bash vi mode """Inserts the indicated # of blank characters at the cursor position. The cursor does not move and remains at the beginning of the inserted blank characters. Data on the line is shifted forward. :param int count: number of characters to insert. """ count = count or 1 line = self.buffer[self.cursor.y + self.line_offset] max_columns = max(line.keys()) for i in range(max_columns, self.cursor.x, -1): line[i + count] = line[i] del line[i] def delete_characters(self, count=None): # XXX: used by pressing 'x' on bash vi mode count = count or 1 line = self.buffer[self.cursor.y + self.line_offset] max_columns = max(line.keys()) for i in range(self.cursor.x, max_columns): line[i] = line[i + count] del line[i + count] def erase_characters(self, count=None): raise NotImplementedError('erase_characters not implemented') # TODO def erase_in_line(self, type_of=0, private=False): """Erases a line in a specific way. :param int type_of: defines the way the line should be erased in: * ``0`` -- Erases from cursor to end of line, including cursor position. * ``1`` -- Erases from beginning of line to cursor, including cursor position. * ``2`` -- Erases complete line. :param bool private: when ``True`` character attributes aren left unchanged **not implemented**. """ def should_we_delete(column): # TODO: check for off-by-one errors! if type_of == 0: return column >= self.cursor.x if type_of == 1: return column <= self.cursor.x if type_of == 2: return True line = self.buffer[self.cursor.y + self.line_offset] for column in list(line.keys()): if should_we_delete(column): del line[column] def erase_in_display(self, type_of=0, private=False): """Erases display in a specific way. :param int type_of: defines the way the line should be erased in: * ``0`` -- Erases from cursor to end of screen, including cursor position. * ``1`` -- Erases from beginning of screen to cursor, including cursor position. * ``2`` -- Erases complete display. All lines are erased and changed to single-width. Cursor does not move. :param bool private: when ``True`` character attributes aren left unchanged **not implemented**. """ interval = ( # a) erase from cursor to the end of the display, including # the cursor, range(self.cursor.y + 1, self.lines), # b) erase from the beginning of the display to the cursor, # including it, range(0, self.cursor.y), # c) erase the whole display. range(0, self.lines) )[type_of] for line in interval: # TODO: from where the -1 in the index below?? self.buffer[line + self.line_offset] = defaultdict(lambda: Char(data=' ')) # In case of 0 or 1 we have to erase the line with the cursor. if type_of in [0, 1]: self.erase_in_line(type_of) def alignment_display(self): for y in range(0, self.lines): line = self.buffer[y + self.line_offset] for x in range(0, self.columns): line[x] = Char('E') def select_graphic_rendition(self, *attrs): """ Support 256 colours """ g = pyte.graphics replace = {} if not attrs: attrs = [0] else: attrs = list(attrs[::-1]) while attrs: attr = attrs.pop() if attr in g.FG: replace["fg"] = g.FG[attr] elif attr in g.BG: replace["bg"] = g.BG[attr] elif attr in g.TEXT: attr = g.TEXT[attr] replace[attr[1:]] = attr.startswith("+") elif not attr: replace = self.default_char._asdict() elif attr in (38, 48): n = attrs.pop() if n != 5: continue if attr == 38: m = attrs.pop() replace["fg"] = 1024 + m elif attr == 48: m = attrs.pop() replace["bg"] = 1024 + m self.cursor.attrs = self.cursor.attrs._replace(**replace) # See tmux/input.c, line: 1388
py
1a4a8825ee7eca577dae79aefb54e0752d7e2f98
import torch from torch import nn from torch.nn import functional as F from models import infogan class Encoder(nn.Module): def __init__(self, latent_dim: int): super().__init__() self.h1_nchan = 64 self.conv1 = nn.Sequential( nn.Conv2d(1, self.h1_nchan, kernel_size=4, stride=2, padding=1), nn.LeakyReLU(.1, inplace=True) ) self.h2_nchan = 128 self.conv2 = nn.Sequential( nn.Conv2d(self.h1_nchan, self.h2_nchan, kernel_size=4, stride=2, padding=1), nn.BatchNorm2d(self.h2_nchan), nn.LeakyReLU(.1, inplace=True) ) self.h3_dim = 1024 self.fc1 = nn.Sequential( nn.Linear(7 * 7 * self.h2_nchan, self.h3_dim), nn.BatchNorm1d(self.h3_dim), nn.LeakyReLU(.1, inplace=True) ) self.fc2_mean = nn.Linear(self.h3_dim, latent_dim) self.fc2_logvar = nn.Linear(self.h3_dim, latent_dim) def forward(self, x): x = self.conv1(x) x = self.conv2(x).view(-1, 7 * 7 * self.h2_nchan) x = self.fc1(x) mean = self.fc2_mean(x) logvar = self.fc2_logvar(x) return mean, logvar Decoder = infogan.Generator def sample_noise(num, dim, device=None) -> torch.Tensor: return torch.randn(num, dim, device=device) class VAE(nn.Module): def __init__(self, latent_dim: int): super().__init__() self.latent_dim = latent_dim self.enc = Encoder(self.latent_dim) self.dec = Decoder(self.latent_dim) self.apply(_weights_init) def sample_latent(self, num: int): return sample_noise(num, self.latent_dim, self.device) def sample_posterior(self, data, num: int = 1): noise = torch.randn(data.shape[0], num, self.latent_dim, device=self.device) mean, logvar = self.enc(data) latent = mean.unsqueeze(1) + (.5 * logvar).exp().unsqueeze(1) * noise def forward(self, data): noise = self.sample_latent(data.shape[0]) mean, logvar = self.enc(data) latent = mean + (.5 * logvar).exp() * noise recon = self.dec(latent) return mean, logvar, latent, recon @property def device(self): return next(self.parameters()).device def _weights_init(m): classname = m.__class__.__name__ if 'Conv' in classname: nn.init.xavier_uniform_(m.weight.data) if m.bias is not None: nn.init.constant_(m.bias.data, 0.) elif 'BatchNorm' in classname: nn.init.normal_(m.weight.data, 1.0, 0.02) nn.init.constant_(m.bias.data, 0.) class Trainer(nn.Module): def __init__(self, model: VAE, beta: float = 1., lr: float = 1e-3): super().__init__() self.model = model self.beta = beta params = list(self.model.enc.parameters()) + list(self.model.dec.parameters()) self.opt = torch.optim.Adam(params, lr=lr, betas=(.5, .99)) def step(self, real_data, verbose: bool = False): mean, logvar, latent, fake_data = self.model(real_data) rec_loss = F.binary_cross_entropy(fake_data, (real_data > .5).float(), size_average=False) # rec_loss = F.binary_cross_entropy(fake_data, real_data, size_average=False) kl_div = -.5 * (1. + logvar - mean ** 2 - logvar.exp()).sum() self.opt.zero_grad() (rec_loss + self.beta * kl_div).backward() self.opt.step() if verbose: print(f"rec_loss = {rec_loss.item():6g}, KL_div = {kl_div.item():6g}, ") def forward(self, real_data, verbose: bool = False): self.step(real_data, verbose)
py
1a4a887e46334474ef61a9242b6b793ed3c5a4f0
import gym __all__ = ['SkipWrapper'] def SkipWrapper(repeat_count): class SkipWrapper(gym.Wrapper): """ Generic common frame skipping wrapper Will perform action for `x` additional steps """ def __init__(self, env): super(SkipWrapper, self).__init__(env) self.repeat_count = repeat_count self.stepcount = 0 def _step(self, action): done = False total_reward = 0 current_step = 0 while current_step < (self.repeat_count + 1) and not done: self.stepcount += 1 obs, reward, done, info = self.env.step(action) total_reward += reward current_step += 1 if 'skip.stepcount' in info: raise gym.error.Error('Key "skip.stepcount" already in info. Make sure you are not stacking ' \ 'the SkipWrapper wrappers.') info['skip.stepcount'] = self.stepcount return obs, total_reward, done, info def _reset(self, **kwargs): self.stepcount = 0 return self.env.reset(**kwargs) return SkipWrapper
py
1a4a88a36164768da53a791735fa75c4108a63a1
# @staticmethod # def default_agent_config(): # agent_config = AgentConfig() # # Sensors # # Sensors define the observation space # # Params for all sensors: # # - tick_every_frame # # - tick_every_n_frames # # - tick_every_x_seconds # agent_config.add_sensor( # "Movement", # { # "location": "absolute", # "velocity": "absolute" # } # ) # # Capture the scene as an image itslef # # agent_config.add_sensor( # # "Camera", # # { # # "width": # # "height": # # "camera_index": # # "capture_source": # # } # # ) # # this is needed if you want your input to work # # Actuator mock the player's input # # they define the action space # # see void ``U4MLActuator_InputKey::Act(const float DeltaTime)``
py
1a4a88a3ebdf920ab256ec8230944b459ebf98c3
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function from __future__ import division from __future__ import absolute_import from __future__ import unicode_literals # Command line : # python -m benchmark.VAR.GG.NN import os import logging from config import SEED from config import _ERROR from config import _TRUTH import numpy as np import pandas as pd from visual.misc import set_plot_config set_plot_config() from utils.log import set_logger from utils.log import flush from utils.log import print_line from utils.model import get_model from utils.model import get_optimizer from utils.model import train_or_load_classifier from utils.evaluation import evaluate_summary_computer from utils.images import gather_images from visual.misc import plot_params from problem.gamma_gauss import GGConfig as Config from problem.gamma_gauss import Generator from problem.gamma_gauss import param_generator from problem.gamma_gauss import GGNLL as NLLComputer from model.neural_network import NeuralNetClassifier from archi.classic import L4 as ARCHI from ...my_argparser import NET_parse_args DATA_NAME = 'GG' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30 def build_model(args, i_cv): args.net = ARCHI(n_in=1, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model = get_model(args, NeuralNetClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model # ===================================================================== # MAIN # ===================================================================== def main(): # BASIC SETUP logger = set_logger() args = NET_parse_args(main_description="Training launcher for INFERNO on GG benchmark") logger.info(args) flush(logger) # INFO model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print("DONE !") def run(args, i_cv): logger = logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set up data generator') config = Config() seed = SEED + i_cv * 5 train_generator = Generator(seed) valid_generator = Generator(seed+1) test_generator = Generator(seed+2) # SET MODEL logger.info('Set up classifier') model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING / LOADING train_or_load_classifier(model, train_generator, config.CALIBRATED, config.N_TRAINING_SAMPLES, retrain=args.retrain) # MEASUREMENT result_row = {'i_cv': i_cv} results = [] for test_config in config.iter_test_config(): logger.info(f"Running test set : {test_config.TRUE}, {test_config.N_TESTING_SAMPLES} samples") for threshold in np.linspace(0, 1, 500): result_row = {'i_cv': i_cv} result_row['threshold'] = threshold result_row.update(test_config.TRUE.to_dict(prefix='true_')) result_row['n_test_samples'] = test_config.N_TESTING_SAMPLES X, y, w = valid_generator.generate(*config.TRUE, n_samples=config.N_VALIDATION_SAMPLES) proba = model.predict_proba(X) decision = proba[:, 1] selected = decision > threshold beta = np.sum(y[selected] == 0) gamma = np.sum(y[selected] == 1) result_row['beta'] = beta result_row['gamma'] = gamma X, y, w = test_generator.generate(*config.TRUE, n_samples=config.N_VALIDATION_SAMPLES) proba = model.predict_proba(X) decision = proba[:, 1] selected = decision > threshold n_selected = np.sum(selected) n_selected_bkg = np.sum(y[selected] == 0) n_selected_sig = np.sum(y[selected] == 1) result_row['n'] = n_selected result_row['b'] = n_selected_bkg result_row['s'] = n_selected_sig result_row['s_sqrt_n'] = n_selected_sig / np.sqrt(n_selected) result_row['s_sqrt_b'] = n_selected_sig / np.sqrt(n_selected) results.append(result_row.copy()) results = pd.DataFrame(results) print(results) return results if __name__ == '__main__': main()
py
1a4a88c5c06bba42e40ac297401ca08b86401416
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2018-03-09 16:04 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("notifications", "0002_add_notifications")] operations = [ migrations.AlterField( model_name="emailnotification", name="state", field=models.CharField( choices=[ ("pending", "Pending"), ("sending", "Sending"), ("sent", "Sent"), ("canceled", "Canceled"), ], default="pending", max_length=10, ), ) ]
py
1a4a897d292ce57efe81f5e63fe8947da1ecaa09
import argparse from tdw.controller import Controller from tdw.remote_build_launcher import RemoteBuildLauncher class MinimalRemote(Controller): """ A minimal example of how to use the launch binaries daemon to start and connect to a build on a remote node. Note: the remote must be running binary_manager.py. """ def __init__(self): args = self.parse_args() build_info = RemoteBuildLauncher.launch_build(args.listening_port, args.build_address, args.controller_address) super().__init__(port=build_info["build_port"]) def parse_args(self): """ Helper function that parses command line arguments . Returns parsed args. """ parser = argparse.ArgumentParser() parser.add_argument( "--listening_port", default="5556", type=str, help="Port on which binary_manager is listening", ) parser.add_argument( "--build_address", default="node14-ccncluster.stanford.edu", type=str, help="IP/hostname on which to launch build", ) parser.add_argument( "--controller_address", default="node05-ccncluster.stanford.edu", type=str, help="Address of controller", ) args = parser.parse_args() return args def run(self): # Create an empty environment. self.communicate({"$type": "create_empty_environment"}) for i in range(100): # Do nothing. Receive a response from the build. resp = self.communicate([]) print(resp) self.communicate({"$type": "terminate"}) if __name__ == "__main__": MinimalRemote().run()
py
1a4a8d1fb4217c2e3e8b119c43eb66dcd770105d
# Copyright (C) 2019 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Test bulk issuetracker synchronization.""" # pylint: disable=too-many-lines,invalid-name import unittest from collections import OrderedDict import ddt import mock from flask import g from ggrc import settings from ggrc import db from ggrc import views from ggrc.notifications import data_handlers from ggrc.integrations import integrations_errors from ggrc.integrations import issuetracker_bulk_sync from ggrc.integrations import constants from ggrc.integrations.synchronization_jobs import sync_utils from ggrc.models import all_models, inflector from ggrc.models.hooks.issue_tracker import issue_tracker_params_builder from integration.ggrc import TestCase, generator from integration.ggrc.api_helper import Api from integration.ggrc.models import factories class TestBulkIssuesSync(TestCase): """Base class for bulk issuetracker synchronization tests.""" def setUp(self): """Set up for test methods.""" super(TestBulkIssuesSync, self).setUp() self.api = Api() self.gen = generator.ObjectGenerator() self.role_people = { "Audit Captains": factories.PersonFactory(email="[email protected]"), "Creators": factories.PersonFactory(email="[email protected]"), "Assignees": factories.PersonFactory(email="[email protected]"), "Verifiers": factories.PersonFactory(email="[email protected]"), } self.issue_id = "42" def setup_assessments(self, asmnt_count, issue_id=None, enabled=True): """Create Audit with couple of Assessments and linked IssueTrackerIssues. Args: asmnt_count: Count of Assessments in Audit. Returns: Tuple with Audit id and list of Assessment ids. """ with factories.single_commit(): audit = factories.AuditFactory() audit.add_person_with_role_name( self.role_people["Audit Captains"], "Audit Captains", ) factories.IssueTrackerIssueFactory( enabled=enabled, issue_tracked_obj=audit, issue_id=issue_id, issue_type=constants.DEFAULT_ISSUETRACKER_VALUES['issue_type'], component_id=12345, hotlist_id=12345, issue_priority="P2", issue_severity="S2", ) assessment_ids = [] for _ in range(asmnt_count): asmnt = factories.AssessmentFactory(audit=audit) factories.RelationshipFactory(source=audit, destination=asmnt) for role_name in ["Creators", "Assignees", "Verifiers"]: asmnt.add_person_with_role_name( self.role_people[role_name], role_name, ) factories.IssueTrackerIssueFactory( enabled=enabled, issue_tracked_obj=asmnt, issue_id=issue_id, title=None, ) assessment_ids.append(asmnt.id) return audit.id, assessment_ids @staticmethod def setup_issues(issue_count, issue_id=None, enabled=True): """Create issues with enabled integration.""" with factories.single_commit(): issue_ids = [] for _ in range(issue_count): issue = factories.IssueFactory() factories.IssueTrackerIssueFactory( enabled=enabled, issue_tracked_obj=issue, issue_id=issue_id, title=None, ) issue_ids.append(issue.id) return issue_ids def issuetracker_sync_mock(self, sync_func_name): """IssueTracker sync method mock.""" return mock.patch.object( sync_utils, sync_func_name, return_value={"issueId": self.issue_id} ) def generate_children_issues_for(self, parent_type, parent_id, child_type): """Generate IssueTracker issue for objects with provided type and ids. Args: obj_type: Type of objects. Now only 'Assessment' supported. obj_ids: List with ids of objects. Returns: Response with result of issues generation. """ with self.issuetracker_sync_mock("create_issue"): return self.api.send_request( self.api.client.post, api_link="/generate_children_issues", data={ "parent": {"type": parent_type, "id": parent_id}, "child_type": child_type, } ) def generate_issues_for(self, object_info): """Generate IssueTracker issues for provided objects.""" with self.issuetracker_sync_mock("create_issue"): return self.api.send_request( self.api.client.post, api_link="/generate_issues", data={ "objects": [{ "type": type_, "id": id_, "hotlist_ids": hotlist_id, "component_id": component_id, } for type_, id_, hotlist_id, component_id in object_info], } ) def update_issues_for(self, object_info): """Update IssueTracker issues for provided objects.""" with self.issuetracker_sync_mock("update_issue"): return self.api.send_request( self.api.client.post, api_link="/update_issues", data={ "objects": [{ "type": type_, "id": id_, "hotlist_ids": hotlist_id, "component_id": component_id, } for type_, id_, hotlist_id, component_id in object_info], } ) def assert_obj_issues(self, issuetracker_info, assignee=None): """Check correctness of created IssueTracker issues.""" for type_, id_, hotlist_id, component_id in issuetracker_info: obj = inflector.get_model(type_).query.get(id_) issue = obj.issuetracker_issue self.assertEqual(issue.enabled, 1) self.assertEqual(issue.title, obj.title) self.assertEqual(issue.component_id, component_id) self.assertEqual(issue.hotlist_id, hotlist_id) self.assertEqual( issue.issue_type, constants.DEFAULT_ISSUETRACKER_VALUES['issue_type'] ) self.assertEqual(issue.issue_priority, "P2") self.assertEqual(issue.issue_severity, "S2") self.assertEqual(issue.assignee, assignee) self.assertEqual(issue.cc_list, "") self.assertEqual(issue.issue_id, self.issue_id) self.assertEqual( issue.issue_url, "http://issue/{}".format(self.issue_id) ) def assert_children_asmnt_issues(self, asmnt_ids): """Check if Assessments IssueTracker issues inherit data from Audit.""" assessments = all_models.Assessment.query.filter( all_models.Assessment.id.in_(asmnt_ids) ) for asmnt in assessments: issue = asmnt.issuetracker_issue parent_issue = asmnt.audit.issuetracker_issue self.assertEqual(issue.enabled, 1) self.assertEqual(issue.title, asmnt.title) self.assertEqual(issue.component_id, parent_issue.component_id) self.assertEqual(issue.hotlist_id, parent_issue.hotlist_id) self.assertEqual(issue.issue_type, parent_issue.issue_type) self.assertEqual(issue.issue_priority, parent_issue.issue_priority) self.assertEqual(issue.issue_severity, parent_issue.issue_severity) self.assertEqual(issue.assignee, "[email protected]") self.assertEqual(issue.cc_list, "") self.assertEqual(issue.issue_id, self.issue_id) self.assertEqual( issue.issue_url, "http://issue/{}".format(self.issue_id) ) def assert_not_updated(self, object_type, object_ids): """Check if IssueTracker issues have empty fields. Args: object_type: Type of objects which issues should be checked. object_ids: List with ids for objects which issues should be checked. Raise: AssertionError if relevant Issues have non-empty base fields. """ issues = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.object_type == object_type, all_models.IssuetrackerIssue.object_id.in_(object_ids), ) for issue in issues: self.assertEqual(issue.issue_id, None) self.assertEqual(issue.assignee, None) self.assertEqual(issue.cc_list, "") self.assertEqual(issue.title, None) @ddt.ddt class TestBulkIssuesGenerate(TestBulkIssuesSync): """Test bulk issues generation.""" @ddt.data("Assessment", "Issue") def test_integration_disabled_on_bulk_create_error(self, model): """Test if {} integration was disabled if bulk creation failed""" user = all_models.Person.query.first() with factories.single_commit(): obj = factories.get_model_factory(model)( modified_by=user ) iti = factories.IssueTrackerIssueFactory( issue_tracked_obj=obj, enabled=True, issue_id=None, ) bulk_creator = issuetracker_bulk_sync.IssueTrackerBulkCreator() objects = [issuetracker_bulk_sync.IssuetrackedObjInfo(obj)] with mock.patch.object(bulk_creator, "sync_issue") as sync_mock: sync_mock.side_effect = integrations_errors.HttpError("error") bulk_creator.handle_issuetracker_sync(objects) sync_mock.assert_called_once() self.assertFalse(iti.enabled) def test_get_objects_method_assmt(self): """Test get_issuetracked_objects() for not linked assessments.""" _, assessment_ids_enabled = self.setup_assessments(3) _, assessment_ids_disabled = self.setup_assessments(2, enabled=False) assessment_ids = assessment_ids_enabled + assessment_ids_disabled creator = issuetracker_bulk_sync.IssueTrackerBulkCreator result = creator.get_issuetracked_objects("Assessment", assessment_ids) result_ids = [assmt.id for assmt in result] self.assertEqual(set(assessment_ids_enabled), set(result_ids)) def test_get_objects_method_issue(self): """Test get_issuetracked_objects() for not linked issues.""" issue_ids_enabled = self.setup_issues(3) issue_ids_disabled = self.setup_issues(2, enabled=False) issue_ids = issue_ids_enabled + issue_ids_disabled creator = issuetracker_bulk_sync.IssueTrackerBulkCreator result = creator.get_issuetracked_objects("Issue", issue_ids) result_ids = [issue.id for issue in result] self.assertEqual(set(issue_ids_enabled), set(result_ids)) def test_issue_generate_call(self): """Test generate_issue call creates task for bulk generate.""" user = all_models.Person.query.filter_by(email="[email protected]").one() setattr(g, '_current_user', user) data = { "revision_ids": [1, 2, 3], } result = views.background_update_issues(data) self.assert200(result) bg_task = all_models.BackgroundTask.query.one() self.assertEqual(bg_task.status, "Success") def test_asmnt_bulk_generate(self): """Test bulk generation of issues for Assessments.""" _, assessment_ids = self.setup_assessments(3) asmnt_issuetracker_info = [ ("Assessment", id_, "123", "321") for id_ in assessment_ids ] response = self.generate_issues_for(asmnt_issuetracker_info) self.assert200(response) self.assertEqual(response.json.get("errors"), []) self.assert_obj_issues(asmnt_issuetracker_info, "[email protected]") @unittest.skip("Not implemented.") def test_permission_check(self): """Test generation if user has rights on part of objects.""" _, assessment_ids = self.setup_assessments(3) with_rights_ids = assessment_ids[:2] without_rights_ids = assessment_ids[2:] _, assignee_user = self.gen.generate_person(user_role="Creator") with factories.single_commit(): for id_ in with_rights_ids: assessment = all_models.Assessment.query.get(id_) assessment.add_person_with_role_name(assignee_user, "Creators") self.api.set_user(assignee_user) asmnt_issuetracker_info = [ ("Assessment", id_, "123", "321") for id_ in assessment_ids ] response = self.generate_issues_for(asmnt_issuetracker_info) self.assert200(response) forbidden_err = "403 Forbidden: You don't have the permission to access " \ "the requested resource. It is either read-protected or " \ "not readable by the server." expected_errors = [ ["Assessment", id_, forbidden_err] for id_ in without_rights_ids ] self.assertEqual(response.json.get("errors"), expected_errors) with_rights_info = [ ("Assessment", id_, "123", "321") for id_ in with_rights_ids ] self.assert_obj_issues(with_rights_info, "[email protected]") self.assert_not_updated("Assessment", without_rights_ids) def test_issue_bulk_generate(self): """Test bulk generation of issuetracker issues for Issue.""" issue_ids = [] with factories.single_commit(): person = factories.PersonFactory() person_email = person.email for _ in range(3): issue = factories.IssueFactory(modified_by=person) for role_name in ["Admin", "Primary Contacts"]: issue.add_person_with_role_name(person, role_name) factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=issue, issue_id=None, title='', component_id=12345, hotlist_id=54321, issue_priority="P2", issue_severity="S2", ) issue_ids.append(issue.id) issue_issuetracker_info = [ ("Issue", id_, None, None) for id_ in issue_ids ] response = self.generate_issues_for(issue_issuetracker_info) self.assert200(response) self.assertEqual(response.json.get("errors"), []) issues = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.object_type == "Issue", all_models.IssuetrackerIssue.object_id.in_(issue_ids) ).all() for issue in issues: parent_obj = issue.Issue_issue_tracked self.assertEqual(issue.enabled, 1) self.assertEqual(issue.title, parent_obj.title) self.assertEqual(issue.component_id, "12345") self.assertEqual(issue.hotlist_id, "54321") self.assertEqual(issue.issue_priority, "P2") self.assertEqual(issue.issue_severity, "S2") self.assertEqual(issue.assignee, person_email) self.assertEqual(issue.cc_list, "") self.assertEqual(issue.issue_id, self.issue_id) self.assertEqual( issue.issue_url, "http://issue/{}".format(self.issue_id) ) def test_rate_limited_generate(self): """Test tickets generation when issuetracker raise 429 error.""" _, assessment_ids = self.setup_assessments(3) error = integrations_errors.HttpError(data="Test Error", status=429) with mock.patch( "ggrc.integrations.issues.Client.create_issue", side_effect=error ) as create_issue_mock: with mock.patch("time.sleep"): response = self.api.send_request( self.api.client.post, api_link="/generate_issues", data={ "objects": [{ "type": "Assessment", "id": id_ } for id_ in assessment_ids], } ) self.assert200(response) expected_errors = [ ["Assessment", id_, "429 Test Error"] for id_ in assessment_ids ] self.assertEqual(response.json.get("errors"), expected_errors) # 3 times for each assessment self.assertEqual(create_issue_mock.call_count, 9) def test_exception_notification(self): """Test notification about failed bulk update.""" filename = "test.csv" updater = issuetracker_bulk_sync.IssueTrackerBulkUpdater() with mock.patch("ggrc.notifications.common.send_email") as send_mock: updater.send_notification(filename, "[email protected]", failed=True) self.assertEqual(send_mock.call_count, 1) (email, title, body), _ = send_mock.call_args_list[0] self.assertEqual(title, updater.ISSUETRACKER_SYNC_TITLE) self.assertEqual(email, "[email protected]") self.assertIn(updater.ERROR_TITLE.format(filename=filename), body) self.assertIn(updater.EXCEPTION_TEXT, body) def test_succeeded_notification(self): """Test notification about succeeded bulk generation.""" creator = issuetracker_bulk_sync.IssueTrackerBulkCreator() filename = "test_file.csv" recipient = "[email protected]" with mock.patch("ggrc.notifications.common.send_email") as send_mock: creator.send_notification(filename, recipient) self.assertEqual(send_mock.call_count, 1) (email, title, body), _ = send_mock.call_args_list[0] self.assertEqual(title, creator.ISSUETRACKER_SYNC_TITLE) self.assertEqual(email, recipient) self.assertIn(creator.SUCCESS_TITLE.format(filename=filename), body) self.assertIn(creator.SUCCESS_TEXT, body) def test_error_notification(self): """Test notification about bulk generation with errors""" creator = issuetracker_bulk_sync.IssueTrackerBulkCreator() filename = "test_file.csv" recipient = "[email protected]" assmt = factories.AssessmentFactory() with mock.patch("ggrc.notifications.common.send_email") as send_mock: creator.send_notification(filename, recipient, errors=[(assmt, "")]) self.assertEqual(send_mock.call_count, 1) (email, title, body), _ = send_mock.call_args_list[0] self.assertEqual(title, creator.ISSUETRACKER_SYNC_TITLE) self.assertEqual(email, recipient) self.assertIn(creator.ERROR_TITLE.format(filename=filename), body) self.assertIn(assmt.slug, body) self.assertIn(assmt.title, body) self.assertIn(data_handlers.get_object_url(assmt), body) @ddt.ddt class TestBulkIssuesChildGenerate(TestBulkIssuesSync): """Test bulk issues generation for child objects.""" def test_get_objects_method_assmt(self): """Test get_issuetracked_objects() for linked assessments.""" _, assessment_ids_enabled = self.setup_assessments(3, issue_id=123) _, assessment_ids_disabled = self.setup_assessments(2, issue_id=123, enabled=False) assessment_ids = assessment_ids_enabled + assessment_ids_disabled updater = issuetracker_bulk_sync.IssueTrackerBulkUpdater result = updater.get_issuetracked_objects("Assessment", assessment_ids) result_ids = [assmt.id for assmt in result] self.assertEqual(set(assessment_ids_enabled), set(result_ids)) def test_get_objects_method_issue(self): """Test get_issuetracked_objects() for linked issues.""" issue_ids_enabled = self.setup_issues(3, issue_id=123) issue_ids_disabled = self.setup_issues(2, issue_id=123, enabled=False) issue_ids = issue_ids_enabled + issue_ids_disabled updater = issuetracker_bulk_sync.IssueTrackerBulkUpdater result = updater.get_issuetracked_objects("Issue", issue_ids) result_ids = [issue.id for issue in result] self.assertEqual(set(issue_ids_enabled), set(result_ids)) def test_asmnt_bulk_child_generate(self): """Test generation of issues for all Assessments in Audit.""" audit_id, assessment_ids = self.setup_assessments(3) with mock.patch("ggrc.notifications.common.send_email"): response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) self.assertEqual(response.json.get("errors"), []) self.assert_children_asmnt_issues(assessment_ids) def test_norights(self): """Test generation if user doesn't have rights on Audit and Assessment.""" audit_id, assessment_ids = self.setup_assessments(3) _, side_user = self.gen.generate_person(user_role="Creator") self.api.set_user(side_user) response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) self.assert_not_updated("Assessment", assessment_ids) def test_partially_rights(self): """Test generation if user has rights on part of Assessments.""" audit_id, assessment_ids = self.setup_assessments(3) changed_asmnt_id = assessment_ids[0] norights_asmnt_ids = assessment_ids[1:] _, assignee_user = self.gen.generate_person(user_role="Creator") audit_role = factories.AccessControlRoleFactory( name="Edit Role", object_type="Audit", update=True ) with factories.single_commit(): assessment = all_models.Assessment.query.get(changed_asmnt_id) assessment.add_person_with_role_name(assignee_user, "Creators") acl = factories.AccessControlListFactory( object_id=audit_id, object_type="Audit", ac_role_id=audit_role.id, ) factories.AccessControlPersonFactory( person=assignee_user, ac_list=acl, ) self.api.set_user(assignee_user) response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) self.assert_children_asmnt_issues([changed_asmnt_id]) self.assert_not_updated("Assessment", norights_asmnt_ids) @ddt.data( issuetracker_bulk_sync.WRONG_COMPONENT_ERR, issuetracker_bulk_sync.WRONG_HOTLIST_ERR, ) def test_invalid_component_id(self, error): """Test generation of issues if '{}' error raised.""" audit_id, assessment_ids = self.setup_assessments(3) error = error.format("12345") with mock.patch("ggrc.notifications.common.send_email"): with mock.patch( "ggrc.integrations.issues.Client.create_issue", side_effect=integrations_errors.HttpError(error) ) as create_issue_mock: response = self.api.send_request( self.api.client.post, api_link="/generate_children_issues", data={ "parent": {"type": "Audit", "id": audit_id}, "child_type": "Assessment" } ) self.assert200(response) self.assertEqual( response.json.get("errors"), [["Assessment", assessment_ids[0], "500 {}".format(error)]] ) self.assertEqual(create_issue_mock.call_count, 1) query = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.issue_id.isnot(None) ) self.assertEqual(query.count(), 0) def test_related_assessments(self): """Assessment with empty issuetracker_issue should be synced""" with factories.single_commit(): audit = factories.AuditFactory() factories.IssueTrackerIssueFactory( issue_tracked_obj=audit, issue_id=None, component_id=12345, hotlist_id=54321, issue_priority="P2", issue_severity="S2", ) assess1 = factories.AssessmentFactory(audit=audit) assess1_id = assess1.id assess2 = factories.AssessmentFactory(audit=audit) assess2_id = assess2.id factories.IssueTrackerIssueFactory( issue_tracked_obj=assess2, issue_id=None, component_id=9999, hotlist_id=7777, issue_priority="P1", issue_severity="S1", ) self.assertIsNone(assess1.issuetracker_issue) with mock.patch("ggrc.notifications.common.send_email"): response = self.generate_children_issues_for( audit.type, audit.id, assess1.type ) self.assert200(response) self.assertEqual(response.json.get("errors"), []) assess1 = all_models.Assessment.query.get(assess1_id) self.assertIsNotNone( assess1.issuetracker_issue, "issuetracker_issue was not created for assessment {}".format( assess1.id ) ) self.assertEqual("12345", assess1.issuetracker_issue.component_id) self.assertEqual("54321", assess1.issuetracker_issue.hotlist_id) self.assertEqual("P2", assess1.issuetracker_issue.issue_priority) self.assertEqual("S2", assess1.issuetracker_issue.issue_severity) assess2 = all_models.Assessment.query.get(assess2_id) self.assertEqual("9999", assess2.issuetracker_issue.component_id) self.assertEqual("7777", assess2.issuetracker_issue.hotlist_id) self.assertEqual("P1", assess2.issuetracker_issue.issue_priority) self.assertEqual("S1", assess2.issuetracker_issue.issue_severity) def test_bg_operation_status(self): """Test background operation status endpoint.""" audit_id, _ = self.setup_assessments(3) response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) url = "background_task_status/{}/{}".format("audit", audit_id) response = self.api.client.get(url) self.assert200(response) self.assertEqual(response.json.get("status"), "Success") self.assertEqual( response.json.get("operation"), "generate_children_issues" ) self.assertEqual(response.json.get("errors"), []) def test_task_already_run_status(self): """Test if new task started when another is in progress.""" audit_id, _ = self.setup_assessments(1) response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) db.session.query(all_models.BackgroundTask).update({"status": "Running"}) db.session.commit() with factories.single_commit(): asmnt = factories.AssessmentFactory(audit_id=audit_id) audit = all_models.Audit.query.get(audit_id) factories.RelationshipFactory(source=audit, destination=asmnt) factories.IssueTrackerIssueFactory( issue_tracked_obj=asmnt, issue_id=None, title=None, ) response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert400(response) self.assertEqual( response.json["message"], "Task 'generate_children_issues' already run for Audit {}.".format( audit_id ) ) url = "background_task_status/{}/{}".format("audit", audit_id) response = self.api.client.get(url) self.assert200(response) self.assertEqual(response.json.get("status"), "Running") self.assertEqual( response.json.get("operation"), "generate_children_issues" ) self.assertEqual(response.json.get("errors"), []) def test_task_failed_status(self): """Test background task status if it failed.""" audit_id, _ = self.setup_assessments(2) with mock.patch( "ggrc.integrations.issuetracker_bulk_sync." "IssueTrackerBulkChildCreator.sync_issuetracker", side_effect=Exception("Test Error") ): response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) url = "background_task_status/{}/{}".format("audit", audit_id) response = self.api.client.get(url) self.assert200(response) self.assertEqual(response.json.get("status"), "Failure") self.assertEqual( response.json.get("operation"), "generate_children_issues" ) self.assertEqual(response.json.get("errors"), []) def test_errors_task_status(self): """Test background task status if it failed.""" audit_id, assessment_ids = self.setup_assessments(2) with mock.patch( "ggrc.integrations.issues.Client.create_issue", side_effect=integrations_errors.HttpError("Test Error") ): response = self.api.send_request( self.api.client.post, api_link="/generate_children_issues", data={ "parent": {"type": "Audit", "id": audit_id}, "child_type": "Assessment" } ) self.assert200(response) url = "background_task_status/{}/{}".format("audit", audit_id) response = self.api.client.get(url) self.assert200(response) self.assertEqual(response.json.get("status"), "Success") self.assertEqual( response.json.get("errors"), [["Assessment", id_, "500 Test Error"] for id_ in assessment_ids] ) def test_child_err_notification(self): """Test notification about failed bulk child generation.""" audit_id, _ = self.setup_assessments(3) _, side_user = self.gen.generate_person(user_role="Creator") self.api.set_user(side_user) with mock.patch("ggrc.notifications.common.send_email") as send_mock: response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) self.assertEqual(send_mock.call_count, 1) (email, title, body), _ = send_mock.call_args_list[0] cur_user = all_models.Person.query.get(side_user.id) child_creator = issuetracker_bulk_sync.IssueTrackerBulkChildCreator self.assertEqual(email, cur_user.email) self.assertEqual(title, child_creator.ISSUETRACKER_SYNC_TITLE) self.assertIn("There were some errors in generating tickets", body) def test_child_notification(self): """Test notification about succeeded bulk child generation.""" audit_id, _ = self.setup_assessments(3) with mock.patch("ggrc.notifications.common.send_email") as send_mock: response = self.generate_children_issues_for( "Audit", audit_id, "Assessment" ) self.assert200(response) self.assertEqual(send_mock.call_count, 1) (email, title, body), _ = send_mock.call_args_list[0] child_creator = issuetracker_bulk_sync.IssueTrackerBulkChildCreator self.assertEqual(email, "[email protected]") self.assertEqual(title, child_creator.ISSUETRACKER_SYNC_TITLE) title = all_models.Audit.query.get(audit_id).title self.assertIn( "Tickets generation for audit \"{}\" was completed".format(title), body ) def test_proper_revisions_creation(self): """Test all revisions are created for new IssuetrackerIssues""" with factories.single_commit(): asmnt = factories.AssessmentFactory() factories.IssueTrackerIssueFactory(issue_tracked_obj=asmnt.audit) response = self.generate_children_issues_for( "Audit", asmnt.audit.id, "Assessment" ) self.assert200(response) revisions = db.session.query( all_models.Revision.action, all_models.IssuetrackerIssue.object_type, all_models.IssuetrackerIssue.object_id ).join( all_models.IssuetrackerIssue, all_models.Revision.resource_id == all_models.IssuetrackerIssue.id ).filter( all_models.Revision.resource_type == 'IssuetrackerIssue', all_models.IssuetrackerIssue.object_id.in_( (asmnt.id, asmnt.audit.id) ) ).all() expected_revisions = { (u'created', u'Assessment', asmnt.id), (u'modified', u'Assessment', asmnt.id), (u'created', u'Audit', asmnt.audit.id) } self.assertEquals(set(revisions), expected_revisions) @ddt.ddt class TestBulkIssuesUpdate(TestBulkIssuesSync): """Test bulk issues update.""" def test_asmnt_bulk_update(self): """Test bulk update of issues for Assessments.""" _, assessment_ids = self.setup_assessments(3) issues = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.object_type == "Assessment", all_models.IssuetrackerIssue.object_id.in_(assessment_ids) ) for issue in issues: issue.enabled = 1 issue.title = "" issue.component_id = "1" issue.hotlist_id = "1" issue.issue_type = constants.DEFAULT_ISSUETRACKER_VALUES['issue_type'] issue.issue_priority = "P2" issue.issue_severity = "S2" issue.assignee = "[email protected]" issue.cc_list = "" issue.issue_id = 123 issue.issue_url = "http://issue/{}".format(self.issue_id) db.session.commit() asmnt_issuetracker_info = [ ("Assessment", id_, "123", "321") for id_ in assessment_ids ] response = self.update_issues_for(asmnt_issuetracker_info) self.assert200(response) self.assertEqual(response.json.get("errors"), []) self.assert_obj_issues(asmnt_issuetracker_info) def test_issue_bulk_generate(self): """Test bulk update of issues for Issues.""" issue_ids = [] with factories.single_commit(): for _ in range(3): issue = factories.IssueFactory() factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=issue, issue_id=self.issue_id, title="", component_id=12345, hotlist_id=54321, issue_priority="P2", issue_severity="S2", ) issue_ids.append(issue.id) with factories.single_commit(): person = factories.PersonFactory() for issue in all_models.Issue.query.all(): issue.modified_by = person for role_name in ["Admin", "Primary Contacts"]: issue.add_person_with_role_name(person, role_name) # Verify that IssueTracker issues hasn't updated data issues = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.object_type == "Issue", all_models.IssuetrackerIssue.object_id.in_(issue_ids) ).all() for issue in issues: parent_obj = issue.Issue_issue_tracked self.assertNotEqual(issue.title, parent_obj.title) self.assertEqual(issue.assignee, None) issue_issuetracker_info = [ ("Issue", id_, None, None) for id_ in issue_ids ] response = self.update_issues_for(issue_issuetracker_info) self.assert200(response) self.assertEqual(response.json.get("errors"), []) # IssueTracker issues should be updated with proper values issues = all_models.IssuetrackerIssue.query.filter( all_models.IssuetrackerIssue.object_type == "Issue", all_models.IssuetrackerIssue.object_id.in_(issue_ids) ).all() for issue in issues: parent_obj = issue.Issue_issue_tracked self.assertEqual(issue.title, parent_obj.title) self.assertEqual(issue.cc_list, "") def test_rate_limited_update(self): """Test tickets update when issuetracker raise 429 error.""" _, assessment_ids = self.setup_assessments(3) for issue in all_models.IssuetrackerIssue.query.all(): issue.issue_id = self.issue_id db.session.commit() error = integrations_errors.HttpError(data="Test Error", status=429) with mock.patch( "ggrc.integrations.issues.Client.update_issue", side_effect=error ) as update_issue_mock: with mock.patch("time.sleep"): response = self.api.send_request( self.api.client.post, api_link="/update_issues", data={ "objects": [{ "type": "Assessment", "id": id_ } for id_ in assessment_ids], } ) self.assert200(response) expected_errors = [ ["Assessment", id_, "429 Test Error"] for id_ in assessment_ids ] self.assertEqual(response.json.get("errors"), expected_errors) # 3 times for each assessment self.assertEqual(update_issue_mock.call_count, 9) @ddt.data("Issue", "Assessment") def test_get_issue_json(self, model): """Test get_issue_json method issue's update""" with factories.single_commit(): factory = factories.get_model_factory(model) obj = factory() factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=obj, title='title', component_id=111, hotlist_id=222, issue_type="PROCESS", issue_priority="P2", issue_severity="S2", ) expected_result = { 'component_id': 111, 'severity': u'S2', 'title': u'title', 'hotlist_ids': [222], 'priority': u'P2', 'type': u'PROCESS' } updater = issuetracker_bulk_sync.IssueTrackerBulkUpdater() # pylint: disable=protected-access result = updater._get_issue_json(obj) self.assertEqual(expected_result, result) @ddt.ddt class TestBulkCommentUpdate(TestBulkIssuesSync): """Test adding comments to IssueTracker issues via bulk.""" @ddt.data( ("Issue", ["c1", "c2", "c3"]), ("Assessment", ["c1", "c2", "c3"]), ) @ddt.unpack @mock.patch("ggrc.integrations.issues.Client.update_issue") def test_comment_bulk_update(self, model, comments, update_mock): """Test bulk comment's update requests are sent correctly""" with factories.single_commit(): factory = factories.get_model_factory(model) obj = factory() factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=obj, issue_id=123, ) request_data = { "comments": [ {"type": obj.type, "id": obj.id, "comment_description": comment} for comment in comments ], "mail_data": {"user_email": "[email protected]"}, } updater = issuetracker_bulk_sync.IssueTrackerCommentUpdater() result = updater.sync_issuetracker(request_data) builder = issue_tracker_params_builder.IssueParamsBuilder template = builder.COMMENT_TMPL url_builder = builder.get_ggrc_object_url self.assert200(result) # pylint: disable=consider-using-enumerate for i in range(len(comments)): self.assertEqual(update_mock.call_args_list[i][0][0], 123) self.assertEqual( update_mock.call_args_list[i][0][1]["comment"], template.format(author="[email protected]", model=model, comment=comments[i], link=url_builder(obj)) ) @ddt.data("Issue", "Assessment") @mock.patch.object(settings, "ISSUE_TRACKER_ENABLED", True) def test_comment_update_call(self, model): """Test bulk update calls appropriate methods""" with factories.single_commit(): factory = factories.get_model_factory(model) obj = factory() factories.IssueTrackerIssueFactory( enabled=True, issue_tracked_obj=obj, issue_id=123, ) comments = "c1;;c2;;c3" with mock.patch.object(issuetracker_bulk_sync.IssueTrackerCommentUpdater, "sync_issuetracker", return_value=([], [])) as comment_mock: with mock.patch.object(issuetracker_bulk_sync.IssueTrackerBulkCreator, "sync_issuetracker", return_value=([], [])) as create_mock: with mock.patch.object(issuetracker_bulk_sync.IssueTrackerBulkCreator, "sync_issuetracker", return_value=([], [])) as upd_mock: response = self.import_data(OrderedDict([ ("object_type", model), ("Code*", obj.slug), ("Comments", comments), ])) expected_comments = [ {'comment_description': comment, 'type': model, 'id': obj.id} for comment in comments.split(";;") ] self._check_csv_response(response, {}) self.assertEqual(comment_mock.call_args[0][0]["comments"], expected_comments) upd_mock.assert_called_once() create_mock.assert_not_called()
py
1a4a8d9d0bbaa544f6e15c7ecb571b69fdaeb027
import time from collections import OrderedDict import json import os import re import praw from dotenv import load_dotenv, find_dotenv import requests load_dotenv(find_dotenv()) client_id=os.environ['REDDIT_CLIENT_ID'] client_secret=os.environ['REDDIT_CLIENT_SECRET'] password=os.environ['REDDIT_PASSWORD'] username=os.environ['REDDIT_USERNAME'] subreddit=os.environ['SUBREDDIT'] user_agent='user-agent for /u/PhDComicsBot' if __name__ == "__main__": reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, password=password, username=username, user_agent=user_agent) while True: print("Collecting all comics") r = requests.get("http://phdcomics.com/comics/archive_list.php") # Save for debugging if something goes wrong with open('index.html', 'w') as outfile: outfile.write(r.text) comic_dict = OrderedDict() # We have no idea! regex = r'href\=.*?comicid=(\d+)>\s*<b>(.*?)</b>.*?<font.*?>(.*?)</font>' BASE_URL = 'http://www.phdcomics.com/comics/archive.php?comicid=' # Some have newlines so use re.S to enable dot to match multilines items = re.findall(regex, r.text, re.S) for comic_id, date, title in items: comic_url = BASE_URL + comic_id comic_dict[comic_id] = {'id': comic_id, 'link': comic_url, 'date': date, 'title': title} print("Saving it to data.json") with open('data.json', 'w') as outfile: json.dump(comic_dict, outfile) last_comic_id = 0 try: with open('last_comic.txt', 'r') as infile: last_comic_id = int(infile.read()) except ValueError: print("File is empty. Something wrong happened. Better exit the program") exit(1) except FileNotFoundError: print("File not found so this must be the first run") for comic_id in comic_dict: if int(comic_id) <= last_comic_id: continue date = comic_dict[comic_id]['date'] title = comic_dict[comic_id]['title'] title = "{0} ({1})".format(title, date) comic_url = BASE_URL + comic_id print("Submitting {0} with title '{1}'".format(comic_url, title)) reddit.subreddit(subreddit).submit(title, url=comic_url) print("Saving the latest comic id : {}".format(comic_id)) with open('last_comic.txt', 'w') as outfile: outfile.write(comic_id) break time.sleep(24 * 60 * 60) # Sleep for a day
py
1a4a8de513d9d732c042ecc8d481d985dd6bd83e
from flask import Flask, request import requests import geopy import re # import geopy.distance from geopy.geocoders import Nominatim import json from datetime import datetime import constants from twilio.twiml.messaging_response import MessagingResponse # Create Flask app instance app = Flask(__name__) # Create geolocator object as an instance of geopy's Nominatim class geolocator = Nominatim(user_agent="covid-bot", timeout=5) # Base API URL base_url = 'https://cdn-api.co-vin.in/api' # The root endpoint @app.route("/") def hello(): return "Hello, World!" # The /bot webhook endpoint @app.route('/bot', methods=['POST']) def bot(): # Get the incoming message request data incoming_values = request.values print("Incoming Values:\n", incoming_values) # Get Geolocation sent by user latitude = incoming_values.get('Latitude', '') longitude = incoming_values.get('Longitude', '') # Geopy geolocator API expects coordinates as a single comma separated string of latitude and longitude geo_coordinates_string = ", ".join((latitude, longitude)) # Get the incoming message from incoming_values incoming_msg = incoming_values.get('Body', '').lower() if incoming_msg in constants.greeting_tokens: # Return greeting message return as_twilio_response(constants.welcome_message) if 'help' in incoming_msg: # Return help message return as_twilio_response(constants.help_message) if latitude: geo_location_dict = get_reverse_geocode(geo_coordinates_string) date_now = datetime.today().strftime('%d-%m-%Y') # Get the location wise response location_response = get_location_response(geo_location_dict, date_now) return as_twilio_response(location_response) m = re.match(r"^\d+$", incoming_msg) if m: date_now = datetime.today().strftime('%d-%m-%Y') return as_twilio_response(get_location_response_by_pincode(m.string, date_now)) return as_twilio_response('Could not understand your message. Please type "help".') # Helper functions def as_twilio_response(message: str) -> str: resp = MessagingResponse() msg = resp.message() msg.body(message) return str(resp) def get_response(url): response = requests.get(url, headers={'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0'}) return response.json() # Get the address dict def get_reverse_geocode(coordinates): location = geolocator.reverse(coordinates, exactly_one=True) address_dict = location.raw['address'] print("Address Dict:", address_dict) return address_dict def get_location_response_by_pincode(pincode, date_now): appointment_api_by_pin = base_url + '/v2/appointment/sessions/public/findByPin?pincode={pincode}&date={date_now}'.format(pincode=pincode, date_now=date_now) appointment_data = get_response(appointment_api_by_pin) appointment_response = f''' ''' sessions = appointment_data.get("sessions", []) if sessions: for idx, each in enumerate(sessions): serial_number = idx + 1 name = each.get("name", "") address = each.get("address", "") district = each.get("district_name", "") from_time = each.get("from", "") to_time = each.get("to", "") fee_type = each.get("fee_type", "") fee = each.get("fee", 0) available_capacity = each.get("available_capacity", 0) min_age_limit = each.get("min_age_limit", 18) vaccine = each.get("vaccine", "") each_response = f''' {serial_number}. {name} {address}, {district} Vaccine: {vaccine}, {fee_type} Available: {available_capacity} ''' appointment_response += each_response else: appointment_response = "0" location_message = f''' Your location pincode is {pincode}. Available vaccine slots today: {appointment_response} Visit www.cowin.gov.in to book your vaccination ''' return location_message def get_location_response(geo_location_dict, date_now): pincode = geo_location_dict.get('postcode', '') return get_location_response_by_pincode(pincode, date_now) if __name__ == '__main__': app.run()
py
1a4a8e3ed5c366bbcf79dec7e09d5c9245ebfb14
from ReinLife.Helpers.trainer import trainer from ReinLife.Helpers.tester import tester
py
1a4a8f43352e4657ad470239c9d4a273c0e99de8
import unittest from fds.analyticsapi.engines.api.linked_pa_templates_api import LinkedPATemplatesApi from fds.analyticsapi.engines.model.linked_pa_template_parameters_root import LinkedPATemplateParametersRoot from fds.analyticsapi.engines.model.linked_pa_template_parameters import LinkedPATemplateParameters from fds.analyticsapi.engines.model.template_content_types import TemplateContentTypes from fds.analyticsapi.engines.model.linked_pa_template_summary import LinkedPATemplateSummary from fds.analyticsapi.engines.model.linked_pa_template_update_parameters import LinkedPATemplateUpdateParameters from fds.analyticsapi.engines.model.linked_pa_template_update_parameters_root import LinkedPATemplateUpdateParametersRoot from fds.analyticsapi.engines.model.linked_pa_template_root import LinkedPATemplateRoot from fds.analyticsapi.engines.model.linked_pa_template import LinkedPATemplate from common_functions import CommonFunctions class TestLinkedPaTemplatesApi(unittest.TestCase): def setUp(self): self.linked_pa_templates_api = LinkedPATemplatesApi(CommonFunctions.build_api_client()) def test_a_create_linked_pa_template(self): linked_pa_template_parameters = LinkedPATemplateParameters( directory="Personal:SDKTests/DoNotModify/LinkedPATemplates/", parent_component_id="801B800245E468A52AEBEC4BE31CFF5AF82F371DAEF5F158AC2E98C2FA324B46", description="This is a linked PA template that only returns security level data", content = TemplateContentTypes( mandatory = ["accounts", "benchmarks"], optional = ["groups", "columns"], locked = ["componentdetail"] ) ) linked_pa_template_parameters_root = LinkedPATemplateParametersRoot( data = linked_pa_template_parameters ) response = self.linked_pa_templates_api.create_linked_pa_templates( linked_pa_template_parameters_root = linked_pa_template_parameters_root) firsttemplate = list(response[0].data.keys())[0] self.assertEqual(response[1], 201, "Response should be 201 - Success") self.assertEqual(type(response[0].data), dict, "Response should be of Dictionary type.") self.assertEqual(type(response[0].data[firsttemplate]), LinkedPATemplateSummary, "Response should be of LinkedPATemplateSummary type.") self.assertGreater(len(response[0].data), 0, "Response result should not be an empty list.") def test_b_get_all_linked_pa_templates(self): response = self.linked_pa_templates_api.get_linked_pa_templates( directory = "Personal:SDKTests/DoNotModify/LinkedPATemplates/" ) firsttemplate = list(response[0].data.keys())[0] self.assertEqual(response[1], 200, "Response should be 200 - Success") self.assertEqual(type(response[0].data), dict, "Response should be of Dictionary type.") self.assertEqual(type(response[0].data[firsttemplate]), LinkedPATemplateSummary, "Response should be of LinkedPATemplateSummary type.") self.assertGreater(len(response[0].data), 0, "Response result should not be an empty list.") def test_c_update_linked_pa_template(self): templates = self.linked_pa_templates_api.get_linked_pa_templates( directory = "Personal:SDKTests/DoNotModify/LinkedPATemplates/" ) template_id = list(templates[0].data.keys())[0] linked_pa_template_update_parameters = LinkedPATemplateUpdateParameters( parent_component_id="801B800245E468A52AEBEC4BE31CFF5AF82F371DAEF5F158AC2E98C2FA324B46", description="This is an updated linked PA template that only returns security level data", content = TemplateContentTypes( mandatory = ["accounts", "benchmarks"], optional = ["groups", "columns"], locked = ["componentdetail"] ) ) linked_pa_template_update_parameters_root = LinkedPATemplateUpdateParametersRoot( data = linked_pa_template_update_parameters ) response = self.linked_pa_templates_api.update_linked_pa_templates( id = template_id, linked_pa_template_update_parameters_root=linked_pa_template_update_parameters_root ) self.assertEqual(response[1], 200, "Response should be 200 - Success") self.assertEqual(type(response[0].data), dict, "Response should be of Dictionary type.") self.assertEqual(type(response[0].data[template_id]), LinkedPATemplateSummary, "Response should be of LinkedPATemplateSummary type.") self.assertGreater(len(response[0].data), 0, "Response result should not be an empty list.") def test_d_get_linked_pa_template_by_id(self): templates = self.linked_pa_templates_api.get_linked_pa_templates( directory = "Personal:SDKTests/DoNotModify/LinkedPATemplates/" ) template_id = list(templates[0].data.keys())[0] response = self.linked_pa_templates_api.get_linked_pa_templates_by_id( id = template_id ) self.assertEqual(response[1], 200, "Response should be 200 - Success") self.assertEqual(type(response[0]), LinkedPATemplateRoot, "Response should be of LinkedPATemplateRoot type.") self.assertEqual(type(response[0].data), LinkedPATemplate, "Response should be of LinkedPATemplate type.") def test_e_delete_linked_pa_template(self): templates = self.linked_pa_templates_api.get_linked_pa_templates( directory = "Personal:SDKTests/DoNotModify/LinkedPATemplates/" ) template_id = list(templates[0].data.keys())[0] response = self.linked_pa_templates_api.delete_linked_pa_templates( id = template_id ) self.assertEqual(response[1], 204, "Response should be 204 - Success") if __name__ == '__main__': unittest.main()
py
1a4a8f678193f4c412c3dec67a0990f0efc06c73
from unittest import TestCase from tests import get_data from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson class StorageTestKT1D8STdsizybSqDGCDn19s8R8Fn6KcDW9xg(TestCase): @classmethod def setUpClass(cls): cls.maxDiff = None cls.contract = get_data('storage/zeronet/KT1D8STdsizybSqDGCDn19s8R8Fn6KcDW9xg.json') def test_storage_encoding_KT1D8STdsizybSqDGCDn19s8R8Fn6KcDW9xg(self): type_expr = self.contract['script']['code'][1] val_expr = self.contract['script']['storage'] schema = build_schema(type_expr) decoded = decode_micheline(val_expr, type_expr, schema) actual = encode_micheline(decoded, schema) self.assertEqual(val_expr, actual) def test_storage_schema_KT1D8STdsizybSqDGCDn19s8R8Fn6KcDW9xg(self): _ = build_schema(self.contract['script']['code'][0]) def test_storage_format_KT1D8STdsizybSqDGCDn19s8R8Fn6KcDW9xg(self): _ = micheline_to_michelson(self.contract['script']['code']) _ = micheline_to_michelson(self.contract['script']['storage'])
py
1a4a8fe5dd8344b81b4b040bdf0b978e541d1158
import os import torch import torch.optim as optim import torch.backends.cudnn as cudnn import argparse import torch.utils.data as data from data import AnnotationTransform, BaseTransform, VOCDetection, detection_collate, coco_detection_collate, seq_detection_collate, mb_cfg, dataset_training_cfg, COCOroot, COCODetection from utils.augmentations import SSDAugmentation from layers.modules import MultiBoxLoss, RefineMultiBoxLoss from layers.functions import PriorBox import numpy as np import time import logging def str2bool(v): return v.lower() in ("yes", "true", "t", "1") def print_log(args): logging.info('model_name: '+ args.model_name) logging.info('ssd_dim: '+ str(args.ssd_dim)) logging.info('Backbone: '+ args.backbone) logging.info('BN: '+ str(args.bn)) logging.info('Conv7 Channel: '+ str(args.c7_channel)) if 'RefineDet' in args.backbone: logging.info('Refine: ' + str(args.refine)) logging.info('Deform: ' + str(args.deform)) logging.info('Multi-head: ' + str(args.multihead)) if args.resume: logging.info('resume: '+ args.resume ) logging.info('start_iter: '+ str(args.start_iter)) elif args.resume_from_ssd: logging.info('resume_from_ssd: '+ args.resume_from_ssd ) else: logging.info('load pre-trained backbone: '+ args.basenet ) logging.info('lr: '+ str(args.lr)) logging.info('warm_epoch: '+ str(args.warm_epoch)) logging.info('gamam: '+ str(args.gamma)) logging.info('step_list: '+ str(args.step_list)) logging.info('save_interval: '+ str(args.save_interval)) logging.info('dataset_name: '+ args.dataset_name ) logging.info('set_file_name: '+ args.set_file_name ) logging.info('gpu_ids: '+ args.gpu_ids) logging.info('augm_type: '+ args.augm_type) logging.info('batch_size: '+ str(args.batch_size)) logging.info('loss weights: '+ str(args.loss_coe)) parser = argparse.ArgumentParser(description='Single Shot MultiBox Detector Training') parser.add_argument('--basenet', default='vgg16bn_reducedfc.pth', help='pretrained base model') parser.add_argument('--jaccard_threshold', default=0.5, type=float, help='Min Jaccard index for matching') parser.add_argument('--batch_size', default=4, type=int, help='Batch size for training') parser.add_argument('--resume', default=None, type=str, help='Resume from checkpoint') #'./weights/tssd300_VID2017_b8s8_RSkipTBLstm_baseAugmDrop2Clip5_FixVggExtraPreLocConf/ssd300_seqVID2017_20000.pth' parser.add_argument('--resume_from_ssd', default=None, type=str, help='Resume vgg and extras from ssd checkpoint') parser.add_argument('--num_workers', default=8, type=int, help='Number of workers used in dataloading') parser.add_argument('--start_iter', default=0, type=int, help='Begin counting iterations starting from this value (should be used with resume)') parser.add_argument('--cuda', default=True, type=str2bool, help='Use cuda to train model') parser.add_argument('--lr', '--learning-rate', default=1e-4, type=float, help='initial learning rate') parser.add_argument('--momentum', default=0.9, type=float, help='momentum') parser.add_argument('--weight_decay', default=5e-4, type=float, help='Weight decay for SGD') parser.add_argument('--gamma', default=0.1, type=float, help='Gamma update for SGD') parser.add_argument('--log_iters', default=True, type=bool, help='Print the loss at each iteration') parser.add_argument('--visdom', default=False, type=str2bool, help='Use visdom to for loss visualization') parser.add_argument('--save_folder', default='./weights040/test', help='Location to save checkpoint models') parser.add_argument('--dataset_name', default='VOC0712', help='VOC0712/VIDDET/seqVID2017/MOT17Det/seqMOT17Det') parser.add_argument('--step_list', nargs='+', type=int, default=[30,50], help='step_list for learning rate') parser.add_argument('--backbone', default='RefineDet_ResNet101', type=str, help='Backbone') parser.add_argument('--c7_channel', default=1024, type=int, help='out_channel of Conv7 in VGG') parser.add_argument('--refine', default=True, type=str2bool, help='Only work when backbone==RefineDet') parser.add_argument('--deform', default=1, type=int, help='number of deform group. 0: Do not use defomable conv. Only work when backbone==RefineDet') parser.add_argument('--multihead', default=True, type=str2bool, help='Multihead detection') parser.add_argument('--drop', default=1.0, type=float, help='DropOut, Only work when backbone==RefineDet') parser.add_argument('--model_name', default='ssd', type=str, help='which model selected') parser.add_argument('--ssd_dim', default=320, type=int, help='ssd_dim 300, 320 or 512') parser.add_argument('--gpu_ids', default='4,5', type=str, help='gpu number') parser.add_argument('--augm_type', default='base', type=str, help='how to transform data') parser.add_argument('--set_file_name', default='train', type=str, help='train_VID_DET/train_video_remove_no_object/train, MOT dataset does not use it') parser.add_argument('--loss_coe', nargs='+', type=float, default=[1.0,1.0, 0.5], help='coefficients for loc, conf, att, asso') parser.add_argument('--bn', default=False, type=str2bool, help='select sequence data in a skip way') parser.add_argument('--save_interval', default=10, type=int, help='frequency of checkpoint saving') parser.add_argument('--warm_epoch', default=0, type=int, help='warm epoch') args = parser.parse_args() if not os.path.exists(args.save_folder): os.mkdir(args.save_folder) current_time = time.strftime("%b_%d_%H:%M:%S_%Y", time.localtime()) logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join(args.save_folder, current_time+'.log'), filemode='w') console = logging.StreamHandler() console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) print_log(args) os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_ids device = torch.device('cuda' if args.cuda and torch.cuda.is_available() else 'cpu') ssd_dim = args.ssd_dim if args.dataset_name in ['MOT15', 'seqMOT15']: prior = 'MOT_300' cfg = mb_cfg[prior] else: prior = 'VOC_'+ str(args.ssd_dim) if args.ssd_dim==300 and 'RFB' in args.backbone: prior += '_RFB' elif args.ssd_dim==512 and 'RefineDet' in args.backbone: prior += '_RefineDet' cfg = mb_cfg[prior] train_sets, num_classes, data_root = dataset_training_cfg[args.dataset_name] logging.info('train sets: ' + str(train_sets)) set_filename = args.set_file_name if args.dataset_name[:3] == 'seq': collate_fn = seq_detection_collate elif args.dataset_name == 'COCO': collate_fn = coco_detection_collate else: collate_fn = detection_collate if args.dataset_name == 'UW': means = (128, 128, 128) else: means = (104, 117, 123) mean_np = np.array(means, dtype=np.int32) batch_size = args.batch_size weight_decay = args.weight_decay max_epoch = args.step_list[-1] gamma = 0.1 momentum = args.momentum if args.visdom: import visdom viz = visdom.Visdom() if 'RFB' in args.backbone: from model.rfbnet_vgg import build_net ssd_net = build_net('train', ssd_dim, num_classes, bn=args.bn) elif 'RefineDet' in args.backbone: if 'MobNet' in args.backbone: if args.deform: from model.dualrefinedet_mobilenet import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, def_groups=args.deform, multihead=args.multihead) else: from model.refinedet_mobilenet import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, use_refine=args.refine) elif args.deform: from model.dualrefinedet_vggbn import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, c7_channel=args.c7_channel, def_groups=args.deform, bn=args.bn, multihead=args.multihead) else: from model.refinedet_vgg import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, use_refine=args.refine, c7_channel=args.c7_channel, bn=args.bn, multihead=args.multihead) elif 'MobNet' in args.backbone: from model.ssd4scale_mobile import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, c7_channel=args.c7_channel) elif '4s' in args.backbone: from model.ssd4scale_vgg import build_net ssd_net = build_net('train', size=ssd_dim, num_classes=num_classes, c7_channel=args.c7_channel, bn=args.bn) else: ssd_net = None net = ssd_net if device==torch.device('cuda'): net = torch.nn.DataParallel(ssd_net) cudnn.benchmark = True print(ssd_net) net = net.to(device) if args.resume: logging.info('Resuming training, loading {}...'.format(args.resume)) ssd_net.load_weights(args.resume) else: backbone_weights = torch.load('../weights/'+ args.basenet) logging.info('Loading base network...') ssd_net.backbone.load_state_dict(backbone_weights) if not args.resume: from model.networks import net_init net_init(ssd_net, args.backbone, logging, refine=args.refine, deform=args.deform, multihead=args.multihead) if args.augm_type == 'ssd': data_transform = SSDAugmentation else: data_transform = BaseTransform optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) # criterion if 'RefineDet' in args.backbone and args.refine: use_refine = True arm_criterion = RefineMultiBoxLoss(2, 0.5, True, 0, True, 3, 0.5, False, device=device, only_loc=True) criterion = RefineMultiBoxLoss(num_classes, 0.5, True, 0, True, 3, 0.5, False, device=device) else: use_refine = False criterion = MultiBoxLoss(num_classes, 0.5, True, 0, True, 3, 0.5, False, device=device) priorbox = PriorBox(cfg) with torch.no_grad(): priors = priorbox.forward().to(device) def train(): net.train() epoch = args.start_iter if args.dataset_name == 'COCO': dataset = COCODetection(COCOroot, year='trainval2014', image_sets=train_sets, transform=data_transform(ssd_dim, means), phase='train') else: dataset = VOCDetection(data_root, train_sets, data_transform(ssd_dim, means), AnnotationTransform(dataset_name=args.dataset_name), dataset_name=args.dataset_name, set_file_name=set_filename) epoch_size = len(dataset) // args.batch_size drop_step = [s * epoch_size for s in args.step_list] max_iter = max_epoch * epoch_size logging.info('Loading Dataset:' + args.dataset_name + ' dataset size: ' +str(len(dataset))) step_index = 0 if args.visdom: # initialize visdom loss plot y_dim = 3 legend = ['Loss', 'Loc Loss', 'Conf Loss',] if use_refine: y_dim += 1 legend += ['Arm Loc Loss',] lot = viz.line( X=torch.zeros((1,)), Y=torch.zeros((1, y_dim)), opts=dict( xlabel='Iteration', ylabel='Loss', title=args.save_folder.split('/')[-1], legend=legend, ) ) batch_iterator = None data_loader = data.DataLoader(dataset, batch_size, num_workers=args.num_workers, shuffle=True, collate_fn=collate_fn, pin_memory=True) for iteration in range(epoch*epoch_size, max_iter + 10): if (not batch_iterator) or (iteration % epoch_size == 0): # create batch iterator batch_iterator = iter(data_loader) if epoch % args.save_interval == 0: logging.info('Saving state, epoch: '+ str(epoch)) torch.save(ssd_net.state_dict(), os.path.join(args.save_folder, args.model_name + str( ssd_dim) + '_' + args.dataset_name + '_' +repr(epoch) + '.pth')) epoch += 1 t0 = time.time() if iteration in drop_step: step_index = drop_step.index(iteration) + 1 adjust_learning_rate(optimizer, args.gamma, epoch, step_index, iteration, epoch_size) # adjust_learning_rate(optimizer, args.gamma) collected_data = next(batch_iterator) with torch.no_grad(): images, targets = collected_data[:2] images = images.to(device) targets = [anno.to(device) for anno in targets] # forward loss = torch.tensor(0., requires_grad=True).to(device) out = net(images) # backward optimizer.zero_grad() if use_refine: loss_arm_l = arm_criterion(out[0], priors, targets) loss_l, loss_c = criterion(out[2:], priors, targets, arm_data=out[:2]) loss += args.loss_coe[0] * loss_arm_l else: loss_l, loss_c = criterion(out, priors, targets) loss += args.loss_coe[0] * loss_l + args.loss_coe[1] * loss_c loss.backward() optimizer.step() t1 = time.time() if iteration % 10 == 0: if use_refine: logging.info('Epoch:' + repr(epoch) + ', epochiter: ' + repr(iteration % epoch_size) + '/' + repr(epoch_size) + ', total_iter ' + repr( iteration) + ' || loss: %.4f, Loss_l: %.4f, loss_c: %.4f, loss_arm_l: %.4f, lr: %.5f || Timer: %.4f sec.' % ( loss, loss_l, loss_c,loss_arm_l, optimizer.param_groups[0]['lr'], t1 - t0)) else: logging.info('Epoch:' + repr(epoch) + ', epochiter: ' + repr(iteration % epoch_size) + '/' + repr(epoch_size) + ', total_iter ' + repr( iteration) + ' || loss: %.4f, Loss_l: %.4f, loss_c: %.4f, lr: %.5f || Timer: %.4f sec.' % (loss, loss_l, loss_c, optimizer.param_groups[0]['lr'], t1 - t0)) if args.visdom: y_dis = [loss.cpu(), args.loss_coe[0]*loss_l.cpu(), args.loss_coe[1]*loss_c.cpu()] if iteration == 1000: # initialize visdom loss plot lot = viz.line( X=torch.zeros((1,)), Y=torch.zeros((1, y_dim)), opts=dict( xlabel='Iteration', ylabel='Loss', title=args.save_folder.split('/')[-1], legend=legend, ) ) if use_refine: y_dis += [args.loss_coe[0]*loss_arm_l.cpu(),] # update = 'append' if iteration viz.line( X=torch.ones((1, y_dim)) * iteration, Y=torch.FloatTensor(y_dis).unsqueeze(0), win=lot, update='append', opts=dict( xlabel='Iteration', ylabel='Loss', title=args.save_folder.split('/')[-1], legend=legend,) ) torch.save(ssd_net.state_dict(), os.path.join(args.save_folder, args.model_name + str(ssd_dim) + '_' + args.dataset_name + '_' + repr(iteration) + '.pth')) print('Complet Training. Saving state, iter:', iteration) # def adjust_learning_rate(optimizer, gamma): # for param_group in optimizer.param_groups: # param_group['lr'] *= gamma def adjust_learning_rate(optimizer, gamma, epoch, step_index, iteration, epoch_size): if epoch <= args.warm_epoch: lr = 1e-6 + (args.lr - 1e-6) * iteration / (epoch_size * args.warm_epoch) else: lr = args.lr * (gamma ** (step_index)) for param_group in optimizer.param_groups: param_group['lr'] = lr # return lr if __name__ == '__main__': train()
py
1a4a914fd9f610b8c9ac5684cdf2b93d079ae521
from __future__ import absolute_import from django.db.models import Q from rest_framework import serializers from rest_framework.response import Response from sentry.api.bases.organization import ( OrganizationEndpoint, OrganizationPermission ) from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize from sentry.api.serializers.models.team import TeamWithProjectsSerializer from sentry.models import ( AuditLogEntryEvent, OrganizationAccessRequest, OrganizationMember, OrganizationMemberTeam, Team ) ERR_INSUFFICIENT_ROLE = 'You cannot modify a member other than yourself.' class OrganizationMemberTeamSerializer(serializers.Serializer): isActive = serializers.BooleanField() class RelaxedOrganizationPermission(OrganizationPermission): _allowed_scopes = [ 'org:read', 'org:write', 'org:admin', 'member:read', 'member:write', 'member:admin', ] scope_map = { 'GET': _allowed_scopes, 'POST': _allowed_scopes, 'PUT': _allowed_scopes, # DELETE checks for role comparison as you can either remove a member # with a lower access role, or yourself, without having the req. scope 'DELETE': _allowed_scopes, } class OrganizationMemberTeamDetailsEndpoint(OrganizationEndpoint): permission_classes = [RelaxedOrganizationPermission] def _can_access(self, request, member): # TODO(dcramer): ideally org owners/admins could perform these actions if request.is_superuser(): return True if not request.user.is_authenticated(): return False if request.user.id == member.user_id: return True return False def _get_member(self, request, organization, member_id): if member_id == 'me': queryset = OrganizationMember.objects.filter( organization=organization, user__id=request.user.id, user__is_active=True, ) else: queryset = OrganizationMember.objects.filter( Q(user__is_active=True) | Q(user__isnull=True), organization=organization, id=member_id, ) return queryset.select_related('user').get() def post(self, request, organization, member_id, team_slug): """ Join a team Join or request access to a team. If the user is already a member of the team, this will simply return a 204. If the user needs permission to join the team, an access request will be generated and the returned status code will be 202. """ try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist if not self._can_access(request, om): return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) try: team = Team.objects.get( organization=organization, slug=team_slug, ) except Team.DoesNotExist: raise ResourceDoesNotExist try: omt = OrganizationMemberTeam.objects.get( team=team, organizationmember=om, ) except OrganizationMemberTeam.DoesNotExist: if not (request.access.has_scope('org:write') or organization.flags.allow_joinleave): omt, created = OrganizationAccessRequest.objects.get_or_create( team=team, member=om, ) if created: omt.send_request_email() return Response(status=202) omt = OrganizationMemberTeam.objects.create( team=team, organizationmember=om, ) else: return Response(status=204) self.create_audit_entry( request=request, organization=organization, target_object=omt.id, target_user=om.user, event=AuditLogEntryEvent.MEMBER_JOIN_TEAM, data=omt.get_audit_log_data(), ) return Response(serialize( team, request.user, TeamWithProjectsSerializer()), status=201) def delete(self, request, organization, member_id, team_slug): """ Leave a team Leave a team. """ try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist if not self._can_access(request, om): return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) try: team = Team.objects.get( organization=organization, slug=team_slug, ) except Team.DoesNotExist: raise ResourceDoesNotExist try: omt = OrganizationMemberTeam.objects.get( team=team, organizationmember=om, ) except OrganizationMemberTeam.DoesNotExist: pass else: self.create_audit_entry( request=request, organization=organization, target_object=omt.id, target_user=om.user, event=AuditLogEntryEvent.MEMBER_LEAVE_TEAM, data=omt.get_audit_log_data(), ) omt.delete() return Response(serialize( team, request.user, TeamWithProjectsSerializer()), status=200)
py
1a4a91764f701910b720e66f1063799d486040d1
from schematic.models.metadata import MetadataModel from schematic import CONFIG config = CONFIG.load_config("schematic/config.yml") inputMModelLocation = CONFIG["model"]["input"]["location"] inputMModelLocationType = CONFIG["model"]["input"]["file_type"] manifest_title = CONFIG["manifest"]["title"] manifest_data_type = CONFIG["manifest"]["data_type"] metadata_model = MetadataModel(inputMModelLocation, inputMModelLocationType)
py
1a4a9278f7a7c848819ec33d5813a7216ae0c931
from __future__ import division, print_function import argparse import datetime import json import os import os.path import shlex import subprocess DATA_TABLE_NAME = "ncbi_taxonomy_sqlite" def build_sqlite(taxonomy_dir, output_directory, name=None, description=None): if not os.path.exists(output_directory): os.mkdir(output_directory) output_filename = os.path.join(output_directory, "tax.ncbitaxonomy.sqlite") cmd_str = "taxonomy_util -d {} to_sqlite {}".format(output_filename, taxonomy_dir) cmd = shlex.split(cmd_str) subprocess.check_call(cmd) today_str = datetime.date.today().strftime("%Y-%m-%d") if name is None or name.strip() == "": name = "ncbitaxonomy_build_" + today_str if description is None or description.strip() == "": description = "NCBI Taxonomy database (built on {})".format(today_str) data = [dict(value=name, description=description, path=output_filename)] return data if __name__ == "__main__": parser = argparse.ArgumentParser( description="Build SQLite database from NCBI taxonomy" ) parser.add_argument( "--output_directory", default="tmp", help="Directory to write output to" ) parser.add_argument( "taxonomy_dir", help="Path to directory containing NCBI Taxonomy nodes.dml and names.dmp file" ) parser.add_argument( "name", help="Name to use for the entry in the data table" ) parser.add_argument( "description", help="Description to use for the entry in the data table" ) parser.add_argument( "galaxy_datamanager_filename", help="Galaxy JSON format file describing data manager inputs", ) args = parser.parse_args() config = json.load(open(args.galaxy_datamanager_filename)) output_directory = config.get("output_data", [{}])[0].get("extra_files_path", None) if output_directory is None: output_directory = args.output_directory if not os.path.isdir(output_directory): os.makedirs(output_directory) data_manager_dict = {} data_manager_dict["data_tables"] = json.load( open(args.galaxy_datamanager_filename) ).get("data_tables", {}) data_manager_dict["data_tables"] = data_manager_dict.get("data_tables", {}) data_manager_dict["data_tables"][DATA_TABLE_NAME] = data_manager_dict[ "data_tables" ].get(DATA_TABLE_NAME, []) data = build_sqlite(args.taxonomy_dir, output_directory, args.name, args.description) data_manager_dict["data_tables"][DATA_TABLE_NAME].extend(data) print(json.dumps(data_manager_dict)) json.dump(data_manager_dict, open(args.galaxy_datamanager_filename, "w"))
py
1a4a947e1f3919c45f7839c8da41fbf4b8d22a35
"""Customized Django paginators.""" from __future__ import unicode_literals from math import ceil from django.core.paginator import ( EmptyPage, Page, PageNotAnInteger, Paginator, ) class CustomPage(Page): """Handle different number of items on the first page.""" def start_index(self): """Return the 1-based index of the first item on this page.""" paginator = self.paginator # Special case, return zero if no items. if paginator.count == 0: return 0 elif self.number == 1: return 1 return ( (self.number - 2) * paginator.per_page + paginator.first_page + 1) def end_index(self): """Return the 1-based index of the last item on this page.""" paginator = self.paginator # Special case for the last page because there can be orphans. if self.number == paginator.num_pages: return paginator.count return (self.number - 1) * paginator.per_page + paginator.first_page class BasePaginator(Paginator): """A base paginator class subclassed by the other real paginators. Handle different number of items on the first page. """ def __init__(self, object_list, per_page, **kwargs): self._num_pages = None if 'first_page' in kwargs: self.first_page = kwargs.pop('first_page') else: self.first_page = per_page super(BasePaginator, self).__init__(object_list, per_page, **kwargs) def get_current_per_page(self, number): return self.first_page if number == 1 else self.per_page class DefaultPaginator(BasePaginator): """The default paginator used by this application.""" def page(self, number): number = self.validate_number(number) if number == 1: bottom = 0 else: bottom = ((number - 2) * self.per_page + self.first_page) top = bottom + self.get_current_per_page(number) if top + self.orphans >= self.count: top = self.count return CustomPage(self.object_list[bottom:top], number, self) def _get_num_pages(self): if self._num_pages is None: if self.count == 0 and not self.allow_empty_first_page: self._num_pages = 0 else: hits = max(0, self.count - self.orphans - self.first_page) try: self._num_pages = int(ceil(hits / float(self.per_page))) + 1 except ZeroDivisionError: self._num_pages = 0 # fallback to a safe value return self._num_pages num_pages = property(_get_num_pages) class LazyPaginator(BasePaginator): """Implement lazy pagination.""" def validate_number(self, number): try: number = int(number) except ValueError: raise PageNotAnInteger('That page number is not an integer') if number < 1: raise EmptyPage('That page number is less than 1') return number def page(self, number): number = self.validate_number(number) current_per_page = self.get_current_per_page(number) if number == 1: bottom = 0 else: bottom = ((number - 2) * self.per_page + self.first_page) top = bottom + current_per_page # Retrieve more objects to check if there is a next page. objects = list(self.object_list[bottom:top + self.orphans + 1]) objects_count = len(objects) if objects_count > (current_per_page + self.orphans): # If another page is found, increase the total number of pages. self._num_pages = number + 1 # In any case, return only objects for this page. objects = objects[:current_per_page] elif (number != 1) and (objects_count <= self.orphans): raise EmptyPage('That page contains no results') else: # This is the last page. self._num_pages = number return CustomPage(objects, number, self) def _get_count(self): raise NotImplementedError count = property(_get_count) def _get_num_pages(self): return self._num_pages num_pages = property(_get_num_pages) def _get_page_range(self): raise NotImplementedError page_range = property(_get_page_range)
py
1a4a9519d4775df34febff545d6e98beaea68a58
# -*- coding: utf-8 -*- # Copyright 2018 IBM. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= # Convert graph partitioning instances into Pauli list # Deal with Gset format. See https://web.stanford.edu/~yyye/yyye/Gset/ import logging from collections import OrderedDict import numpy as np import numpy.random as rand from qiskit.quantum_info import Pauli from qiskit_aqua import Operator logger = logging.getLogger(__name__) def random_graph(n, weight_range=10, edge_prob=0.3, savefile=None, seed=None): """Generate random Erdos-Renyi graph. Args: n (int): number of nodes. weight_range (int): weights will be smaller than this value, in absolute value. edge_prob (float): probability of edge appearing. savefile (str or None): name of file where to save graph. seed (int or None): random seed - if None, will not initialize. Returns: numpy.ndarray: adjacency matrix (with weights). """ assert(weight_range >= 0) if seed: rand.seed(seed) w = np.zeros((n, n)) m = 0 for i in range(n): for j in range(i+1, n): if rand.rand() <= edge_prob: w[i, j] = rand.randint(1, weight_range) if rand.rand() >= 0.5: w[i, j] *= -1 m += 1 w += w.T if savefile: with open(savefile, 'w') as outfile: outfile.write('{} {}\n'.format(n, m)) for i in range(n): for j in range(i+1, n): if w[i, j] != 0: outfile.write('{} {} {}\n'.format(i + 1, j + 1, w[i, j])) return w def get_graphpartition_qubitops(weight_matrix): """Generate Hamiltonian for the graph partitioning Args: weight_matrix (numpy.ndarray) : adjacency matrix. Returns: operator.Operator, float: operator for the Hamiltonian and a constant shift for the obj function. Goals: 1 separate the vertices into two set of the same size 2 make sure the number of edges between the two set is minimized. Hamiltonian: H = H_A + H_B H_A = sum\_{(i,j)\in E}{(1-ZiZj)/2} H_B = (sum_{i}{Zi})^2 = sum_{i}{Zi^2}+sum_{i!=j}{ZiZj} H_A is for achieving goal 2 and H_B is for achieving goal 1. """ num_nodes = len(weight_matrix) pauli_list = [] shift = 0 for i in range(num_nodes): for j in range(i): if weight_matrix[i, j] != 0: xp = np.zeros(num_nodes, dtype=np.bool) zp = np.zeros(num_nodes, dtype=np.bool) zp[i] = True zp[j] = True pauli_list.append([-0.5, Pauli(zp, xp)]) shift += 0.5 for i in range(num_nodes): for j in range(num_nodes): if i != j: xp = np.zeros(num_nodes, dtype=np.bool) zp = np.zeros(num_nodes, dtype=np.bool) zp[i] = True zp[j] = True pauli_list.append([1, Pauli(zp, xp)]) else: shift += 1 return Operator(paulis=pauli_list), shift def parse_gset_format(filename): """Read graph in Gset format from file. Args: filename (str): name of the file. Returns: numpy.ndarray: adjacency matrix as a 2D numpy array. """ n = -1 with open(filename) as infile: header = True m = -1 count = 0 for line in infile: v = map(lambda e: int(e), line.split()) if header: n, m = v w = np.zeros((n, n)) header = False else: s, t, x = v s -= 1 # adjust 1-index t -= 1 # ditto w[s, t] = t count += 1 assert m == count w += w.T return w def objective_value(x, w): """Compute the value of a cut. Args: x (numpy.ndarray): binary string as numpy array. w (numpy.ndarray): adjacency matrix. Returns: float: value of the cut. """ X = np.outer(x, (1-x)) w_01 = np.where(w != 0, 1, 0) return np.sum(w_01 * X) def get_graph_solution(x): """Get graph solution from binary string. Args: x (numpy.ndarray) : binary string as numpy array. Returns: numpy.ndarray: graph solution as binary numpy array. """ return 1 - x def sample_most_likely(state_vector): """Compute the most likely binary string from state vector. Args: state_vector (numpy.ndarray or dict): state vector or counts. Returns: numpy.ndarray: binary string as numpy.ndarray of ints. """ if isinstance(state_vector, dict) or isinstance(state_vector, OrderedDict): # get the binary string with the largest count binary_string = sorted(state_vector.items(), key=lambda kv: kv[1])[-1][0] x = np.asarray([int(y) for y in reversed(list(binary_string))]) return x else: n = int(np.log2(state_vector.shape[0])) k = np.argmax(np.abs(state_vector)) x = np.zeros(n) for i in range(n): x[i] = k % 2 k >>= 1 return x def get_gset_result(x): """Get graph solution in Gset format from binary string. Args: x (numpy.ndarray) : binary string as numpy array. Returns: Dict[int, int]: graph solution in Gset format. """ return {i + 1: 1 - x[i] for i in range(len(x))}
py
1a4a964936105a8905169dcb884659767e14371b
from scrapy import cmdline cmdline.execute('scrapy crawl shufazidian'.split(' '))
py
1a4a967c6c8399fc0f7c7b06c35153322b274098
#!/usr/bin/env python # encoding: utf-8 """ @author: sherlock @contact: [email protected] """ import logging import os import sys sys.path.append('.') from fastreid.config import get_cfg from fastreid.engine import DefaultTrainer, default_argument_parser, default_setup, launch, Hazytrainer from fastreid.utils.checkpoint import Checkpointer from fastreid.engine import hooks from fastreid.evaluation import ReidEvaluator class H_Trainer(Hazytrainer): @classmethod def build_evaluator(cls, cfg, num_query, output_folder=None): if output_folder is None: output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") return ReidEvaluator(cfg, num_query) class BaseTrainer(DefaultTrainer): @classmethod def build_evaluator(cls, cfg, num_query, output_folder=None): if output_folder is None: output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") return ReidEvaluator(cfg, num_query) def setup(args): """ Create configs and perform basic setups. """ cfg = get_cfg() cfg.merge_from_file(args.config_file) cfg.merge_from_list(args.opts) cfg.freeze() default_setup(cfg, args) return cfg def main(args): cfg = setup(args) if args.eval_only: logger = logging.getLogger("fastreid.trainer") cfg.defrost() cfg.MODEL.BACKBONE.PRETRAIN = False model = H_Trainer.build_model(cfg) Checkpointer(model).load(cfg.MODEL.WEIGHTS) # load trained model if cfg.TEST.PRECISE_BN.ENABLED and hooks.get_bn_modules(model): prebn_cfg = cfg.clone() prebn_cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN prebn_cfg.DATASETS.NAMES = tuple([cfg.TEST.PRECISE_BN.DATASET]) # set dataset name for PreciseBN logger.info("Prepare precise BN dataset") hooks.PreciseBN( # Run at the same freq as (but before) evaluation. model, # Build a new data loader to not affect training H_Trainer.build_train_loader(prebn_cfg), cfg.TEST.PRECISE_BN.NUM_ITER, ).update_stats() res = H_Trainer.test(cfg, model) return res trainer = H_Trainer(cfg) trainer.resume_or_load(resume=args.resume) return trainer.train() if __name__ == "__main__": args = default_argument_parser() args.add_argument("--info", type=str, default="test", help="information of parameters and losses") args = args.parse_args() print("Command Line Args:", args) launch( main, args.num_gpus, num_machines=args.num_machines, machine_rank=args.machine_rank, dist_url=args.dist_url, args=(args,), )
py
1a4a96a08fc7151c082b762969e6846ff6d7604c
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os from airflow.models.dag import DAG from airflow.operators.python import PythonOperator from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.operators.s3_bucket import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.dates import days_ago BUCKET_NAME = os.environ.get('BUCKET_NAME', 'test-airflow-12345') def upload_keys(): """This is a python callback to add keys into the s3 bucket""" # add keys to bucket s3_hook = S3Hook() for i in range(0, 3): s3_hook.load_string( string_data="input", key=f"path/data{i}", bucket_name=BUCKET_NAME, ) with DAG( dag_id='s3_bucket_dag', schedule_interval=None, start_date=days_ago(2), max_active_runs=1, tags=['example'], ) as dag: create_bucket = S3CreateBucketOperator( task_id='s3_bucket_dag_create', bucket_name=BUCKET_NAME, region_name='us-east-1', ) add_keys_to_bucket = PythonOperator( task_id="s3_bucket_dag_add_keys_to_bucket", python_callable=upload_keys ) delete_bucket = S3DeleteBucketOperator( task_id='s3_bucket_dag_delete', bucket_name=BUCKET_NAME, force_delete=True, ) create_bucket >> add_keys_to_bucket >> delete_bucket
py
1a4a97a11776c28fd79b9ba253622bddb0c3d0a0
import gocept.httpserverlayer.custom import mock import pkg_resources import suds.sax.parser import time import unittest import zeit.cms.checkout.helper import zeit.vgwort.connection import zeit.vgwort.interfaces import zeit.vgwort.testing import zope.component class WebServiceTest(zeit.vgwort.testing.EndToEndTestCase): def setUp(self): super(WebServiceTest, self).setUp() self.service = zope.component.getUtility( zeit.vgwort.interfaces.IMessageService) @property def repository(self): import zeit.cms.repository.interfaces return zope.component.getUtility( zeit.cms.repository.interfaces.IRepository) def add_token(self, content): ts = zope.component.getUtility(zeit.vgwort.interfaces.ITokens) ts.order(1) token = zeit.vgwort.interfaces.IToken(content) token.public_token, token.private_token = ts.claim() def test_smoketest_successful_call_roundtrip(self): result = self.service.call('qualityControl') self.assert_(len(result.qualityControlValues) > 0) def test_validation_error_should_raise_error_message(self): products = list(zeit.cms.content.sources.PRODUCT_SOURCE(None)) product = [x for x in products if x.id == 'KINZ'][0] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.product = product with self.assertRaises(zeit.vgwort.interfaces.WebServiceError) as e: self.service.new_document(self.repository['testcontent']) self.assertIn( "The value 'None' of attribute 'privateidentificationid'", str(e.exception)) def test_business_fault_should_raise_error_message(self): shakespeare = zeit.content.author.author.Author() shakespeare.title = 'Sir' shakespeare.firstname = 'William' shakespeare.lastname = 'Shakespeare' shakespeare.vgwortid = 12345 self.repository['shakespeare'] = shakespeare shakespeare = self.repository['shakespeare'] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.authorships = [co.authorships.create(shakespeare)] co.title = 'Title' co.teaserText = 'asdf' content = self.repository['testcontent'] self.add_token(content) try: self.service.new_document(content) except zeit.vgwort.interfaces.WebServiceError, e: self.assertIn('Shakespeare', unicode(e)) else: self.fail('WebServiceError should have been raised.') def test_report_new_document(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' author.vgwortid = 2601970 self.repository['author'] = author author = self.repository['author'] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.authorships = [co.authorships.create(author)] co.title = 'Title' co.teaserText = 'x' * 2000 content = self.repository['testcontent'] self.add_token(content) self.service.new_document(content) def test_author_without_vgwotid_works(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' self.repository['author'] = author author = self.repository['author'] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.authorships = [co.authorships.create(author)] co.title = 'Title' co.teaserText = 'x' * 2000 content = self.repository['testcontent'] self.add_token(content) self.service.new_document(content) def test_non_author_doc_as_author_should_be_ignored(self): import transaction import zeit.connector.interfaces author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' self.repository['tina'] = author author = self.repository['tina'] author2 = zeit.content.author.author.Author() author2.firstname = 'Invalid' author2.lastname = 'stuff' self.repository['author2'] = author2 author2 = self.repository['author2'] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.authorships = [co.authorships.create(author), co.authorships.create(author2)] co.title = 'Title' co.teaserText = 'x' * 2000 content = self.repository['testcontent'] connector = zope.component.getUtility( zeit.connector.interfaces.IConnector) transaction.commit() connector._properties[u'http://xml.zeit.de/author2'][ ('type', 'http://namespaces.zeit.de/CMS/meta')] = 'foo' self.add_token(content) self.service.new_document(content) class RequestHandler(gocept.httpserverlayer.custom.RequestHandler): def do_GET(self): self.send_response(200) self.end_headers() wsdl = pkg_resources.resource_string(__name__, 'pixelService.wsdl') wsdl = wsdl.replace('__PORT__', str(HTTP_LAYER['http_port'])) self.wfile.write(wsdl) def do_POST(self): self.send_response(500) self.send_header('Content-Type', 'text/html') self.send_header('Content-Length', 0) self.end_headers() # suds expects SOAP or nothing (and may the Lord have mercy if the # server should return 500 with an HTML error message instead...) self.wfile.write('') HTTP_LAYER = gocept.httpserverlayer.custom.Layer( RequestHandler, name='HTTPLayer', module=__name__) class HTTPErrorTest(unittest.TestCase): layer = HTTP_LAYER def test_http_error_should_raise_technical_error(self): service = zeit.vgwort.connection.PixelService( 'http://%s' % self.layer['http_address'], '', '') time.sleep(1) self.assertRaises( zeit.vgwort.interfaces.TechnicalError, lambda: list(service.order_pixels(1))) class MessageServiceTest(zeit.vgwort.testing.EndToEndTestCase): def setUp(self): super(MessageServiceTest, self).setUp() # Need a real webservice to load the WSDL. self.service = zope.component.getUtility( zeit.vgwort.interfaces.IMessageService) @property def repository(self): import zeit.cms.repository.interfaces return zope.component.getUtility( zeit.cms.repository.interfaces.IRepository) def get_content(self, authors, freetext=None): products = list(zeit.cms.content.sources.PRODUCT_SOURCE(None)) product = [x for x in products if x.id == 'KINZ'][0] content = self.repository['testcontent'] with zeit.cms.checkout.helper.checked_out(content) as co: co.authorships = [co.authorships.create(x) for x in authors] co.authors = freetext co.product = product co.title = 'Title' co.teaserText = 'x' * 2000 return self.repository['testcontent'] def test_content_must_have_commonmetadata(self): with self.assertRaises(zeit.vgwort.interfaces.WebServiceError) as e: self.service.new_document(mock.sentinel.notanarticle) self.assertEqual( e.exception.args, ('Does not seem to be an article -- stale cache?',)) def test_product_is_passed_as_additional_author_with_code(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' author.vgwortid = 2601970 self.repository['author'] = author author = self.repository['author'] content = self.get_content([author]) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(2, len(authors)) self.assertEqual('1234abc', authors[-1].code) def test_author_code_should_be_passed_instead_of_name(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' author.vgwortid = 2601970 author.vgwortcode = 'codecodecode' self.repository['author'] = author author = self.repository['author'] content = self.get_content([author]) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(2, len(authors)) self.assertEqual('codecodecode', authors[0].code) def test_author_name_should_be_passed(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' self.repository['author'] = author author = self.repository['author'] content = self.get_content([author]) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(2, len(authors)) self.assertEqual('Tina', authors[0].firstName) self.assertEqual('Groll', authors[0].surName) def test_url_should_point_to_www_zeit_de(self): content = self.get_content([]) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) self.assertEqual('http://www.zeit.de/testcontent/komplettansicht', call.call_args[0][3].webrange[0].url) def test_freetext_authors_should_be_passed(self): content = self.get_content( [], freetext=(('Paul Auster', 'Hans Christian Andersen'))) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(3, len(authors)) # two author, one product self.assertEqual('Paul', authors[0].firstName) self.assertEqual('Auster', authors[0].surName) self.assertEqual('Hans Christian', authors[1].firstName) self.assertEqual('Andersen', authors[1].surName) def test_freetext_authors_should_be_passed_unless_structured_given(self): author = zeit.content.author.author.Author() author.firstname = 'Tina' author.lastname = 'Groll' self.repository['author'] = author author = self.repository['author'] content = self.get_content( [author], freetext=(('Paul Auster', 'Hans Christian Andersen'))) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(2, len(authors)) self.assertEqual('Tina', authors[0].firstName) self.assertEqual('Groll', authors[0].surName) def test_freetext_authors_should_not_break_with_no_space(self): content = self.get_content( [], freetext=(('Merlin',))) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(1, len(authors)) # one product def test_freetext_authors_should_be_whitespace_normalized(self): content = self.get_content( [], freetext=( (' Paul Auster ', ' Hans Christian Andersen '))) with mock.patch('zeit.vgwort.connection.MessageService.call') as call: self.service.new_document(content) parties = call.call_args[0][1] authors = parties.authors.author self.assertEqual(3, len(authors)) # two author, one product self.assertEqual('Paul', authors[0].firstName) self.assertEqual('Auster', authors[0].surName) self.assertEqual('Hans Christian', authors[1].firstName) self.assertEqual('Andersen', authors[1].surName) class CodeFixer(unittest.TestCase): def test_dont_raise_when_not_applicable(self): context = mock.Mock() context.envelope = suds.sax.parser.Parser().parse( string='<Body><foo></foo></Body>') zeit.vgwort.connection.CodeFixer().marshalled(context)
py
1a4a97e4c823e79c34838d74ce9d7d7cedf2bfca
from flask import Flask, render_template app = Flask(__name__, static_url_path='/static', template_folder='template') @app.route("/") def index(): return render_template("index.html") @app.route("/about") def about(): return render_template("about.html") @app.route("/projects/<int:id>") def cv(id): id = int(id) if id == 1: return render_template("project1.html") if id == 2: return render_template("project2.html") if id == 3: return render_template("project3.html") if id == 4: return render_template("project4.html") #if id == 5: # return render_template("project5.html") #if id == 6: # return render_template("project6.html") if __name__ == "__main__": app.run(debug=True)
py
1a4a986c41531ef7495ed076d967aada91d56e68
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import sys import logging import signal import uuid import json import datetime from threading import Thread, current_thread import mesos.interface from mesos.interface import mesos_pb2 import mesos.native logging.basicConfig(level=logging.DEBUG) def run_driver(*args, **kwargs): """ Starts mesos driver in separate thread. Stops driver in case when SIGINT is received at the main thread. """ driver = mesos.native.MesosSchedulerDriver(*args, **kwargs) def run_driver_async(): status = 0 if driver.run() == mesos_pb2.DRIVER_STOPPED else 1 driver.stop() sys.exit(status) framework_thread = Thread(target=run_driver_async) framework_thread.daemon = True framework_thread.start() if current_thread().name == "MainThread": signal.signal(signal.SIGINT, lambda signal, frame: driver.stop()) class FailureScheduler(mesos.interface.Scheduler): """ Starts executor for each failure config. Passes needed config for running failure using task.data. """ def __init__(self, executor, logger, task_retry): self.executor = executor self.services_statuses = {} self.logger = logger self.task_retry = task_retry def make_task_prototype(self, offer, cpus, mem, service_name): """ Creates task with needed resources """ task = mesos_pb2.TaskInfo() task_id = uuid.uuid4().hex task.task_id.value = task_id task.slave_id.value = offer.slave_id.value task.name = "pisaura-failure-runner-{}".format(service_name.replace(" ", "-")) cpus_r = task.resources.add() cpus_r.name = "cpus" cpus_r.type = mesos_pb2.Value.SCALAR cpus_r.scalar.value = cpus mem_r = task.resources.add() mem_r.name = "mem" mem_r.type = mesos_pb2.Value.SCALAR mem_r.scalar.value = mem return task def add_service(self, service): self.services_statuses[service['service']] = { 'service': service, 'status': mesos_pb2.TASK_STAGING, 'tasks': [], 'logs': [], 'updated': str(datetime.datetime.utcnow()), 'created': str(datetime.datetime.utcnow()) } def make_task(self, offer, service): task = self.make_task_prototype( offer, service['cpus'], service['mem'], service['service']) task.data = json.dumps(service) task.executor.MergeFrom(self.executor) if service['service'] in self.services_statuses: self.services_statuses[service['service']]['status'] = None self.services_statuses[service['service']]['tasks'].append(task.task_id.value) else: self.services_statuses[service['service']] = { 'service': service, 'status': None, 'tasks': [task.task_id.value] } return task def registered(self, driver, frameworkId, masterInfo): self.logger.info("Registered with framework ID %s" % frameworkId.value) def log_offer_stat(self, offer): offerCpus = 0 offerMem = 0 for resource in offer.resources: if resource.name == "cpus": offerCpus += resource.scalar.value elif resource.name == "mem": offerMem += resource.scalar.value self.logger.debug( "Received offer %s with cpus: %s and mem: %s", offer.id.value, offerCpus, offerMem) def get_next_service(self): retry_statuses = [mesos_pb2.TASK_ERROR, mesos_pb2.TASK_FAILED, mesos_pb2.TASK_STAGING] for service_name in self.services_statuses: self.logger.debug("Trying to commit %s as next service", service_name) tasks_count = len(self.services_statuses[service_name]['tasks']) status = self.services_statuses[service_name]['status'] if status not in retry_statuses: continue if status is None and tasks_count: continue if tasks_count < self.task_retry: return self.services_statuses[service_name]['service'] else: self.logger.debug( "retry count exceeded for service %s", service_name) def resourceOffers(self, driver, offers): for offer in offers: self.log_offer_stat(offer) service = self.get_next_service() self.logger.debug("Next service is %s", service) if not service: driver.declineOffer(offer.id) return task = self.make_task(offer, service) self.logger.info("Launching task {task} " "using offer {offer}.".format(task=task.task_id.value, offer=offer.id.value)) tasks = [task] driver.launchTasks(offer.id, tasks) def statusUpdate(self, driver, update): self.logger.debug( "Task %s is in state %s, message: %s" % ( update.task_id.value, mesos_pb2.TaskState.Name(update.state), update.message)) for service_name in self.services_statuses: if update.task_id.value in self.services_statuses[service_name]['tasks']: self.logger.info( "Move service %s to the state %s", service_name, mesos_pb2.TaskState.Name(update.state)) self.services_statuses[service_name]['status'] = update.state self.services_statuses[service_name]['logs'] = json.loads(update.data or "[]") self.services_statuses[service_name]['updated'] = str(datetime.datetime.utcnow()) def frameworkMessage(self, driver, executor_id, slave_id, message): self.logger.info("Received framework message %s", message) def init_executor(app_config): """ Creates mesos executor using given config dict. """ uris = app_config['resources'] executor = mesos_pb2.ExecutorInfo() executor.executor_id.value = "%s-executor" % app_config['framework_name'] executor.command.value = app_config['executor_command'] for uri in uris: uri_proto = executor.command.uris.add() uri_proto.value = uri uri_proto.extract = False if not uri.endswith(".tar.gz") else True executor.name = app_config['framework_name'].capitalize() return executor def run(application_config): """ Main function for setup and run FailureScheduler. """ executor = init_executor(application_config) framework = mesos_pb2.FrameworkInfo() framework.user = "" # Have Mesos fill in the current user. framework.name = application_config['framework_name'] logger = logging.getLogger("pisaura.scheduler") scheduler = FailureScheduler( executor, logger, application_config['task_retry']) run_driver(scheduler, framework, application_config['master']) return scheduler
py
1a4a98d860f3b2946a6b81fdd5392537b46ed4c9
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_device_sshd short_description: Manage the SSHD settings of a BIG-IP description: - Manage the SSHD settings of a BIG-IP. version_added: 2.2 options: allow: description: - Specifies, if you have enabled SSH access, the IP address or address range for other systems that can use SSH to communicate with this system. - To specify all addresses, use the value C(all). - IP address can be specified, such as 172.27.1.10. - IP rangees can be specified, such as 172.27.*.* or 172.27.0.0/255.255.0.0. - To remove SSH access specify an empty list or an empty string. type: list banner: description: - Whether to enable the banner or not. type: str choices: - enabled - disabled banner_text: description: - Specifies the text to include on the pre-login banner that displays when a user attempts to login to the system using SSH. type: str inactivity_timeout: description: - Specifies the number of seconds before inactivity causes an SSH session to log out. type: int log_level: description: - Specifies the minimum SSHD message level to include in the system log. type: str choices: - debug - debug1 - debug2 - debug3 - error - fatal - info - quiet - verbose login: description: - Specifies, when checked C(enabled), that the system accepts SSH communications. type: str choices: - enabled - disabled port: description: - Port that you want the SSH daemon to run on. type: int notes: - Requires BIG-IP version 12.0.0 or greater extends_documentation_fragment: f5 author: - Tim Rupp (@caphrim007) - Wojciech Wypior (@wojtek0806) ''' EXAMPLES = r''' - name: Set the banner for the SSHD service from a string bigip_device_sshd: banner: enabled banner_text: banner text goes here provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set the banner for the SSHD service from a file bigip_device_sshd: banner: enabled banner_text: "{{ lookup('file', '/path/to/file') }}" provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Set the SSHD service to run on port 2222 bigip_device_sshd: port: 2222 provider: user: admin password: secret server: lb.mydomain.com delegate_to: localhost ''' RETURN = r''' allow: description: - Specifies, if you have enabled SSH access, the IP address or address range for other systems that can use SSH to communicate with this system. returned: changed type: list sample: 192.0.2.* banner: description: Whether the banner is enabled or not. returned: changed type: str sample: true banner_text: description: - Specifies the text included on the pre-login banner that displays when a user attempts to login to the system using SSH. returned: changed and success type: str sample: This is a corporate device. Connecting to it without... inactivity_timeout: description: - The number of seconds before inactivity causes an SSH session to log out. returned: changed type: int sample: 10 log_level: description: The minimum SSHD message level to include in the system log. returned: changed type: str sample: debug login: description: Specifies that the system accepts SSH communications or not. returned: changed type: bool sample: true port: description: Port that you want the SSH daemon to run on. returned: changed type: int sample: 22 ''' from ansible.module_utils.basic import AnsibleModule try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import fq_name from library.module_utils.network.f5.common import f5_argument_spec from library.module_utils.network.f5.common import is_empty_list except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import fq_name from ansible.module_utils.network.f5.common import f5_argument_spec from ansible.module_utils.network.f5.common import is_empty_list class Parameters(AnsibleF5Parameters): api_map = { 'bannerText': 'banner_text', 'inactivityTimeout': 'inactivity_timeout', 'logLevel': 'log_level', } api_attributes = [ 'allow', 'banner', 'bannerText', 'inactivityTimeout', 'logLevel', 'login', 'port', ] updatables = [ 'allow', 'banner', 'banner_text', 'inactivity_timeout', 'log_level', 'login', 'port', ] returnables = [ 'allow', 'banner', 'banner_text', 'inactivity_timeout', 'log_level', 'login', 'port', ] class ApiParameters(Parameters): pass class ModuleParameters(Parameters): @property def inactivity_timeout(self): if self._values['inactivity_timeout'] is None: return None return int(self._values['inactivity_timeout']) @property def port(self): if self._values['port'] is None: return None return int(self._values['port']) @property def allow(self): allow = self._values['allow'] if allow is None: return None if is_empty_list(allow): return [] return allow class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: change = getattr(self, returnable) if isinstance(change, dict): result.update(change) else: result[returnable] = change result = self._filter_params(result) except Exception: pass return result class UsableChanges(Changes): pass class ReportableChanges(Changes): pass class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def allow(self): if self.want.allow is None: return None if not self.want.allow: if self.have.allow is None: return None if self.have.allow is not None: return self.want.allow if self.have.allow is None: return self.want.allow if set(self.want.allow) != set(self.have.allow): return self.want.allow @property def banner_text(self): if self.want.banner_text is None: return None if self.want.banner_text == '' and self.have.banner_text is None: return None if self.want.banner_text != self.have.banner_text: return self.want.banner_text class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] ) def exec_module(self): result = dict() changed = self.present() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def present(self): return self.update() def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def should_update(self): result = self._update_changed_options() if result: return True return False def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/sys/sshd/".format( self.client.provider['server'], self.client.provider['server_port'], ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/sys/sshd/".format( self.client.provider['server'], self.client.provider['server_port'], ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class ArgumentSpec(object): def __init__(self): self.choices = ['enabled', 'disabled'] self.levels = [ 'debug', 'debug1', 'debug2', 'debug3', 'error', 'fatal', 'info', 'quiet', 'verbose' ] self.supports_check_mode = True argument_spec = dict( allow=dict( type='list' ), banner=dict( choices=self.choices ), banner_text=dict(), inactivity_timeout=dict( type='int' ), log_level=dict( choices=self.levels ), login=dict( choices=self.choices ), port=dict( type='int' ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
py
1a4a99ff3831aca881da4b4bcb65d192d2a3f9b5
# Copyright 2020 MONAI Consortium # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import sys import tempfile from glob import glob import nibabel as nib import numpy as np import torch from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import monai from monai.data import ImageDataset, create_test_image_3d, decollate_batch from monai.inferers import sliding_window_inference from monai.metrics import DiceMetric from monai.transforms import Activations, AddChannel, AsDiscrete, Compose, RandRotate90, RandSpatialCrop, ScaleIntensity, EnsureType from monai.visualize import plot_2d_or_3d_image def main(tempdir): monai.config.print_config() logging.basicConfig(stream=sys.stdout, level=logging.INFO) # create a temporary directory and 40 random image, mask pairs print(f"generating synthetic data to {tempdir} (this may take a while)") for i in range(40): im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1) n = nib.Nifti1Image(im, np.eye(4)) nib.save(n, os.path.join(tempdir, f"im{i:d}.nii.gz")) n = nib.Nifti1Image(seg, np.eye(4)) nib.save(n, os.path.join(tempdir, f"seg{i:d}.nii.gz")) images = sorted(glob(os.path.join(tempdir, "im*.nii.gz"))) segs = sorted(glob(os.path.join(tempdir, "seg*.nii.gz"))) # define transforms for image and segmentation train_imtrans = Compose( [ ScaleIntensity(), AddChannel(), RandSpatialCrop((96, 96, 96), random_size=False), RandRotate90(prob=0.5, spatial_axes=(0, 2)), EnsureType(), ] ) train_segtrans = Compose( [ AddChannel(), RandSpatialCrop((96, 96, 96), random_size=False), RandRotate90(prob=0.5, spatial_axes=(0, 2)), EnsureType(), ] ) val_imtrans = Compose([ScaleIntensity(), AddChannel(), EnsureType()]) val_segtrans = Compose([AddChannel(), EnsureType()]) # define image dataset, data loader check_ds = ImageDataset(images, segs, transform=train_imtrans, seg_transform=train_segtrans) check_loader = DataLoader(check_ds, batch_size=10, num_workers=2, pin_memory=torch.cuda.is_available()) im, seg = monai.utils.misc.first(check_loader) print(im.shape, seg.shape) # create a training data loader train_ds = ImageDataset(images[:20], segs[:20], transform=train_imtrans, seg_transform=train_segtrans) train_loader = DataLoader(train_ds, batch_size=4, shuffle=True, num_workers=8, pin_memory=torch.cuda.is_available()) # create a validation data loader val_ds = ImageDataset(images[-20:], segs[-20:], transform=val_imtrans, seg_transform=val_segtrans) val_loader = DataLoader(val_ds, batch_size=1, num_workers=4, pin_memory=torch.cuda.is_available()) dice_metric = DiceMetric(include_background=True, reduction="mean", get_not_nans=False) post_trans = Compose([EnsureType(), Activations(sigmoid=True), AsDiscrete(threshold=0.5)]) # create UNet, DiceLoss and Adam optimizer device = torch.device("cuda" if torch.cuda.is_available() else "cpu") model = monai.networks.nets.UNet( spatial_dims=3, in_channels=1, out_channels=1, channels=(16, 32, 64, 128, 256), strides=(2, 2, 2, 2), num_res_units=2, ).to(device) loss_function = monai.losses.DiceLoss(sigmoid=True) optimizer = torch.optim.Adam(model.parameters(), 1e-3) # start a typical PyTorch training val_interval = 2 best_metric = -1 best_metric_epoch = -1 epoch_loss_values = list() metric_values = list() writer = SummaryWriter() for epoch in range(5): print("-" * 10) print(f"epoch {epoch + 1}/{5}") model.train() epoch_loss = 0 step = 0 for batch_data in train_loader: step += 1 inputs, labels = batch_data[0].to(device), batch_data[1].to(device) optimizer.zero_grad() outputs = model(inputs) loss = loss_function(outputs, labels) loss.backward() optimizer.step() epoch_loss += loss.item() epoch_len = len(train_ds) // train_loader.batch_size print(f"{step}/{epoch_len}, train_loss: {loss.item():.4f}") writer.add_scalar("train_loss", loss.item(), epoch_len * epoch + step) epoch_loss /= step epoch_loss_values.append(epoch_loss) print(f"epoch {epoch + 1} average loss: {epoch_loss:.4f}") if (epoch + 1) % val_interval == 0: model.eval() with torch.no_grad(): val_images = None val_labels = None val_outputs = None for val_data in val_loader: val_images, val_labels = val_data[0].to(device), val_data[1].to(device) roi_size = (96, 96, 96) sw_batch_size = 4 val_outputs = sliding_window_inference(val_images, roi_size, sw_batch_size, model) val_outputs = [post_trans(i) for i in decollate_batch(val_outputs)] # compute metric for current iteration dice_metric(y_pred=val_outputs, y=val_labels) # aggregate the final mean dice result metric = dice_metric.aggregate().item() # reset the status for next validation round dice_metric.reset() metric_values.append(metric) if metric > best_metric: best_metric = metric best_metric_epoch = epoch + 1 torch.save(model.state_dict(), "best_metric_model_segmentation3d_array.pth") print("saved new best metric model") print( "current epoch: {} current mean dice: {:.4f} best mean dice: {:.4f} at epoch {}".format( epoch + 1, metric, best_metric, best_metric_epoch ) ) writer.add_scalar("val_mean_dice", metric, epoch + 1) # plot the last model output as GIF image in TensorBoard with the corresponding image and label plot_2d_or_3d_image(val_images, epoch + 1, writer, index=0, tag="image") plot_2d_or_3d_image(val_labels, epoch + 1, writer, index=0, tag="label") plot_2d_or_3d_image(val_outputs, epoch + 1, writer, index=0, tag="output") print(f"train completed, best_metric: {best_metric:.4f} at epoch: {best_metric_epoch}") writer.close() if __name__ == "__main__": with tempfile.TemporaryDirectory() as tempdir: main(tempdir)
py
1a4a9afd5e10e293517905baa798ac1e958b8764
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from functools import partial import re import textwrap from typing import ( Any, Callable, NamedTuple, Optional, Dict, Sequence, Set, Type, TypeVar ) import warnings from jax._src.config import config from jax._src import dtypes from jax._src.lax import lax as lax_internal from jax._src.numpy.ndarray import ndarray from jax._src.util import safe_zip from jax._src import api from jax import core from jax._src.lax import lax import numpy as np _T = TypeVar("_T") _parameter_break = re.compile("\n(?=[A-Za-z_])") _section_break = re.compile(r"\n(?=[^\n]{3,15}\n-{3,15})", re.MULTILINE) _numpy_signature_re = re.compile(r'^([\w., ]+=)?\s*[\w\.]+\([\w\W]*?\)$', re.MULTILINE) _versionadded = re.compile(r'^\s+\.\.\s+versionadded::', re.MULTILINE) _docreference = re.compile(r':doc:`(.*?)\s*<.*?>`') class ParsedDoc(NamedTuple): """ docstr: full docstring signature: signature from docstring. summary: summary from docstring. front_matter: front matter before sections. sections: dictionary of section titles to section content. """ docstr: Optional[str] signature: str = "" summary: str = "" front_matter: str = "" sections: Dict[str, str] = {} def _parse_numpydoc(docstr: Optional[str]) -> ParsedDoc: """Parse a standard numpy-style docstring. Args: docstr: the raw docstring from a function Returns: ParsedDoc: parsed version of the docstring """ if docstr is None or not docstr.strip(): return ParsedDoc(docstr) # Remove any :doc: directives in the docstring to avoid sphinx errors docstr = _docreference.sub( lambda match: f"{match.groups()[0]}", docstr) signature, body = "", docstr match = _numpy_signature_re.match(body) if match: signature = match.group() body = docstr[match.end():] firstline, _, body = body.partition('\n') body = textwrap.dedent(body.lstrip('\n')) match = _numpy_signature_re.match(body) if match: signature = match.group() body = body[match.end():] summary = firstline if not summary: summary, _, body = body.lstrip('\n').partition('\n') body = textwrap.dedent(body.lstrip('\n')) front_matter = "" body = "\n" + body section_list = _section_break.split(body) if not _section_break.match(section_list[0]): front_matter, *section_list = section_list sections = {section.split('\n', 1)[0]: section for section in section_list} return ParsedDoc(docstr=docstr, signature=signature, summary=summary, front_matter=front_matter, sections=sections) def _parse_parameters(body: str) -> Dict[str, str]: """Parse the Parameters section of a docstring.""" title, underline, content = body.split('\n', 2) assert title == 'Parameters' assert underline and not underline.strip('-') parameters = _parameter_break.split(content) return {p.partition(' : ')[0].partition(', ')[0]: p for p in parameters} def _parse_extra_params(extra_params: str) -> Dict[str, str]: """Parse the extra parameters passed to _wraps()""" parameters = _parameter_break.split(extra_params.strip('\n')) return {p.partition(' : ')[0].partition(', ')[0]: p for p in parameters} def _wraps( fun: Optional[Callable[..., Any]], update_doc: bool = True, lax_description: str = "", sections: Sequence[str] = ('Parameters', 'Returns', 'References'), skip_params: Sequence[str] = (), extra_params: Optional[str] = None, ) -> Callable[[_T], _T]: """Specialized version of functools.wraps for wrapping numpy functions. This produces a wrapped function with a modified docstring. In particular, if `update_doc` is True, parameters listed in the wrapped function that are not supported by the decorated function will be removed from the docstring. For this reason, it is important that parameter names match those in the original numpy function. Args: fun: The function being wrapped update_doc: whether to transform the numpy docstring to remove references of parameters that are supported by the numpy version but not the JAX version. If False, include the numpy docstring verbatim. lax_description: a string description that will be added to the beginning of the docstring. sections: a list of sections to include in the docstring. The default is ["Parameters", "returns", "References"] skip_params: a list of strings containing names of parameters accepted by the function that should be skipped in the parameter list. extra_params: an optional string containing additional parameter descriptions. When ``update_doc=True``, these will be added to the list of parameter descriptions in the updated doc. """ def wrap(op): docstr = getattr(fun, "__doc__", None) try: name = f"{fun.__module__}.{fun.__name__}" except AttributeError: name = getattr(fun, "__name__", getattr(op, "__name__", str(op))) if docstr: try: parsed = _parse_numpydoc(docstr) if update_doc and 'Parameters' in parsed.sections: code = getattr(getattr(op, "__wrapped__", op), "__code__", None) # Remove unrecognized parameter descriptions. parameters = _parse_parameters(parsed.sections['Parameters']) if extra_params: parameters.update(_parse_extra_params(extra_params)) parsed.sections['Parameters'] = ( "Parameters\n" "----------\n" + "\n".join(_versionadded.split(desc)[0].rstrip() for p, desc in parameters.items() if (code is None or p in code.co_varnames) and p not in skip_params) ) docstr = parsed.summary.strip() + "\n" if parsed.summary else "" docstr += f"\nLAX-backend implementation of :func:`{name}`.\n" if lax_description: docstr += "\n" + lax_description.strip() + "\n" docstr += "\n*Original docstring below.*\n" # We remove signatures from the docstrings, because they redundant at best and # misleading at worst: e.g. JAX wrappers don't implement all ufunc keyword arguments. # if parsed.signature: # docstr += "\n" + parsed.signature.strip() + "\n" if parsed.front_matter: docstr += "\n" + parsed.front_matter.strip() + "\n" kept_sections = (content.strip() for section, content in parsed.sections.items() if section in sections) if kept_sections: docstr += "\n" + "\n\n".join(kept_sections) + "\n" except: if config.jax_enable_checks: raise docstr = fun.__doc__ op.__doc__ = docstr op.__np_wrapped__ = fun for attr in ['__name__', '__qualname__']: try: value = getattr(fun, attr) except AttributeError: pass else: setattr(op, attr, value) return op return wrap _dtype = partial(dtypes.dtype, canonicalize=True) def _asarray(arr): """ Pared-down utility to convert object to a DeviceArray. Note this will not correctly handle lists or tuples. """ _check_arraylike("_asarray", arr) dtype, weak_type = dtypes._lattice_result_type(arr) return lax_internal._convert_element_type(arr, dtype, weak_type) def _promote_shapes(fun_name, *args): """Apply NumPy-style broadcasting, making args shape-compatible for lax.py.""" if len(args) < 2: return args else: shapes = [np.shape(arg) for arg in args] if all(len(shapes[0]) == len(s) for s in shapes[1:]): return args # no need for rank promotion, so rely on lax promotion nonscalar_ranks = {len(shp) for shp in shapes if shp} if len(nonscalar_ranks) < 2: return args else: if config.jax_numpy_rank_promotion != "allow": _rank_promotion_warning_or_error(fun_name, shapes) if config.jax_dynamic_shapes: # With dynamic shapes we don't support singleton-dimension broadcasting; # we instead broadcast out to the full shape as a temporary workaround. res_shape = lax.broadcast_shapes(*shapes) return [_broadcast_to(arg, res_shape) for arg, shp in zip(args, shapes)] else: result_rank = len(lax.broadcast_shapes(*shapes)) return [_broadcast_to(arg, (1,) * (result_rank - len(shp)) + shp) for arg, shp in zip(args, shapes)] def _rank_promotion_warning_or_error(fun_name, shapes): if config.jax_numpy_rank_promotion == "warn": msg = ("Following NumPy automatic rank promotion for {} on shapes {}. " "Set the jax_numpy_rank_promotion config option to 'allow' to " "disable this warning; for more information, see " "https://jax.readthedocs.io/en/latest/rank_promotion_warning.html.") warnings.warn(msg.format(fun_name, ' '.join(map(str, shapes)))) elif config.jax_numpy_rank_promotion == "raise": msg = ("Operands could not be broadcast together for {} on shapes {} " "and with the config option jax_numpy_rank_promotion='raise'. " "For more information, see " "https://jax.readthedocs.io/en/latest/rank_promotion_warning.html.") raise ValueError(msg.format(fun_name, ' '.join(map(str, shapes)))) def _promote_dtypes(*args): """Convenience function to apply Numpy argument dtype promotion.""" # TODO(dougalm,mattjj): This is a performance bottleneck. Consider memoizing. if len(args) < 2: return args else: to_dtype, weak_type = dtypes._lattice_result_type(*args) to_dtype = dtypes.canonicalize_dtype(to_dtype) return [lax_internal._convert_element_type(x, to_dtype, weak_type) for x in args] def _promote_dtypes_inexact(*args): """Convenience function to apply Numpy argument dtype promotion. Promotes arguments to an inexact type.""" to_dtype, weak_type = dtypes._lattice_result_type(*args) to_dtype = dtypes.canonicalize_dtype(to_dtype) to_dtype_inexact = _to_inexact_dtype(to_dtype) weak_type = (weak_type and to_dtype == to_dtype_inexact) return [lax_internal._convert_element_type(x, to_dtype_inexact, weak_type) for x in args] def _to_inexact_dtype(dtype): """Promotes a dtype into an inexact dtype, if it is not already one.""" return dtype if dtypes.issubdtype(dtype, np.inexact) else dtypes.promote_types(dtype, dtypes.float_) def _complex_elem_type(dtype): """Returns the float type of the real/imaginary parts of a complex dtype.""" return np.abs(np.zeros((), dtype)).dtype def _arraylike(x): return (isinstance(x, np.ndarray) or isinstance(x, ndarray) or hasattr(x, '__jax_array__') or np.isscalar(x)) def _stackable(*args): return all(type(arg) in stackables for arg in args) stackables: Set[Type] = set() _register_stackable: Callable[[Type], None] = stackables.add def _check_arraylike(fun_name, *args): """Check if all args fit JAX's definition of arraylike.""" assert isinstance(fun_name, str), f"fun_name must be a string. Got {fun_name}" if any(not _arraylike(arg) for arg in args): pos, arg = next((i, arg) for i, arg in enumerate(args) if not _arraylike(arg)) msg = "{} requires ndarray or scalar arguments, got {} at position {}." raise TypeError(msg.format(fun_name, type(arg), pos)) def _check_no_float0s(fun_name, *args): """Check if none of the args have dtype float0.""" if any(dtypes.dtype(arg) is dtypes.float0 for arg in args): raise TypeError( f"Called {fun_name} with a float0 array. " "float0s do not support any operations by design because they " "are not compatible with non-trivial vector spaces. No implicit dtype " "conversion is done. You can use np.zeros_like(arr, dtype=np.float) " "to cast a float0 array to a regular zeros array. \n" "If you didn't expect to get a float0 you might have accidentally " "taken a gradient with respect to an integer argument.") def _promote_args(fun_name, *args): """Convenience function to apply Numpy argument shape and dtype promotion.""" _check_arraylike(fun_name, *args) _check_no_float0s(fun_name, *args) return _promote_shapes(fun_name, *_promote_dtypes(*args)) def _promote_args_inexact(fun_name, *args): """Convenience function to apply Numpy argument shape and dtype promotion. Promotes non-inexact types to an inexact type.""" _check_arraylike(fun_name, *args) _check_no_float0s(fun_name, *args) return _promote_shapes(fun_name, *_promote_dtypes_inexact(*args)) @partial(api.jit, inline=True) def _broadcast_arrays(*args): """Like Numpy's broadcast_arrays but doesn't return views.""" shapes = [np.shape(arg) for arg in args] if not shapes or all(core.symbolic_equal_shape(shapes[0], s) for s in shapes): # TODO(mattjj): remove the array(arg) here return [arg if isinstance(arg, ndarray) or np.isscalar(arg) else _asarray(arg) for arg in args] result_shape = lax.broadcast_shapes(*shapes) return [_broadcast_to(arg, result_shape) for arg in args] def _broadcast_to(arr, shape): if hasattr(arr, "broadcast_to"): return arr.broadcast_to(shape) _check_arraylike("broadcast_to", arr) arr = arr if isinstance(arr, ndarray) else _asarray(arr) if not isinstance(shape, tuple) and np.ndim(shape) == 0: shape = (shape,) shape = core.canonicalize_shape(shape) # check that shape is concrete arr_shape = np.shape(arr) if core.symbolic_equal_shape(arr_shape, shape): return arr else: nlead = len(shape) - len(arr_shape) shape_tail = shape[nlead:] compatible = all(core.symbolic_equal_one_of_dim(arr_d, [1, shape_d]) for arr_d, shape_d in safe_zip(arr_shape, shape_tail)) if nlead < 0 or not compatible: msg = "Incompatible shapes for broadcasting: {} and requested shape {}" raise ValueError(msg.format(arr_shape, shape)) diff, = np.where(tuple(not core.symbolic_equal_dim(arr_d, shape_d) for arr_d, shape_d in safe_zip(arr_shape, shape_tail))) new_dims = tuple(range(nlead)) + tuple(nlead + diff) kept_dims = tuple(np.delete(np.arange(len(shape)), new_dims)) return lax.broadcast_in_dim(lax.squeeze(arr, tuple(diff)), shape, kept_dims) # The `jit` on `where` exists to avoid materializing constants in cases like # `np.where(np.zeros(1000), 7, 4)`. In op-by-op mode, we don't want to # materialize the broadcast forms of scalar arguments. @api.jit def _where(condition, x=None, y=None): if x is None or y is None: raise ValueError("Either both or neither of the x and y arguments should " "be provided to jax.numpy.where, got {} and {}." .format(x, y)) if not np.issubdtype(_dtype(condition), np.bool_): condition = lax.ne(condition, lax_internal._zero(condition)) x, y = _promote_dtypes(x, y) condition, x, y = _broadcast_arrays(condition, x, y) try: is_always_empty = core.is_empty_shape(np.shape(x)) except: is_always_empty = False # can fail with dynamic shapes return lax.select(condition, x, y) if not is_always_empty else x
py
1a4a9c98ad983317bf387bbb0b2b9fe81f57721c
import pickle import os if os.path.exists('db.pkl'): print('yes') else: print('no') # with open('db.pkl' ,'rb') as f: # pass
py
1a4a9cc3001e79a6862f86f3210b9a7be5e017c5
__author__ = 'Sergei' from model.contact import Contact def test_contact_new(app): old_contact = app.contact.get_contact_list() contacts = Contact(first_n="first", mid_n="middle",last_n="last",nick_n= "kuk",company= "adda",address= "575 oiweojdckjgsd,russia",home_ph= "12134519827",cell_ph= "120092340980",email="[email protected]") app.contact.create_c(contacts) assert len(old_contact)+1 == app.contact.count_first() new_contact = app.contact.get_contact_list() old_contact.append(contacts) assert sorted(old_contact, key=Contact.id_or_max) == sorted(new_contact, key=Contact.id_or_max)
py
1a4a9db64e1ef93a4d6cf66a0f92bfb7a5a4ad56
# Copyright 2021, Kay Hayen, mailto:[email protected] # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function a = 3 b = 7 c = [7, 8] d = 15 print("+", a + b) print("-", a - b) print("*", a * b) print("/", a / b) print("//", a // b) print("%", b % a) print("& (2)", a & b) print("| (2)", a | b) print("& (3)", a & b & d) print("| (3)", a | b | d) print("^ (2)", a ^ b) print("^ (3)", a ^ b ^ d) print("**", a ** b) print("<<", a << b) print(">>", b >> a) print("in", b in c) print("not in", b not in c) print("<", a < b) print(">", a > b) print("==", a == b) print("<=", a <= b) print(">=", a >= b) print("!=", a != b) print("is", a is b) print("is not", a is not b) print("~", ~b) print("-", -b) print("+", +b) l = {("a", "c"): "a,c", "b": 2, "c": 3, "d": 4} l["l",] = "6" print("Extended slicing:") print("Should be a,c:", l["a", "c"]) print("Short form of extended slicing:") d = {} # d[1] = 1 d[1,] = 2 d[1, 2] = 3 d[1, 2, 3] = 4 L = list(d) L.sort() print(L) s = "Some information" ss = s[-1] print("Constant subscript of string", ss) print("Slicing on a list:") l = [1, 3, 5, 7, 11, 13, 17] print(l[None:None]) n = None print(l[n:n]) print(l[3:n]) print(l[n:3]) value = None try: x = value[1] except Exception as e: print("Indexing None gives", repr(e))
py
1a4a9e49fc571c25cd03966f7bd478144a1df017
""" =============================================== vidgear library source-code is deployed under the Apache 2.0 License: Copyright (c) 2019-2020 Abhishek Thakur(@abhiTronix) <[email protected]> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============================================== """ # import the necessary packages import os import sys import pytest import platform import logging as log import tempfile from vidgear.gears import VideoGear from vidgear.gears.helper import logger_handler # define test logger logger = log.getLogger("Test_videogear") logger.propagate = False logger.addHandler(logger_handler()) logger.setLevel(log.DEBUG) # define machine os _windows = True if os.name == "nt" else False def return_testvideo_path(): """ returns Test video path """ path = "{}/Downloads/Test_videos/BigBuckBunny_4sec.mp4".format( tempfile.gettempdir() ) return os.path.abspath(path) @pytest.mark.skipif((platform.system() != "Linux"), reason="Not Implemented") def test_PiGear_import(): """ Testing VideoGear Import -> assign to fail when PiGear class is imported """ # cleanup environment try: del sys.modules["picamera"] del sys.modules["picamera.array"] except KeyError: pass try: stream = VideoGear(enablePiCamera=True, logging=True).start() stream.stop() except Exception as e: if isinstance(e, ImportError): pytest.xfail(str(e)) else: pytest.fail(str(e)) # Video credit: http://www.liushuaicheng.org/CVPR2014/index.html test_data = [ ( "https://raw.githubusercontent.com/abhiTronix/Imbakup/master/Images/example4_train_input.mp4", { "SMOOTHING_RADIUS": 5, "BORDER_SIZE": 10, "BORDER_TYPE": "replicate", "CROP_N_ZOOM": True, }, ), ( "https://raw.githubusercontent.com/abhiTronix/Imbakup/master/Images/example_empty_train_input.mp4", { "SMOOTHING_RADIUS": 5, "BORDER_SIZE": 15, "BORDER_TYPE": "reflect", }, ), ( "https://raw.githubusercontent.com/abhiTronix/Imbakup/master/Images/example4_train_input.mp4", { "SMOOTHING_RADIUS": "5", "BORDER_SIZE": "15", "BORDER_TYPE": ["reflect"], "CROP_N_ZOOM": "yes", }, ), (return_testvideo_path(), {"BORDER_TYPE": "im_wrong"}), ] @pytest.mark.parametrize("source, options", test_data) def test_video_stablization(source, options): """ Testing VideoGear's Video Stablization playback capabilities """ try: # open stream stab_stream = VideoGear( source=source, stabilize=True, logging=True, **options ).start() framerate = stab_stream.framerate # playback while True: frame = stab_stream.read() # read stablized frames if frame is None: break # clean resources stab_stream.stop() logger.debug("Input Framerate: {}".format(framerate)) assert framerate > 0 except Exception as e: pytest.fail(str(e))
py
1a4a9f20279a2992543c1232c390271408c15cb8
from django.apps import AppConfig class SuburbConfig(AppConfig): name = 'suburb'
py
1a4a9f7490de4bf2b64264de12651a7a16447a2d
#Escreva um programa que leia um número inteiro e calcule a #soma de todos os divisores desse número, com exceção dele #próprio. Ex: a soma dos divisores do número 66 é #1+2+3+6+11+22+33=78 divisores=[] texto=[] n=int(input("Informe um numero: ")) txt='1 ' i=0 texto.append(txt) divisores.append(1) for c in range(2,n): if(n%c==0): i+=1 divisores.append(c) txt=f'+ {c} ' texto.append(txt) print(f'A soma dos divisores eh {sum(divisores)} = {texto}')
py
1a4aa09fd1b01b9a3965bd75f6845894ec618c30
import unittest from pathlib import Path from taskcat import Config from taskcat.testing._unit_test import UnitTest from taskcat.testing.base_test import BaseTest class TestUnitTest(unittest.TestCase): BaseTest.__abstractmethods__ = set() @classmethod def setUpClass(cls): input_file = ".taskcat.yml" project_root_path = Path(__file__).parent / "../data/nested-fail" input_file_path = project_root_path / input_file cls.base_config = Config.create( project_root=project_root_path, project_config_path=input_file_path, ) def test_methods(self): test = UnitTest(self.base_config) with self.assertRaises(NotImplementedError): test.run() with self.assertRaises(NotImplementedError): test.clean_up() def test_inheritance(self): test = UnitTest(self.base_config) self.assertIsInstance(test, BaseTest)
py
1a4aa15f87280ea733fff3e4206941aefa90db1b
""" This file offers the methods to automatically retrieve the graph Eubacterium sp. AB3007. The graph is automatically retrieved from the STRING repository. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ from typing import Dict from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph from ...ensmallen import Graph # pylint: disable=import-error def EubacteriumSpAb3007( directed: bool = False, preprocess: bool = True, load_nodes: bool = True, verbose: int = 2, cache: bool = True, cache_path: str = "graphs/string", version: str = "links.v11.5", **additional_graph_kwargs: Dict ) -> Graph: """Return new instance of the Eubacterium sp. AB3007 graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False Wether to load the graph as directed or undirected. By default false. preprocess: bool = True Whether to preprocess the graph to be loaded in optimal time and memory. load_nodes: bool = True, Whether to load the nodes vocabulary or treat the nodes simply as a numeric range. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache: bool = True Whether to use cache, i.e. download files only once and preprocess them only once. cache_path: str = "graphs" Where to store the downloaded graphs. version: str = "links.v11.5" The version of the graph to retrieve. The available versions are: - homology.v11.0 - homology.v11.5 - physical.links.v11.0 - physical.links.v11.5 - links.v11.0 - links.v11.5 additional_graph_kwargs: Dict Additional graph kwargs. Returns ----------------------- Instace of Eubacterium sp. AB3007 graph. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ return AutomaticallyRetrievedGraph( graph_name="EubacteriumSpAb3007", repository="string", version=version, directed=directed, preprocess=preprocess, load_nodes=load_nodes, verbose=verbose, cache=cache, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs )()
py
1a4aa2d12cb60b63b4d4b26c4d93d2bc2a582cd5
#! /usr/bin/env python try: from io import StringIO import builtins except ImportError: # Python 3 from io import StringIO import builtins as __builtin__ import glob import os import shutil import sys sys.path.append(os.path.dirname(sys.path[0])) import AUTOutil import bifDiag import parseB import parseBandS import parseC import parseH import parseS SIMPLE=0 EXPERT=1 import AUTOExceptions ############################################# # commands ############################################# def command(f,*args,**kw): # This is a class factory that produces a class that can be used # to make macros of commands. class cmd(object): if len(args) == 2: type = args[0] shortName = args[1] alias = kw.get("alias",[]) fun = staticmethod(f) __doc__ = f.__doc__ def __init__(self,*args,**kw): self.args = args self.kw = kw # The call function must return something that you # can call the "print" function on def __call__(self): return self.fun(*self.args,**self.kw) def undo(self): raise Exception("Undo undefined for this command") return cmd ############################################## # Generic Commands ############################################## def macro(command_list): for command in command_list: command() commandMacro = command(macro) # info messages: override this function or sys.stdout to redirect def info(s): sys.stdout.write(s) # interact with a .exe file def interact(command,*args): if not os.path.exists(command): command = command + '.exe' fullcmd = " ".join([command]+list(args)) if os.spawnv(os.P_WAIT,command, (os.path.basename(command),) + args) != 0: raise AUTOExceptions.AUTORuntimeError("Error running %s" % fullcmd) info("Finished running: " + fullcmd + "\n") ############################################## # Script based commands from $AUTO_DIR/97/cmds ############################################## def clean(): """Clean the current directory. Type FUNC() to clean the current directory. This command will delete all files of the form fort.*, *.*~, *.o, and *.exe. """ toclean = (glob.glob("fort.*") + glob.glob("*.o") + glob.glob("*.exe")+ glob.glob("*.*~")) # remove duplicates files = [] for f in toclean: if f not in files: files.append(f) for f in files: os.remove(f) info("Deleting fort.* *.o *.exe *.*~ ... done\n") commandClean = command(clean,alias=['cl']) def copydemo(name): """Copy a demo into the current directory. Type FUNC('xxx') to copy all files from auto/07p/demos/xxx to the current user directory. Here 'xxx' denotes a demo name; e.g., 'abc'. To avoid the overwriting of existing files, always run demos in a clean work directory. """ demodir = os.path.join(os.environ["AUTO_DIR"],"demos",name) for f in glob.glob(os.path.join(demodir,"*")): if os.path.isdir(f): subdir = f[len(demodir)+len(os.sep):] try: os.remove(subdir) except OSError: pass try: os.mkdir(subdir) except OSError: pass for f2 in glob.glob(os.path.join(f,"*")): try: shutil.copy(f2, subdir) except IOError: pass try: shutil.copy(f, ".") except IOError: pass if (os.path.exists(os.path.join(demodir,"c.%s.1"%name)) and not os.path.exists(os.path.join(demodir,"c.%s"%name))): shutil.copy("c.%s.1"%name,"c.%s"%name) info("Copying demo %s ... done\n"%name) commandCopyDemo = command(copydemo,SIMPLE,"demo") def demo(name,runner=None): """Copy a demo into the current directory and load it. Type FUNC('xxx') to copy all files from auto/07p/demos/xxx to the current user directory. Here 'xxx' denotes a demo name; e.g., 'abc'. To avoid the overwriting of existing files, always run demos in a clean work directory. NOTE: This command automatically performs the load command as well. """ runner = withrunner(runner) lst = [commandCopyDemo(name)] slash = name.rfind("/") if slash != -1: name = name[slash+1:] lst.append(commandRunnerLoadName(name,runner)) return macro(lst) commandCopyAndLoadDemo = command(demo,alias=['dm']) def df(): """Clear the current directory of fort files. Type FUNC() to clean the current directory. This command will delete all files of the form fort.*. """ toclean = glob.glob("fort.*") for f in toclean: os.remove(f) info("Deleting fort.* ... done\n") commandDeleteFortFiles = command(df,alias=['deletefort']) def us(name,templates=None): """Convert user-supplied data files. Type FUNC('xxx') to convert a user-supplied data file 'xxx.dat' to AUTO format. The converted file is called 's.dat'. The original file is left unchanged. AUTO automatically sets the period in PAR(11). Other parameter values must be set in 'STPNT'. (When necessary, PAR(11) may also be redefined there.) The constants-file file 'c.xxx' must be present, as the AUTO-constants 'NTST' and 'NCOL' are used to define the new mesh. For examples of using the 'userData' command see demos 'lor' and 'pen' (where it has the old name 'fc'). Note: this technique has been obsoleted by the 'dat' AUTO constant. """ info("Starting conversion of %s.dat : \n"%name) if glob.glob("%s.f90"%name) == []: if glob.glob("%s.f"%name) == []: equation_file="%s.c"%name else: equation_file="%s.f"%name else: equation_file="%s.f90"%name cfile = applyTemplate(name,"constants",templates) datfile = "%s.dat"%name info("(Required files : %s, %s, %s)\n"%(equation_file,cfile, datfile)) from . import runAUTO fconrun = runAUTO.runAUTO(makefile="$AUTO_DIR/cmds/cmds.make fcon") fconrun.config(e=name) fconrun.runMakefile(name) if os.path.exists(cfile): shutil.copy(cfile,"fort.2") if os.path.exists(datfile): shutil.copy(datfile,"fort.3") interact("./fcon") sfile = applyTemplate("dat","solution",templates) if os.path.exists("fort.8"): if os.path.exists(sfile): os.remove(sfile) os.rename("fort.8",sfile) info("Conversion done : converted file saved as %s\n"%sfile) files = glob.glob("fcon*") + ["fort.2", "fort.3"] for f in files: os.remove(f) commandUserData = command(us,alias=['userdata']) ############################################## # Commands which use the filename templates ############################################## def applyTemplate(text,template,templates=None): if templates is None: templates = {} templates["equation"] = "EQUATION_NAME=%s" templates["constants"] = "c.%s" templates["bifurcationDiagram"] = "b.%s" templates["solution"] = "s.%s" templates["diagnostics"] = "d.%s" templates["homcont"] = "h.%s" if text is None: return None elif type(text) in [type(""), type(1), type(1.0)]: rval = templates[template]%text tmp = glob.glob(rval) if len(tmp) > 0: rval = "" for x in tmp: rval = rval + x + " " rval = rval.strip() return rval else: return text def filenameTemplate(name=None,templates=None): name1={} name1["constants"] = applyTemplate(name,"constants",templates) name1["bifurcationDiagram"] = applyTemplate(name,"bifurcationDiagram",templates) name1["solution"] = applyTemplate(name,"solution",templates) name1["diagnostics"] = applyTemplate(name,"diagnostics",templates) return name1 def relabel(name1=None,name2=None,templates=None): """Relabel data files. Type y=FUNC(x) to return the python object x, with the solution labels sequentially relabelled starting at 1, as a new object y. Type FUNC('xxx') to relabel s.xxx and b.xxx. Backups of the original files are saved. Type FUNC('xxx','yyy') to relabel the existing data-files s.xxx and b.xxx, and save them to s.yyy and b.yyy; d.xxx is copied to d.yyy. """ typen = type(name1) if type(name1) == type(""): name1 = filenameTemplate(name1,templates) name2 = filenameTemplate(name2,templates) if typen != type("") and typen != type(None): data = name1.relabel() info("Relabeling done\n") return data n1b = name1["bifurcationDiagram"] n1s = name1["solution"] n1d = name1["diagnostics"] if n1b is None and n1s is None and n1d is None: n1b, n1s, n1d = "fort.7", "fort.8", "fort.9" if name2["bifurcationDiagram"] is None: n2b = n1b+'~~' n2s = n1s+'~~' n2d = n1d+'~~' else: n2b = name2["bifurcationDiagram"] n2s = name2["solution"] n2d = name2["diagnostics"] from . import relabel relabel.relabel(n1b,n1s,n2b,n2s) if os.access(n2b,os.F_OK): if name2["bifurcationDiagram"] is None: # Save backups if os.access(n1b+'~',os.F_OK): os.remove(n1b+'~') os.rename(n1b,n1b+'~') os.rename(n2b,n1b) if os.access(n1s+'~',os.F_OK): os.remove(n1s+'~') os.rename(n1s,n1s+'~') os.rename(n2s,n1s) elif os.path.exists(n1d): shutil.copy(n1d, n2d) info("Relabeling succeeded\n") info("Relabeling done\n") commandRelabel = command(relabel,SIMPLE,"relabel",alias=['rl']) def merge(name1=None,name2=None,templates=None): """Merge branches in data files. Type y=FUNC(x) to return the python object x, with its branches merged into continuous curves, as a new object y. Type FUNC('xxx') to merge branches in s.xxx, b.xxx, and d.xxx. Backups of the original files are saved. Type FUNC('xxx','yyy') to merge branches in the existing data-files s.xxx, b.xxx, and d.xxx and save them to s.yyy, b.yyy, and d.yyy. """ ntype = type(name1) if type(name1) == type(""): name1 = filenameTemplate(name1,templates) name2 = filenameTemplate(name2,templates) if ntype != type("") and ntype != type(None): data = name1.merge() info("Merge done\n") return data n1b = name1["bifurcationDiagram"] n1s = name1["solution"] n1d = name1["diagnostics"] if n1b is None and n1s is None and n1d is None: n1b, n1s, n1d = "fort.7", "fort.8", "fort.9" bd = bifDiag.bifDiag(n1b,n1s,n1d) bd = bd.merge() if name2["bifurcationDiagram"] is None: n2b = n1b+'~~' n2s = n1s+'~~' n2d = n1d+'~~' else: n2b = name2["bifurcationDiagram"] n2s = name2["solution"] n2d = name2["diagnostics"] bd.writeFilename(n2b,n2s,n2d) if os.access(n2b,os.F_OK): if name2["bifurcationDiagram"] is None: # Save backups for [n1,n2] in [[n1b,n2b],[n1s,n2s],[n1d,n2d]]: if os.access(n1+'~',os.F_OK): os.remove(n1+'~') os.rename(n1,n1+'~') os.rename(n2,n1) info("Merging succeeded\n") info("Merging done\n") commandMergeBranches = command(merge,SIMPLE,"merge",alias=['mb']) def subtract(name1,name2,col,branch=1,point=1,templates=None): """Subtract branches in data files. Type z=FUNC(x,y,ref) to return the python object x, where, using interpolation, the first branch in y is subtracted from all branches in x, as a new object z. Use 'ref' (e.g., 'PAR(1)') as the reference column in y (only the first monotonically increasing or decreasing part is used). Type FUNC('xxx','yyy','ref') to subtract, using interpolation, the first branch in b.yyy from all branches in b.xxx, and save the result in b.xxx. A Backup of the original file is saved. Use optional arguments branch=m, and point=n, to denote the branch and first point on that branch within y or 'b.yyy', where m,n are in {1,2,3,...}. """ ntype = type(name1) if type(name1) == type(""): name1 = filenameTemplate(name1,templates) name2 = filenameTemplate(name2,templates) if ntype != type(""): sub = name1.subtract(name2[branch-1],col,point) info("Subtracting done\n") return sub else: n1b = name1["bifurcationDiagram"] bd1 = bifDiag.bifDiag(n1b) n2b = name2["bifurcationDiagram"] if n1b == n2b: bd2 = bd1 else: bd2 = bifDiag.bifDiag(n2b) sub = bd1.subtract(bd2[branch-1],col,point) shutil.copy(n1b,n1b+'~') sub.writeFilename(n1b,'') info("Subtracting done\n") commandSubtractBranches = command(subtract,SIMPLE,"subtract",alias=['sb']) def append(name1,name2=None,templates=None): """Append data files. Type FUNC(x,'xxx') to append bifurcation diagram x to the data-files b.xxx, s.xxx, and d.xxx. This is equivalent to the command save(x+load('xxx'),'xxx') Type FUNC('xxx',x) to append existing data-files s.xxx, b.xxx, and d.xxx to bifurcation diagram x. This is equivalent to the command x=load('xxx')+x Type FUNC('xxx') to append the output-files fort.7, fort.8, fort.9, to existing data-files s.xxx, b.xxx, and d.xxx. Type FUNC('xxx','yyy') to append existing data-files s.xxx, b.xxx, and d.xxx to data-files s.yyy, b.yyy, and d.yyy. """ parsed1=None parsed2=None if isinstance(name1, bifDiag.bifDiag): parsed1=name1 name1=name2 name2=None if isinstance(name1, bifDiag.bifDiag): parsed2=name1 else: name1 = filenameTemplate(name1,templates) name2 = filenameTemplate(name2,templates) if parsed1 or parsed2: n = None if not parsed1 or not parsed2: nb = name1["bifurcationDiagram"] ns = name1["solution"] nd = name1["diagnostics"] if parsed2: #append to parsed2 if not parsed1: parsed1 = bifDiag.bifDiag(nb,ns,nd) info("Appending from %s, %s and %s ... done\n"%(nb,ns,nd)) parsed2.extend(parsed1) return if parsed1: #append from parsed1 to file parsed1.writeFilename(nb,ns,nd,append=True) info("Appending to %s, %s and %s ... done\n"%(nb,ns,nd)) return i = 7 for s in ["bifurcationDiagram","solution","diagnostics"]: n1 = name1[s] n2 = name2[s] if n2 is None: n2 = n1 n1 = "fort."+str(i) i = i+1 try: f1 = open(n1,"rb") f2 = open(n2,"ab") while 1: buf = f1.read(1024*1024) if len(buf) == 0: break f2.write(buf) f1.close() f2.close() info("Appending %s to %s ... done\n"%(n1,n2)) except IOError: info("Appending %s to %s: %s\n"%(n1,n2,sys.exc_info()[1])) commandAppend = command(append,SIMPLE,"append",alias=['ap']) def dirfilenames(name1,name2,name3,name4): """Convert arguments to directories and names for copy() and move()""" dir1 = "" dir2 = "" if os.path.isdir(name1): dir1 = name1 name1 = name2 if name4 is not None: dir2 = name3 name2 = name4 elif name3 is not None: name2 = name3 elif os.path.isdir(name2): dir2 = name2 if name3 is not None: name2 = name3 else: name2 = name1 return dir1,name1,dir2,name2 def copy(name1,name2,name3=None,name4=None,templates=None): """Copy data files. Type FUNC(name1,name2) or FUNC(name1,name2,name3) or FUNC(name1,name2,name3,name4). Copy the data-files dir1/c.xxx, dir1/b.xxx, dir1/s.xxx, and dir1/d.xxx to dir2/c.yyy, dir2/b.yyy, dir2/s.yyy, and dir2/d.yyy. The values of dir1/?.xxx and dir2/?.yyy are as follows, depending on whether name1 is a directory or name2 is a directory: FUNC(name1,name2) no directory names: ./?.name1 and ./?.name2 name1 is a directory: name1/?.name2 and ./?.name2 name2 is a directory: ./?.name1 and name2/?.name1 FUNC(name1,name2,name3) name1 is a directory: name1/?.name2 and ./?.name3 name2 is a directory: ./?.name1 and name2/?.name3 FUNC(name1,name2,name3,name4) name1/?.name2 and name3/?.name4 """ dir1, name1, dir2, name2 = dirfilenames(name1,name2,name3,name4) names1 = filenameTemplate(name1,templates) names2 = filenameTemplate(name2,templates) done = False for s in ["bifurcationDiagram","solution","diagnostics","constants"]: n1 = os.path.join(dir1,names1[s]) n2 = os.path.join(dir2,names2[s]) if os.path.exists(n1): shutil.copy(n1,n2) info("Copying %s to %s ... done\n"%(n1,n2)) done = True if not done: raise AUTOExceptions.AUTORuntimeError( "Copying: no files found for %s and %s"%( os.path.join(dir1,"[bsdc]."+name1), os.path.join(dir2,"[bsdc]."+name2))) commandCopyDataFiles = command(copy,alias=['cp']) def save(name1,name2=None,templates=None): """Save data files. Type FUNC(x,'xxx') to save bifurcation diagram x to the files b.xxx, s.xxx, d.xxx. Existing files with these names will be overwritten. If x is a solution, a list of solutions, or does not contain any bifurcation diagram or diagnostics data, then only the file s.xxx is saved to. Type FUNC('xxx') to save the output-files fort.7, fort.8, fort.9, to b.xxx, s.xxx, d.xxx. Existing files with these names will be overwritten. """ parsed = None if not name2 is None: parsed = name1 name1 = name2 name1 = filenameTemplate(name1,templates) for s in ["bifurcationDiagram","solution","diagnostics"]: n1 = name1[s] if os.path.exists(n1): shutil.copy(n1,n1+'~') if parsed: n1b = name1["bifurcationDiagram"] n1s = name1["solution"] n1d = name1["diagnostics"] if (type(parsed) == type([]) and isinstance(parsed[0], parseB.AUTOBranch)): parsed = bifDiag.bifDiag(parsed) if (isinstance(parsed,bifDiag.bifDiag) and len(parsed) > 0 and len(parsed[0]) > 0): parsed.writeFilename(n1b,n1s,n1d) msg = "Saving to %s and %s ... done\n"%(n1b,n1s) for d in parsed: if hasattr(d,"diagnostics"): msg = "Saving to %s, %s, and %s ... done\n"%(n1b,n1s,n1d) break else: if (type(parsed) == type([]) and isinstance(parsed[0], parseS.AUTOSolution)): parsed = parseS.parseS(parsed) parsed.writeFilename(n1s) msg = "Saving to %s ... done\n"%(n1s) info(msg) return i = 7 for s in ["bifurcationDiagram","solution","diagnostics"]: n1 = name1[s] forti = "fort." + str(i) i = i + 1 if os.path.exists(forti): shutil.copy(forti,n1) info("Saving %s as %s ... done\n"%(forti,n1)) commandCopyFortFiles = command(save,SIMPLE,"save",alias=['sv']) def delete(name,templates=None): """Delete data files. Type FUNC('xxx') to delete the data-files d.xxx, b.xxx, and s.xxx. """ name = filenameTemplate(name,templates) n1b = name["bifurcationDiagram"] n1s = name["solution"] n1d = name["diagnostics"] if os.path.exists(n1b): os.remove(n1b) info("Deleting %s ... done\n"%n1b) if os.path.exists(n1s): os.remove(n1s) info("Deleting %s ... done\n"%n1s) if os.path.exists(n1d): os.remove(n1d) info("Deleting %s ... done\n"%n1d) commandDeleteDataFiles = command(delete,alias=['dl']) def deleteLabel(codes=None,name1=None,name2=None,templates=None, keepTY=0,keep=0): if hasattr(codes,'deleteLabel'): origlen=len(codes()) new = codes.deleteLabel(name1,keepTY=keepTY,keep=keep,copy=1) newlen=len(new()) info("Deleted %d labels, and kept %d.\n"%(origlen-newlen, newlen)) return new name1 = filenameTemplate(name1,templates) if name1["solution"] is None: changedb='fort.7' changeds='fort.8' else: changedb=name1["bifurcationDiagram"] changeds=name1["solution"] bs=bifDiag.bifDiag(changedb,changeds) origlen=len(bs()) bs.deleteLabel(codes,keepTY=keepTY,keep=keep) newlen=len(bs()) if name2 is None: origb=changedb+'~' origs=changeds+'~' try: os.remove(origb) except: pass try: os.remove(origs) except: pass os.rename(changedb,origb) os.rename(changeds,origs) bs.writeFilename(changedb,changeds) else: name2 = filenameTemplate(name2,templates) bs.writeFilename(name2["bifurcationDiagram"],name2["solution"]) info("Deleted %d labels, and kept %d.\n"%(origlen-newlen, newlen)) def dsp(typenames=None,name1=None,name2=None,templates=None): """Delete special points. Type FUNC(x,list) to delete the special points in list from the Python object x, which must be a solution list or a bifurcation diagram. Type FUNC(list,'xxx') to delete from the data-files b.xxx, and s.xxx. Type FUNC(list,'xxx','yyy') to save to b.yyy and s.yyy instead of ?.xxx. Type FUNC(list) to delete from fort.7 and fort.8. list is a label number or type name code, or a list of those, such as 1, or [2,3], or 'UZ' or ['BP','LP'], or it can be None or omitted to mean the special points ['BP','LP','HB','PD','TR','EP','MX'] Alternatively a boolean user-defined function f that takes a solution can be specified for list, such as def f(s): return s["PAR(9)"]<0 where all solutions are deleted that satisfy the given condition, or def f(s1,s2): return abs(s1["L2-NORM"] - s2["L2-NORM"]) < 1e-4 where all solutions are compared with each other and s2 is deleted if the given condition is satisfied, which causes pruning of solutions that are close to each other. Type information is NOT kept in the bifurcation diagram. """ return deleteLabel(typenames,name1,name2,templates) commandDeleteSpecialPoints = command(dsp) def ksp(typenames=None,name1=None,name2=None,templates=None): """Keep special points. Type FUNC(x,list) to only keep the special points in list in the Python object x, which must be a solution list or a bifurcation diagram. Type FUNC(list,'xxx') to keep them in the data-files b.xxx and s.xxx. Type FUNC(list,'xxx','yyy') to save to b.yyy and s.yyy instead of ?.xxx. Type FUNC(list) to keep them in fort.7 and fort.8. list is a label number or type name code, or a list of those, such as 1, or [2,3], or 'UZ' or ['BP','LP'], or it can be None or omitted to mean ['BP','LP','HB','PD','TR','EP','MX'], deleting 'UZ' and regular points. Alternatively a boolean user-defined function f that takes a solution can be specified for list, such as def f(s): return s["PAR(9)"]<0 where only solutions are kept that satisfy the given condition. Type information is NOT kept in the bifurcation diagram. """ return deleteLabel(typenames,name1,name2,templates,keep=1) commandKeepSpecialPoints = command(ksp) def dlb(typenames=None,name1=None,name2=None,templates=None): """Delete special labels. Type FUNC(x,list) to delete the special points in list from the Python object x, which must be a solution list or a bifurcation diagram. Type FUNC(list,'xxx') to delete from the data-files b.xxx and s.xxx. Type FUNC(list,'xxx','yyy') to save to b.yyy and s.yyy instead of ?.xxx. Type FUNC(list) to delete from fort.7 and fort.8. list is a label number or type name code, or a list of those, such as 1, or [2,3], or 'UZ' or ['BP','LP'], or it can be None or omitted to mean the special points ['BP','LP','HB','PD','TR','EP','MX'] Alternatively a boolean user-defined function f that takes a solution can be specified for list, such as def f(s): return s["PAR(9)"] < 0 where all solutions are deleted that satisfy the given condition, or def f(s1,s2): return abs(s1["L2-NORM"] - s2["L2-NORM"]) < 1e-4 where all solutions are compared with each other and s2 is deleted if the given condition is satisfied, which causes pruning of solutions that are close to each other. Type information is kept in the bifurcation diagram for plotting. """ return deleteLabel(typenames,name1,name2,templates,keepTY=1) commandDeleteLabels = command(dlb) def klb(typenames=None,name1=None,name2=None,templates=None): """Keep special labels. Type FUNC(x,list) to only keep the special points in list in the Python object x, which must be a solution list or a bifurcation diagram. Type FUNC(list,'xxx') to keep them in the data-files b.xxx and s.xxx. Type FUNC(list,'xxx','yyy') to save to b.yyy and s.yyy instead of ?.xxx. Type FUNC(list) to keep them in fort.7 and fort.8. list is a label number or type name code, or a list of those, such as 1, or [2,3], or 'UZ' or ['BP','LP'], or it can be None or omitted to mean ['BP','LP','HB','PD','TR','EP','MX'], deleting 'UZ' and regular points. Alternatively a boolean user-defined function f that takes a solution can be specified for list, such as def f(s): return s["PAR(9)"]<0 where only solutions are kept that satisfy the given condition. Type information is kept in the bifurcation diagram for plotting. """ return deleteLabel(typenames,name1,name2,templates,keepTY=1,keep=1) commandKeepLabels = command(klb) def expandData(cmd,name=None,templates=None): name = filenameTemplate(name,templates) n1b = name["bifurcationDiagram"] n1s = name["solution"] if n1s is None: n1s = "fort.8" n1b = "fort.7" if os.path.exists(n1b): shutil.copy(n1b,n1b+'~') if os.path.exists(n1s): shutil.copy(n1s,"fort.28") if os.path.exists(n1s+'~'): os.remove(n1s+'~') os.rename(n1s,n1s+'~') interact(os.path.expandvars("$AUTO_DIR/bin/%s"%cmd)) os.rename("fort.38",n1s) if os.path.exists("fort.28"): os.remove("fort.28") if cmd == "double": info("Solution doubling done.\n") else: info("Solution tripling done.\n") def double(name=None,templates=None): """Double a solution. Type FUNC() to double the solution in 'fort.7' and 'fort.8'. Type FUNC('xxx') to double the solution in b.xxx and s.xxx. """ expandData("double",name,templates) commandDouble = command(double,alias=['db']) def move(name1,name2,name3=None,name4=None,templates=None): """Move data-files to a new name. Type FUNC(name1,name2) or FUNC(name1,name2,name3) or FUNC(name1,name2,name3,name4) Move the data-files dir1/b.xxx, dir1/s.xxx, and dir1/d.xxx, to dir2/b.yyy, dir2/s.yyy, and dir2/d.yyy, and copy the constants file dir1/c.xxx to dir2/c.yyy. The values of dir1/?.xxx and dir2/?.yyy are as follows, depending on whether name1 is a directory or name2 is a directory: FUNC(name1,name2) no directory names: ./?.name1 and ./?.name2 name1 is a directory: name1/?.name2 and ./?.name2 name2 is a directory: ./?.name1 and name2/?.name1 FUNC(name1,name2,name3) name1 is a directory: name1/?.name2 and ./?.name3 name2 is a directory: ./?.name1 and name2/?.name3 FUNC(name1,name2,name3,name4) name1/?.name2 and name3/?.name4 """ dir1, name1, dir2, name2 = dirfilenames(name1,name2,name3,name4) names1 = filenameTemplate(name1,templates) names2 = filenameTemplate(name2,templates) done = False for s in ["bifurcationDiagram","solution","diagnostics","constants"]: n1 = os.path.join(dir1,names1[s]) n2 = os.path.join(dir2,names2[s]) if s == "constants": try: shutil.copy(n1,n2) info("Copying %s to %s ... done\n"%(n1,n2)) done = True except IOError: pass continue if os.path.exists(n1): if os.path.exists(n2): os.remove(n2) os.rename(n1,n2) info("Renaming %s as %s ... done\n"%(n1,n2)) done = True if not done: raise AUTOExceptions.AUTORuntimeError( "Renaming: no files found for %s and %s"%( os.path.join(dir1,"[bsdc]."+name1), os.path.join(dir2,"[bsdc]."+name2))) commandMoveFiles = command(move,alias=['mv']) def cn(name,templates=None): """Get the current continuation constants. Type FUNC('xxx') to get a parsed version of the constants file c.xxx. This is equivalent to the command loadbd('xxx').c """ name = filenameTemplate(name,templates) data = parseC.parseC(name["constants"]) info("Parsed file: %s\n"%name["constants"]) return data commandParseConstantsFile = command(cn,alias=['constantsget']) def hcn(name,templates=None): """Get the current HomCont continuation constants. Type FUNC('xxx') to get a parsed version of the HomCont file h.xxx. """ name = filenameTemplate(name,templates) data = parseH.parseH(name["homcont"]) info("Parsed file: %s\n"%name["homcont"]) return data commandParseHomcontFile = command(hcn) def sl(name=None,templates=None): """Parse solution file: Type FUNC('xxx') to get a parsed version of the solution file s.xxx. This is equivalent to the command loadbd('xxx')() """ name = filenameTemplate(name,templates) n1s = name["solution"] or "fort.8" try: data = parseS.parseS(n1s) except IOError: raise AUTOExceptions.AUTORuntimeError(sys.exc_info()[1]) if isinstance(n1s, str): info("Parsed file: %s\n"%n1s) return data commandParseSolutionFile = command(sl,alias=['solutionget']) def dg(name=None,templates=None): """Parse a bifurcation diagram. Type FUNC('xxx') to get a parsed version of the diagram file b.xxx. This is equivalent to the command loadbd('xxx') but without the solutions in s.xxx and without the diagnostics in d.xxx. """ name = filenameTemplate(name,templates) n1b = name["bifurcationDiagram"] if n1b is None: n1b = "fort.7" try: data = parseB.parseB(n1b) except IOError: raise AUTOExceptions.AUTORuntimeError(sys.exc_info()[1]) info("Parsed file: %s\n"%n1b) return data commandParseDiagramFile = command(dg,alias=['diagramget']) def bt(name=None,templates=None): """Parse both bifurcation diagram and solution. Type FUNC('xxx') to get a parsed version of the diagram file b.xxx and solution file s.xxx. This is equivalent to the command loadbd('xxx') but without the diagnostics in d.xxx. """ name = filenameTemplate(name,templates) n1b = name["bifurcationDiagram"] n1s = name["solution"] if n1b is None: n1b = "fort.7" n1s = "fort.8" data = parseBandS.parseBandS(n1b,n1s) output_names = n1b + " and " + n1s info("Parsed files: %s\n"%output_names) return data commandParseDiagramAndSolutionFile = command(bt,alias=['diagramandsolutionget']) def queryDiagnostic(diagnostic,name=None,templates=None): name = filenameTemplate(name,templates) n1d = name["diagnostics"] if n1d is None: n1d = "fort.9" try: f = open(n1d) except TypeError: for branch in n1d: if hasattr(branch,"diagnostics"): for s in str(branch.diagnostics).splitlines(): if diagnostic in s: info(s+"\n") info("\n") return for s in f: if diagnostic in s: info(s) f.close() info("\n") commandQueryDiagnostic = command(queryDiagnostic,alias=None) def branchpoint(name=None,templates=None): """Print the ``branch-point function''. Type FUNC(x) to list the value of the ``branch-point function'' in the diagnostics of the bifurcation diagram object x. This function vanishes at a branch point. Type FUNC() to list the value of the ``branch-point function'' in the output-file fort.9. Type FUNC('xxx') to list the value of the ``branch-point function'' in the info file 'd.xxx'. """ queryDiagnostic("BP",name,templates) commandQueryBranchPoint = command(branchpoint,alias=['bp','br']) def eigenvalue(name=None,templates=None): """Print eigenvalues of Jacobian (algebraic case). Type FUNC(x) to list the eigenvalues of the Jacobian in the diagnostics of the bifurcation diagram object x. (Algebraic problems.) Type FUNC() to list the eigenvalues of the Jacobian in fort.9. Type FUNC('xxx') to list the eigenvalues of the Jacobian in the info file 'd.xxx'. """ queryDiagnostic("Eigenvalue",name,templates) commandQueryEigenvalue = command(eigenvalue,alias=['ev','eg']) def floquet(name=None,templates=None): """Print the Floquet multipliers. Type FUNC(x) to list the Floquet multipliers in the diagnostics of the bifurcation diagram object x. (Differential equations.) Type FUNC() to list the Floquet multipliers in the output-file fort.9. Type FUNC('xxx') to list the Floquet multipliers in the info file 'd.xxx'. """ queryDiagnostic("Mult",name,templates) commandQueryFloquet = command(floquet,alias=['fl']) def hopf(name=None,templates=None): """Print the value of the ``Hopf function''. Type FUNC(x) to list the value of the ``Hopf function'' in the diagnostics of the bifurcation diagram object x. This function vanishes at a Hopf bifurcation point. Type FUNC() to list the value of the ``Hopf function'' in the output-file fort.9. Type FUNC('xxx') to list the value of the ``Hopf function'' in the info file 'd.xxx'. """ queryDiagnostic("Hopf",name,templates) commandQueryHopf = command(hopf,alias=['hp','hb']) def iterations(name=None,templates=None): """Print the number of Newton interations. Type FUNC(x) to list the number of Newton iterations per continuation step in the diagnostics of the bifurcation diagram object x. Type FUNC() to list the number of Newton iterations per continuation step in fort.9. Type FUNC('xxx') to list the number of Newton iterations per continuation step in the info file 'd.xxx'. """ queryDiagnostic("Iterations",name,templates) commandQueryIterations = command(iterations,alias=['it']) def limitpoint(name=None,templates=None): """Print the value of the ``limit point function''. Type FUNC(x) to list the value of the ``limit point function'' in the diagnostics of the bifurcation diagram object x. This function vanishes at a limit point (fold). Type FUNC() to list the value of the ``limit point function'' in the output-file fort.9. Type FUNC('xxx') to list the value of the ``limit point function'' in the info file 'd.xxx'. """ queryDiagnostic("Fold",name,templates) commandQueryLimitpoint = command(limitpoint,alias=['lm','lp']) def note(name=None,templates=None): """Print notes in info file. Type FUNC(x) to show any notes in the diagnostics of the bifurcation diagram object x. Type FUNC() to show any notes in the output-file fort.9. Type FUNC('xxx') to show any notes in the info file 'd.xxx'. """ queryDiagnostic("NOTE",name,templates) commandQueryNote = command(note,alias=['nt']) def secondaryperiod(name=None,templates=None): """Print value of ``secondary-periodic bif. fcn''. Type FUNC(x) to list the value of the ``secondary-periodic bifurcation function'' in the diagnostics of the bifurcation diagram object x. This function vanishes at period-doubling and torus bifurcations. Type FUNC() to list the value of the ``secondary-periodic bifurcation function'' in the output-file 'fort.9. Type FUNC('xxx') to list the value of the ``secondary-periodic bifurcation function'' in the info file 'd.xxx'. """ queryDiagnostic("SPB",name,templates) commandQuerySecondaryPeriod = command(secondaryperiod,alias=['sp','sc']) def stepsize(name=None,templates=None): """Print continuation step sizes. Type FUNC(x) to list the continuation step size for each continuation step in the diagnostics of the bifurcation diagram object x. Type FUNC() to list the continuation step size for each continuation step in 'fort.9'. Type FUNC('xxx') to list the continuation step size for each continuation step in the info file 'd.xxx'. """ queryDiagnostic("Step",name,templates) commandQueryStepsize = command(stepsize,alias=['ss','st']) def triple(name=None,templates=None): """Triple a solution. Type FUNC() to triple the solution in 'fort.8'. Type FUNC('xxx') to triple the solution in s.xxx. """ return expandData("triple",name,templates) commandTriple = command(triple,alias=['tr']) ############################################ # System Commands ############################################ def ls(dir=None): """List the current directory. Type 'FUNC' to run the system 'ls' command in the current directory. This command will accept whatever arguments are accepted by the Unix command 'ls'. """ cmd = "ls" if os.name in ["nt", "dos"]: path = os.environ["PATH"].split(os.pathsep) cmd = "dir" for s in path: if os.path.exists(os.path.join(s,"ls.exe")): cmd = "ls" break if dir is not None: cmd = "%s %s"%(cmd,dir) if sys.stdout is sys.__stdout__: sys.stdout.flush() os.system(cmd) else: info(AUTOutil.getstatusoutput(cmd, shell=True)[1]+'\n') commandLs = command(ls) def quit(): """Quit the AUTO CLUI.""" if isinstance(builtins.quit,str): sys.exit() else: builtins.quit() commandQuit = command(quit,alias=['q']) def shell(cmd): """Run a shell command. Type FUNC('xxx') to run the command 'xxx' in the Unix shell and display the results in the AUTO command line user interface. """ sys.stdout.flush() os.system(cmd) commandShell = command(shell) def wait(): """Wait for the user to enter a key. Type 'FUNC()' to have the AUTO interface wait until the user hits any key (mainly used in scripts). """ print("Hit <return> to continue") input() commandWait = command(wait) def cat(f=None): """Print the contents of a file Type 'FUNC xxx' to list the contents of the file 'xxx'. """ if f is not None: f = open(f,"r") for line in f: info(line) f.close() else: line = sys.stdin.readline() while line != "": info(line) line = sys.stdin.readline() commandCat = command(cat) ############################################ # Commands which use runAUTO ############################################ # This function is overridden in AUTOclui.py, so the AUTOSimpleFunctions # instance's runner can be used. def withrunner(runner=None): return runner def cd(dir=None,runner=None): """Change directories. Type 'FUNC xxx' to change to the directory 'xxx'. This command understands both shell variables and home directory expansion. """ runner = withrunner(runner) if dir is None or dir == '': dir = os.path.expanduser("~") try: dir = os.path.expanduser(dir) dir = os.path.expandvars(dir) os.chdir(dir) except: print(("Directory '%s' not found"%(dir,))) runner.config(dir=os.getcwd()) commandCd = command(cd) def configure(runner=None,templates=None,data=None,**kw): """Load files into the AUTO runner or return modified solution data. Type result=FUNC([options]) to modify the AUTO runner. The type of the result is a solution object. There are many possible options: \\begin{verbatim} Long name Short name Description ------------------------------------------- equation e The equations file constants c The AUTO constants file homcont h The Homcont parameter file solution s The restart solution file NDIM,IPS,etc AUTO constants. BR,PT,TY,LAB Solution constants. \\end{verbatim} Options which are not explicitly set retain their previous value. For example one may type: s=FUNC(e='ab',c='ab.1') to use 'ab.c' as the equations file and c.ab.1 as the constants file. You can also specify AUTO Constants, e.g., DS=0.05, or IRS=2. Special values for DS are '+' (forwards) and '-' (backwards). Example: s = FUNC(s,DS='-') changes s.c['DS'] to -s.c['DS']. """ def applyRunnerConfigResolveAbbreviation(**kw): abbrev = {} for key in ["equation", "constants", "solution", "homcont"]: abbrev[key[0]] = key abbrev[key] = key for key in list(kw): # remove long duplicates if (key in abbrev and key != abbrev[key] and abbrev[key] in kw): del kw[abbrev[key]] for key,value in list(kw.items()): if key in abbrev: # change the abbreviation to the long version del kw[key] if AUTOutil.isiterable(value): kw[abbrev[key]] = value else: if key[0] == 'e': kw['e'] = value kw[abbrev[key]] = applyTemplate(value,abbrev[key],templates) return kw def applyRunnerConfigResolveFilenames(**kw): exception = None objectdict = {"constants": parseC.parseC, "homcont": parseH.parseH, "solution": parseS.parseS} for key in ["constants", "homcont", "solution"]: if key in kw: value = kw[key] elif data is not None: value = applyTemplate(data,key,templates) else: value = None if value is not None and not AUTOutil.isiterable(value): try: kw[key] = objectdict[key](value) except IOError: if key in kw: # for solution only raise exception later if IRS!=0 exception = sys.exc_info()[1] if key != "solution": raise AUTOExceptions.AUTORuntimeError(exception) # ignore error, but erase runner data for load("xxx") kw[key] = None if data is not None and "e" not in kw and not AUTOutil.isiterable(data): kw["e"] = data kw["equation"] = applyTemplate(data,"equation",templates) if "e" in kw: eq = kw["e"] for ext in [".f90",".f",".c"]: if os.path.exists(eq+ext): return kw, exception raise AUTOExceptions.AUTORuntimeError( "No equations file found for: '%s'"%eq) return kw, exception runner = withrunner(runner) if "info" in kw: info = kw["info"] del kw["info"] else: info = globals()["info"] kw = applyRunnerConfigResolveAbbreviation(**kw) kw, exception = applyRunnerConfigResolveFilenames(**kw) if data is not None and AUTOutil.isiterable(data): if hasattr(data,"load"): # for load(object,...) if "equation" in kw: del kw["equation"] solution = data.load(**kw) c = solution.c kw = {"equation": applyTemplate(c.get("e", ""), "equation", templates), "solution": solution, "constants": c, "homcont": c.get("homcont")} else: # for load(array,...) kw["solution"] = data solution = runner.load(**kw) if exception is not None and runner.options["constants"]["IRS"]: raise AUTOExceptions.AUTORuntimeError(exception) info("Runner configured\n") return solution commandRunnerConfig = command(configure,alias=None) def load(data=None,runner=None,templates=None,**kw): """Load files into the AUTO runner or return modified solution data. Type result=FUNC([options]) to modify the AUTO runner. Type result=FUNC(data,[options]) to return possibly modified solution data. The type of the result is a solution object. FUNC(data,[options]) returns a solution in the following way for different types of data: * A solution: load returns the solution data, with AUTO constants modified by options. * A bifurcation diagram or a solution list: returns the solution specified by the AUTO constant IRS, or if IRS is not specified, the last solution in s. * A string: AUTO uses the solution in the file 's.s' together with the constants in the files 'c.s', and 'h.s'. Not all of these files need to be present. * A Python list array or a numpy array representing a solution, returns a solution with the given contents. Such an array must be given column-wise, as [[t0, ..., tn], [x0, ..., xn], [y0, ..., yn], ...], or for a point solution as [x, y, z, ...]. There are many possible options: \\begin{verbatim} Long name Short name Description ------------------------------------------- equation e The equations file constants c The AUTO constants file homcont h The Homcont parameter file solution s The restart solution file NDIM,IPS,etc AUTO constants. BR,PT,TY,LAB Solution constants. \\end{verbatim} If data is not specified or data is a string then options which are not explicitly set retain their previous value. For example one may type: s=FUNC(e='ab',c='ab.1') to use 'ab.c' as the equations file and c.ab.1 as the constants file. Type s=FUNC('name') to load all files with base 'name'. This does the same thing as running s=FUNC(e='name',c='name,h='name',s='name'). You can also specify AUTO Constants, e.g., DS=0.05, or IRS=2. Special values for DS are '+' (forwards) and '-' (backwards). Example: s = FUNC(s,DS='-') changes s.c['DS'] to -s.c['DS']. """ runner = withrunner(runner) return configure(runner,templates,data,**kw) commandRunnerLoadName = command(load,SIMPLE,"loadname",alias=['ld']) def loadbd(name=None,templates=None,**kw): """Load bifurcation diagram files. Type b=FUNC([options]) to load output files or output data. There are three possible options: \\begin{verbatim} Long name Short name Description ------------------------------------------- bifurcationdiagram b The bifurcation diagram file solution s The solution file or list of solutions diagnostics d The diagnostics file \\end{verbatim} Type FUNC('name') to load all files with base 'name'. This does the same thing as running FUNC(b='name',s='name,d='name'). plot(b) will then plot the 'b' and 's' components. Returns a bifurcation diagram object representing the files in b. """ def __applyBsdConfigResolveAbbreviation(**kw): abbrev = {} for key in ["bifurcationDiagram", "solution", "diagnostics"]: abbrev[key[0]] = key abbrev[key] = key for key in list(kw): # remove long duplicates if (key in abbrev and key != abbrev[key] and abbrev[key] in kw): del kw[abbrev[key]] for key,value in list(kw.items()): if key in abbrev: # change the abbreviation to the long version del kw[key] if type(value) in [type(""),type(1),type(1.0)]: kw[abbrev[key]] = applyTemplate(value,abbrev[key],templates) else: kw[abbrev[key]] = value return kw if name is not None: if AUTOutil.isiterable(name): lst = ["bifurcationDiagram"] else: lst = ["bifurcationDiagram", "solution", "diagnostics"] for key in lst: if key not in kw: kw[key] = name if name is None and kw == {}: bname, sname, dname = "fort.7", "fort.8", "fort.9" else: dict = __applyBsdConfigResolveAbbreviation(**kw) bname = dict.get("bifurcationDiagram") sname = dict.get("solution") dname = dict.get("diagnostics") data = bifDiag.bifDiag(bname,sname,dname) info("Parsed output data\n") return data commandParseOutputFiles = command(loadbd,SIMPLE,"loadbd",alias=['bd']) def pr(parameter=None,runner=None): """Print continuation parameters. Type FUNC() to print all the parameters. Type FUNC('xxx') to return the parameter 'xxx'. These commands are equivalent to the commands print s.c print s.c['xxx'] where s is a solution. """ runner = withrunner(runner) if parameter is None: info(str(runner.options["constants"])) else: return runner.options["constants"][parameter] commandRunnerPrintFort2 = command(pr,alias=['printconstant','pc']) def hpr(parameter=None,runner=None): """Print HomCont continuation parameters. Type FUNC() to print all the HomCont parameters. Type FUNC('xxx') to return the HomCont parameter 'xxx'. These commands are equivalent to the commands print s.c print s.c['xxx'] where s is a solution. """ runner = withrunner(runner) if parameter is None: info(str(runner.options["homcont"])) else: return runner.options["homcont"][parameter] commandRunnerPrintFort12 = command(hpr) def ch(entry=None,value=None,runner=None,**kw): """Modify continuation constants. Type FUNC('xxx',yyy) to change the constant 'xxx' to have value yyy. This is equivalent to the command s=load(s,xxx=yyy) where s is a solution. """ runner = withrunner(runner) if entry is not None: runner.options["constants"][entry] = value info("%s changed to %s\n"%(entry,value)) else: configure(runner,None,info=lambda s:None,**kw) info(str(kw)+'\n') commandRunnerConfigFort2 = command(ch,SIMPLE,"changeConstants", alias=['changeconstant','cc']) def hch(entry=None,value=None,runner=None,**kw): """Modify HomCont continuation constants. Type FUNC('xxx',yyy) to change the HomCont constant 'xxx' to have value yyy. This is equivalent to the command s=load(s,xxx=yyy) where s is a solution. """ runner = withrunner(runner) if entry is not None: runner.options["homcont"][entry] = value info("%s changed to %s\n"%(entry,value)) else: configure(runner,None,info=lambda s:None,**kw) info(str(kw)+'\n') commandRunnerConfigFort12 = command(hch,SIMPLE,"changeConstantsHomCont") def run(data=None,sv=None,ap=None,runner=None,templates=None,**kw): """Run AUTO. Type r=FUNC([data],[options]) to run AUTO from solution data with the given AUTO constants or file keyword options. The results are stored in the bifurcation diagram r which you can later print with ``print r'', obtain branches from via r[0], r[1], ..., and obtain solutions from via r(3), r(5), r('LP2'), where 3 and 5 are label numbers, and 'LP2' refers to the second LP label. FUNC(data) runs AUTO in the following way for different types of data: * A solution: AUTO starts from solution data, with AUTO constants data.c. * A bifurcation diagram: AUTO start from the solution specified by the AUTO constant IRS, or if IRS is not specified, the last solution in data, data()[-1], with AUTO constants data()[-1].c. * A string: AUTO uses the solution in the file 's.data' together with the constants in the files 'c.data', and 'h.data'. Not all of these files need to be present. If no solution data is specified, then the global values from the 'load' command are used instead, where options which are not explicitly set retain their previous value. Keyword argument options can be AUTO constants, such as DS=0.05, or ISW=-1, or specify a constant or solution file. These override the constants in s.c, where applicable. See ``load'': FUNC(s,options) is equivalent to FUNC(load(s,options)) Example: given a bifurcation diagram bd, with a branch point solution, switch branches and stop at the first Hopf bifurcation: hb = FUNC(bd('BP1'),ISW=-1,STOP='HB1') Special keyword arguments are 'sv' and 'ap'; 'sv' is also an AUTO constant: FUNC(bd('BP1'),ISW=-1,STOP='HB1',sv='hb',ap='all') saves to the files b.hb, s.hb and d.hb, and appends to b.all, s.all, and d.all. """ runner = withrunner(runner) if sv is not None: kw['sv'] = sv load(data,runner,templates,info=lambda msg:None,**kw) res = runner.run() sv = runner.options["constants"].get("sv") runner.options["constants"]['sv'] = None if sv is not None and sv != '': name = filenameTemplate(sv,templates) bname = name["bifurcationDiagram"] sname = name["solution"] dname = name["diagnostics"] info("Saving to %s, %s, and %s ... done\n"%(bname,sname,dname)) if ap is not None: append(sv,ap) elif ap is not None: append(ap) return res commandRun = command(run,SIMPLE,"run",alias=['r','rn']) def rundemo(demo,equation="all",runner=None): runner = withrunner(runner) runner.config(equation=equation) runner.runDemo(demo) commandRunDemo = command(rundemo,alias=None) def runMakefileWithSetup(equation=None,fort2=None,fort3=None,runner=None): runner = withrunner(runner) if fort2 is not None: runner.config(fort2=fort2) if fort3 is not None: runner.config(fort3=fort3) # Before this is called runner needs to have the fort2 and fort3 # options set. Otherwise this will raise an exception. runner.runMakefileWithSetup(equation) commandRunMakefileWithSetup = command(runMakefileWithSetup,alias=None) def runMakefile(equation=None,runner=None): runner = withrunner(runner) runner.runMakefile(equation) commandRunMakefile = command(runMakefile,alias=None) def runExecutableWithSetup(executable=None,fort2=None,fort3=None,runner=None): runner = withrunner(runner) if fort2 is not None: runner.config(fort2=fort2) if fort3 is not None: runner.config(fort3=fort3) # Before this is called runner needs to have the fort2 and fort3 # options set. Otherwise this will raise an exception. runner.runExecutableWithSetup(executable) commandRunExecutableWithSetup = command(runExecutableWithSetup,alias=None) def runExecutable(executable=None,fort2=None,fort3=None,runner=None): runner = withrunner(runner) runner.runExecutable(executable) commandRunExecutable = command(runExecutable,alias=None) def runCommandWithSetup(command=None,fort2=None,fort3=None,runner=None): runner = withrunner(runner) if fort2 is not None: runner.config(fort2=fort2) if fort3 is not None: runner.config(fort3=fort3) # Before this is called runner needs to have the fort2 and fort3 # options set. Otherwise this will raise an exception. runner.runCommandWithSetup(command) commandRunCommandWithSetup = command(runCommandWithSetup,alias=None) def runCommand(command=None,runner=None): runner = withRunner(runner) runner.runCommand(command) commandRunCommand = command(runCommand,alias=None) def plot3(name=None,r3b=False): """3D plotting of data. Type FUNC(x) to run the graphics program PLAUT04 for the graphical inspection of bifurcation diagram or solution data in x. Type FUNC('xxx') to run the graphics program PLAUT04 for the graphical inspection of the data-files b.xxx and s.xxx. Type FUNC() to run the graphics program PLAUT04 for the graphical inspection of the output-files 'fort.7' and 'fort.8'. Type FUNC(...,r3b=True) to run PLAUT04 in restricted three body problem mode. """ cmd = os.path.join(os.path.expandvars("$AUTO_DIR"),"bin") cmd = os.path.join(cmd, "plaut04") arg = [] if r3b: arg = ["-r3b"] if name is not None: if type(name) == type(""): arg.append(name) else: d = name for f in ["fort.7","fort.8","fort.9"]: if os.path.exists(f): os.remove(f) if isinstance(d,bifDiag.bifDiag): d.writeFilename("fort.7","fort.8","fort.9") elif isinstance(d,parseBandS.parseBandS): d.writeFilename("fort.7","fort.8") elif isinstance(d,parseB.parseB): d.writeFilename("fort.7") elif isinstance(d,parseS.parseS): d.writeFilename("fort.8") elif isinstance(d,parseB.AUTOBranch): d.writeFilename("fort.7") elif isinstance(d,parseS.AUTOSolution): d.writeFilename("fort.8") sys.stdout.flush() if not os.path.exists(cmd): cmd = cmd + '.exe' if sys.stdout is sys.__stdout__: os.spawnv(os.P_NOWAIT,cmd,[os.path.basename(cmd)] + arg) else: # when testing, change directories so plaut04 does not keep # cwd open on Windows and it can be deleted cwd = os.getcwd() os.chdir(os.path.dirname(cmd)) os.spawnv(os.P_NOWAIT,cmd,[os.path.basename(cmd), cwd] + arg) # and wait a little bit os.chdir(cwd) import time time.sleep(2) commandPlotter3D = command(plot3,alias=['p3']) try: try: from tkinter import Tk except ImportError: from tkinter import Tk # Python 3 plotterimported = False try: import readline import atexit except: pass import select # this polling loop is here so that Cygwin Python does not "hang" the # plot window while Python waits for a user input def handleevents(): while select.select([sys.stdin],[],[],0.02) == ([], [], []): _root.dooneevent() ##################################################### # Plotting commands ##################################################### def plot(name=None,templates=None,**kw): """Plotting of data. Type FUNC(x) to run the graphics program PyPLAUT for the graphical inspection of bifurcation diagram or solution data in x. Type FUNC('xxx') to run the graphics program PyPLAUT for the graphical inspection of the data-files b.xxx and s.xxx. Type FUNC() to run the graphics program for the graphical inspection of the output-files 'fort.7' and 'fort.8'. Values also present in the file autorc, such as color_list="black green red blue orange" can be provided as keyword arguments, as well as hide=True which hides the on-screen plot. The return value, for instance, p for p=plot(x) will be the handle for the graphics window. It has p.config() and p.savefig() methods that allow you to configure and save the plot. When plotting, see help(p.config) and help(p.savefig) for details. """ options = kw if type(name) == type("") or name is None: name = filenameTemplate(name,templates) parsed = None else: parsed = name # delay importing plotting modules until we actually plot... global plotterimported, windowPlotter if not plotterimported: #from ..graphics import windowPlotter from sys import path from os.path import dirname as dir path.append(dir(path[0])) from graphics import windowPlotter plotterimported = True # root has to be here since I am passing options in # a dictionary. Otherwise the default agruements # get messed up # NOTE: options set here go to the MegaToplevel!, while # the return value of this function is the underlying # grapher. So we add 'grapher_' to all options that don't # already do for k in list(options): if k[:8] != 'grapher_': v = options[k] del options[k] options['grapher_'+k] = v # Get rid of the initial window if options.get('grapher_hide'): root=None else: root=Tk() root.withdraw() if sys.platform == "cygwin": try: readline.set_pre_input_hook(handleevents) global _root _root=root except: pass if parsed: nb, ns = None, None if isinstance(parsed,bifDiag.bifDiag): nb = parsed ns = parsed() elif isinstance(parsed,parseBandS.parseBandS): nb = parsed.diagram.branches ns = parsed.solution elif isinstance(parsed,parseB.parseB): nb = parsed.branches elif isinstance(parsed,parseS.parseS): ns = parsed elif isinstance(parsed,parseB.AUTOBranch): nb = parseB.parseBR([parsed]) elif isinstance(parsed,parseS.AUTOSolution): ns = parseS.parseS([parsed]) if nb: options["grapher_bifurcation_diagram"] = nb if ns: options["grapher_solution"] = ns else: n1b = name["bifurcationDiagram"] n1s = name["solution"] if n1b is None: n1b = "fort.7" n1s = "fort.8" try: n1b = parseB.parseBR(n1b) n1b = bifDiag.bifDiag(n1b,n1s,constants=n1b[0].c) except IOError: n1b = bifDiag.bifDiag(n1b,n1s) options["grapher_bifurcation_diagram"] = n1b options["grapher_solution"] = n1b() handle = windowPlotter.WindowPlotter2D(root,**options) if (not options.get('grapher_hide') or 'graphics.grapher_mpl' not in sys.modules): handle.update() try: def plotterquit(): try: handle.destroy() except KeyError: pass atexit.register(plotterquit) except: pass info("Created plot\n") return handle except: print("\n-------------------------------------------------------------") print("Could not import plotting modules, plotting will be disabled.") print("This is probably because Tkinter is not enabled in your Python installation.") print("-------------------------------------------------------------\n") def plot(name=None,templates=None,**kw): """2D plotting of data. Plotting of data has been disabled in the AUTO-07P CLUI. This is probably because the Python interpretor cannot load the Tkinter module. """ info("2D plotting has been disabled\n") commandPlotter = command(plot,SIMPLE,"plot",alias=['pl','p2']) ################################################## # CLUI commands ################################################## def autohelp(command_string=""): outputString = "" # Read in the aliases. _aliases = {} parser = AUTOutil.getAUTORC() if parser.has_section("AUTO_command_aliases"): for option in parser.options("AUTO_command_aliases"): cmd = parser.get("AUTO_command_aliases",option) if cmd not in _aliases: _aliases[cmd] = [] _aliases[cmd].append(option) from . import AUTOCommands if _aliases == {}: # Now we copy the commands from the module for key in AUTOCommands.__dict__: cmd = getattr(AUTOCommands,key) # Check to see if it is a command if hasattr(cmd,"fun") and cmd.alias is not None: _aliases[key] = [cmd.fun.__name__] + cmd.alias command_list = [] # Here we get a list of the names of all of the commands in AUTOCommands for key in AUTOCommands.__dict__: if key in _aliases: command_list.append(key) return_value = {} if not isinstance(command_string, str): try: outputString += command_string.__doc__+'\n' except TypeError: pass info(outputString) return return_value if len(command_string) == 0: # If we were created with the empty string return a formatted # quick reference of all commands as the string and a # dictionary of all commands as the data. The dictionary # has an entry for each command which is a dictionary # with two entries: # "aliases" a list of the aliases of the command # "description" a one line description of the command command_list.sort() outputString += " ALIASES DESCRIPTION\n" for cmd in command_list: return_value[cmd] = {} return_value[cmd]["aliases"] = [] aliases = "" for key in _aliases[cmd]: aliases = aliases + key + " " return_value[cmd]["aliases"].append(key) doc = getattr(AUTOCommands,cmd).__doc__ if doc is not None: outputString += " %-25s"%aliases doc = doc.splitlines() return_value[cmd]["description"] = doc[0] outputString += doc[0] outputString += "\n" from . import interactiveBindings execlist = [{'name' : 'auto', 'alias' : 'ex', 'fn' : interactiveBindings.AUTOInteractiveConsole.ex}, {'name' : 'demofile', 'alias' : 'dmf', 'fn' : interactiveBindings.AUTOInteractiveConsole.dmf}] for cmdprop in execlist: cmd = cmdprop['name'] return_value[cmd] = {} return_value[cmd]["aliases"] = [cmd,cmdprop['alias']] aliases = cmd + " " + cmdprop['alias'] doc = cmdprop["fn"].__doc__ outputString += " %-25s"%aliases doc = doc.splitlines() return_value[cmd]["description"] = doc[0] outputString += doc[0] outputString += "\n" outputString += "\n" else: # If we were created with the nonempty string return a formatted # reference for the given command as the string and a # dictionary containing information about the command as the data. # The dictionary has 3 entries: # "name" the full name of the command # "aliases" a list of all of the aliases of the command # "description" a long description of the command try: doc = getattr(AUTOCommands,command_string).__doc__ return_value["name"] = command_string except: doc = getattr(AUTOCommands,_aliases[command_string]).__doc__ return_value["name"] = _aliases[command_string] doc = doc.replace("FUNC",command_string) return_value["short description"] = doc.splitlines()[0] return_value["long description"] = "\n".join(doc.split("\n")[1:]) # Get rid of the LaTeX stuff from the string that gets returned, but # NOT from the data portion doc = doc.replace("\\begin{verbatim}","") doc = doc.replace("\\end{verbatim}","") doc = doc + "\n" if not command_string in command_list: # This means help was asked for an alias for cmd in _aliases: if command_string in _aliases[cmd]: command_string = cmd break doc = doc + "Command name: "+command_string+"\n" return_value["aliases"] = [] doc = doc + "Aliases: " if command_string in _aliases: for key in _aliases[command_string]: doc = doc + key + " " return_value["aliases"].append(key) outputString += doc+"\n" info(outputString) return return_value commandHelp = command(autohelp) # This is just a little wrapper around commandHelp which discards the # data portion of the return. This is because, for the # interactive command line we don't want it to print out. def man(command_string=""): """Get help on the AUTO commands. Type 'FUNC' to list all commands with a online help. Type 'FUNC xxx' to get help for command 'xxx'. """ autohelp(command_string) commandInteractiveHelp = command(man) ################################################## # GUI commands ################################################## def printFunc(printFnc,text): printFnc(text) info(text) commandPrintFunc = command(printFunc) # FIXME: This is not done!! def gui(type="simple"): """Show AUTOs graphical user interface. Type FUNC() to start AUTOs graphical user interface. NOTE: This command is not implemented yet. """ try: from tkinter import Tk except ImportError: from tkinter import Tk # Python 3 from .graphics import AUTOgui # Get rid of the initial window root = Tk() root.withdraw() guic = AUTOgui.AUTOgui(type) info("GUI created\n") return guic commandCreateGUI = command(gui) # Not ready yet ## def commandRunGeneralGUI(runner): ## tkSimple ## first = commandSetupGeneralRun(eq_name,saved_data,parameter_name) ## second = commandRunnerConfig(runner,makefile="$AUTO_DIR/cmds/cmds.make") ## third = commandRunMakefile(runner,"EQUATION_NAME=%s"%(eq_name)) ## return commandMacro((first,second,third)) ## commandRunGeneralGUI = command(generalGUI) ############################################ # High level functions ############################################ def splabs(s,typename,templates=None): """Return special labels Type FUNC('xxx',typename) to get a list of labels with the specified typename, where typename can be one of 'EP', 'MX', 'BP', 'LP', 'UZ', 'HB', 'PD', 'TR', or 'RG'. This is equivalent to the command load('xxx')(typename) which gives a list of the solutions themselves; load('xxx')(typename).getLabels() returns the list of labels. Or use FUNC(s,typename) where s is a parsed solution from sl(). This is equivalent to the command s(typename).getLabels() """ labels = [] for solution in sl(s,templates=templates): if solution['Type name'] == typename: labels.append(solution['Label']) return labels commandSpecialPointLabels = command(splabs) ############################################ # Testing stuff ############################################ def test(): from . import runAUTO import sys def printfunc(text): stdout.write(text+"\n") stdout = sys.stdout f = StringIO() def getinfo(s): f.write(s) def noinfo(s): pass global info runner = runAUTO.runAUTO( makefile="", demos_dir=os.path.join(os.environ["AUTO_DIR"],"python")) clean = commandRunDemo("wav","clean",runner) first = commandRunDemo("wav","first",runner) second = commandRunDemo("wav","second",runner) tmacro = commandMacro((clean,first,first)) printer = commandPrintFunc(printfunc,"Hello World") quiet = commandRunnerConfig(runner,log=f) verbose = commandRunnerConfig(runner,log=None) changedir = commandCd("wav",runner) constants = commandParseConstantsFile("wav") changeup = commandCd("..",runner) verbose() clean() first() tmacro() quiet() second() stdout.write(f.getvalue()+"\n") printer() verbose() clean() changedir() constants() changeup() if __name__ == "__main__": test()
py
1a4aa35fee0aee9020cfc0ff5c0cfe0c20093dc1
''' Created on 5/9/2014 @author: victor ''' import unittest from pyproct.data.handler.dataHandler import DataHandler from pyproct.data.handler.test.TestDataLoader import FakeFileLoader class DataHandlerMock(DataHandler): def get_loader(self, data_type): return FakeFileLoader class FakeSourceGenerator(): def __init__(self, source_list): self.source_list = source_list class TestDataHandler(unittest.TestCase): def test_data_handler(self): dh = DataHandlerMock({ "type":"any", # As our loader is hardcoded it must not check the data type # availability "files":[(1,5), (6,9)] }, source_generator_class = FakeSourceGenerator) # We check we have all the elements self.assertEqual([0, 1, 2, 3, 4, 5, 6, 7, 8], list(dh.get_all_elements())) # Then we check we can get their sources self.assertTupleEqual( dh.get_source_of_element(3), (1, 5)) # Element 3 is datum(3) == 4 self.assertTupleEqual( dh.get_source_of_element(4), (1, 5)) # Element 4 is datum(4) == 5 self.assertTupleEqual( dh.get_source_of_element(5), (6, 9)) # Element 5 is datum(5) == 6 self.assertTupleEqual( dh.get_source_of_element(7), (6, 9)) # Element 7 is datum(7) == 8 if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
py
1a4aa47f49066d0ae726e49e0135bdff270894ca
class Pass(object): def __init__(self, name): self.name = name def __repr__(self): return "<pass name=%s>" % self.name PassListId = 0 class PassList(object): def __init__(self, transforms): global PassListId self.pass_id = PassListId PassListId += 1 self.transforms = transforms def __repr__(self): return "<passlist id=%s values=%s>" % (self.pass_id, self.transforms) def __iter__(self): return self.transforms.__iter__() def addPass(self, p): if isinstance(p, list): p = PassList(p) self.transforms.append(p) def generate(self): stack = list(reversed(self.transforms)) result = [] while stack: transform = stack.pop() if isinstance(transform, Pass): result.append(transform.name) continue for child in reversed(list(transform)): stack.append(child) return result class PassPipeline(object): def __init__(self, identifier, action): self.identifier = identifier self.action = action self.pass_list = PassList([]) def addPass(self, p): self.pass_list.addPass(p) def __repr__(self): return "<passpipeline values=%s>" % self.pass_list def generate(self): x = [self.identifier, self.action['name'], self.action.get('count', 0)] x.extend(self.pass_list.generate()) return x
py
1a4aa49740cfe22d9681aac82d8762ef057798c9
import string from time import sleep from loguru import logger from tacticalrmm.celery import app from django.conf import settings from agents.models import Agent from .models import ChocoSoftware, ChocoLog, InstalledSoftware logger.configure(**settings.LOG_CONFIG) @app.task() def install_chocolatey(pk, wait=False): if wait: sleep(15) agent = Agent.objects.get(pk=pk) r = agent.salt_api_cmd(timeout=120, func="chocolatey.bootstrap", arg="force=True") if r == "timeout" or r == "error": logger.error(f"failed to install choco on {agent.salt_id}") return try: output = r.lower() except Exception as e: logger.error(f"failed to install choco on {agent.salt_id}: {e}") return success = ["chocolatey", "is", "now", "ready"] if all(x in output for x in success): agent.choco_installed = True agent.save(update_fields=["choco_installed"]) logger.info(f"Installed chocolatey on {agent.salt_id}") return "ok" else: logger.error(f"failed to install choco on {agent.salt_id}") return @app.task def update_chocos(): agents = Agent.objects.only("pk") online = [x for x in agents if x.status == "online" and x.choco_installed] while 1: for agent in online: r = agent.salt_api_cmd(timeout=10, func="test.ping") if r == "timeout" or r == "error" or (isinstance(r, bool) and not r): continue if isinstance(r, bool) and r: ret = agent.salt_api_cmd(timeout=200, func="chocolatey.list") if ret == "timeout" or ret == "error": continue try: chocos = [{"name": k, "version": v[0]} for k, v in ret.items()] except AttributeError: continue else: # somtimes chocolatey api is down or buggy and doesn't return the full list of software if len(chocos) < 4000: continue else: logger.info(f"Chocos were updated using {agent.salt_id}") ChocoSoftware(chocos=chocos).save() break break return "ok" @app.task def get_installed_software(pk): agent = Agent.objects.get(pk=pk) r = agent.salt_api_cmd(timeout=30, func="pkg.list_pkgs") if r == "timeout" or r == "error": logger.error(f"Timed out trying to get installed software on {agent.salt_id}") return printable = set(string.printable) try: software = [ { "name": "".join(filter(lambda x: x in printable, k)), "version": "".join(filter(lambda x: x in printable, v)), } for k, v in r.items() ] except Exception as e: logger.error(f"Unable to get installed software on {agent.salt_id}: {e}") return if not InstalledSoftware.objects.filter(agent=agent).exists(): InstalledSoftware(agent=agent, software=software).save() else: s = agent.installedsoftware_set.get() s.software = software s.save(update_fields=["software"]) return "ok" @app.task def install_program(pk, name, version): agent = Agent.objects.get(pk=pk) r = agent.salt_api_cmd( timeout=900, func="chocolatey.install", arg=[name, f"version={version}"], ) if r == "timeout" or r == "error": logger.error(f"Failed to install {name} {version} on {agent.salt_id}: timeout") return try: output = r.lower() except Exception as e: logger.error(f"Failed to install {name} {version} on {agent.salt_id}: {e}") return success = [ "install", "of", name.lower(), "was", "successful", "installed", ] duplicate = [name.lower(), "already", "installed", "--force", "reinstall"] installed = False if all(x in output for x in success): installed = True logger.info(f"Successfully installed {name} {version} on {agent.salt_id}") elif all(x in output for x in duplicate): logger.warning(f"Already installed: {name} {version} on {agent.salt_id}") else: logger.error(f"Something went wrong - {name} {version} on {agent.salt_id}") ChocoLog( agent=agent, name=name, version=version, message=output, installed=installed ).save() get_installed_software.delay(agent.pk) return "ok"
py
1a4aa5f437e2a7e5bdfbdf2fb5b05bfd83468944
nome = str(input('Qual é seu nome completo? ')) print('Seu nome tem Silva! {} '.format('silva' in nome.lower()))
py
1a4aa6b247f301249d235ea2f7c38519b579743b
from mythic_c2_container.C2ProfileBase import * import sys # request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} # must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message async def test(request): response = RPCResponse() response.status = RPCStatus.Success response.response = "hello" #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") return response # The opsec function is called when a payload is created as a check to see if the parameters supplied are good # The input for "request" is a dictionary of: # { # "action": "opsec", # "parameters": { # "param_name": "param_value", # "param_name2: "param_value2", # } # } # This function should return one of two things: # For success: {"status": "success", "message": "your success message here" } # For error: {"status": "error", "error": "your error message here" } async def opsec(request): return {"status": "success", "message": "No OPSEC Check Performed"}
py
1a4aa7c04dcd94ecd88293e77a80fabb7ef296a9
import re _R = re.compile(r"^([0-9]*)-([0-9]*) ([a-zA-Z]): (.*)$") def _parse_in(line): r = _R.search(line) grp = r.groups() return int(grp[0]), int(grp[1]), grp[2], grp[3] def transform_input_list(inp): r = [] for line in inp: r.append(_parse_in(line)) return r def stage_1(inp): nb_ok_pass = 0 for min, max, let, pss in inp: nb_oc = len(re.findall(let, pss)) if not (nb_oc < min or nb_oc > max): nb_ok_pass = nb_ok_pass + 1 return nb_ok_pass def stage_2(inp): nb_ok_pass = 0 for p1, p2, let, pss in inp: if (pss[p1 - 1] == let and pss[p2 - 1] != let) or ( pss[p1 - 1] != let and pss[p2 - 1] == let ): nb_ok_pass = nb_ok_pass + 1 return nb_ok_pass def solve(inp): inp = transform_input_list(inp) return stage_1(inp), stage_2(inp)
py
1a4aa98494f7fbe1c652f8d5a291ad3055e13548
#!/usr/bin/python # -*- coding: utf-8 -*- from django_cron import cronScheduler, Job from runapp import deleteOrphanJobs class deleteOrphans(Job): run_every = 4000 def job(self): deleteOrphanJobs() cronScheduler.register(deleteOrphans)
py
1a4aa9dcf136ea802150c9104b0570d976168017
# Generated by Django 3.0.7 on 2020-06-07 18:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('projects', '0009_auto_20200607_1515'), ] operations = [ migrations.AddField( model_name='project', name='average_score', field=models.FloatField(default=0), ), ]
py
1a4aaa9d53b42c6c4d324c36309897da49fbe185
import os.path import sys import tqdm import pathlib import cv2 as cv import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torchvision.models as models import xml.etree.ElementTree as ET from torchvision import ops from torchvision import transforms from torch.utils.data import Dataset, DataLoader from PIL import Image POS_ANCHOR_LABEL = 1 NEG_ANCHOR_LABEL = 0 INVALID_ANCHOR_LABEL = -1 class RegionProposalNetwork(nn.Module): def __init__(self, n_anchors, n_inter_channels=256): super().__init__() pretrained_model = models.mobilenet_v2(pretrained=True) for param in pretrained_model.parameters(): param.requires_grad = False self.backbone = nn.Sequential(*pretrained_model.features[0:7]) self.conv_inter = nn.Conv2d(32, n_inter_channels, 3, padding=1) self.relu = nn.ReLU(inplace=True) self.conv_reg = nn.Conv2d(n_inter_channels, 4 * n_anchors, 1) self.conv_cls = nn.Conv2d(n_inter_channels, 2 * n_anchors, 1) def forward(self, x): x = self.backbone(x) x = self.conv_inter(x) x = self.relu(x) pred_reg = self.conv_reg(x) pred_cls = self.conv_cls(x) batch_size = x.shape[0] pred_reg = pred_reg.permute(0, 2, 3, 1).reshape(batch_size, -1, 4) pred_cls = pred_cls.permute(0, 2, 3, 1).reshape(batch_size, -1, 2) return pred_reg, pred_cls def _get_rand_sub_selector(selector, indices, n_vals): rand_perm = torch.randperm(len(indices))[:n_vals] subset_indices = indices[rand_perm] subset_selector = torch.full_like(selector, False) subset_selector[subset_indices] = True return subset_selector def _rpn_cls_loss(pred_cls, cls_labels, pos_selector, neg_selector): pred_cls_pos = pred_cls[pos_selector] pred_cls_neg = pred_cls[neg_selector] cls_loss_pos = F.cross_entropy( pred_cls_pos, cls_labels[pos_selector], reduction='sum') cls_loss_neg = F.cross_entropy( pred_cls_neg, cls_labels[neg_selector], reduction='sum') n_valid = pred_cls_pos.numel() + pred_cls_neg.numel() cls_loss = (cls_loss_pos + cls_loss_neg) / n_valid return cls_loss def _rpn_reg_loss(pred_reg, reg_targets, pos_selector): pred_reg_valid = pred_reg[pos_selector] if len(pred_reg_valid) == 0: return 0 reg_targets_valid = reg_targets[pos_selector] reg_loss = F.smooth_l1_loss(pred_reg_valid, reg_targets_valid) return reg_loss class RPNClsAndRegLoss(nn.Module): def __init__(self, n_pos_samples=32, n_neg_samples=32, reg_balance=0.5): super().__init__() self.n_pos_samples = n_pos_samples self.n_neg_samples = n_neg_samples assert 0 < reg_balance < 1 self.reg_balance = reg_balance def forward(self, pred_reg, pred_cls, reg_targets, cls_labels): pos_selector = (cls_labels == POS_ANCHOR_LABEL) neg_selector = (cls_labels == NEG_ANCHOR_LABEL) pos_indices = torch.where(pos_selector)[0] neg_indices = torch.where(neg_selector)[0] n_pos_found = len(pos_indices) n_neg_found = len(neg_indices) n_pos_subset = min(self.n_pos_samples, n_pos_found) n_pos_missing = max(self.n_pos_samples - n_pos_found, 0) n_neg_subset = min(self.n_neg_samples + n_pos_missing, n_neg_found) pos_subset_selector = _get_rand_sub_selector( pos_selector, pos_indices, n_pos_subset) neg_subset_selector = _get_rand_sub_selector( neg_selector, neg_indices, n_neg_subset) cls_loss = _rpn_cls_loss( pred_cls, cls_labels, pos_subset_selector, neg_subset_selector) reg_loss = _rpn_reg_loss(pred_reg, reg_targets, pos_subset_selector) loss = (self.reg_balance * reg_loss + (1 - self.reg_balance) * cls_loss) return loss def generate_anchors(scales, ratios, response_size, total_stride): n_response_cells = response_size ** 2 n_anchors = len(scales) * len(ratios) feature_cell_size = total_stride * total_stride widths = torch.sqrt(feature_cell_size * ratios) heights = widths / ratios widths_scaled = torch.outer(scales, widths).flatten() heights_scaled = torch.outer(scales, heights).flatten() widths_scaled = torch.tile(widths_scaled, (n_response_cells,)) heights_scaled = torch.tile(heights_scaled, (n_response_cells,)) coords = torch.arange(response_size) * total_stride centers = coords + (total_stride / 2) xs, ys = torch.meshgrid(centers, centers) xs = torch.tile(xs.flatten(), (n_anchors,)) ys = torch.tile(ys.flatten(), (n_anchors,)) anchors = torch.vstack((xs, ys, widths_scaled, heights_scaled)).squeeze().T return anchors class RPNTargetBuilder: def __init__( self, anchors, img_width, img_height, pos_thresh=0.5, neg_thresh=0.2): self.pos_thresh = pos_thresh self.neg_thresh = neg_thresh self.anchors = anchors self.anchors_xyxy = ops.box_convert(anchors, 'cxcywh', 'xyxy') # [A, 4] self.valid_anchors_selector = ( (self.anchors_xyxy[:, 0] >= 0) & (self.anchors_xyxy[:, 1] >= 0) & (self.anchors_xyxy[:, 2] < img_width) & (self.anchors_xyxy[:, 3] < img_height)) def build_reg_and_cls_targets(self, boxes): boxes_xyxy = ops.box_convert(boxes, 'cxcywh', 'xyxy') # [B, 4] iou_dist = ops.box_iou(self.anchors_xyxy, boxes_xyxy) # [A, B] closest_box_indices = torch.argmax(iou_dist, dim=1) # [A, 1] target_boxes = boxes[closest_box_indices] # [A, 4] # Both [A, 2] xy_targets = ( (target_boxes[..., :2] - self.anchors[..., :2]) / self.anchors[..., 2:]) wh_targets = torch.log(target_boxes[..., 2:] / self.anchors[..., 2:]) reg_target = torch.hstack((xy_targets, wh_targets)) # [A, 4] pos_selector = torch.any(iou_dist > self.pos_thresh, dim=1) # [A,] neg_selector = torch.all(iou_dist < self.neg_thresh, dim=1) # [A,] valid_pos_selector = pos_selector & self.valid_anchors_selector # [A,] valid_neg_selector = neg_selector & self.valid_anchors_selector # [A,] cls_target = torch.full( (len(self.anchors),), INVALID_ANCHOR_LABEL, device=boxes.device) # [A,] cls_target[valid_pos_selector] = POS_ANCHOR_LABEL # [A,] cls_target[valid_neg_selector] = NEG_ANCHOR_LABEL # [A,] return reg_target, cls_target def read_img_with_boxes(imgs_dir, anno_file): tree = ET.parse(str(anno_file)) root = tree.getroot() img_file_name = root.find("filename").text img = Image.open(str(imgs_dir / img_file_name)) if img.mode in ('L', 'RGBA'): img = img.convert('RGB') box_elems = ("xmin", "ymin", "xmax", "ymax") boxes = [] for box in root.findall(".//bndbox"): box = [int(box.find(elem).text) for elem in box_elems] boxes.append(box) boxes = ops.box_convert(torch.tensor(boxes), 'xyxy', 'cxcywh') return img, boxes class ImgAndBoxesSyncResize: def __init__(self, target_width, target_height, resample=Image.BICUBIC): self.target_width = target_width self.target_height = target_height self.resample = resample def __call__(self, img, boxes): img_resized = img.resize( (self.target_width, self.target_height), self.resample) width_scale = self.target_width / img.size[0] height_scale = self.target_height / img.size[1] scale = torch.tensor( ((width_scale, height_scale, width_scale, height_scale),)) boxes_resized = boxes * scale return img_resized, boxes_resized class RoadSignDetectionDataset(Dataset): def __init__( self, root_dir_path, img_transforms=None, img_boxes_transforms=None): self.img_transforms = img_transforms self.img_boxes_transforms = img_boxes_transforms self.imgs = [] self.boxes = [] self._read_dataset(root_dir_path) def __getitem__(self, item): img = self.imgs[item] boxes = self.boxes[item] if self.img_boxes_transforms: img, boxes = self.img_boxes_transforms(img, boxes) if self.img_transforms: img = self.img_transforms(img) return img, boxes def __len__(self): return len(self.imgs) def _read_dataset(self, root_dir_path): root_dir = pathlib.Path(root_dir_path) annos_dir = root_dir / "annotations" imgs_dir = root_dir / "images" for anno_file in annos_dir.rglob("road*.xml"): img, boxes = read_img_with_boxes(imgs_dir, anno_file) self.imgs.append(img) self.boxes.append(boxes) if __name__ == '__main__': device = torch.device('cuda') pin_memory = True root_dir_path = "../../../datasets/road_sign_detection" response_size = 28 total_stride = 8 img_size = 224 scales = torch.tensor((4.0, 8.0, 16.0)) ratios = torch.tensor((0.5, 1.0, 2.0)) n_anchors = len(scales) * len(ratios) n_epochs = 100 anchors = generate_anchors( scales, ratios, response_size, total_stride).to(device) rpn = RegionProposalNetwork(n_anchors).to(device) criterion = RPNClsAndRegLoss().to(device) target_builder = RPNTargetBuilder(anchors, img_size, img_size) optimizer = torch.optim.Adam(rpn.parameters(), lr=1.e-3) img = torch.zeros((1, 3, 224, 224)).to(device) rpn(img) normalize = transforms.Normalize( mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) img_transforms = transforms.Compose(( transforms.ToTensor(), normalize,)) img_boxes_transforms = ImgAndBoxesSyncResize(img_size, img_size) dataset = RoadSignDetectionDataset( root_dir_path, img_transforms, img_boxes_transforms) n_workers = 1 train_loader = DataLoader( dataset, batch_size=1, shuffle=True, num_workers=n_workers, pin_memory=pin_memory) def run_epoch(epoch, device, backward=True): rpn.train(backward) losses_sum = 0.0 n_batches = len(dataset) mode_text = "train" if backward else "valid" epoch_text = f"[{mode_text}] epoch: {epoch:3d}/{n_epochs}" tqdm_bar = tqdm.tqdm(total=n_batches, file=sys.stdout) with torch.set_grad_enabled(backward), tqdm_bar as pbar: for batch, (img, boxes) in enumerate(train_loader, start=1): img = img.to(device) boxes = boxes.to(device).squeeze(dim=0) # Remove batch dim. pred_reg, pred_cls = rpn(img) pred_reg = pred_reg.squeeze() pred_cls = pred_cls.squeeze() reg_target, cls_target =\ target_builder.build_reg_and_cls_targets(boxes) loss = criterion(pred_reg, pred_cls, reg_target, cls_target) if backward: optimizer.zero_grad() loss.backward() optimizer.step() curr_loss = loss.item() losses_sum += curr_loss curr_batch_loss = losses_sum / batch loss_text = f"loss: {curr_loss:.5f} ({curr_batch_loss:.5f})" pbar.set_description(f"{epoch_text} | {loss_text}") pbar.update() batch_loss = losses_sum / n_batches return batch_loss for epoch in range(1, n_epochs + 1): run_epoch(epoch, device) checkpoint_file_path = "../rpn_checkpoint.pth" checkpoint = rpn.state_dict() torch.save(checkpoint, checkpoint_file_path)
py
1a4aab09639855b044c8656c90642c5668e9ed03
# # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import copy import re import sys from command import InteractiveCommand from editor import Editor from error import HookError, UploadError from git_command import GitCommand from project import RepoHook from pyversion import is_python3 # pylint:disable=W0622 if not is_python3(): input = raw_input else: unicode = str # pylint:enable=W0622 UNUSUAL_COMMIT_THRESHOLD = 1000 def _ConfirmManyUploads(multiple_branches=False): if multiple_branches: print('ATTENTION: One or more branches has an unusually high number ' 'of commits.') else: print('ATTENTION: You are uploading an unusually high number of commits.') print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across ' 'branches?)') answer = input("If you are sure you intend to do this, type 'yes': ").strip() return answer == "yes" def _die(fmt, *args): msg = fmt % args print('error: %s' % msg, file=sys.stderr) sys.exit(1) def _SplitEmails(values): result = [] for value in values: result.extend([s.strip() for s in value.split(',')]) return result class Upload(InteractiveCommand): common = True helpSummary = "Upload changes for code review" helpUsage = """ %prog [--re --cc] [<project>]... """ helpDescription = """ The '%prog' command is used to send changes to the Gerrit Code Review system. It searches for topic branches in local projects that have not yet been published for review. If multiple topic branches are found, '%prog' opens an editor to allow the user to select which branches to upload. '%prog' searches for uploadable changes in all projects listed at the command line. Projects can be specified either by name, or by a relative or absolute path to the project's local directory. If no projects are specified, '%prog' will search for uploadable changes in all projects listed in the manifest. If the --reviewers or --cc options are passed, those emails are added to the respective list of users, and emails are sent to any new users. Users passed as --reviewers must already be registered with the code review system, or the upload will fail. Configuration ------------- review.URL.autoupload: To disable the "Upload ... (y/N)?" prompt, you can set a per-project or global Git configuration option. If review.URL.autoupload is set to "true" then repo will assume you always answer "y" at the prompt, and will not prompt you further. If it is set to "false" then repo will assume you always answer "n", and will abort. review.URL.autoreviewer: To automatically append a user or mailing list to reviews, you can set a per-project or global Git option to do so. review.URL.autocopy: To automatically copy a user or mailing list to all uploaded reviews, you can set a per-project or global Git option to do so. Specifically, review.URL.autocopy can be set to a comma separated list of reviewers who you always want copied on all uploads with a non-empty --re argument. review.URL.username: Override the username used to connect to Gerrit Code Review. By default the local part of the email address is used. The URL must match the review URL listed in the manifest XML file, or in the .git/config within the project. For example: [remote "origin"] url = git://git.example.com/project.git review = http://review.example.com/ [review "http://review.example.com/"] autoupload = true autocopy = [email protected],[email protected] review.URL.uploadtopic: To add a topic branch whenever uploading a commit, you can set a per-project or global Git option to do so. If review.URL.uploadtopic is set to "true" then repo will assume you always want the equivalent of the -t option to the repo command. If unset or set to "false" then repo will make use of only the command line option. References ---------- Gerrit Code Review: http://code.google.com/p/gerrit/ """ def _Options(self, p): p.add_option('-t', dest='auto_topic', action='store_true', help='Send local branch name to Gerrit Code Review') p.add_option('--re', '--reviewers', type='string', action='append', dest='reviewers', help='Request reviews from these people.') p.add_option('--cc', type='string', action='append', dest='cc', help='Also send email to these email addresses.') p.add_option('--br', type='string', action='store', dest='branch', help='Branch to upload.') p.add_option('--cbr', '--current-branch', dest='current_branch', action='store_true', help='Upload current git branch.') p.add_option('-d', '--draft', action='store_true', dest='draft', default=False, help='If specified, upload as a draft.') p.add_option('-D', '--destination', '--dest', type='string', action='store', dest='dest_branch', metavar='BRANCH', help='Submit for review on this target branch.') # Options relating to upload hook. Note that verify and no-verify are NOT # opposites of each other, which is why they store to different locations. # We are using them to match 'git commit' syntax. # # Combinations: # - no-verify=False, verify=False (DEFAULT): # If stdout is a tty, can prompt about running upload hooks if needed. # If user denies running hooks, the upload is cancelled. If stdout is # not a tty and we would need to prompt about upload hooks, upload is # cancelled. # - no-verify=False, verify=True: # Always run upload hooks with no prompt. # - no-verify=True, verify=False: # Never run upload hooks, but upload anyway (AKA bypass hooks). # - no-verify=True, verify=True: # Invalid p.add_option('--no-verify', dest='bypass_hooks', action='store_true', help='Do not run the upload hook.') p.add_option('--verify', dest='allow_all_hooks', action='store_true', help='Run the upload hook without prompting.') def _SingleBranch(self, opt, branch, people): project = branch.project name = branch.name remote = project.GetBranch(name).remote key = 'review.%s.autoupload' % remote.review answer = project.config.GetBoolean(key) if answer is False: _die("upload blocked by %s = false" % key) if answer is None: date = branch.date commit_list = branch.commits destination = opt.dest_branch or project.dest_branch or project.revisionExpr print('Upload project %s/ to remote branch %s:' % (project.relpath, destination)) print(' branch %s (%2d commit%s, %s):' % ( name, len(commit_list), len(commit_list) != 1 and 's' or '', date)) for commit in commit_list: print(' %s' % commit) sys.stdout.write('to %s (y/N)? ' % remote.review) answer = sys.stdin.readline().strip().lower() answer = answer in ('y', 'yes', '1', 'true', 't') if answer: if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD: answer = _ConfirmManyUploads() if answer: self._UploadAndReport(opt, [branch], people) else: _die("upload aborted by user") def _MultipleBranches(self, opt, pending, people): projects = {} branches = {} script = [] script.append('# Uncomment the branches to upload:') for project, avail in pending: script.append('#') script.append('# project %s/:' % project.relpath) b = {} for branch in avail: if branch is None: continue name = branch.name date = branch.date commit_list = branch.commits if b: script.append('#') destination = opt.dest_branch or project.dest_branch or project.revisionExpr script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % ( name, len(commit_list), len(commit_list) != 1 and 's' or '', date, destination)) for commit in commit_list: script.append('# %s' % commit) b[name] = branch projects[project.relpath] = project branches[project.name] = b script.append('') script = [ x.encode('utf-8') if issubclass(type(x), unicode) else x for x in script ] script = Editor.EditString("\n".join(script)).split("\n") project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$') branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*') project = None todo = [] for line in script: m = project_re.match(line) if m: name = m.group(1) project = projects.get(name) if not project: _die('project %s not available for upload', name) continue m = branch_re.match(line) if m: name = m.group(1) if not project: _die('project for branch %s not in script', name) branch = branches[project.name].get(name) if not branch: _die('branch %s not in %s', name, project.relpath) todo.append(branch) if not todo: _die("nothing uncommented for upload") many_commits = False for branch in todo: if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD: many_commits = True break if many_commits: if not _ConfirmManyUploads(multiple_branches=True): _die("upload aborted by user") self._UploadAndReport(opt, todo, people) def _AppendAutoList(self, branch, people): """ Appends the list of reviewers in the git project's config. Appends the list of users in the CC list in the git project's config if a non-empty reviewer list was found. """ name = branch.name project = branch.project key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review raw_list = project.config.GetString(key) if not raw_list is None: people[0].extend([entry.strip() for entry in raw_list.split(',')]) key = 'review.%s.autocopy' % project.GetBranch(name).remote.review raw_list = project.config.GetString(key) if not raw_list is None and len(people[0]) > 0: people[1].extend([entry.strip() for entry in raw_list.split(',')]) def _FindGerritChange(self, branch): last_pub = branch.project.WasPublished(branch.name) if last_pub is None: return "" refs = branch.GetPublishedRefs() try: # refs/changes/XYZ/N --> XYZ return refs.get(last_pub).split('/')[-2] except (AttributeError, IndexError): return "" def _UploadAndReport(self, opt, todo, original_people): have_errors = False for branch in todo: try: people = copy.deepcopy(original_people) self._AppendAutoList(branch, people) # Check if there are local changes that may have been forgotten changes = branch.project.UncommitedFiles() if changes: key = 'review.%s.autoupload' % branch.project.remote.review answer = branch.project.config.GetBoolean(key) # if they want to auto upload, let's not ask because it could be automated if answer is None: sys.stdout.write('Uncommitted changes in ' + branch.project.name) sys.stdout.write(' (did you forget to amend?):\n') sys.stdout.write('\n'.join(changes) + '\n') sys.stdout.write('Continue uploading? (y/N) ') a = sys.stdin.readline().strip().lower() if a not in ('y', 'yes', 't', 'true', 'on'): print("skipping upload", file=sys.stderr) branch.uploaded = False branch.error = 'User aborted' continue # Check if topic branches should be sent to the server during upload if opt.auto_topic is not True: key = 'review.%s.uploadtopic' % branch.project.remote.review opt.auto_topic = branch.project.config.GetBoolean(key) destination = opt.dest_branch or branch.project.dest_branch # Make sure our local branch is not setup to track a different remote branch merge_branch = self._GetMergeBranch(branch.project) if destination: full_dest = 'refs/heads/%s' % destination if not opt.dest_branch and merge_branch and merge_branch != full_dest: print('merge branch %s does not match destination branch %s' % (merge_branch, full_dest)) print('skipping upload.') print('Please use `--destination %s` if this is intentional' % destination) branch.uploaded = False continue branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft, dest_branch=destination) branch.uploaded = True except UploadError as e: branch.error = e branch.uploaded = False have_errors = True print(file=sys.stderr) print('----------------------------------------------------------------------', file=sys.stderr) if have_errors: for branch in todo: if not branch.uploaded: if len(str(branch.error)) <= 30: fmt = ' (%s)' else: fmt = '\n (%s)' print(('[FAILED] %-15s %-15s' + fmt) % ( branch.project.relpath + '/', \ branch.name, \ str(branch.error)), file=sys.stderr) print() for branch in todo: if branch.uploaded: print('[OK ] %-15s %s' % ( branch.project.relpath + '/', branch.name), file=sys.stderr) if have_errors: sys.exit(1) def _GetMergeBranch(self, project): p = GitCommand(project, ['rev-parse', '--abbrev-ref', 'HEAD'], capture_stdout = True, capture_stderr = True) p.Wait() local_branch = p.stdout.strip() p = GitCommand(project, ['config', '--get', 'branch.%s.merge' % local_branch], capture_stdout = True, capture_stderr = True) p.Wait() merge_branch = p.stdout.strip() return merge_branch def Execute(self, opt, args): project_list = self.GetProjects(args) pending = [] reviewers = [] cc = [] branch = None if opt.branch: branch = opt.branch for project in project_list: if opt.current_branch: cbr = project.CurrentBranch up_branch = project.GetUploadableBranch(cbr) if up_branch: avail = [up_branch] else: avail = None print('ERROR: Current branch (%s) not uploadable. ' 'You may be able to type ' '"git branch --set-upstream-to m/master" to fix ' 'your branch.' % str(cbr), file=sys.stderr) else: avail = project.GetUploadableBranches(branch) if avail: pending.append((project, avail)) if pending and (not opt.bypass_hooks): hook = RepoHook('pre-upload', self.manifest.repo_hooks_project, self.manifest.topdir, abort_if_user_denies=True) pending_proj_names = [project.name for (project, avail) in pending] pending_worktrees = [project.worktree for (project, avail) in pending] try: hook.Run(opt.allow_all_hooks, project_list=pending_proj_names, worktree_list=pending_worktrees) except HookError as e: print("ERROR: %s" % str(e), file=sys.stderr) return if opt.reviewers: reviewers = _SplitEmails(opt.reviewers) if opt.cc: cc = _SplitEmails(opt.cc) people = (reviewers, cc) if not pending: print("no branches ready for upload", file=sys.stderr) elif len(pending) == 1 and len(pending[0][1]) == 1: self._SingleBranch(opt, pending[0][1][0], people) else: self._MultipleBranches(opt, pending, people)
py
1a4aab148971b8e54c1c7c03f906bbef2e27b71f
# Copyright 2022 Maximilien Le Clei. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cv2 import numpy as np import torch def compute_padding(d_input): padding = () for d in d_input[-1:0:-1]: if d == 1: padding += (1,1) elif d == 2: padding += (0,1) else: padding += (0,0) return padding def neg(tup): return tuple(-x for x in tup) def avg_pool(x, d): _, _, h, w = x.shape x = x.numpy() x = x[0] x = np.transpose(x, (1, 2, 0)) x = cv2.resize(x, (h//d, w//d), interpolation=cv2.INTER_AREA) if x.ndim == 2: x = x[:, :, None] x = np.transpose(x, (2, 0, 1)) x = x[None, :, :, :] x = torch.Tensor(x) return x def torch_cat(x, i): for x_i in x: x_i = x_i.numpy() return torch.Tensor(np.concatenate(x, i))
py
1a4aaba69747838f02a431e5486e88311e14443d
"""Tests for the templatetags of the markdown_utils app.""" from django.test import TestCase from ..templatetags import markdown_utils_tags as tags class RenderMarkdownTestCase(TestCase): """Tests for the ``render_markdown`` assignment tag.""" longMessage = True def test_tag(self): result = tags.render_markdown('# Foobar') self.assertEqual(result, '<h1>Foobar</h1>', msg=( 'Should render the given input correctly.')) result = tags.render_markdown('Foobar\nBarfoo') self.assertEqual(result, '<p>Foobar<br />\nBarfoo</p>', msg=( 'Should render the given input correctly.'))
py
1a4aac3c9e754235a58496dbad5c8bbb44f99827
"""Measurements collection.""" from datetime import datetime, timedelta from typing import Optional, cast import pymongo from pymongo.database import Database from model.metric import Metric from model.queries import get_attribute_type, get_measured_attribute from server_utilities.functions import iso_timestamp, percentage from server_utilities.type import MeasurementId, MetricId, Scale, Status, TargetType def latest_measurement(database: Database, metric_uuid: MetricId): """Return the latest measurement.""" return database.measurements.find_one(filter={"metric_uuid": metric_uuid}, sort=[("start", pymongo.DESCENDING)]) def latest_successful_measurement(database: Database, metric_uuid: MetricId): """Return the latest successful measurement.""" return database.measurements.find_one( filter={"metric_uuid": metric_uuid, "sources.value": {"$ne": None}}, sort=[("start", pymongo.DESCENDING)] ) def recent_measurements_by_metric_uuid(database: Database, max_iso_timestamp: str = "", days=7): """Return all recent measurements.""" max_iso_timestamp = max_iso_timestamp or iso_timestamp() min_iso_timestamp = (datetime.fromisoformat(max_iso_timestamp) - timedelta(days=days)).isoformat() recent_measurements = database.measurements.find( filter={"end": {"$gte": min_iso_timestamp}, "start": {"$lte": max_iso_timestamp}}, sort=[("start", pymongo.ASCENDING)], projection={"_id": False, "sources.entities": False}, ) measurements_by_metric_uuid: dict[MetricId, list] = {} for measurement in recent_measurements: measurements_by_metric_uuid.setdefault(measurement["metric_uuid"], []).append(measurement) return measurements_by_metric_uuid def measurements_by_metric( database: Database, *metric_uuids: MetricId, min_iso_timestamp: str = "", max_iso_timestamp: str = "", ): """Return all measurements for one metric, without the entities, except for the most recent one.""" measurement_filter: dict = {"metric_uuid": {"$in": metric_uuids}} if min_iso_timestamp: measurement_filter["end"] = {"$gt": min_iso_timestamp} if max_iso_timestamp: measurement_filter["start"] = {"$lt": max_iso_timestamp} latest_with_entities = database.measurements.find_one( measurement_filter, sort=[("start", pymongo.DESCENDING)], projection={"_id": False} ) if not latest_with_entities: return [] all_measurements_without_entities = database.measurements.find( measurement_filter, projection={"_id": False, "sources.entities": False} ) return list(all_measurements_without_entities)[:-1] + [latest_with_entities] def count_measurements(database: Database) -> int: """Return the number of measurements.""" return int(database.measurements.count_documents(filter={})) def update_measurement_end(database: Database, measurement_id: MeasurementId): """Set the end date and time of the measurement to the current date and time.""" return database.measurements.update_one(filter={"_id": measurement_id}, update={"$set": {"end": iso_timestamp()}}) def insert_new_measurement( database: Database, data_model, metric_data: dict, measurement: dict, previous_measurement: dict ) -> dict: """Insert a new measurement.""" if "_id" in measurement: del measurement["_id"] metric = Metric(data_model, metric_data) metric_type = data_model["metrics"][metric.type()] measurement["start"] = measurement["end"] = now = iso_timestamp() for scale in metric_type["scales"]: value = calculate_measurement_value(data_model, metric, measurement["sources"], scale) status = metric.status(value) measurement[scale] = dict(value=value, status=status, direction=metric.direction()) # We can't cover determine_status_start() returning False in the feature tests because all new measurements have # a status start timestamp, hence the pragma: no cover-behave: if status_start := determine_status_start(status, previous_measurement, scale, now): # pragma: no cover-behave measurement[scale]["status_start"] = status_start for target in ("target", "near_target", "debt_target"): target_type = cast(TargetType, target) measurement[scale][target] = determine_target_value(metric, measurement, scale, target_type) database.measurements.insert_one(measurement) del measurement["_id"] return measurement def calculate_measurement_value(data_model, metric: Metric, sources, scale: Scale) -> Optional[str]: """Calculate the measurement value from the source measurements.""" if not sources or any(source["parse_error"] or source["connection_error"] for source in sources): return None values = [int(source["value"]) - value_of_entities_to_ignore(data_model, metric, source) for source in sources] add = metric.addition() if scale == "percentage": direction = metric.direction() totals = [int(source["total"]) for source in sources] if add is sum: values, totals = [sum(values)], [sum(totals)] values = [percentage(value, total, direction) for value, total in zip(values, totals)] return str(add(values)) def value_of_entities_to_ignore(data_model, metric: Metric, source) -> int: """Return the value of ignored entities, i.e. entities marked as fixed, false positive or won't fix. If the entities have a measured attribute, return the sum of the measured attributes of the ignored entities, otherwise return the number of ignored attributes. For example, if the metric is the amount of ready user story points, the source entities are user stories and the measured attribute is the amount of story points of each user story. """ entities = source.get("entity_user_data", {}).items() ignored_entities = [ entity[0] for entity in entities if entity[1].get("status") in ("fixed", "false_positive", "wont_fix") ] source_type = metric.sources()[source["source_uuid"]]["type"] if attribute := get_measured_attribute(data_model, metric.type(), source_type): entity = data_model["sources"][source_type]["entities"].get(metric.type(), {}) attribute_type = get_attribute_type(entity, attribute) convert = dict(float=float, integer=int, minutes=int)[attribute_type] value = sum(convert(entity[attribute]) for entity in source["entities"] if entity["key"] in ignored_entities) else: value = len(ignored_entities) return int(value) def determine_status_start( current_status: Optional[Status], previous_measurement: dict, scale: Scale, now: str ) -> Optional[str]: """Determine the date time since when the metric has the current status.""" if previous_measurement: previous_status = previous_measurement.get(scale, {}).get("status") if current_status == previous_status: return str(previous_measurement.get(scale, {}).get("status_start", "")) or None return now def determine_target_value(metric: Metric, measurement: dict, scale: Scale, target: TargetType): """Determine the target, near target or debt target value.""" target_value = metric.get_target(target) if scale == metric.scale() else measurement.get(scale, {}).get(target) return None if target == "debt_target" and metric.accept_debt_expired() else target_value def changelog(database: Database, nr_changes: int, **uuids): """Return the changelog for the measurements belonging to the items with the specific uuids.""" return database.measurements.find( filter={"delta.uuids": {"$in": list(uuids.values())}}, sort=[("start", pymongo.DESCENDING)], limit=nr_changes, projection=["delta", "start"], )
py
1a4aada8d88a43fbf57fa53cc0b261f0a91ad320
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import NoAlertPresentException import unittest, time, re class UntitledTestCase(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(30) self.base_url = "https://www.katalon.com/" self.verificationErrors = [] self.accept_next_alert = True def test_untitled_test_case(self): driver = self.driver driver.get("http://localhost/addressbook/") driver.find_element_by_name("user").clear() driver.find_element_by_name("user").send_keys("admin") driver.find_element_by_name("pass").click() driver.find_element_by_name("pass").clear() driver.find_element_by_name("pass").send_keys("secret") driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Password:'])[1]/following::input[2]").click() driver.find_element_by_link_text("groups").click() driver.find_element_by_name("new").click() driver.find_element_by_name("group_name").click() driver.find_element_by_name("group_name").clear() driver.find_element_by_name("group_name").send_keys("test") driver.find_element_by_name("group_header").click() driver.find_element_by_name("group_header").clear() driver.find_element_by_name("group_header").send_keys("test1") driver.find_element_by_name("group_footer").click() driver.find_element_by_name("group_footer").clear() driver.find_element_by_name("group_footer").send_keys("test2") driver.find_element_by_name("submit").click() driver.find_element_by_link_text("group page").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Groups'])[1]/following::span[1]").click() driver.find_element_by_link_text("Logout").click() def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException as e: return False return True def is_alert_present(self): try: self.driver.switch_to_alert() except NoAlertPresentException as e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
py
1a4aaed3e5419c127d5558c1465d74127d706059
import os import asyncio from jina import __default_host__ from jina.importer import ImportExtensions from jina.serve.runtimes.gateway import GatewayRuntime from jina.serve.runtimes.gateway.http.app import get_fastapi_app __all__ = ['HTTPGatewayRuntime'] class HTTPGatewayRuntime(GatewayRuntime): """Runtime for HTTP interface.""" async def async_setup(self): """ The async method setup the runtime. Setup the uvicorn server. """ with ImportExtensions(required=True): from uvicorn import Config, Server class UviServer(Server): """The uvicorn server.""" async def setup(self, sockets=None): """ Setup uvicorn server. :param sockets: sockets of server. """ config = self.config if not config.loaded: config.load() self.lifespan = config.lifespan_class(config) self.install_signal_handlers() await self.startup(sockets=sockets) if self.should_exit: return async def serve(self, **kwargs): """ Start the server. :param kwargs: keyword arguments """ await self.main_loop() from jina.helper import extend_rest_interface uvicorn_kwargs = self.args.uvicorn_kwargs or {} self._set_topology_graph() self._set_connection_pool() self._server = UviServer( config=Config( app=extend_rest_interface( get_fastapi_app( self.args, topology_graph=self._topology_graph, connection_pool=self._connection_pool, logger=self.logger, ) ), host=__default_host__, port=self.args.port_expose, log_level=os.getenv('JINA_LOG_LEVEL', 'error').lower(), **uvicorn_kwargs ) ) await self._server.setup() async def async_run_forever(self): """Running method of ther server.""" self._connection_pool.start() await self._server.serve() async def _wait_for_cancel(self): """Do NOT override this method when inheriting from :class:`GatewayPod`""" # handle terminate signals while not self.is_cancel.is_set() and not self._server.should_exit: await asyncio.sleep(0.1) await self.async_cancel() async def async_teardown(self): """Shutdown the server.""" await self._server.shutdown() await self._connection_pool.close() async def async_cancel(self): """Stop the server.""" self._server.should_exit = True
py
1a4ab0124058ed7d8bd7f3ef7ce568eb1bdd6c9f
from .celery import app as celery_app # this ensures that the celery app is loaded # every time Django starts __all__ = [ "celery_app", ]
py
1a4ab027e08f8e945d5484a619a4da2d6a73504e
from multiprocessing.connection import wait from bot import dp from aiogram import types from aiogram.dispatcher.storage import FSMContext from filters import Main, IsOwner from functions.client import cidSelect, getpasswordState, sidSelect, pidSelect, cnSelect, sftSelect, scidSelect, getloginState, schoolInfo from states import addAccount from utils.db import db from callbacks import cb_account from functions.sgo import ns_sessions from netschoolapi import NetSchoolAPI from utils.db.data import Account @dp.callback_query_handler(Main(), cb_account.filter(action='login'), state='*') async def select_login_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): await call.answer() await getloginState(call.message, state) @dp.callback_query_handler(Main(), cb_account.filter(action='select_scid'), state=addAccount.scid) async def select_sft_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): await call.answer() account = await Account.get_registerAccount(call.from_user.id) ns = ns_sessions[account['id']] ns._prelogin_data['scid'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await schoolInfo(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='select_sft'), state=[addAccount.sft, '*']) async def select_sft_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): account = await Account.get_registerAccount(call.from_user.id) ns = ns_sessions[account['id']] ns._prelogin_data['sft'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await call.answer() await scidSelect(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='select_cn'), state=addAccount.cn) async def select_cn_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): account = await Account.get_registerAccount(call.from_user.id) ns = ns_sessions[account['id']] ns._prelogin_data['cn'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await call.answer() await sftSelect(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='select_pid'), state=addAccount.pid) async def select_pid_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): account = await Account.get_registerAccount(call.from_user.id) ns = ns_sessions[account['id']] ns._prelogin_data['pid'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await call.answer() await cnSelect(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='select_sid'), state=addAccount.sid) async def select_sid_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): account = await Account.get_registerAccount(call.from_user.id) ns = ns_sessions[account['id']] ns._prelogin_data['sid'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await call.answer() await pidSelect(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='select_cid'), state=addAccount.cid) async def select_cid_handler(call: types.CallbackQuery, callback_data: dict, state=FSMContext): account = await Account.get_registerAccount(call.from_user.id, 'id') ns = ns_sessions[account['id']] ns._prelogin_data['cid'] = callback_data.get('value') await Account.update(account['id'], **ns._prelogin_data) await call.answer() await sidSelect(call.message, account['id']) @dp.callback_query_handler(Main(), cb_account.filter(action='add', value=''), state=[addAccount.url, addAccount.wait_url, '*']) async def account_add(call: types.CallbackQuery, state=FSMContext): # register_account = await Account.get_registerAccount(call.from_user.id) # if register_account: # for account in register_account: # print(account) # if not account: # ... await call.answer() await addAccount.url.set() regions = await db.executeall("SELECT * FROM regions ORDER BY users_count DESC NULLS LAST LIMIT 3") if regions: await nsSelect(call.message) else: async with state.proxy() as data: data['message'] = call.message await call.message.edit_text("📎 Введите ссылку на ваш СГО") await addAccount.wait_url.set() @dp.callback_query_handler(Main(), cb_account.filter(action='region_select'), state=['*']) async def regionSelect(call: types.CallbackQuery, callback_data: dict): region = await db.execute("SELECT url FROM regions WHERE id = %s", [callback_data['value']]) account = await Account.add(call.from_user.id, region[0]) await addAccount.cid.set() ns_sessions[account['id']] = NetSchoolAPI(region[0]) await cidSelect(account['id'], call.message) async def nsSelect(message: types.Message): regions = await db.executeall("SELECT * FROM regions ORDER BY users_count DESC NULLS LAST LIMIT 3") markup = types.InlineKeyboardMarkup() button_loc = types.InlineKeyboardButton( "📍 Определить регион", callback_data=cb_account.new(action='geo', value='')) button_custom = types.InlineKeyboardButton( "✏️ Ввести свою ссылку", callback_data=cb_account.new(action='url', value='')) markup.row(button_loc, button_custom) for x in regions: markup.add(types.InlineKeyboardButton( x[1], callback_data=cb_account.new(action='region_select', value=str(x[0])))) text = "🏙 Выбрите город или другой метод добавления Сетевого Города. Образование" if message.text != text: await message.edit_text(text, reply_markup=markup) @dp.callback_query_handler(Main(), cb_account.filter(action='geo', value=''), state=addAccount.url) async def requestGeo(call: types.CallbackQuery, state=FSMContext): await call.answer() markup = types.ReplyKeyboardMarkup(resize_keyboard=True) markup.add(types.KeyboardButton( "📍 Оптравить местоположение", request_location=True)) markup.add(types.KeyboardButton( "❌ Отмена")) georequest_msg = await call.message.answer("📍 Воспользуйтесь специальной кнопкой для отправки своего местоположения", reply_markup=markup) await call.message.delete() async with state.proxy() as data: data["message"] = georequest_msg await addAccount.wait_geo.set() @dp.callback_query_handler(Main(), cb_account.filter(action='url', value=''), state=addAccount.url) async def waitUrl(call: types.CallbackQuery, state: FSMContext): await call.answer() markup = types.InlineKeyboardMarkup() markup.add(types.InlineKeyboardButton( "◀️ Вернуться к другим методам", callback_data=cb_account.new(action='add', value=''))) async with state.proxy() as data: data["message"] = call.message await addAccount.wait_url.set() await call.message.edit_text("💬 Отправьте ссылку на свою систему Сетевой Город. Образование, скопировав её из адресной строки вашего браузера", reply_markup=markup) @dp.callback_query_handler(Main(), cb_account.filter(action='continue'), state=['*']) async def account_continueAdd(call: types.CallbackQuery, callback_data: dict, state: FSMContext): account = await Account.get_registerAccount(call.from_user.id) if not account['url']: await account_add(call, state) else: ns = NetSchoolAPI(account['url']) ns_sessions[account['id']] = ns regions = await db.execute("SELECT * FROM regions") for key in account.items(): if key[1]: ns._prelogin_data.update({key[0]: key[1]}) else: if key[0] == 'cid': await cidSelect(account['id'], call.message) break elif key[0] == 'sid': await sidSelect(call.message, account['id']) break elif key[0] == 'pid': await pidSelect(call.message, account['id']) break elif key[0] == 'cn': await cnSelect(call.message, account['id']) break elif key[0] == 'sft': await sftSelect(call.message, account['id']) break elif key[0] == 'scid': await scidSelect(call.message, account['id']) break elif key[0] == 'username': await schoolInfo(call.message, account['id']) break elif key[0] == 'password': await schoolInfo(call.message, account['id']) break else: await account_add(call, state) break
py
1a4ab151820c409a45ef9f39edb95fba26e58552
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import abc class TransformationPool(abc.ABC): @abc.abstractmethod def get_transformation(self, exclude_trans=None): raise NotImplementedError @abc.abstractmethod def transformations_sim(self, t1, t2): raise NotImplementedError
py
1a4ab36429050f360f92807a2a5f2db48a78a4af
# Title: 개미 # Link: https://www.acmicpc.net/problem/4307 import sys sys.setrecursionlimit(10 ** 6) read_single_int = lambda: int(sys.stdin.readline().strip()) read_list_int = lambda: list(map(int, sys.stdin.readline().strip().split(' '))) def solution(l: int, n: int, ants: list): fast, slow = 0, 0 for ant in ants: fast = max(fast, min(l-ant, ant)) slow = max(slow, max(ant, l-ant)) return '{} {}'.format(fast, slow) def main(): t = read_single_int() for _ in range(t): l, n = read_list_int() ants = [] for _ in range(n): ants.append(read_single_int()) print(solution(l, n, ants)) if __name__ == '__main__': main()
py
1a4ab42e447805f2a28558075a1a829a796cd8cf
from datetime import datetime import pytz as pytz import scrapy from fosdem_event_scraper.settings import INPUT_FILE class FosdemEventSpider(scrapy.Spider): name = "fosdem-event" def start_requests(self): with open(INPUT_FILE, "r") as fhandle: for url in map(str.rstrip, fhandle.readlines()): yield scrapy.Request(url=url) def parse(self, response, **kwargs): ret = dict() for info_element in response.css("ul.side-box > li"): infoid = info_element.css("strong::text").extract_first().lower() ret[infoid] = info_element.css("a::text").extract_first() ret["title"] = response.css("#pagetitles h1::text").extract_first() ret["url"] = response.url ret["time"] = datetime.now(pytz.UTC) yield ret
py
1a4ab4c632fd693c569c9acbec3f5d23066870c2
from __future__ import print_function import os import warnings warnings.filterwarnings('ignore') import time import torch import shutil import argparse from m2det import build_net import torch.utils.data as data import torch.backends.cudnn as cudnn from torch.nn.utils.clip_grad import clip_grad_norm_ from layers.functions import Detect,PriorBox from data import detection_collate from configs.CC import Config from utils.core import * from tensorboardX import SummaryWriter parser = argparse.ArgumentParser(description='M2Det Training') parser.add_argument('-c', '--config', default='configs/m2det320_resnet101.py') parser.add_argument('-d', '--dataset', default='COCO', help='VOC or COCO dataset') parser.add_argument('--ngpu', default=1, type=int, help='gpus') parser.add_argument('--resume_net', default=None, help='resume net for retraining') parser.add_argument('--resume_epoch', default=0, type=int, help='resume iter for retraining') parser.add_argument('-t', '--tensorboard', type=bool, default=False, help='Use tensorborad to show the Loss Graph') args = parser.parse_args() print_info('----------------------------------------------------------------------\n' '| M2Det Training Program |\n' '----------------------------------------------------------------------',['yellow','bold']) #Enable Tensorboard logger logger = set_logger(args.tensorboard) writer = SummaryWriter() #Use configs from specified global cfg cfg = Config.fromfile(args.config) net = build_net('train', size = cfg.model.input_size, # Only 320, 512, 704 and 800 are supported config = cfg.model.m2det_config) init_net(net, cfg, args.resume_net) # init the network with pretrained weights or resumed weights if args.ngpu>1: net = torch.nn.DataParallel(net) if cfg.train_cfg.cuda: net.cuda() cudnn.benchmark = True optimizer = set_optimizer(net, cfg) criterion = set_criterion(cfg) anchor_config = anchors(cfg) priorbox = PriorBox(anchor_config) #Detector detector = Detect(cfg.model.m2det_config.num_classes, cfg.loss.bkg_label, anchor_config) with torch.no_grad(): priors = priorbox.forward() if cfg.train_cfg.cuda: priors = priors.cuda() if __name__ == '__main__': net.train() epoch = args.resume_epoch print_info('===> Loading Dataset...',['yellow','bold']) dataset = get_dataloader(cfg, args.dataset, 'train_sets') epoch_size = len(dataset) // (cfg.train_cfg.per_batch_size * args.ngpu) max_iter = getattr(cfg.train_cfg.step_lr,args.dataset)[-1] * epoch_size stepvalues = [_*epoch_size for _ in getattr(cfg.train_cfg.step_lr, args.dataset)[:-1]] print_info('===> Training M2Det on ' + args.dataset, ['yellow','bold']) step_index = 0 if args.resume_epoch > 0: start_iter = args.resume_epoch * epoch_size else: start_iter = 0 for iteration in range(start_iter, max_iter): if iteration % epoch_size == 0: batch_iterator = iter(data.DataLoader(dataset, cfg.train_cfg.per_batch_size * args.ngpu, shuffle=True, num_workers=cfg.train_cfg.num_workers, collate_fn=detection_collate)) if epoch % cfg.model.save_eposhs == 0: save_checkpoint(net, cfg, final=False, datasetname = args.dataset, epoch=epoch) epoch += 1 load_t0 = time.time() if iteration in stepvalues: step_index += 1 lr = adjust_learning_rate(optimizer, cfg.train_cfg.gamma, epoch, step_index, iteration, epoch_size, cfg) images, targets = next(batch_iterator) if cfg.train_cfg.cuda: images = images.cuda() targets = [anno.cuda() for anno in targets] out = net(images) optimizer.zero_grad() loss_l, loss_c = criterion(out, priors, targets) loss = loss_l + loss_c write_logger({'loc_loss':loss_l.item(), 'conf_loss':loss_c.item(), 'loss':loss.item()},logger,iteration,status=args.tensorboard) loss.backward() #clip_grad_norm_(net.parameters(), 5) # Clip gradients at 5, because exploding gradients occurred once optimizer.step() load_t1 = time.time() print_train_log(iteration, cfg.train_cfg.print_epochs, [time.ctime(),epoch,iteration%epoch_size,epoch_size,iteration,loss_l.item(),loss_c.item(),load_t1-load_t0,lr]) save_checkpoint(net, cfg, final=True, datasetname=args.dataset,epoch=-1)
py
1a4ab4fc3318619eefa20871c3eea03e7ee607a8
from typing import * class Solution: def fourSum(self, nums: List[int], target: int) -> List[List[int]]: result = set() nums_len = len(nums) nums.sort() for i in range(nums_len): for j in range(i + 1, nums_len): k = j + 1 l = nums_len - 1 while k < l: temp_sum = nums[i] + nums[j] + nums[k] + nums[l] if temp_sum == target: result.add((nums[i], nums[j], nums[k], nums[l])) k += 1 elif temp_sum < target: k += 1 else: l -= 1 return list(map(lambda tuple: list(tuple), result)) s = Solution() print(s.fourSum([-3,-2,-1,0,0,1,2,3], 0))
py
1a4ab518e1b8ff67a53bf69f606e8e516df54dbc
import pandas as pd import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim import torch import torchvision import torchvision.transforms as transforms from sklearn.model_selection import train_test_split from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler from sklearn.metrics import classification_report from sklearn.metrics import accuracy_score import pickle import torch.optim as optim def run_full_code_gn(device, model_file, test_data_file, output_file, n): def unpickle(file): with open(file, 'rb') as fo: dict = pickle.load(fo, encoding='bytes') return dict def normalise(X): return (X - X.mean(axis = 0))/(X.std(axis = 0) + (np.ones((1,X.shape[1]))*(1e-06))) dict6 = unpickle(test_data_file) Xtest = np.array(dict6[b'data']) # Ytest = np.array(dict6[b'labels']) Xtest = normalise(Xtest) Xtest = Xtest.reshape(10000, 3, 32, 32) Xtest = torch.from_numpy(Xtest) # Ytest = Ytest.astype(int) # Ytest = torch.from_numpy(Ytest) Xtest = Xtest.to(torch.float32) # Ytest = Ytest.to(torch.int64) class LambdaLayer(nn.Module): def __init__(self, lambd): super(LambdaLayer, self).__init__() self.lambd = lambd def forward(self, x): return self.lambd(x) class Group_Normalisation(nn.Module): def __init__(self, numlayer, G): super().__init__() self.gamma = nn.Parameter(torch.ones((1, numlayer, 1, 1)), requires_grad = True) self.beta = nn.Parameter(torch.zeros((1, numlayer, 1, 1)), requires_grad = True) self.eps = 1e-6 self.G = G def forward(self, x): x = x.reshape((x.shape[0], self.G, x.shape[1]//self.G, x.shape[2], x.shape[3])) mean = x.mean(dim = (2, 3, 4), keepdim=True) var = x.var(dim = (2, 3, 4), keepdim=True) x = (x - mean) / torch.sqrt(var + self.eps) x = x.reshape((x.shape[0], x.shape[2]*self.G, x.shape[3], x.shape[4])) x = self.gamma * x + self.beta return x class ResNetBlock(nn.Module): def __init__(self, numlayer, n, G): super(ResNetBlock, self).__init__() self.conv1 = nn.Conv2d(numlayer, numlayer, 3, padding = 1) self.group_norm1 = Group_Normalisation(numlayer, G) self.conv2 = nn.Conv2d(numlayer, numlayer, 3, padding = 1) self.group_norm2 = Group_Normalisation(numlayer, G) def forward(self, x): y = x x = self.conv1(x) x = self.group_norm1(x) x = F.relu(x) x = self.conv2(x) x = self.group_norm2(x) x = x + y x = F.relu(x); return x class ResNet_Layer(nn.Module): def __init__(self, numlayer, n, G): super(ResNet_Layer, self).__init__() self.conv_blocs = nn.Sequential(*[ResNetBlock(numlayer, n, G) for i in range(0, n)]) def forward(self, x): x = self.conv_blocs(x); return x class ResNet_Downsample(nn.Module): def __init__(self, numlayerin, numlayerout, n, G): super(ResNet_Downsample, self).__init__() self.conv1 = nn.Conv2d(numlayerin, numlayerout, 3, stride = 2, padding = 1) self.layer_norm1 = Group_Normalisation(numlayerout, G) self.conv2 = nn.Conv2d(numlayerout, numlayerout, 3, padding = 1) self.layer_norm2 = Group_Normalisation(numlayerout, G) self.s1A = LambdaLayer(lambda x: F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, int(numlayerin/2), int(numlayerin/2)), "constant", 0)) def forward(self, x): y = x x = self.conv1(x) x = self.layer_norm1(x) x = F.relu(x) x = self.conv2(x) x = self.layer_norm2(x) x = x + self.s1A(y) x = F.relu(x) return x class ResNet(nn.Module): def __init__(self, n1, r1): super(ResNet, self).__init__() self.n = n1 self.r = r1 self.conv_3_16 = nn.Conv2d(3, 16, 3, padding = 1) self.group_norm1 = Group_Normalisation(16, 4) self.resnet_layer1 = ResNet_Layer(16, n1, 4) self.resnet_block1 = ResNet_Downsample(16, 32, n1, 8) self.resnet_layer2 = ResNet_Layer(32, n1-1, 8) self.resnet_block2 = ResNet_Downsample(32, 64, n1, 8) self.resnet_layer3 = ResNet_Layer(64, n1-1, 8) self.globalAvg = nn.AdaptiveAvgPool2d((1, 1)) self.fc1 = nn.Linear(64, self.r) def forward(self, x): x = self.conv_3_16(x) x = self.group_norm1(x) x = F.relu(x) x = self.resnet_layer1(x) x = self.resnet_block1(x) x = self.resnet_layer2(x) x = self.resnet_block2(x) x = self.resnet_layer3(x) #Global average pooling x = self.globalAvg(x) y = x.view(-1, 64) x = self.fc1(y) return x, y model = ResNet(n, 10) model.load_state_dict(torch.load(model_file)) model = model.to(device) len_Xtest = Xtest.shape[0] final_preds = np.array([]).reshape((0, 10)) batch_size = 128 for i in range(0, (len_Xtest//batch_size)): x = torch.FloatTensor(Xtest[i*batch_size:(i+1)*batch_size, :]).to(device) with torch.no_grad(): preds, _ = model(x) final_preds = np.vstack((final_preds, preds.detach().cpu().numpy())) if(len_Xtest - ((len_Xtest//batch_size)*batch_size) > 0): x = torch.FloatTensor(Xtest[((len_Xtest//batch_size)*batch_size):len_Xtest, :]).to(device) with torch.no_grad(): preds, _ = model(x) final_preds = np.vstack((final_preds, preds.detach().cpu().numpy())) print(final_preds.shape) final_preds = final_preds.argmax(axis = 1) final_preds = final_preds.reshape(final_preds.shape[0]) # # get predictions for val data # with torch.no_grad(): # preds, _ = model(Xtest.to(device)) # preds = preds.detach().cpu().numpy() # # prediction # prediction = preds.argmax(axis = 1) s = "" for x in final_preds: s += str(x) + "\n" with open(output_file, "w") as f: f.write(s)
py
1a4ab5b7f6fe1508a67f545183e2c384f5e665d3
#from .exchanges import Exchange, FTXSpot import asyncio class Order: def __init__(self, order_id: str, base: str, quote: str, side: str, volume: float): self.id = order_id self.base = base self.quote = quote self.side = side.upper() self.volume = volume self.remaining_volume = volume self.open = True self.completed = False self.filled_volume = 0 #Total order volume (including fees) self.total_fees = {} #Fees paid, format {currency: fee} self.fills = {} self.fill_event = asyncio.Event() self.close_event = asyncio.Event() self.price = None self.reported_fill = None self.modifyed = False def update(self, update_type, data): balance_changes = {self.quote: 0, self.base: 0} if update_type == 'FILL': if data['trade_id'] in self.fills: return balance_changes volume_modifyer = 1 if self.side == 'BUY' else -1 self.remaining_volume -= data['volume'] print('Order', self.id, self.base, self.quote, ' fill, remaining volume: ', self.remaining_volume) balance_changes[self.base] += volume_modifyer * data['volume'] balance_changes[self.quote] -= volume_modifyer * data['volume'] * data['price'] for currency, fee in data['fees'].items(): if currency not in self.total_fees: self.total_fees[currency] = 0 if currency not in balance_changes: balance_changes[currency] = 0 self.total_fees[currency] += fee balance_changes[currency] -= fee self.fills[data['trade_id']] = dict(balance_changes) if self.remaining_volume < 10**-5 or (self.reported_fill is not None and self.reported_fill - 10**-5 <= self.volume - self.remaining_volume): self.open = False self.completed = True self.fill_event.set() if update_type == 'UPDATE': if data['status'] == 'CLOSED' and data['id'] == self.id and not self.modifyed: self.open = False self.close_event.set() self.reported_fill = data['filled_size'] if self.reported_fill - 10**-5 <= self.volume - self.remaining_volume: self.fill_event.set() if self.reported_fill == 0.0 and self.price is None: print('Order canceled by exchange, no reason given') return balance_changes class LimitOrder(Order): pass class MarketOrder(Order): pass class Account: '''Account class to manage orders and store basic data''' def __init__(self, api, secret, exchange): self.api_key = api self.secret_key = secret self.exchange = exchange self.balance = None self.order_update_queue = exchange.user_update_queue self.parse_order_update_task = asyncio.create_task(self.parse_order_updates()) self.orders = {} self.unhandled_order_updates = {} self.fill_queues = {} async def get_balance(self): self.balance = await self.exchange.get_account_balance(self.api_key, self.secret_key) def __str__(self): r = '' for coin, balance in self.balance.items(): if balance > 0: r += coin + '\t| ' + '{0:.4f}'.format(balance) r += '\n' return r def remove_closed_orders(self): to_delete = [] for order_id, order in self.orders.items(): if not order.open: to_delete.append(order_id) for order_id in to_delete: del self.orders[order_id] async def get_open_orders(self): pass async def parse_order_updates(self): try: while True and self.exchange.connection_manager.open: if self.balance is None: await self.get_balance() order_update = await self.order_update_queue.get() if order_update['type'] == 'FILL': volume_modifyer = 1 if order_update['side'] == 'BUY' else -1 base, quote = order_update['market'] if base not in self.balance: self.balance[base] = 0.0 if quote not in self.balance: self.balance[quote] = 0.0 self.balance[base] += volume_modifyer * order_update['volume'] self.balance[quote] -= volume_modifyer * order_update['volume'] * order_update['price'] for fee_currency, fee in order_update['fees'].items(): if fee_currency not in self.balance: self.balance[fee_currency] = 0.0 self.balance[fee_currency] -= fee print(order_update['id'], self.fill_queues) if order_update['id'] in self.fill_queues: await self.fill_queues[order_update['id']].put(order_update) if order_update['id'] not in self.orders: if order_update['id'] not in self.unhandled_order_updates: self.unhandled_order_updates[order_update['id']] = [] self.unhandled_order_updates[order_update['id']].append(order_update) else: self.orders[order_update['id']].update(order_update['type'], order_update) self.order_update_queue.task_done() except Exception as e: print('Error in Account.parse_order_updates():', e) def add_order(self, order): if order.id in self.unhandled_order_updates: for update in self.unhandled_order_updates[order.id]: order.update(update['type'], update) self.orders[order.id] = order async def refresh_fills(self, start_time): fills = await self.exchange.get_order_fills(start_time, self.api_key, self.secret_key) for fill in fills: if fill['id'] not in self.orders: print('Error in account class, orders out of sync!') #need to update orders elif fill['trade_id'] not in self.orders[fill['id']]: self.orders[fill['id']].update('FILL', fill) async def market_order(self, base, quote, side, **kwargs): if 'quote_volume' not in kwargs and 'volume' not in kwargs: print('ERROR: missing required argument') #TODO: proper exception return if 'volume' in kwargs: response = await self.exchange.market_order(base, quote, side, kwargs['volume'], self.api_key, self.secret_key) else: response = await self.exchange.market_order_quote_volume(base, quote, side, kwargs['quote_volume'], self.api_key, self.secret_key) async def limit_order(self, base, quote, side, price, volume, fill_queue = None): order = await self.exchange.limit_order(base, quote, side, price, volume, self.api_key, self.secret_key) self.fill_queues[order.id] = fill_queue return order async def change_order(self, order, **kwargs): print(kwargs) order.modifyed = True if order.remaining_volume < 10**-6: return if 'exchange' in kwargs: exchange = kwargs['exchange'] if 'price' in kwargs and float(self.exchange.price_renderers[(order.base, order.quote)].render(kwargs['price'])) == order.price: del kwargs['price'] if 'price' in kwargs and 'size' in kwargs: new_order_id, new_price, new_remaining = await self.exchange.change_order(order.id, order.base, order.quote, self.api_key, self.secret_key, self.subaccount, price=kwargs['price'], size=kwargs['size']) elif 'price' in kwargs: new_order_id, new_price, new_remaining = await self.exchange.change_order(order.id, order.base, order.quote, self.api_key, self.secret_key, self.subaccount, price=kwargs['price']) elif 'size' in kwargs: new_order_id, new_price, new_remaining = await self.exchange.change_order(order.id, order.base, order.quote, self.api_key, self.secret_key, self.subaccount, size=kwargs['size']) else: print('no change to order') order.modifyed = False return order.price = new_price if order.id in self.fill_queues: self.fill_queues[new_order_id] = self.fill_queues[order.id] order.id = new_order_id order.modifyed = False self.orders[new_order_id] = order class BinanceAccount(Account): async def get_dividend_record(self, limit = 20): return await self.exchange.get_asset_dividend(limit, self.api_key, self.secret_key) async def get_account_websocket_key(self): response = await self.exchange.connection_manager.signed_get() class FuturesAccount(Account): pass class FTXAccount(Account): def __init__(self, api, secret, exchange, subaccount = None, connection_manager = None): self.subaccount = subaccount super().__init__(api, secret, exchange) if connection_manager is not None: self.connection_manager = connection_manager async def market_order(self, base, quote, side, **kwargs): if 'exchange' in kwargs: exchange = kwargs['exchange'] else: exchange = self.exchange if 'quote_volume' not in kwargs and 'volume' not in kwargs: print('ERROR: missing required argument') #TODO: proper exception return if 'volume' in kwargs: order = await exchange.market_order(base, quote, side, kwargs['volume'], self.api_key, self.secret_key, self.subaccount) else: order = await exchange.market_order_quote_volume(base, quote, side, kwargs['quote_volume'], self.api_key, self.secret_key, self.subaccount) if order is None: #failed to place order... return self.add_order(order) return order async def limit_order(self, base, quote, side, price, volume, **kwargs): if 'exchange' in kwargs: exchange = kwargs['exchange'] else: exchange = self.exchange response = await exchange.limit_order(base, quote, side, price, volume, self.api_key, self.secret_key, self.subaccount) self.add_order(response) if 'fill_queue' in kwargs: self.fill_queues[response.id] = kwargs['fill_queue'] else: print(kwargs) return response async def cancel_order(self, order_id, **kwargs): response = await self.exchange.cancel_order(order_id.id, self.api_key, self.secret_key, self.subaccount) async def get_balance(self): self.balance = await self.exchange.get_account_balance(self.api_key, self.secret_key, self.subaccount) async def subscribe_to_user_data(self): await self.get_balance() await self.exchange.subscribe_to_user_data(self.api_key, self.secret_key, self.subaccount) async def cancel_all_orders(self): await self.exchange.cancel_all_orders(self.api_key, self.secret_key, self.subaccount)
py
1a4ab6236c40371385cc2131b157f3291ddc0543
__all__ = ["Monitor", "get_monitor_files", "load_results"] import csv import json import os import time from glob import glob from typing import List, Optional, Tuple, Union import gym import numpy as np import pandas from stable_baselines3.common.type_aliases import GymObs, GymStepReturn class Monitor(gym.Wrapper): """ A monitor wrapper for Gym environments, it is used to know the episode reward, length, time and other data. :param env: The environment :param filename: the location to save a log file, can be None for no log :param allow_early_resets: allows the reset of the environment before it is done :param reset_keywords: extra keywords for the reset call, if extra parameters are needed at reset :param info_keywords: extra information to log, from the information return of env.step() """ EXT = "monitor.csv" def __init__( self, env: gym.Env, filename: Optional[str] = None, allow_early_resets: bool = True, reset_keywords: Tuple[str, ...] = (), info_keywords: Tuple[str, ...] = (), ): super(Monitor, self).__init__(env=env) self.t_start = time.time() if filename is None: self.file_handler = None self.logger = None else: if not filename.endswith(Monitor.EXT): if os.path.isdir(filename): filename = os.path.join(filename, Monitor.EXT) else: filename = filename + "." + Monitor.EXT self.file_handler = open(filename, "wt") self.file_handler.write("#%s\n" % json.dumps( {"t_start": self.t_start, "env_id": env.spec and env.spec.id})) self.logger = csv.DictWriter(self.file_handler, fieldnames=( "r", "l", "s", "t") + reset_keywords + info_keywords) self.logger.writeheader() self.file_handler.flush() self.reset_keywords = reset_keywords self.info_keywords = info_keywords self.allow_early_resets = allow_early_resets self.rewards = None self.needs_reset = True self.episode_rewards = [] self.episode_lengths = [] self.episode_times = [] self.total_steps = 0 self.success = [] # extra info about the current episode, that was passed in during reset() self.current_reset_info = {} def reset(self, **kwargs) -> GymObs: """ Calls the Gym environment reset. Can only be called if the environment is over, or if allow_early_resets is True :param kwargs: Extra keywords saved for the next episode. only if defined by reset_keywords :return: the first observation of the environment """ if not self.allow_early_resets and not self.needs_reset: raise RuntimeError( "Tried to reset an environment before done. If you want to allow early resets, " "wrap your env with Monitor(env, path, allow_early_resets=True)" ) self.rewards = [] self.success = [] self.needs_reset = False for key in self.reset_keywords: value = kwargs.get(key) if value is None: raise ValueError( "Expected you to pass kwarg {} into reset".format(key)) self.current_reset_info[key] = value return self.env.reset(**kwargs) def step(self, action: Union[np.ndarray, int]) -> GymStepReturn: """ Step the environment with the given action :param action: the action :return: observation, reward, done, information """ if self.needs_reset: raise RuntimeError("Tried to step environment that needs reset") observation, reward, done, info = self.env.step(action) self.rewards.append(reward) if done: self.needs_reset = True ep_rew = sum(self.rewards) ep_len = len(self.rewards) ep_success = sum(self.success) ep_info = {"r": round(ep_rew, 6), "l": ep_len, "t": round( time.time() - self.t_start, 6)} for key in self.info_keywords: ep_info[key] = info[key] self.episode_rewards.append(ep_rew) self.episode_lengths.append(ep_len) self.episode_times.append(time.time() - self.t_start) ep_info.update(self.current_reset_info) if self.logger: self.logger.writerow(ep_info) self.file_handler.flush() info["episode"] = ep_info self.total_steps += 1 return observation, reward, done, info def close(self) -> None: """ Closes the environment """ super(Monitor, self).close() if self.file_handler is not None: self.file_handler.close() def get_total_steps(self) -> int: """ Returns the total number of timesteps :return: """ return self.total_steps def get_episode_rewards(self) -> List[float]: """ Returns the rewards of all the episodes :return: """ return self.episode_rewards def get_episode_lengths(self) -> List[int]: """ Returns the number of timesteps of all the episodes :return: """ return self.episode_lengths def get_episode_times(self) -> List[float]: """ Returns the runtime in seconds of all the episodes :return: """ return self.episode_times class LoadMonitorResultsError(Exception): """ Raised when loading the monitor log fails. """ pass def get_monitor_files(path: str) -> List[str]: """ get all the monitor files in the given path :param path: the logging folder :return: the log files """ return glob(os.path.join(path, "*" + Monitor.EXT)) def load_results(path: str) -> pandas.DataFrame: """ Load all Monitor logs from a given directory path matching ``*monitor.csv`` :param path: the directory path containing the log file(s) :return: the logged data """ monitor_files = get_monitor_files(path) if len(monitor_files) == 0: raise LoadMonitorResultsError( f"No monitor files of the form *{Monitor.EXT} found in {path}") data_frames, headers = [], [] for file_name in monitor_files: with open(file_name, "rt") as file_handler: first_line = file_handler.readline() assert first_line[0] == "#" header = json.loads(first_line[1:]) data_frame = pandas.read_csv(file_handler, index_col=None) headers.append(header) data_frame["t"] += header["t_start"] data_frames.append(data_frame) data_frame = pandas.concat(data_frames) data_frame.sort_values("t", inplace=True) data_frame.reset_index(inplace=True) data_frame["t"] -= min(header["t_start"] for header in headers) return data_frame
py
1a4ab6457021295a55b89fe1599879697a34169c
from app import reset_products_file #import from another file reset_products_file()
py
1a4ab6b3155254cbce9b35cba8a73208ff44d187
# Copyright 2021 The NetKet Authors - All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numbers from typing import Union, Tuple, List, Optional from numpy.typing import DTypeLike, ArrayLike import numpy as np from numba import jit import jax import jax.numpy as jnp from netket.hilbert import AbstractHilbert, Fock from ._abstract_operator import AbstractOperator from ._lazy import Transpose, Adjoint, Squared @jit(nopython=True) def _number_to_state(number, hilbert_size_per_site, local_states_per_site, out): out[:] = local_states_per_site[:, 0] size = out.shape[0] ip = number k = size - 1 while ip > 0: local_size = hilbert_size_per_site[k] out[k] = local_states_per_site[k, ip % local_size] ip = ip // local_size k -= 1 return out def is_hermitian(a: np.ndarray, rtol=1e-05, atol=1e-08) -> bool: return np.allclose(a, a.T.conj(), rtol=rtol, atol=atol) def _dtype(obj: Union[numbers.Number, ArrayLike, "LocalOperator"]) -> DTypeLike: if isinstance(obj, numbers.Number): return type(obj) elif isinstance(obj, AbstractOperator): return obj.dtype elif isinstance(obj, np.ndarray): return obj.dtype else: raise TypeError(f"cannot deduce dtype of object type {type(obj)}: {obj}") def resize( arr: ArrayLike, shape: List[int], dtype: Optional[DTypeLike] = None, init: Optional[numbers.Number] = None, ) -> ArrayLike: """ resizes the input array to the new shape that must be larger than the old. The resulting array is initialized with the old array in the corresponding indices, and with init in the rest. Args: arr: The array to be resized shape: The new shape dtype: optional dtype of the new array. If unspecified the old array dtype is used init: optional initialization value for the new entries Returns: a numpy array with the chosen shape and dtype. """ if dtype is None: dtype = arr.dtype if isinstance(shape, int): shape = (shape,) if arr.shape == shape: return arr arr_shape = arr.shape if len(shape) != arr.ndim: raise ValueError("the number of dimensions should not change.") for (i, ip) in zip(arr_shape, shape): if ip < i: raise ValueError( f"The new dimensions ({shape}) should all be larger than the old ({arr_shape})." ) new_arr = np.empty(shape=shape, dtype=arr.dtype) if init is not None: new_arr[...] = init if arr.ndim == 0: raise ValueError("Cannot resize a 0-dimensional scalar") elif arr.ndim == 1: new_arr[: arr_shape[0]] = arr elif arr.ndim == 2: new_arr[: arr_shape[0], : arr_shape[1]] = arr elif arr.ndim == 3: new_arr[: arr_shape[0], : arr_shape[1], : arr_shape[2]] = arr elif arr.ndim == 4: new_arr[: arr_shape[0], : arr_shape[1], : arr_shape[2], : arr_shape[3]] = arr else: raise ValueError(f"unsupported number of dimensions: {arr.ndim}") return new_arr def _reorder_matrix(hi, mat, acting_on): acting_on_sorted = np.sort(acting_on) if np.all(acting_on_sorted == acting_on): return mat, acting_on acting_on_sorted_ids = np.argsort(acting_on) # could write custom binary <-> int logic instead of using Fock... # Since i need to work with bit-strings (where instead of bits i # have integers, in order to support arbitrary size spaces) this # is exactly what hilbert.to_number() and viceversa do. # target ordering binary representation hi_subspace = Fock(hi.shape[acting_on_sorted[0]] - 1) for site in acting_on_sorted[1:]: hi_subspace = Fock(hi.shape[site] - 1) * hi_subspace # find how to map target ordering back to unordered acting_on_unsorted_ids = np.zeros(len(acting_on), dtype=np.intp) for (i, site) in enumerate(acting_on): acting_on_unsorted_ids[i] = np.argmax(site == acting_on_sorted) # now it is valid that # acting_on_sorted == acting_on[acting_on_unsorted_ids] # generate n-bit strings in the target ordering v = hi_subspace.all_states() # convert them to origin (unordered) ordering v_unsorted = v[:, acting_on_unsorted_ids] # convert the unordered bit-strings to numbers in the target space. n_unsorted = hi_subspace.states_to_numbers(v_unsorted) # reorder the matrix mat_sorted = mat[n_unsorted, :][:, n_unsorted] return mat_sorted, acting_on_sorted class LocalOperator(AbstractOperator): """A custom local operator. This is a sum of an arbitrary number of operators acting locally on a limited set of k quantum numbers (i.e. k-local, in the quantum information sense). """ def __init__( self, hilbert: AbstractHilbert, operators: Union[List[ArrayLike], ArrayLike] = [], acting_on: Union[List[int], List[List[int]]] = [], constant: numbers.Number = 0, dtype: Optional[DTypeLike] = None, ): r""" Constructs a new ``LocalOperator`` given a hilbert space and (if specified) a constant level shift. Args: hilbert (netket.AbstractHilbert): Hilbert space the operator acts on. operators (list(numpy.array) or numpy.array): A list of operators, in matrix form. acting_on (list(numpy.array) or numpy.array): A list of sites, which the corresponding operators act on. constant (float): Level shift for operator. Default is 0.0. Examples: Constructs a ``LocalOperator`` without any operators. >>> from netket.hilbert import CustomHilbert >>> from netket.operator import LocalOperator >>> hi = CustomHilbert(local_states=[1, -1])**20 >>> empty_hat = LocalOperator(hi) >>> print(len(empty_hat.acting_on)) 0 """ super().__init__(hilbert) self._constant = constant # check if passing a single operator or a list of operators if isinstance(acting_on, numbers.Number): acting_on = [acting_on] is_nested = any(hasattr(i, "__len__") for i in acting_on) if not is_nested: operators = [operators] acting_on = [acting_on] operators = [np.asarray(operator) for operator in operators] # If we asked for a specific dtype, enforce it. if dtype is None: dtype = np.promote_types(operators[0].dtype, np.float32) for op in operators[1:]: np.promote_types(dtype, op.dtype) self._dtype = dtype self._init_zero() self.mel_cutoff = 1.0e-6 for op, act in zip(operators, acting_on): if len(act) > 0: self._add_operator(op, act) @property def operators(self) -> List[np.ndarray]: """List of the matrices of the operators encoded in this Local Operator. Returns a copy. """ return self._operators_list() @property def acting_on(self) -> List[List[int]]: """List containing the list of the sites on which every operator acts. Every operator `self.operators[i]` acts on the sites `self.acting_on[i]` """ actions = [action[action >= 0].tolist() for action in self._acting_on] return actions @property def dtype(self) -> DTypeLike: return self._dtype @property def size(self) -> int: return self._size @property def is_hermitian(self) -> bool: """Returns true if this operator is hermitian.""" return self._is_hermitian @property def mel_cutoff(self) -> float: r"""float: The cutoff for matrix elements. Only matrix elements such that abs(O(i,i))>mel_cutoff are considered""" return self._mel_cutoff @mel_cutoff.setter def mel_cutoff(self, mel_cutoff): self._mel_cutoff = mel_cutoff assert self.mel_cutoff >= 0 @property def constant(self) -> numbers.Number: return self._constant @property def n_operators(self) -> int: return self._n_operators def __add__(self, other: Union["LocalOperator", numbers.Number]): op = self.copy(dtype=np.promote_types(self.dtype, _dtype(other))) op = op.__iadd__(other) return op def __radd__(self, other): return self.__add__(other) def __iadd__(self, other): if isinstance(other, LocalOperator): if self.hilbert != other.hilbert: return NotImplemented if not np.can_cast(other.dtype, self.dtype, casting="same_kind"): raise ValueError( f"Cannot add inplace operator with dtype {other.dtype} to operator with dtype {self.dtype}" ) assert other.mel_cutoff == self.mel_cutoff for i in range(other._n_operators): acting_on = other._acting_on[i, : other._acting_size[i]] operator = other._operators[i] self._add_operator(operator, acting_on) self._constant += other.constant return self if isinstance(other, numbers.Number): if not np.can_cast(type(other), self.dtype, casting="same_kind"): raise ValueError( f"Cannot add inplace operator with dtype {type(other)} to operator with dtype {self.dtype}" ) self._constant += other return self return NotImplemented def __sub__(self, other): return self + (-other) def __rsub__(self, other): return other + (-self) def __isub__(self, other): return self.__iadd__(-other) def __neg__(self): return -1 * self def __mul__(self, other): if isinstance(other, AbstractOperator): op = self.copy(dtype=np.promote_types(self.dtype, _dtype(other))) return op.__imatmul__(other) elif not isinstance(other, numbers.Number): return NotImplemented op = self.copy(dtype=np.promote_types(self.dtype, _dtype(other))) op._diag_mels *= other op._mels *= other op._constant *= other for _op in op._operators: _op *= other return op def __imul__(self, other): if isinstance(other, AbstractOperator): return self.__imatmul__(other) elif not isinstance(other, numbers.Number): return NotImplemented if not np.can_cast(type(other), self.dtype, casting="same_kind"): raise ValueError( f"Cannot add inplace operator with dtype {type(other)} to operator with dtype {self.dtype}" ) self._diag_mels *= other self._mels *= other self._constant *= other for _op in self._operators: _op *= other return self def __imatmul__(self, other): if not isinstance(other, LocalOperator): return NotImplemented if not np.can_cast(other.dtype, self.dtype, casting="same_kind"): raise ValueError( f"Cannot add inplace operator with dtype {type(other)} to operator with dtype {self.dtype}" ) return self._concrete_imatmul_(other) def _op__matmul__(self, other): return self._concrete_matmul_(other) def _concrete_matmul_(self, other: "LocalOperator") -> "LocalOperator": if not isinstance(other, LocalOperator): return NotImplemented op = self.copy(dtype=np.promote_types(self.dtype, _dtype(other))) op @= other return op def _concrete_imatmul_(self, other: "LocalOperator") -> "LocalOperator": if not isinstance(other, LocalOperator): return NotImplemented tot_operators = [] tot_act = [] for i in range(other._n_operators): act_i = other._acting_on[i, : other._acting_size[i]].tolist() ops, act = self._multiply_operator(other._operators[i], act_i) tot_operators += ops tot_act += act prod = LocalOperator(self.hilbert, tot_operators, tot_act, dtype=self.dtype) self_constant = self._constant if np.abs(other._constant) > self.mel_cutoff: self *= other._constant self += prod self._constant = 0.0 else: self = prod if np.abs(self_constant) > self.mel_cutoff: self += other * self_constant return self def __truediv__(self, other): if not isinstance(other, numbers.Number): raise TypeError("Only divison by a scalar number is supported.") if other == 0: raise ValueError("Dividing by 0") return self.__mul__(1.0 / other) def __rmul__(self, other): return self.__mul__(other) def _init_zero(self): self._operators = [] self._n_operators = 0 self._max_op_size = 0 self._max_acting_size = 0 self._max_local_hilbert_size = 0 self._size = 0 self._acting_on = np.zeros((0, 0), dtype=np.intp) self._acting_size = np.zeros(0, dtype=np.intp) self._diag_mels = np.zeros((0, 0), dtype=self.dtype) self._mels = np.empty((0, 0, 0), dtype=self.dtype) self._x_prime = np.empty((0, 0, 0, 0)) self._n_conns = np.empty((0, 0), dtype=np.intp) self._local_states = np.zeros((0, 0, 0), dtype=np.float64) self._basis = np.zeros((0, 0), dtype=np.int64) self._is_hermitian = True def _acting_on_list(self): acting_on = [] for i in range(self.n_operators): acting_on.append(np.copy(self._acting_on[i, : self._acting_size[i]])) return acting_on def _operators_list(self): "A deep copy of the operators" operators = [np.copy(op) for op in self._operators] return operators def _add_operator(self, operator: ArrayLike, acting_on: List[int]): if not np.can_cast(operator, self.dtype, casting="same_kind"): raise ValueError(f"Cannot cast type {operator.dtype} to {self.dtype}") acting_on = np.asarray(acting_on, dtype=np.intp) operator = np.asarray(operator, dtype=self.dtype) if np.unique(acting_on).size != acting_on.size: raise ValueError("acting_on contains repeated entries.") if any(acting_on >= self.hilbert.size): raise ValueError("acting_on points to a site not in the hilbert space.") if operator.ndim != 2: raise ValueError("The operator should be a matrix") if np.all(np.abs(operator) < self.mel_cutoff): return # re-sort the operator operator, acting_on = _reorder_matrix(self.hilbert, operator, acting_on) # find overlapping support support_i = None for (i, support) in enumerate(self._acting_on_list()): if np.all(acting_on == support): support_i = i break # If overlapping support, add the local operators themselves if support_i is not None: dim = min(operator.shape[0], self._operators[support_i].shape[0]) _opv = self._operators[support_i][:dim, :dim] _opv += operator[:dim, :dim] n_local_states_per_site = np.asarray( [self.hilbert.size_at_index(i) for i in acting_on] ) self._append_matrix( self._operators[support_i], self._diag_mels[support_i], self._mels[support_i], self._x_prime[support_i], self._n_conns[support_i], self._acting_size[support_i], self._local_states[support_i], self.mel_cutoff, n_local_states_per_site, ) isherm = True for op in self._operators: isherm = isherm and is_hermitian(op) self._is_hermitian = isherm else: self.__add_new_operator__(operator, acting_on) def __add_new_operator__(self, operator: np.ndarray, acting_on: np.ndarray): # Else, we must add a completely new operator self._n_operators += 1 self._operators.append(operator) # Add a new row and eventually resize the acting_on self._acting_size = np.resize(self._acting_size, (self.n_operators,)) self._acting_size[-1] = acting_on.size acting_size = acting_on.size self._max_op_size = max((operator.shape[0], self._max_op_size)) n_local_states_per_site = np.asarray( [self.hilbert.size_at_index(i) for i in acting_on] ) if operator.shape[0] != np.prod(n_local_states_per_site): raise RuntimeError( r"""the given operator matrix has shape={} and acts on the sites={}, which have a local hilbert space size of sizes={} giving an expected shape for the operator expected_shape={}.""".format( operator.shape, acting_on, n_local_states_per_site, np.prod(n_local_states_per_site), ) ) self._max_acting_size = max(self._max_acting_size, acting_on.size) self._max_local_hilbert_size = max( self._max_local_hilbert_size, np.max(n_local_states_per_site) ) self._acting_on = resize( self._acting_on, shape=(self.n_operators, self._max_acting_size), init=-1 ) self._acting_on[-1, :acting_size] = acting_on if ( self._acting_on[-1, :acting_size].max() > self.hilbert.size or self._acting_on[-1, :acting_size].min() < 0 ): raise InvalidInputError("Operator acts on an invalid set of sites") self._local_states = resize( self._local_states, shape=( self.n_operators, self._max_acting_size, self._max_local_hilbert_size, ), init=np.nan, ) ## add an operator to local_states for site in range(acting_size): self._local_states[-1, site, : n_local_states_per_site[site]] = np.asarray( self.hilbert.states_at_index(acting_on[site]) ) ## add an operator to basis self._basis = resize( self._basis, shape=(self.n_operators, self._max_acting_size), init=1e10 ) ba = 1 for s in range(acting_on.size): self._basis[-1, s] = ba ba *= n_local_states_per_site[acting_on.size - s - 1] ## self._diag_mels = resize( self._diag_mels, shape=(self.n_operators, self._max_op_size), init=np.nan ) self._mels = resize( self._mels, shape=(self.n_operators, self._max_op_size, self._max_op_size - 1), init=np.nan, ) self._x_prime = resize( self._x_prime, shape=( self.n_operators, self._max_op_size, self._max_op_size - 1, self._max_acting_size, ), init=-1, ) self._n_conns = resize( self._n_conns, shape=(self.n_operators, self._max_op_size), init=-1 ) if acting_on.max() + 1 >= self._size: self._size = acting_on.max() + 1 self._append_matrix( operator, self._diag_mels[-1], self._mels[-1], self._x_prime[-1], self._n_conns[-1], self._acting_size[-1], self._local_states[-1], self.mel_cutoff, n_local_states_per_site, ) isherm = True for op in self._operators: isherm = isherm and is_hermitian(op) self._is_hermitian = isherm @staticmethod @jit(nopython=True) def _append_matrix( operator, diag_mels, mels, x_prime, n_conns, acting_size, local_states_per_site, epsilon, hilb_size_per_site, ): op_size = operator.shape[0] assert op_size == operator.shape[1] for i in range(op_size): diag_mels[i] = operator[i, i] n_conns[i] = 0 for j in range(op_size): if i != j and np.abs(operator[i, j]) > epsilon: k_conn = n_conns[i] mels[i, k_conn] = operator[i, j] _number_to_state( j, hilb_size_per_site, local_states_per_site[:acting_size, :], x_prime[i, k_conn, :acting_size], ) n_conns[i] += 1 def _multiply_operator(self, op, act): operators = [] acting_on = [] act = np.asarray(act) for i in range(self.n_operators): act_i = self._acting_on[i, : self._acting_size[i]] inters = np.intersect1d(act_i, act, return_indices=False) if act.size == act_i.size and np.array_equal(act, act_i): # non-interesecting with same support operators.append(np.copy(np.matmul(self._operators[i], op))) acting_on.append(act_i.tolist()) elif inters.size == 0: # disjoint supports operators.append(np.copy(np.kron(self._operators[i], op))) acting_on.append(act_i.tolist() + act.tolist()) else: _act = list(act) _act_i = list(act_i) _op = op.copy() _op_i = self._operators[i].copy() # expand _act to match _act_i actmin = min(act) for site in act_i: if site not in act: I = np.eye(self.hilbert.shape[site], dtype=self.dtype) if site < actmin: _act = [site] + _act _op = np.kron(I, _op) else: #  site > actmax _act = _act + [site] _op = np.kron(_op, I) act_i_min = min(act_i) for site in act: if site not in act_i: I = np.eye(self.hilbert.shape[site], dtype=self.dtype) if site < act_i_min: _act_i = [site] + _act_i _op_i = np.kron(I, _op_i) else: #  site > actmax _act_i = _act_i + [site] _op_i = np.kron(_op_i, I) # reorder _op, _act = _reorder_matrix(self.hilbert, _op, _act) _op_i, _act_i = _reorder_matrix(self.hilbert, _op_i, _act_i) if len(_act) == len(_act_i) and np.array_equal(_act, _act_i): # non-interesecting with same support operators.append(np.matmul(_op_i, _op)) acting_on.append(_act_i) else: raise ValueError("Something failed") return operators, acting_on def copy(self, *, dtype: Optional = None): """Returns a copy of the operator, while optionally changing the dtype of the operator. Args: dtype: optional dtype """ if dtype is None: dtype = self.dtype if not np.can_cast(self.dtype, dtype, casting="same_kind"): raise ValueError(f"Cannot cast {self.dtype} to {dtype}") return LocalOperator( hilbert=self.hilbert, operators=[np.copy(op) for op in self._operators], acting_on=self._acting_on_list(), constant=self._constant, dtype=dtype, ) def transpose(self, *, concrete=False): r"""LocalOperator: Returns the tranpose of this operator.""" if concrete: new_ops = [np.copy(ops.transpose()) for ops in self._operators] return LocalOperator( hilbert=self.hilbert, operators=new_ops, acting_on=self._acting_on_list(), constant=self._constant, ) else: return Transpose(self) def conjugate(self, *, concrete=False): r"""LocalOperator: Returns the complex conjugate of this operator.""" new_ops = [np.copy(ops).conjugate() for ops in self._operators] return LocalOperator( hilbert=self.hilbert, operators=new_ops, acting_on=self._acting_on_list(), constant=np.conjugate(self._constant), ) def get_conn_flattened(self, x, sections, pad=False): r"""Finds the connected elements of the Operator. Starting from a given quantum number x, it finds all other quantum numbers x' such that the matrix element :math:`O(x,x')` is different from zero. In general there will be several different connected states x' satisfying this condition, and they are denoted here :math:`x'(k)`, for :math:`k=0,1...N_{\mathrm{connected}}`. This is a batched version, where x is a matrix of shape (batch_size,hilbert.size). Args: x (matrix): A matrix of shape (batch_size,hilbert.size) containing the batch of quantum numbers x. sections (array): An array of size (batch_size) useful to unflatten the output of this function. See numpy.split for the meaning of sections. pad (bool): Whether to use zero-valued matrix elements in order to return all equal sections. Returns: matrix: The connected states x', flattened together in a single matrix. array: An array containing the matrix elements :math:`O(x,x')` associated to each x'. """ return self._get_conn_flattened_kernel( x, sections, self._local_states, self._basis, self._constant, self._diag_mels, self._n_conns, self._mels, self._x_prime, self._acting_on, self._acting_size, pad, ) def _get_conn_flattened_closure(self): _local_states = self._local_states _basis = self._basis _constant = self._constant _diag_mels = self._diag_mels _n_conns = self._n_conns _mels = self._mels _x_prime = self._x_prime _acting_on = self._acting_on _acting_size = self._acting_size fun = self._get_conn_flattened_kernel def gccf_fun(x, sections): return fun( x, sections, _local_states, _basis, _constant, _diag_mels, _n_conns, _mels, _x_prime, _acting_on, _acting_size, ) return jit(nopython=True)(gccf_fun) @staticmethod @jit(nopython=True) def _get_conn_flattened_kernel( x, sections, local_states, basis, constant, diag_mels, n_conns, all_mels, all_x_prime, acting_on, acting_size, pad=False, ): batch_size = x.shape[0] n_sites = x.shape[1] dtype = all_mels.dtype assert sections.shape[0] == batch_size n_operators = n_conns.shape[0] xs_n = np.empty((batch_size, n_operators), dtype=np.intp) tot_conn = 0 max_conn = 0 for b in range(batch_size): # diagonal element conn_b = 1 # counting the off-diagonal elements for i in range(n_operators): acting_size_i = acting_size[i] xs_n[b, i] = 0 x_b = x[b] x_i = x_b[acting_on[i, :acting_size_i]] for k in range(acting_size_i): xs_n[b, i] += ( np.searchsorted( local_states[i, acting_size_i - k - 1], x_i[acting_size_i - k - 1], ) * basis[i, k] ) conn_b += n_conns[i, xs_n[b, i]] tot_conn += conn_b sections[b] = tot_conn if pad: max_conn = max(conn_b, max_conn) if pad: tot_conn = batch_size * max_conn x_prime = np.empty((tot_conn, n_sites), dtype=x.dtype) mels = np.empty(tot_conn, dtype=dtype) c = 0 for b in range(batch_size): c_diag = c mels[c_diag] = constant x_batch = x[b] x_prime[c_diag] = np.copy(x_batch) c += 1 for i in range(n_operators): # Diagonal part mels[c_diag] += diag_mels[i, xs_n[b, i]] n_conn_i = n_conns[i, xs_n[b, i]] if n_conn_i > 0: sites = acting_on[i] acting_size_i = acting_size[i] for cc in range(n_conn_i): mels[c + cc] = all_mels[i, xs_n[b, i], cc] x_prime[c + cc] = np.copy(x_batch) for k in range(acting_size_i): x_prime[c + cc, sites[k]] = all_x_prime[ i, xs_n[b, i], cc, k ] c += n_conn_i if pad: delta_conn = max_conn - (c - c_diag) mels[c : c + delta_conn].fill(0) x_prime[c : c + delta_conn, :] = np.copy(x_batch) c += delta_conn sections[b] = c return x_prime, mels def get_conn_filtered(self, x, sections, filters): r"""Finds the connected elements of the Operator using only a subset of operators. Starting from a given quantum number x, it finds all other quantum numbers x' such that the matrix element :math:`O(x,x')` is different from zero. In general there will be several different connected states x' satisfying this condition, and they are denoted here :math:`x'(k)`, for :math:`k=0,1...N_{\mathrm{connected}}`. This is a batched version, where x is a matrix of shape (batch_size,hilbert.size). Args: x (matrix): A matrix of shape (batch_size,hilbert.size) containing the batch of quantum numbers x. sections (array): An array of size (batch_size) useful to unflatten the output of this function. See numpy.split for the meaning of sections. filters (array): Only operators op(filters[i]) are used to find the connected elements of x[i]. Returns: matrix: The connected states x', flattened together in a single matrix. array: An array containing the matrix elements :math:`O(x,x')` associated to each x'. """ return self._get_conn_filtered_kernel( x, sections, self._local_states, self._basis, self._constant, self._diag_mels, self._n_conns, self._mels, self._x_prime, self._acting_on, self._acting_size, filters, ) @staticmethod @jit(nopython=True) def _get_conn_filtered_kernel( x, sections, local_states, basis, constant, diag_mels, n_conns, all_mels, all_x_prime, acting_on, acting_size, filters, ): batch_size = x.shape[0] n_sites = x.shape[1] dtype = all_mels.dtype assert filters.shape[0] == batch_size and sections.shape[0] == batch_size n_operators = n_conns.shape[0] xs_n = np.empty((batch_size, n_operators), dtype=np.intp) tot_conn = 0 for b in range(batch_size): # diagonal element tot_conn += 1 # counting the off-diagonal elements i = filters[b] assert i < n_operators and i >= 0 acting_size_i = acting_size[i] xs_n[b, i] = 0 x_b = x[b] x_i = x_b[acting_on[i, :acting_size_i]] for k in range(acting_size_i): xs_n[b, i] += ( np.searchsorted( local_states[i, acting_size_i - k - 1], x_i[acting_size_i - k - 1], ) * basis[i, k] ) tot_conn += n_conns[i, xs_n[b, i]] sections[b] = tot_conn x_prime = np.empty((tot_conn, n_sites)) mels = np.empty(tot_conn, dtype=dtype) c = 0 for b in range(batch_size): c_diag = c mels[c_diag] = constant x_batch = x[b] x_prime[c_diag] = np.copy(x_batch) c += 1 i = filters[b] # Diagonal part mels[c_diag] += diag_mels[i, xs_n[b, i]] n_conn_i = n_conns[i, xs_n[b, i]] if n_conn_i > 0: sites = acting_on[i] acting_size_i = acting_size[i] for cc in range(n_conn_i): mels[c + cc] = all_mels[i, xs_n[b, i], cc] x_prime[c + cc] = np.copy(x_batch) for k in range(acting_size_i): x_prime[c + cc, sites[k]] = all_x_prime[i, xs_n[b, i], cc, k] c += n_conn_i return x_prime, mels def __repr__(self): ao = self.acting_on acting_str = f"acting_on={ao}" if len(acting_str) > 55: acting_str = f"#acting_on={len(ao)} locations" return f"{type(self).__name__}(dim={self.hilbert.size}, {acting_str}, constant={self.constant}, dtype={self.dtype})"
py
1a4ab6e66b90415bb5af54ad515227a3c333d277
from collections import Counter from django.core.management.base import BaseCommand from django_orghierarchy.models import Organization class Command(BaseCommand): help = 'Generate distinct names for organizations' def add_arguments(self, parser): parser.add_argument( '--all', help='Generate distinct names for all orgs (not duplicates)', action='store_true', ) def handle(self, *args, **options): # First we make sure the abbreviations are unique orgs = Organization.objects.filter(dissolution_date=None).select_related('parent') for org in orgs: if org.abbreviation: dupe_descendants = org.get_descendants().filter(abbreviation=org.abbreviation) for dupe in dupe_descendants: print('%s (abbreviation %s -> None)' % (dupe, dupe.abbreviation)) dupe.abbreviation = None dupe.save(update_fields=['abbreviation']) # Refresh from db orgs = Organization.objects.filter(dissolution_date=None).select_related('parent') orgs_by_name = {} for org in orgs: orgs_by_name.setdefault(org.name, []).append(org) for org_name, duplicate_orgs in orgs_by_name.items(): if not options['all'] and len(duplicate_orgs) == 1: continue if len(duplicate_orgs) > 1: print("%d duplicates for %s" % (len(duplicate_orgs), org_name)) if options['all']: levels = 4 else: levels = 1 # Keep on adding parent names until the names are distinct while levels < 5: distinct_names = [org.generate_distinct_name(levels) for org in duplicate_orgs] c = Counter(distinct_names) if max(c.values()) == 1: break levels += 1 for org in duplicate_orgs: distinct_name = org.generate_distinct_name(levels) print("\t%s" % distinct_name) if org.distinct_name != distinct_name: org.distinct_name = distinct_name org.save(update_fields=['distinct_name'])
py
1a4ab856f6c750a7acd22255af8324f7f90d638b
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-23 08:59 from __future__ import unicode_literals import django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('helpdesk', '0012_queue_default_owner'), ] operations = [ migrations.AddField( model_name='followup', name='options', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, editable=False, null=True, verbose_name='Optional actions to perform for recipient'), ), ]
py
1a4ab898373248015532c8ba853004272bdf0c19
from inspect import signature from typing import Any, Type, TypeVar from httpx import Response from .errors import TelePayError T = TypeVar("T") def validate_response(response: Response) -> None: if response.status_code < 200 or response.status_code >= 300: error_data = response.json() error = error_data.pop("error", None) message = error_data.pop("message", None) raise TelePayError( status_code=response.status_code, error=error, message=message, ) def parse_json(cls: Type[T], **json: Any) -> T: cls_fields = {field for field in signature(cls).parameters} native_args, new_args = {}, {} for name, val in json.items(): if name in cls_fields: native_args[name] = val else: new_args[name] = val ret = cls(**native_args) for new_name, new_val in new_args.items(): setattr(ret, new_name, new_val) return ret
py
1a4ab8ffcfeb48f84537f08dbb9499ae6a0633c2
# Copyright (C) 2019 The Raphielscape Company LLC. # # Licensed under the Raphielscape Public License, Version 1.d (the "License"); # you may not use this file except in compliance with the License. # # You can find misc modules, which dont fit in anything xD """ scape module for other small commands. """ from random import randint from time import sleep from os import execl import sys import os import io import sys from scape import BOTLOG, BOTLOG_CHATID, CMD_HELP, bot, GIT_REPO_NAME, ALIVE_NAME from scape.events import register from scape.utils import time_formatter import urllib import requests from bs4 import BeautifulSoup import re from PIL import Image # ================= CONSTANT ================= DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node # ============================================ opener = urllib.request.build_opener() useragent = 'Mozilla/5.0 (Linux; Android 9; SM-G960F Build/PPR1.180610.011; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/78.0.3904.70 Mobile Safari/537.36' opener.addheaders = [('User-agent', useragent)] @register(outgoing=True, pattern="^.random") async def randomise(items): """ For .random command, get a random item from the list of items. """ itemo = (items.text[8:]).split() if len(itemo) < 2: await items.edit( "`2 or more items are required! Check .help random for more info.`" ) return index = randint(1, len(itemo) - 1) await items.edit("**Query: **\n`" + items.text[8:] + "`\n**Output: **\n`" + itemo[index] + "`") @register(outgoing=True, pattern="^.sleep ([0-9]+)$") async def sleepybot(time): """ For .sleep command, let the scape snooze for a few second. """ counter = int(time.pattern_match.group(1)) await time.edit("`I am sulking and snoozing...`") if BOTLOG: str_counter = time_formatter(counter) await time.client.send_message( BOTLOG_CHATID, f"You put the bot to sleep for {str_counter}.", ) sleep(counter) await time.edit("`OK, I'm awake now.`") @register(outgoing=True, pattern="^.shutdown$") async def killdabot(event): """ For .shutdown command, shut the bot down.""" await event.edit("`Goodbye *Windows XP shutdown sound*....`") if BOTLOG: await event.client.send_message(BOTLOG_CHATID, "#SHUTDOWN \n" "Bot shut down") await bot.disconnect() @register(outgoing=True, pattern="^.restart$") async def killdabot(event): await event.edit("`*i would be back in a moment*`") if BOTLOG: await event.client.send_message(BOTLOG_CHATID, "#RESTART \n" "Bot Restarted") await bot.disconnect() # Spin a new instance of bot execl(sys.executable, sys.executable, *sys.argv) # Shut the existing one down exit() @register(outgoing=True, pattern="^.community$") async def bot_community(community): """ For .community command, just returns OG Paperplane's group link. """ await community.edit( "Join RaphielGang's awesome scape community: @scape_support" "\nDo note that Paperplane Extended is an unoficial fork of their " "Paperplane project and it may get limited or no support for bugs.") @register(outgoing=True, pattern="^.support$") async def bot_support(wannahelp): """ For .support command, just returns the group link. """ await wannahelp.edit( "Join the scape-remix Channel: @scape-remix \ \nJoin the scape-remix Chat: @remixsupport") @register(outgoing=True, pattern="^.creator$") async def creator(e): await e.edit("[Heyworld](https://github.com/sahyam2019)") @register(outgoing=True, pattern="^.readme$") async def reedme(e): await e.edit( "Here's something for you to read:\n" "\n[Openscape's README.md file](https://github.com/mkaraniya/Openscape/blob/sql-extended/README.md)" "\n[Setup Guide - Basic](https://telegra.ph/How-to-host-a-Telegram-scape-11-02)" "\n[Setup Guide - Google Drive](https://telegra.ph/How-To-Setup-GDrive-11-02)" "\n[Setup Guide - LastFM Module](https://telegra.ph/How-to-set-up-LastFM-module-for-Paperplane-scape-11-02)" "\n[Video Tutorial - 576p](https://mega.nz/#!ErwCESbJ!1ZvYAKdTEfb6y1FnqqiLhHH9vZg4UB2QZNYL9fbQ9vs)" "\n[Video Tutorial - 1080p](https://mega.nz/#!x3JVhYwR!u7Uj0nvD8_CyyARrdKrFqlZEBFTnSVEiqts36HBMr-o)" "\n[Special - Note](https://telegra.ph/Special-Note-11-02)") # Copyright (c) Gegham Zakaryan | 2019 @register(outgoing=True, pattern="^.repeat (.*)") async def repeat(rep): cnt, txt = rep.pattern_match.group(1).split(' ', 1) replyCount = int(cnt) toBeRepeated = txt replyText = toBeRepeated + "\n" for i in range(0, replyCount - 1): replyText += toBeRepeated + "\n" await rep.edit(replyText) @register(outgoing=True, pattern="^.repo$") async def repo_is_here(wannasee): """ For .repo command, just returns the repo URL. """ await wannasee.edit( "Click [here](https://github.com/Calliope-K/Scape-Remix) to open scape-remix's GitHub page." ) @register(outgoing=True, pattern="^.myrepo$") async def myrepo_is_here(wannaseeme): """ For .myrepo command, just returns the repo URL. """ await wannaseeme.edit( f'Click [here](https://github.com/{GIT_REPO_NAME}/tree/sql-extended/) to open {DEFAULTUSER}`s GitHub page' ) @register(outgoing=True, pattern="^.raw$") async def raw(event): the_real_message = None reply_to_id = None if event.reply_to_msg_id: previous_message = await event.get_reply_message() the_real_message = previous_message.stringify() reply_to_id = event.reply_to_msg_id else: the_real_message = event.stringify() reply_to_id = event.message.id with io.BytesIO(str.encode(the_real_message)) as out_file: out_file.name = "raw_message_data.txt" await event.edit( "`Check the scape log for the decoded message data !!`") await event.client.send_file( BOTLOG_CHATID, out_file, force_document=True, allow_cache=False, reply_to=reply_to_id, caption="`Here's the decoded message data !!`") @register(outgoing=True, pattern=r"^.reverse(?: |$)(\d*)") async def okgoogle(img): """ For .reverse command, Google search images and stickers. """ if os.path.isfile("okgoogle.png"): os.remove("okgoogle.png") message = await img.get_reply_message() if message and message.media: photo = io.BytesIO() await bot.download_media(message, photo) else: await img.edit("`Reply to photo or sticker nigger.`") return if photo: await img.edit("`Processing...`") try: image = Image.open(photo) except OSError: await img.edit('`Unsupported sexuality, most likely.`') return name = "okgoogle.png" image.save(name, "PNG") image.close() # https://stackoverflow.com/questions/23270175/google-reverse-image-search-using-post-request#28792943 searchUrl = 'https://www.google.com/searchbyimage/upload' multipart = { 'encoded_image': (name, open(name, 'rb')), 'image_content': '' } response = requests.post(searchUrl, files=multipart, allow_redirects=False) fetchUrl = response.headers['Location'] if response != 400: await img.edit("`Image successfully uploaded to Google. Maybe.`" "\n`Parsing source now. Maybe.`") else: await img.edit("`Google told me to fuck off.`") return os.remove(name) match = await ParseSauce(fetchUrl + "&preferences?hl=en&fg=1#languages") guess = match['best_guess'] imgspage = match['similar_images'] if guess and imgspage: await img.edit(f"[{guess}]({fetchUrl})\n\n`Looking for images...`") else: await img.edit("`Couldn't find anything for your uglyass.`") return if img.pattern_match.group(1): lim = img.pattern_match.group(1) else: lim = 3 images = await scam(match, lim) yeet = [] for i in images: k = requests.get(i) yeet.append(k.content) try: await img.client.send_file(entity=await img.client.get_input_entity(img.chat_id ), file=yeet, reply_to=img) except TypeError: pass await img.edit( f"[{guess}]({fetchUrl})\n\n[Visually similar images]({imgspage})") async def ParseSauce(googleurl): """Parse/Scrape the HTML code for the info we want.""" source = opener.open(googleurl).read() soup = BeautifulSoup(source, 'html.parser') results = {'similar_images': '', 'best_guess': ''} try: for similar_image in soup.findAll('input', {'class': 'gLFyf'}): url = 'https://www.google.com/search?tbm=isch&q=' + \ urllib.parse.quote_plus(similar_image.get('value')) results['similar_images'] = url except BaseException: pass for best_guess in soup.findAll('div', attrs={'class': 'r5a77d'}): results['best_guess'] = best_guess.get_text() return results async def scam(results, lim): single = opener.open(results['similar_images']).read() decoded = single.decode('utf-8') imglinks = [] counter = 0 pattern = r'^,\[\"(.*[.png|.jpg|.jpeg])\",[0-9]+,[0-9]+\]$' oboi = re.findall(pattern, decoded, re.I | re.M) for imglink in oboi: counter += 1 if not counter >= int(lim): imglinks.append(imglink) else: break return imglinks CMD_HELP.update({ "misc": "`.random` <item1> <item2> ... <itemN>\ \nUsage: Get a random item from the list of items.\ \n\n`.sleep` <seconds>\ \nusage:scapes get tired too. Let yours snooze for a few seconds.\ \n\n`.shutdown`\ \nUsage: Shutdowns the bot temporarily\ \n\n`.support`\ \nUsage: if you need help, use this command\ \n\n`.community`\ \nUsage: Join the awesome Openscape community !!\ \n\n`.repo`\ \nUsage: If you are curious what makes the scape work, this is what you need !!\ \n\n`.myrepo`\ \nUsage: If you are curious which is your personal repo, this is what you have.\ \n\n`.readme`\ \nUsage: Provide links to setup the scape and it's modules.\ \n\n`.creator`\ \nUsage: Know who created this awesome scape !!\ \n\n`.repeat` <no.> <text>\ \nUsage: Repeats the text for a number of times. Don't confuse this with spam tho.\ \n\n`.restart`\ \nUsage: Restarts the bot !!\ \n\n`.raw`\ \nUsage: Get detailed JSON-like formatted data about replied message.\ \n\n`.reverse`\ \nUsage: Reply to a pic/sticker to revers-search it on Google Images.\ \n\n: `.poll`\ \nUsage:If you doesnt give any input it sends a default poll. if you like customize it then use this syntax:\ \n `.poll question ; option 1; option2 ;`\ \n ';' this seperates the each option and question." })
py
1a4ab97884e06c6fec5d9d2ae0ccb0eb3d305716
from django.conf.urls import include, url from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.views.generic import RedirectView try: from django.urls import reverse_lazy except ImportError: from django.core.urlresolvers import reverse_lazy redirect_view = RedirectView.as_view(url=reverse_lazy('admin:index')) urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'', include('openwisp_controller.urls')), {% if openwisp2_network_topology %} url(r'^', include('openwisp_network_topology.urls')), {% endif %} {% for extra_url in openwisp2_extra_urls %} {{ extra_url }}, {% endfor %} url(r'^$', redirect_view, name='index') ] urlpatterns += staticfiles_urlpatterns()