blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bc3c434190c692ff3c0094d20db9185411b09b8e | 3caa6e43e3da18858a719c51dc9c63eb11014035 | /build/LawnMowerRobot/hrp/am_loopmap/catkin_generated/pkg.installspace.context.pc.py | 85b851e7aaef81366a23f89a188a48a8c38bea1b | [] | no_license | rh-chen/catkin_ws | 2ab5050340e7e62f818681d1c86b222f2b867ce9 | 102203a9c5b870862d5c4fcf465f3bf88247a007 | refs/heads/master | 2020-06-03T10:28:31.749952 | 2016-06-02T21:43:10 | 2016-06-02T21:43:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/agneev/catkin_ws/install/include".split(';') if "/home/agneev/catkin_ws/install/include" != "" else []
PROJECT_CATKIN_DEPENDS = "nav_msgs;roscpp;std_msgs;tf;am_driver".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "am_loopmap"
PROJECT_SPACE_DIR = "/home/agneev/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
51c49fc3154deee52d0c8f7793fbddac1737f209 | 70bfc97fb0ff98d817f72f27775a7b467ae66c39 | /firmware/python_modules/sha2017/dashboard/installer.py | 2bd071a428f17b60fb027c5bec3caf6cb46245ee | [
"Apache-2.0"
] | permissive | zakx/ESP32-platform-firmware | ae6ab2d023c06598dc6a9010fb4bfcf3a2d6710f | 3593865602c2aba1f5a066a333ff864d9eaae4a3 | refs/heads/master | 2020-07-03T03:19:04.964007 | 2019-08-10T17:05:16 | 2019-08-10T17:05:16 | 201,767,805 | 0 | 0 | null | 2019-08-11T13:24:42 | 2019-08-11T13:24:42 | null | UTF-8 | Python | false | false | 5,351 | py | import orientation, dashboard.resources.woezel_repo as woezel_repo, term, easydraw, system, time, gc, ugfx, wifi, uos, json, sys, woezel, display
repo = woezel_repo
orientation.default()
def showMessage(msg, error=False, icon_wifi=False, icon_ok=False):
term.header(True, "Installer")
print(msg)
if error:
easydraw.messageCentered("ERROR\n\n"+msg, True, "/media/alert.png")
elif icon_wifi:
easydraw.messageCentered("PLEASE WAIT\n\n"+msg, True, "/media/wifi.png")
elif icon_ok:
easydraw.messageCentered(msg, True, "/media/ok.png")
else:
easydraw.messageCentered("PLEASE WAIT\n\n"+msg, True, "/media/busy.png")
# Generic actions
def btn_unhandled(pressed):
display.flush(display.FLAG_LUT_FASTEST)
def btn_exit(pressed):
if pressed:
system.launcher()
def btn_update(pressed):
if pressed:
repo.update()
system.start("installer", True)
# Categories list
categories_list = ugfx.List(0,0,ugfx.width(),ugfx.height()-48)
def show_categories(pressed=True):
if not pressed:
return
ugfx.clear(ugfx.WHITE)
#Hide category list
category_list.visible(False)
category_list.enabled(False)
#Show categories list
categories_list.visible(True)
categories_list.enabled(True)
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_exit)
ugfx.input_attach(ugfx.BTN_SELECT, btn_update)
ugfx.input_attach(ugfx.BTN_A, show_category)
ugfx.input_attach(ugfx.BTN_B, btn_unhandled)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
#Hint
easydraw.disp_string_right_bottom(0, "START: Exit app")
easydraw.disp_string_right_bottom(1, "A: Open category")
easydraw.disp_string_right_bottom(2, "SELECT: Update repo")
#Flush screen
display.flush(display.FLAG_LUT_NORMAL)
# Category browsing
category_list = ugfx.List(0,0,ugfx.width(),ugfx.height()-48)
def show_category(pressed=True):
if not pressed:
return
ugfx.clear(ugfx.WHITE)
global category
categories_list.visible(False)
categories_list.enabled(False)
slug = repo.categories[categories_list.selected_index()]["slug"]
showMessage("Loading "+slug+"...")
display.drawFill()
#Clean up list
while category_list.count() > 0:
category_list.remove_item(0)
try:
try:
category = repo.getCategory(slug)
except BaseException as e:
print("CAT OPEN ERR", e)
showMessage("Failed to open category "+slug+"!", True)
display.drawFill()
time.sleep(1)
show_categories()
gc.collect()
for package in category:
category_list.add_item("%s rev. %s" % (package["name"], package["revision"]))
category_list.selected_index(0)
category_list.visible(True)
category_list.enabled(True)
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_exit)
ugfx.input_attach(ugfx.BTN_SELECT, btn_unhandled)
ugfx.input_attach(ugfx.BTN_A, install_app)
ugfx.input_attach(ugfx.BTN_B, show_categories)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
#Hint
easydraw.disp_string_right_bottom(0, "START: Exit")
easydraw.disp_string_right_bottom(1, "A: Install app")
easydraw.disp_string_right_bottom(2, "B: Back")
#Flush screen
display.flush(display.FLAG_LUT_NORMAL)
except BaseException as e:
sys.print_exception(e)
print("ERROR", e)
showMessage("Internal error", True)
display.drawFill()
time.sleep(1)
show_categories()
# Install application
def install_app(pressed=True):
global category
if pressed:
slug = category[category_list.selected_index()]["slug"]
category = []
gc.collect()
category_list.visible(False)
category_list.enabled(False)
category_list.clear()
#Input handling
ugfx.input_attach(ugfx.BTN_START, btn_unhandled)
ugfx.input_attach(ugfx.BTN_SELECT, btn_unhandled)
ugfx.input_attach(ugfx.BTN_A, btn_unhandled)
ugfx.input_attach(ugfx.BTN_B, btn_unhandled)
ugfx.input_attach(ugfx.JOY_UP, btn_unhandled)
ugfx.input_attach(ugfx.JOY_DOWN, btn_unhandled)
ugfx.input_attach(ugfx.JOY_LEFT, btn_unhandled)
ugfx.input_attach(ugfx.JOY_RIGHT, btn_unhandled)
if not wifi.status():
wifi.connect()
wifi.wait()
if not wifi.status():
showMessage("Unable to connect to WiFi.")
display.drawFill()
time.sleep(2)
show_category()
showMessage("Installing "+slug+"...")
display.drawFill()
try:
woezel.install(slug)
showMessage("OK\n\n"+slug+" has been installed!", False, False, True)
display.drawFill()
time.sleep(2)
show_category()
except woezel.LatestInstalledError:
showMessage("NOTICE\n\nLatest version is already installed.", False, False, True)
display.drawFill()
time.sleep(2)
show_category()
except BaseException as e:
print("WOEZEL ERROR", e)
showMessage("Failed to install "+slug+"!", True)
display.drawFill()
time.sleep(2)
show_category()
#Main application
showMessage("Loading categories...")
display.drawFill()
if not repo.load():
if not repo.update():
if repo.lastUpdate==0:
showMessage("Failed to load repository. Returning to launcher...")
display.drawFill()
system.launcher()
for category in repo.categories:
categories_list.add_item("%s (%d) >" % (category["name"], category["eggs"]))
show_categories()
| [
"[email protected]"
] | |
853dc52871c3cacc341de2c08d13e6f8f092c466 | 91406a8a39d4cf9c9db57d57c13b74ea6a50b31d | /backend/theadvrou_19084/settings.py | b1326c53deb98ee4d1d5405cc5757874998d7f98 | [] | no_license | crowdbotics-apps/theadvrou-19084 | 9fb5d3ebb645f946ed70469daf0c8770e8692723 | ecb1356777441578fafd5816638c4056bdaa8c21 | refs/heads/master | 2022-11-25T06:13:56.853550 | 2020-07-22T10:03:51 | 2020-07-22T10:03:51 | 281,493,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,806 | py | """
Django settings for theadvrou_19084 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"delivery_order",
"driver",
"menu",
"delivery_user_profile",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "theadvrou_19084.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "theadvrou_19084.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
38ef0f1184875790dfef82c9ee5d3fb8e86bdc73 | 53bf080b397fb1250fc9be30b5978f7b14276fd8 | /app/http/controllers/WelcomeController.py | 3fde2bcb917796a4e3efb76d49a6dcee323fcfbc | [] | no_license | afdolriski/masonite-starter | 0297aea952ba6ca9bf86de8436bc33157af12292 | 1e61170e5ac0b40fba2c3022aaab4a5a8e6fe467 | refs/heads/master | 2021-05-12T18:43:34.160655 | 2018-01-10T03:15:27 | 2018-01-10T03:15:27 | 117,072,009 | 1 | 0 | null | 2018-01-11T08:30:10 | 2018-01-11T08:30:09 | null | UTF-8 | Python | false | false | 330 | py | ''' A Module Description '''
from masonite.view import view
from config import application
class WelcomeController(object):
''' Controller for welcoming the user '''
def __init__(self):
pass
def show(self, request):
''' Show Welcome Template '''
return view('welcome', {'app': application})
| [
"[email protected]"
] | |
a80448101f8374c1c17802dfdd3ad542417d37a9 | 17cbe826892d06dc5aee4e4c2a5747e10933f2d0 | /allennlp/scripts/train_fixtures.py | 35f92b290cdb557c4bb1ba9ff665094925d8b8a5 | [
"Apache-2.0",
"MIT"
] | permissive | rahular/joint-coref-srl | 3fdd0e37a56e3be894f3da4ceeb030a599ff4388 | cd85fb4e11af1a1ea400ed657d0a4511c1d6c6be | refs/heads/main | 2023-02-16T21:53:11.721014 | 2021-01-18T15:31:47 | 2021-01-18T15:31:47 | 330,708,579 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,821 | py | #!/usr/bin/env python
import glob
import logging
import os
import re
import shutil
import sys
import tempfile
sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.join(__file__, os.pardir))))
from allennlp.commands.test_install import _get_module_root
from allennlp.commands.train import train_model_from_file, train_model
from allennlp.common import Params
from allennlp.common.util import pushd
from allennlp.training.metrics import EvalbBracketingScorer
logger = logging.getLogger(__name__)
def train_fixture(config_prefix: str) -> None:
config_file = config_prefix + "experiment.json"
serialization_dir = config_prefix + "serialization"
# Train model doesn't like it if we have incomplete serialization
# directories, so remove them if they exist.
if os.path.exists(serialization_dir):
shutil.rmtree(serialization_dir)
# train the model
train_model_from_file(config_file, serialization_dir)
# remove unnecessary files
shutil.rmtree(os.path.join(serialization_dir, "log"))
for filename in glob.glob(os.path.join(serialization_dir, "*")):
if (
filename.endswith(".log")
or filename.endswith(".json")
or re.search(r"epoch_[0-9]+\.th$", filename)
):
os.remove(filename)
def train_fixture_gpu(config_prefix: str) -> None:
config_file = config_prefix + "experiment.json"
serialization_dir = config_prefix + "serialization"
params = Params.from_file(config_file)
params["trainer"]["cuda_device"] = 0
# train this one to a tempdir
tempdir = tempfile.gettempdir()
train_model(params, tempdir)
# now copy back the weights and and archived model
shutil.copy(
os.path.join(tempdir, "best.th"), os.path.join(serialization_dir, "best_gpu.th")
)
shutil.copy(
os.path.join(tempdir, "model.tar.gz"),
os.path.join(serialization_dir, "model_gpu.tar.gz"),
)
if __name__ == "__main__":
module_root = _get_module_root().parent
with pushd(module_root, verbose=True):
if len(sys.argv) >= 2 and sys.argv[1].lower() == "gpu":
train_fixture_gpu("allennlp/tests/fixtures/srl/")
else:
models = [
"biaffine_dependency_parser",
"constituency_parser",
"coref",
"decomposable_attention",
"encoder_decoder/composed_seq2seq",
"encoder_decoder/simple_seq2seq",
"encoder_decoder/copynet_seq2seq",
"simple_tagger_with_span_f1",
"srl",
]
for model in models:
if model == "constituency_parser":
EvalbBracketingScorer.compile_evalb()
train_fixture(f"allennlp/tests/fixtures/{model}/")
| [
"[email protected]"
] | |
1eb118f5187ab4a3c4a08f6e93c8c1611df906d2 | 7b4cf9df77a8f5f716dece430c9465b1de137a30 | /src/rubricsampling/short_answer_test.py | 392581f97870dcfcb6c025aa236fdcdf982e71d0 | [] | no_license | willcrichton/generative-grading | b440074e64c36a1fd982b4331e2d4ea36cbd57e4 | a36e0c91f778817f8b79d36a06e4b982b1f30245 | refs/heads/master | 2020-06-06T16:27:02.103847 | 2019-07-04T21:22:40 | 2019-07-04T21:22:40 | 192,791,584 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,536 | py | #!/usr/bin/env python
import pickle
import os.path
import sys
import generatorUtils as utils
from pprint import pprint
from tqdm import tqdm
from engine import Engine
from engineTempered import EngineTempered
from engineGuidedInference import EngineGuided
from src.datasets.citizenship_labels import CitizenshipLabels
import torch
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import getpass
USER = getpass.getuser()
GRAMMAR_DIR = 'src/rubricsampling/grammars/citizenship13'
EXP_DIR = f'/home/{USER}/generative-grading/experiments/citizenship13_100k/2019-04-12--01_26_27'
class RubricSampler(object):
def create_data_loader(self, dataset, n=None):
if n is None:
n = len(dataset)
out = []
raw_progs = []
for i in range(n):
prog = dataset.raw_inputs[i]
if prog == 'religious freedom':
out.append(dataset[i])
raw_progs.append(prog)
break
return out, raw_progs
# Function: Run
# -------------
# This function compiles and renders samples
# from the Rubric Sample
def run(self):
inf_e = EngineGuided(GRAMMAR_DIR, EXP_DIR)
dataset = CitizenshipLabels(13, split='valid', vocab=inf_e.agent.train_dataset.vocab)
# dataset = CitizenshipLabels(13, split='valid')
N = 50
data, raw_prgs = self.create_data_loader(dataset)
data_loader = DataLoader(data, batch_size=1, shuffle=False)
tqdm_batch = tqdm(data_loader, total=N)
# inf_e = EngineTempered(GRAMMAR_DIR)
time_data = []
uniq_progs = set()
failed = []
num_all = 0
num_correct = 0
for i, data_list in enumerate(tqdm_batch):
program_args = (data_list[0], data_list[2])
label = data_list[3]
program = raw_prgs[i]
import pdb; pdb.set_trace()
corr = self.infer_matches(inf_e, program, program_args, label)
if corr:
num_correct += 1
num_all += 1
# pprint(failed)
print(f'Accuracy = {num_correct/num_all}')
def infer_matches(self, inf_e, program, program_args, label, n_lim=4):
all_progs = []
for i in range(n_lim):
new_prog, new_choices = self.guided_sample(inf_e, program_args)
all_progs.append(new_prog)
# import pdb; pdb.set_trace()
# print(program)
# print(new_prog)
# print(label)
# pprint(new_choices)
# input()
# print()
return int(new_choices['correctStrategy']) == label.item()
#####################
# Private Helpers
#####################
def guided_sample(self, inf_e, program_args):
# something that will crash if accessed without setting
initAssignments = 1000000 * torch.ones(1, inf_e.model.num_nodes)
program, labels, decisions, rvOrder, rvAssignments_pred = inf_e.renderProgram(program_args, initAssignments)
# program, labels, decisions, rvOrder, rvAssignments_pred = inf_e.renderProgram()
# print(rvAssignments[0][rvOrders[0][:rvOrders_lengths[0]]])
# print(rvAssignments_pred[0][rvOrders[0][:rvOrders_lengths[0]]])
# input()
return program, decisions
def sample(self, e):
program, labels, decisions, _, _ = e.renderProgram()
return program, decisions
if __name__ == '__main__':
RubricSampler().run()
| [
"[email protected]"
] | |
e1c77a2d0b8f06a83ef2a11dbfb21d51589e4ba3 | 77ab53380f74c33bb3aacee8effc0e186b63c3d6 | /5180_constrained_subset_sum.py | 323b9c395febde884a579570cf9771cc9a0c10dc | [] | no_license | tabletenniser/leetcode | 8e3aa1b4df1b79364eb5ca3a97db57e0371250b6 | d3ebbfe2e4ab87d5b44bc534984dfa453e34efbd | refs/heads/master | 2023-02-23T18:14:31.577455 | 2023-02-06T07:09:54 | 2023-02-06T07:09:54 | 94,496,986 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,829 | py | '''
Given an integer array nums and an integer k, return the maximum sum of a non-empty subset of that array such that for every two consecutive integers in the subset, nums[i] and nums[j], where i < j, the condition j - i <= k is satisfied.
A subset of an array is obtained by deleting some number of elements (can be zero) from the array, leaving the remaining elements in their original order.
Example 1:
Input: nums = [10,2,-10,5,20], k = 2
Output: 37
Explanation: The subset is [10, 2, 5, 20].
Example 2:
Input: nums = [-1,-2,-3], k = 1
Output: -1
Explanation: The subset must be non-empty, so we choose the largest number.
Example 3:
Input: nums = [10,-2,-10,-5,20], k = 2
Output: 23
Explanation: The subset is [10, -2, -5, 20].
Constraints:
1 <= k <= nums.length <= 10^5
-10^4 <= nums[i] <= 10^4
'''
import sys
sys.setrecursionlimit(100000)
class Solution:
def max_sum(self, nums, l, h, k):
cur_max = -99999999999999999999999999999
for k_i in range(k):
cur_sum = 0
for n in range(l+k_i, h+1, k):
cur_sum += nums[n]
cur_max = max(cur_sum, cur_max)
return cur_max
def constrainedSubsetSum(self, nums, k) -> int:
if max(nums) < 0:
return max(nums)
res = 0
n_start = None
for i in range(len(nums)):
num = nums[i]
if num >= 0:
if n_start:
if n_start != 0:
n_range_sum = self.max_sum(nums, n_start, i - 1, k)
res = max(0, res + n_range_sum)
n_start = None
res += num
else:
if not n_start:
n_start = i
return res
def rec(self, cur_ind, cur_k):
if (cur_ind, cur_k) in self.ht:
return self.ht[(cur_ind, cur_k)]
if cur_ind == 0:
return max(0, self.nums[cur_ind]) if cur_k < self.k else self.nums[cur_ind]
choose = self.rec(cur_ind - 1, 1) + self.nums[cur_ind]
res = choose
if cur_k < self.k and self.nums[cur_ind] < 0:
not_choose = self.rec(cur_ind - 1, cur_k + 1)
res = max(res, not_choose)
# print(cur_ind, cur_k, res)
self.ht[(cur_ind, cur_k)] = res
return res
def constrainedSubsetSum2(self, nums, k) -> int:
if max(nums) < 0:
return max(nums)
self.nums = nums
self.k = k
self.ht = dict()
return self.rec(len(nums)-1, 1)
s = Solution()
# nums = [10,-2,-10,-5,20]
# k = 2
# nums = [-1,-2,-3]
# k = 1
# nums =[-5266,4019,7336,-3681,-5767]
# k = 2
nums = [-8269,3217,-4023,-4138,-683,6455,-3621,9242,4015,-3790]
k = 1
# nums = [(-1)**i * i for i in range(10000)]
# k = 5000
res = s.constrainedSubsetSum(nums, k)
print(res)
| [
"[email protected]"
] | |
51f2b3c70763ec2c0eae169ef31a20b8bdcc1dc2 | 081b33ead95b323e77bdce3717af0a5790e34a1e | /backend/apps/league/migrations/0001_initial.py | 791e1d76b4e75f218d5bd9686461819c8acd89b0 | [] | no_license | alexmon1989/afliga | 81ea3b32b18040bb8baa4e8af14a73003fb9a89f | 661da30c0a5aa6b9975eb7dea9c9a031529d2dbb | refs/heads/master | 2023-02-23T11:12:45.608118 | 2023-02-11T12:12:41 | 2023-02-11T12:12:41 | 105,630,198 | 0 | 0 | null | 2023-02-15T20:50:12 | 2017-10-03T08:36:15 | Python | UTF-8 | Python | false | false | 7,519 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-26 09:05
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_time', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Минута')),
],
),
migrations.CreateModel(
name='EventType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('table', models.TextField(blank=True, null=True, verbose_name='Таблица результатов')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('match_date', models.DateTimeField(blank=True, null=True, verbose_name='Время начала матча')),
('protocol', models.TextField(blank=True, verbose_name='Протокол')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='ФИО')),
('birth_date', models.DateField(blank=True, null=True, verbose_name='Дата рождения')),
('biography', models.TextField(blank=True, verbose_name='Биография')),
],
),
migrations.CreateModel(
name='Position',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('description', models.TextField(blank=True, null=True, verbose_name='Описание')),
('logo', models.ImageField(blank=True, null=True, upload_to='teams', verbose_name='Логотип')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='Tournament',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Название')),
('description', models.TextField(blank=True, null=True, verbose_name='Описание')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Создано')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Обновлено')),
],
),
migrations.CreateModel(
name='TournamentPlayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Player', verbose_name='Игрок')),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Team', verbose_name='Команда')),
('tournament', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Tournament', verbose_name='Турнир')),
],
),
migrations.AddField(
model_name='tournament',
name='players',
field=models.ManyToManyField(blank=True, through='league.TournamentPlayer', to='league.Player'),
),
migrations.AddField(
model_name='tournament',
name='teams',
field=models.ManyToManyField(blank=True, to='league.Team'),
),
migrations.AddField(
model_name='player',
name='position',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Position', verbose_name='Амплуа'),
),
migrations.AddField(
model_name='match',
name='team_1',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='team_1', to='league.Team', verbose_name='Команда 1'),
),
migrations.AddField(
model_name='match',
name='team_2',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='team_2', to='league.Team', verbose_name='Команда 2'),
),
migrations.AddField(
model_name='group',
name='tournament',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Tournament', verbose_name='Турнир'),
),
migrations.AddField(
model_name='event',
name='event_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.EventType', verbose_name='Тип события'),
),
migrations.AddField(
model_name='event',
name='match',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Match', verbose_name='Матч'),
),
migrations.AddField(
model_name='event',
name='player',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Player', verbose_name='Игрок'),
),
migrations.AddField(
model_name='event',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='league.Team', verbose_name='Команда'),
),
]
| [
"[email protected]"
] | |
dedfb1d33115d91728996501c76df18a4c9b57b9 | 4c5cbde4ff3af3a74aec4ecef3048c93dcd2e5e1 | /alphamind/data/engines/sqlengine.py | 9208a3c4821f3eff6c9e11b21af4b1cf02c0157a | [
"MIT"
] | permissive | GehirnW/alpha-mind | 7a6f049d65f5250b52440b35be9b673f03bbddad | 6eccbae8f9dadfabcbbf79e567a8d9ba53a9deaf | refs/heads/master | 2021-08-22T07:44:21.487565 | 2017-11-29T17:12:33 | 2017-11-29T17:12:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,890 | py | # -*- coding: utf-8 -*-
"""
Created on 2017-7-7
@author: cheng.li
"""
from typing import Iterable
from typing import List
from typing import Dict
from typing import Tuple
from typing import Union
import numpy as np
import pandas as pd
import sqlalchemy as sa
import sqlalchemy.orm as orm
from sqlalchemy import select, and_, outerjoin, join, delete, insert
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import bindparam
from alphamind.data.engines.universe import Universe
from alphamind.data.dbmodel.models import FactorMaster
from alphamind.data.dbmodel.models import FactorLog
from alphamind.data.dbmodel.models import Strategy
from alphamind.data.dbmodel.models import IndexComponent
from alphamind.data.dbmodel.models import Industry
from alphamind.data.dbmodel.models import Experimental
from alphamind.data.dbmodel.models import RiskMaster
from alphamind.data.dbmodel.models import RiskCovDay
from alphamind.data.dbmodel.models import RiskCovShort
from alphamind.data.dbmodel.models import RiskCovLong
from alphamind.data.dbmodel.models import FullFactor
from alphamind.data.dbmodel.models import Models
from alphamind.data.dbmodel.models import Market
from alphamind.data.dbmodel.models import IndexMarket
from alphamind.data.dbmodel.models import Universe as UniverseTable
from alphamind.data.dbmodel.models import Formulas
from alphamind.data.dbmodel.models import DailyPortfoliosSchedule
from alphamind.data.dbmodel.models import Performance
from alphamind.data.dbmodel.models import Positions
from alphamind.data.transformer import Transformer
from alphamind.model.loader import load_model
from alphamind.formula.utilities import encode_formula
from alphamind.formula.utilities import decode_formula
from PyFin.api import advanceDateByCalendar
risk_styles = ['BETA',
'MOMENTUM',
'SIZE',
'EARNYILD',
'RESVOL',
'GROWTH',
'BTOP',
'LEVERAGE',
'LIQUIDTY',
'SIZENL']
industry_styles = [
'Bank',
'RealEstate',
'Health',
'Transportation',
'Mining',
'NonFerMetal',
'HouseApp',
'LeiService',
'MachiEquip',
'BuildDeco',
'CommeTrade',
'CONMAT',
'Auto',
'Textile',
'FoodBever',
'Electronics',
'Computer',
'LightIndus',
'Utilities',
'Telecom',
'AgriForest',
'CHEM',
'Media',
'IronSteel',
'NonBankFinan',
'ELECEQP',
'AERODEF',
'Conglomerates'
]
macro_styles = ['COUNTRY']
total_risk_factors = risk_styles + industry_styles + macro_styles
factor_tables = [FullFactor, Experimental]
DEFAULT_URL = 'postgresql+psycopg2://postgres:[email protected]/alpha'
DAILY_RETURN_OFFSET = 0
def _map_risk_model_table(risk_model: str) -> tuple:
if risk_model == 'day':
return RiskCovDay, FullFactor.d_srisk
elif risk_model == 'short':
return RiskCovShort, FullFactor.s_srisk
elif risk_model == 'long':
return RiskCovLong, FullFactor.l_srisk
else:
raise ValueError("risk model name {0} is not recognized".format(risk_model))
def _map_factors(factors: Iterable[str], used_factor_tables) -> Dict:
factor_cols = {}
excluded = {'trade_date', 'code', 'isOpen'}
for f in factors:
for t in used_factor_tables:
if f not in excluded and f in t.__table__.columns:
factor_cols[t.__table__.columns[f]] = t
break
return factor_cols
def _map_industry_category(category: str) -> str:
if category == 'sw':
return '申万行业分类'
else:
raise ValueError("No other industry is supported at the current time")
class SqlEngine(object):
def __init__(self,
db_url: str = None):
if db_url:
self.engine = sa.create_engine(db_url)
else:
self.engine = sa.create_engine(DEFAULT_URL)
self.session = None
self.create_session()
if self.engine.name == 'mssql':
self.ln_func = func.log
else:
self.ln_func = func.ln
def create_session(self):
db_session = orm.sessionmaker(bind=self.engine)
self.session = db_session()
def fetch_factors_meta(self) -> pd.DataFrame:
query = self.session.query(FactorMaster)
return pd.read_sql(query.statement, query.session.bind)
def fetch_factor_coverage(self) -> pd.DataFrame:
query = self.session.query(FactorLog)
return pd.read_sql(query.statement, query.session.bind)
def fetch_risk_meta(self) -> pd.DataFrame:
query = self.session.query(RiskMaster)
return pd.read_sql(query.statement, query.session.bind)
def fetch_strategy(self, ref_date: str, strategy: str) -> pd.DataFrame():
query = select([Strategy.strategyName, Strategy.factor, Strategy.weight]).where(
and_(
Strategy.trade_date == ref_date,
Strategy.strategyName == strategy
)
)
return pd.read_sql(query, self.session.bind)
def fetch_strategy_names(self):
query = select([Strategy.strategyName]).distinct()
cursor = self.engine.execute(query)
strategy_names = {s[0] for s in cursor.fetchall()}
return strategy_names
def fetch_codes(self, ref_date: str, universe: Universe) -> List[int]:
cond = universe.query(ref_date)
query = select([UniverseTable.trade_date, UniverseTable.code]).distinct().where(cond)
cursor = self.engine.execute(query)
codes_set = {c[1] for c in cursor.fetchall()}
return sorted(codes_set)
def fetch_codes_range(self,
universe: Universe,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None) -> pd.DataFrame:
cond = universe.query_range(start_date, end_date, dates)
query = select([UniverseTable.trade_date, UniverseTable.code]).distinct().where(cond)
return pd.read_sql(query, self.engine)
def fetch_dx_return(self,
ref_date: str,
codes: Iterable[int],
expiry_date: str = None,
horizon: int = 0,
offset: int = 0) -> pd.DataFrame:
start_date = ref_date
if not expiry_date:
end_date = advanceDateByCalendar('china.sse', ref_date,
str(1 + horizon + offset + DAILY_RETURN_OFFSET) + 'b').strftime('%Y%m%d')
else:
end_date = expiry_date
stats = func.sum(self.ln_func(1. + Market.chgPct)).over(
partition_by=Market.code,
order_by=Market.trade_date,
rows=(1 + DAILY_RETURN_OFFSET + offset, 1 + horizon + DAILY_RETURN_OFFSET + offset)).label('dx')
query = select([Market.trade_date, Market.code, stats]).where(
and_(
Market.trade_date.between(start_date, end_date),
Market.code.in_(codes)
)
)
df = pd.read_sql(query, self.session.bind).dropna()
df = df[df.trade_date == ref_date]
return df[['code', 'dx']]
def fetch_dx_return_range(self,
universe,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
horizon: int = 0,
offset: int = 0) -> pd.DataFrame:
if dates:
start_date = dates[0]
end_date = dates[-1]
end_date = advanceDateByCalendar('china.sse', end_date,
str(1 + horizon + offset + DAILY_RETURN_OFFSET) + 'b').strftime('%Y-%m-%d')
cond = universe.query_range(start_date, end_date)
big_table = join(Market, UniverseTable,
and_(Market.trade_date == UniverseTable.trade_date,
Market.code == UniverseTable.code,
cond))
stats = func.sum(self.ln_func(1. + Market.chgPct)).over(
partition_by=Market.code,
order_by=Market.trade_date,
rows=(1 + offset + DAILY_RETURN_OFFSET, 1 + horizon + offset + DAILY_RETURN_OFFSET)).label('dx')
query = select([Market.trade_date, Market.code, stats]) \
.select_from(big_table)
df = pd.read_sql(query, self.session.bind).dropna()
if dates:
df = df[df.trade_date.isin(dates)]
return df
def fetch_dx_return_index(self,
ref_date: str,
index_code: int,
expiry_date: str = None,
horizon: int = 0,
offset: int = 0) -> pd.DataFrame:
start_date = ref_date
if not expiry_date:
end_date = advanceDateByCalendar('china.sse', ref_date,
str(1 + horizon + offset + DAILY_RETURN_OFFSET) + 'b').strftime('%Y%m%d')
else:
end_date = expiry_date
stats = func.sum(self.ln_func(1. + IndexMarket.chgPct)).over(
partition_by=IndexMarket.indexCode,
order_by=IndexMarket.trade_date,
rows=(1 + DAILY_RETURN_OFFSET + offset, 1 + horizon + DAILY_RETURN_OFFSET + offset)).label('dx')
query = select([IndexMarket.trade_date, IndexMarket.indexCode.label('code'), stats]).where(
and_(
IndexMarket.trade_date.between(start_date, end_date),
IndexMarket.indexCode == index_code
)
)
df = pd.read_sql(query, self.session.bind).dropna()
df = df[df.trade_date == ref_date]
return df[['code', 'dx']]
def fetch_dx_return_index_range(self,
index_code,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
horizon: int = 0,
offset: int = 0) -> pd.DataFrame:
if dates:
start_date = dates[0]
end_date = dates[-1]
end_date = advanceDateByCalendar('china.sse', end_date,
str(1 + horizon + offset + DAILY_RETURN_OFFSET) + 'b').strftime('%Y-%m-%d')
stats = func.sum(self.ln_func(1. + IndexMarket.chgPct)).over(
partition_by=IndexMarket.indexCode,
order_by=IndexMarket.trade_date,
rows=(1 + offset + DAILY_RETURN_OFFSET, 1 + horizon + offset + DAILY_RETURN_OFFSET)).label('dx')
query = select([IndexMarket.trade_date, IndexMarket.indexCode.label('code'), stats]) \
.where(
and_(
IndexMarket.trade_date.between(start_date, end_date),
IndexMarket.indexCode == index_code
)
)
df = pd.read_sql(query, self.session.bind).dropna()
if dates:
df = df[df.trade_date.isin(dates)]
return df
def fetch_factor(self,
ref_date: str,
factors: Iterable[object],
codes: Iterable[int],
warm_start: int = 0,
used_factor_tables=None) -> pd.DataFrame:
if isinstance(factors, Transformer):
transformer = factors
else:
transformer = Transformer(factors)
dependency = transformer.dependency
if used_factor_tables:
factor_cols = _map_factors(dependency, used_factor_tables)
else:
factor_cols = _map_factors(dependency, factor_tables)
start_date = advanceDateByCalendar('china.sse', ref_date, str(-warm_start) + 'b').strftime('%Y-%m-%d')
end_date = ref_date
big_table = FullFactor
for t in set(factor_cols.values()):
if t.__table__.name != FullFactor.__table__.name:
big_table = outerjoin(big_table, t, and_(FullFactor.trade_date == t.trade_date,
FullFactor.code == t.code))
query = select(
[FullFactor.trade_date, FullFactor.code, FullFactor.isOpen] + list(factor_cols.keys())) \
.select_from(big_table).where(and_(FullFactor.trade_date.between(start_date, end_date),
FullFactor.code.in_(codes)))
df = pd.read_sql(query, self.engine).sort_values(['trade_date', 'code']).set_index('trade_date')
res = transformer.transform('code', df)
for col in res.columns:
if col not in set(['code', 'isOpen']) and col not in df.columns:
df[col] = res[col].values
df['isOpen'] = df.isOpen.astype(bool)
df = df.loc[ref_date]
df.index = list(range(len(df)))
return df
def fetch_factor_range(self,
universe: Universe,
factors: Union[Transformer, Iterable[object]],
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
external_data: pd.DataFrame = None,
used_factor_tables=None) -> pd.DataFrame:
if isinstance(factors, Transformer):
transformer = factors
else:
transformer = Transformer(factors)
dependency = transformer.dependency
if used_factor_tables:
factor_cols = _map_factors(dependency, used_factor_tables)
else:
factor_cols = _map_factors(dependency, factor_tables)
cond = universe.query_range(start_date, end_date, dates)
big_table = FullFactor
for t in set(factor_cols.values()):
if t.__table__.name != FullFactor.__table__.name:
if dates is not None:
big_table = outerjoin(big_table, t, and_(FullFactor.trade_date == t.trade_date,
FullFactor.code == t.code,
FullFactor.trade_date.in_(dates)))
else:
big_table = outerjoin(big_table, t, and_(FullFactor.trade_date == t.trade_date,
FullFactor.code == t.code,
FullFactor.trade_date.between(start_date, end_date)))
big_table = join(big_table, UniverseTable,
and_(FullFactor.trade_date == UniverseTable.trade_date,
FullFactor.code == UniverseTable.code,
cond))
query = select(
[FullFactor.trade_date, FullFactor.code, FullFactor.isOpen] + list(factor_cols.keys())) \
.select_from(big_table).distinct()
df = pd.read_sql(query, self.engine).sort_values(['trade_date', 'code'])
if external_data is not None:
df = pd.merge(df, external_data, on=['trade_date', 'code']).dropna()
df.set_index('trade_date', inplace=True)
res = transformer.transform('code', df)
for col in res.columns:
if col not in set(['code', 'isOpen']) and col not in df.columns:
df[col] = res[col].values
df['isOpen'] = df.isOpen.astype(bool)
return df.reset_index()
def fetch_benchmark(self,
ref_date: str,
benchmark: int) -> pd.DataFrame:
query = select([IndexComponent.code, (IndexComponent.weight / 100.).label('weight')]).where(
and_(
IndexComponent.trade_date == ref_date,
IndexComponent.indexCode == benchmark
)
)
return pd.read_sql(query, self.engine)
def fetch_benchmark_range(self,
benchmark: int,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None) -> pd.DataFrame:
cond = IndexComponent.trade_date.in_(dates) if dates else IndexComponent.trade_date.between(start_date,
end_date)
query = select(
[IndexComponent.trade_date, IndexComponent.code, (IndexComponent.weight / 100.).label('weight')]).where(
and_(
cond,
IndexComponent.indexCode == benchmark
)
)
return pd.read_sql(query, self.engine)
def fetch_risk_model(self,
ref_date: str,
codes: Iterable[int],
risk_model: str = 'short',
excluded: Iterable[str] = None) -> Tuple[pd.DataFrame, pd.DataFrame]:
risk_cov_table, special_risk_col = _map_risk_model_table(risk_model)
cov_risk_cols = [risk_cov_table.__table__.columns[f] for f in total_risk_factors]
query = select([risk_cov_table.FactorID,
risk_cov_table.Factor]
+ cov_risk_cols).where(
risk_cov_table.trade_date == ref_date
)
risk_cov = pd.read_sql(query, self.engine).sort_values('FactorID')
if excluded:
risk_exposure_cols = [FullFactor.__table__.columns[f] for f in total_risk_factors if f not in set(excluded)]
else:
risk_exposure_cols = [FullFactor.__table__.columns[f] for f in total_risk_factors]
query = select([FullFactor.code, special_risk_col] + risk_exposure_cols) \
.where(and_(FullFactor.trade_date == ref_date, FullFactor.code.in_(codes))).distinct()
risk_exp = pd.read_sql(query, self.engine)
return risk_cov, risk_exp
def fetch_risk_model_range(self,
universe: Universe,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
risk_model: str = 'short',
excluded: Iterable[str] = None) -> Tuple[pd.DataFrame, pd.DataFrame]:
risk_cov_table, special_risk_col = _map_risk_model_table(risk_model)
cov_risk_cols = [risk_cov_table.__table__.columns[f] for f in total_risk_factors]
cond = risk_cov_table.trade_date.in_(dates) if dates else risk_cov_table.trade_date.between(start_date,
end_date)
query = select([risk_cov_table.trade_date,
risk_cov_table.FactorID,
risk_cov_table.Factor]
+ cov_risk_cols).where(
cond
)
risk_cov = pd.read_sql(query, self.engine).sort_values(['trade_date', 'FactorID'])
if not excluded:
excluded = []
risk_exposure_cols = [FullFactor.__table__.columns[f] for f in total_risk_factors if f not in set(excluded)]
cond = universe.query_range(start_date, end_date, dates)
big_table = join(FullFactor, UniverseTable,
and_(FullFactor.trade_date == UniverseTable.trade_date,
FullFactor.code == UniverseTable.code,
cond))
query = select(
[FullFactor.trade_date, FullFactor.code, special_risk_col] + risk_exposure_cols) \
.select_from(big_table).distinct()
risk_exp = pd.read_sql(query, self.engine)
return risk_cov, risk_exp
def fetch_industry(self,
ref_date: str,
codes: Iterable[int],
category: str = 'sw'):
industry_category_name = _map_industry_category(category)
query = select([Industry.code,
Industry.industryID1.label('industry_code'),
Industry.industryName1.label('industry')]).where(
and_(
Industry.trade_date == ref_date,
Industry.code.in_(codes),
Industry.industry == industry_category_name
)
).distinct()
return pd.read_sql(query, self.engine)
def fetch_industry_range(self,
universe: Universe,
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
category: str = 'sw'):
industry_category_name = _map_industry_category(category)
cond = universe.query_range(start_date, end_date, dates)
if dates:
big_table = join(Industry, UniverseTable,
and_(Industry.trade_date == UniverseTable.trade_date,
Industry.code == UniverseTable.code,
Industry.industry == industry_category_name,
Industry.trade_date.in_(dates),
cond))
else:
big_table = join(Industry, UniverseTable,
and_(Industry.trade_date == UniverseTable.trade_date,
Industry.code == UniverseTable.code,
Industry.industry == industry_category_name,
Industry.trade_date.between(start_date, end_date),
cond))
query = select([Industry.trade_date,
Industry.code,
Industry.industryID1.label('industry_code'),
Industry.industryName1.label('industry')]).select_from(big_table).distinct()
return pd.read_sql(query, self.engine)
def fetch_data(self, ref_date: str,
factors: Iterable[str],
codes: Iterable[int],
benchmark: int = None,
risk_model: str = 'short',
industry: str = 'sw') -> Dict[str, pd.DataFrame]:
total_data = {}
transformer = Transformer(factors)
factor_data = self.fetch_factor(ref_date,
transformer,
codes,
used_factor_tables=[FullFactor, Experimental])
if benchmark:
benchmark_data = self.fetch_benchmark(ref_date, benchmark)
total_data['benchmark'] = benchmark_data
factor_data = pd.merge(factor_data, benchmark_data, how='left', on=['code'])
factor_data['weight'] = factor_data['weight'].fillna(0.)
if risk_model:
excluded = list(set(total_risk_factors).intersection(transformer.dependency))
risk_cov, risk_exp = self.fetch_risk_model(ref_date, codes, risk_model, excluded)
factor_data = pd.merge(factor_data, risk_exp, how='left', on=['code'])
total_data['risk_cov'] = risk_cov
industry_info = self.fetch_industry(ref_date=ref_date,
codes=codes,
category=industry)
factor_data = pd.merge(factor_data, industry_info, on=['code'])
total_data['factor'] = factor_data
return total_data
def fetch_data_experimental(self, ref_date: str,
factors: Iterable[str],
codes: Iterable[int],
benchmark: int = None,
risk_model: str = 'short',
industry: str = 'sw') -> Dict[str, pd.DataFrame]:
total_data = {}
transformer = Transformer(factors)
factor_data = self.fetch_factor(ref_date, transformer, codes, used_factor_tables=[Experimental])
if benchmark:
benchmark_data = self.fetch_benchmark(ref_date, benchmark)
total_data['benchmark'] = benchmark_data
factor_data = pd.merge(factor_data, benchmark_data, how='left', on=['code'])
factor_data['weight'] = factor_data['weight'].fillna(0.)
if risk_model:
excluded = list(set(total_risk_factors).intersection(transformer.dependency))
risk_cov, risk_exp = self.fetch_risk_model(ref_date, codes, risk_model, excluded)
factor_data = pd.merge(factor_data, risk_exp, how='left', on=['code'])
total_data['risk_cov'] = risk_cov
industry_info = self.fetch_industry(ref_date=ref_date,
codes=codes,
category=industry)
factor_data = pd.merge(factor_data, industry_info, on=['code'])
total_data['factor'] = factor_data
return total_data
def fetch_data_range(self,
universe: Universe,
factors: Iterable[str],
start_date: str = None,
end_date: str = None,
dates: Iterable[str] = None,
benchmark: int = None,
risk_model: str = 'short',
industry: str = 'sw',
external_data: pd.DataFrame = None) -> Dict[str, pd.DataFrame]:
total_data = {}
transformer = Transformer(factors)
factor_data = self.fetch_factor_range(universe,
transformer,
start_date,
end_date,
dates,
external_data=external_data)
if benchmark:
benchmark_data = self.fetch_benchmark_range(benchmark, start_date, end_date, dates)
total_data['benchmark'] = benchmark_data
factor_data = pd.merge(factor_data, benchmark_data, how='left', on=['trade_date', 'code'])
factor_data['weight'] = factor_data['weight'].fillna(0.)
if risk_model:
excluded = list(set(total_risk_factors).intersection(transformer.dependency))
risk_cov, risk_exp = self.fetch_risk_model_range(universe, start_date, end_date, dates, risk_model,
excluded)
factor_data = pd.merge(factor_data, risk_exp, how='left', on=['trade_date', 'code'])
total_data['risk_cov'] = risk_cov
industry_info = self.fetch_industry_range(universe,
start_date=start_date,
end_date=end_date,
dates=dates,
category=industry)
factor_data = pd.merge(factor_data, industry_info, on=['trade_date', 'code'])
total_data['factor'] = factor_data
return total_data
def fetch_model(self,
ref_date=None,
model_type=None,
model_version=None,
is_primary=True,
model_id=None) -> pd.DataFrame:
conditions = []
if ref_date:
conditions.append(Models.trade_date == ref_date)
if model_id:
conditions.append(Models.model_id == model_id)
if model_type:
conditions.append(Models.model_type == model_type)
if model_version:
conditions.append(Models.model_version == model_version)
conditions.append(Models.is_primary == is_primary)
query = select([Models]).where(and_(*conditions))
model_df = pd.read_sql(query, self.engine)
for i, model_desc in enumerate(model_df.model_desc):
model_df.loc[i, 'model'] = load_model(model_desc)
del model_df['model_desc']
return model_df
def insert_formula(self, formula_name, formula_obj, comment=None):
dict_repr = encode_formula(formula=formula_obj)
query = delete(Formulas).where(
Formulas.formula == formula_name
)
self.engine.execute(query)
query = insert(Formulas, values=dict(formula=formula_name,
formula_desc=dict_repr,
comment=comment))
self.engine.execute(query)
def load_formula(self, formula_name):
query = select([Formulas]).where(
Formulas.formula == formula_name
)
df = pd.read_sql(query, self.engine)
if not df.empty:
return decode_formula(df.loc[0, 'formula_desc']['desc'])
def load_all_formulas(self):
query = select([Formulas])
df = pd.read_sql(query, self.engine, index_col='formula')
if not df.empty:
return pd.Series({name: decode_formula(df.loc[name, 'formula_desc']['desc']) for name in df.index})
def insert_portfolio_schedule(self, df):
query = insert(DailyPortfoliosSchedule).values(
{
DailyPortfoliosSchedule.portfolio_name: bindparam('portfolio_name'),
DailyPortfoliosSchedule.trade_date: bindparam('trade_date')
}
)
self.engine.execute(query, df.to_dict('record'))
def upsert_performance(self, ref_date, df):
build_types = df['type'].unique().tolist()
universes = df['universe'].unique().tolist()
benchmarks = df['benchmark'].unique().tolist()
portfolios = df['portfolio'].unique().tolist()
sources = df['source'].unique().tolist()
query = delete(Performance).where(
and_(
Performance.trade_date == ref_date,
Performance.type.in_(build_types),
Performance.universe.in_(universes),
Performance.benchmark.in_(benchmarks),
Performance.source.in_(sources),
Performance.portfolio.in_(portfolios)
)
)
self.engine.execute(query)
df.to_sql(Performance.__table__.name, self.engine, if_exists='append', index=False)
def upsert_positions(self, ref_date, df):
universes = df.universe.unique().tolist()
benchmarks = df.benchmark.unique().tolist()
build_types = df.type.unique().tolist()
sources = df.source.unique().tolist()
portfolios = df.portfolio.unique().tolist()
query = delete(Positions).where(
and_(
Positions.trade_date == ref_date,
Positions.type.in_(build_types),
Positions.universe.in_(universes),
Positions.benchmark.in_(benchmarks),
Positions.source.in_(sources),
Positions.portfolio.in_(portfolios)
)
)
self.engine.execute(query)
df.to_sql(Positions.__table__.name,
self.engine,
if_exists='append',
index=False,
dtype={'weight': sa.types.JSON})
if __name__ == '__main__':
engine = SqlEngine()
df = pd.DataFrame(dict(trade_date=['2017-11-24'],
portfolio_name=['test']))
engine.insert_portfolio_schedule(df) | [
"[email protected]"
] | |
43c33309ccced89d49211dc1029a296d823cd9cf | d69a933c30e45d2bf97d4e01ca0a7956daebb089 | /src/rdfextras/sparql2sql/bison/QName.py | 90844bfe5fe52ca3574aa9b05adf6c9b94e334f9 | [] | no_license | agarrido/ro-manager | d83af466abbf9bb27952f238f4e036810a45e447 | 9229b435d67825ce71bb2a7bd9855119d4f02c58 | refs/heads/master | 2021-01-17T11:58:32.133634 | 2011-10-19T12:33:50 | 2011-10-19T12:33:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | from rdflib.term import Identifier
class QName(Identifier):
__slots__ = ("localname", "prefix")
def __new__(cls,value):
try:
inst = unicode.__new__(cls, value)
except UnicodeDecodeError:
inst = unicode.__new__(cls, value,'utf-8')
inst.prefix,inst.localname = value.split(':')
return inst
class QNamePrefix(Identifier):
def __init__(self,prefix):
super(QNamePrefix,self).__init__(prefix)
# Convenience
# from rdfextras.sparql2sql.bison.QName import QName
# from rdfextras.sparql2sql.bison.QName import QNamePrefix | [
"[email protected]"
] | |
1ca683547437674bf5d5f76e7114ff272e379d08 | 3bb301688bfd5b31f762dbe3ffead0a345a09401 | /main.py | 7e1582dfd4c9db3de14ebdd93f488c96c0b2f990 | [] | no_license | wudangqibujie/3.-24 | 5a03ef051dfaf543d788382215610016a4514e44 | d29a0ed76ef5bb8c2b253f775d2ad0dad0b93f4c | refs/heads/master | 2021-04-15T14:39:11.034821 | 2018-03-24T12:38:13 | 2018-03-24T12:38:13 | 126,597,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | import master
import redis_or
import slave
from lxml import etree
# a=master.Master_Spider("shenzhen")
# html = a.get_html("https://www.xin.com/beijing/benchi/i3/")
# urls=a.get_detail_url(html)
q = redis_or.Redis_Data()
# for url in urls:
# q.set_into_data("test_car_urls",url)
for i in range(1,11):
url = q.pop_data("test_car_urls")
# print(url)
html = master.Master_Spider("shenzhen").get_html("https://"+url)
print(type(html))
a = slave.Slave_Spisder()
data = a.parse_detail_data(html)
print(data)
| [
"[email protected]"
] | |
098a19b480464b71fd30f91f96b12deccd2b1286 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/nlp/MT5_ID4146_for_PyTorch/transformers/src/transformers/models/hubert/__init__.py | ee06be214ac55b024c5eba4b6ae5f0f737581f55 | [
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 2,332 | py | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import _LazyModule, is_tf_available, is_torch_available
_import_structure = {
".wav2vec2.feature_extraction_wav2vec2": ["Wav2Vec2FeatureExtractor"],
"configuration_hubert": ["HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "HubertConfig"],
}
if is_torch_available():
_import_structure["modeling_hubert"] = [
"HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"HubertForCTC",
"HubertForSequenceClassification",
"HubertModel",
"HubertPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_hubert"] = [
"TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFHubertForCTC",
"TFHubertModel",
"TFHubertPreTrainedModel",
]
if TYPE_CHECKING:
from ..wav2vec2.feature_extraction_wav2vec2 import Wav2Vec2FeatureExtractor
from .configuration_hubert import HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, HubertConfig
if is_torch_available():
from .modeling_hubert import (
HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
HubertForCTC,
HubertForSequenceClassification,
HubertModel,
HubertPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_hubert import (
TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFHubertForCTC,
TFHubertModel,
TFHubertPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
| [
"[email protected]"
] | |
a9b1421809818a57810ce42c5b458c0686040d19 | 2760effda15d884af413ca2a35809d03fabea377 | /lc-94.py | d3a1d65ec14db010713ce89de3a52e712e346568 | [] | no_license | UtsavRaychaudhuri/leetcode | 31943b98ad89d96d72ee4b6b1d1c8d70429d1e1f | 77a13580fd6231830558b1cf8c84f8b3b62b99d0 | refs/heads/master | 2020-11-27T18:02:23.712639 | 2020-09-29T19:39:49 | 2020-09-29T19:39:49 | 229,552,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def __init__(self):
self.my_list=[]
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root:
self.inorderTraversal(root.left)
self.my_list.append(root.val)
self.inorderTraversal(root.right)
return self.my_list
| [
"[email protected]"
] | |
577c9dd804a8656c8b35188d217681bc4516d0ac | c981cc8c8dfe05fc5a57915e9b4d8479a9a6bbf2 | /py/lvmspec/bootcalib.py | 91003626b0f86de4e277dcc8432285166c3fbcfa | [
"BSD-3-Clause"
] | permissive | sdss/lvmspec | af25dae6ae87ea1355aa8f9075e2e174e4599eb7 | befd6991537c4947fdf63ca262937f2bb845148f | refs/heads/master | 2021-06-25T02:13:27.038449 | 2019-10-28T20:35:38 | 2019-10-28T20:35:38 | 102,514,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65,452 | py | """
lvmspec.bootcalib
==================
Utility functions to perform a quick calibration of DESI data
TODO:
1. Expand to r, i cameras
2. QA plots
3. Test with CR data
"""
from __future__ import print_function, absolute_import, division
import numpy as np
import copy
import pdb
import imp
import yaml
import glob
import math
import time
import os
import sys
import argparse
import locale
from pkg_resources import resource_exists, resource_filename
from astropy.modeling import models, fitting
from astropy.stats import sigma_clip
from astropy.table import Table, Column, vstack
from astropy.io import fits
from lvmspec.util import set_backend
set_backend()
from matplotlib import pyplot as plt
import matplotlib
import matplotlib.gridspec as gridspec
import matplotlib.cm as cm
from matplotlib.backends.backend_pdf import PdfPages
from lvmutil.log import get_logger
from lvmutil import funcfits as dufits
from numpy.polynomial.legendre import legval
glbl_figsz = (16,9)
########################################################
# High level wrapper
# TODO: This was ported from the original bin/desi_bootcalib so that it could
# be called independently by quicklook, but it needs to be coordinated with
# lvmspec.scripts.bootcalib.main()
########################################################
def bootcalib(deg,flatimage,arcimage):
"""
Args:
deg: Legendre polynomial degree to use to fit
flatimage: lvmspec.image.Image object of flatfield
arcimage: lvmspec.image.Image object of arc
Mostly inherited from lvmspec/bin/desi_bootcalib directly as needed
Returns:
xfit, fdicts, gauss, all_wave_soln
TODO: document what those return objects are
"""
camera=flatimage.camera
flat=flatimage.pix
ny=flat.shape[0]
xpk,ypos,cut=find_fiber_peaks(flat)
xset,xerr=trace_crude_init(flat,xpk,ypos)
xfit,fdicts=fit_traces(xset,xerr)
gauss=fiber_gauss(flat,xfit,xerr)
#- Also need wavelength solution not just trace
arc=arcimage.pix
arc_ivar=arcimage.ivar
all_spec=extract_sngfibers_gaussianpsf(arc,arc_ivar,xfit,gauss)
llist=load_arcline_list(camera)
### dlamb,wmark,gd_lines,line_guess=load_gdarc_lines(camera)
dlamb, gd_lines = load_gdarc_lines(camera, llist)
#- Solve for wavelengths
all_wv_soln=[]
all_dlamb=[]
for ii in range(all_spec.shape[1]):
spec=all_spec[:,ii]
pixpk=find_arc_lines(spec)
id_dict=id_arc_lines(pixpk,gd_lines,dlamb,wmark,line_guess=line_guess)
id_dict['fiber']=ii
#- Find the other good ones
if camera == 'z':
inpoly = 3 # The solution in the z-camera has greater curvature
else:
inpoly = 2
add_gdarc_lines(id_dict, pixpk, gd_lines, inpoly=inpoly)
#- Now the rest
id_remainder(id_dict, pixpk, llist)
#- Final fit wave vs. pix too
final_fit, mask = dufits.iter_fit(np.array(id_dict['id_wave']), np.array(id_dict['id_pix']), 'polynomial', 3, xmin=0., xmax=1.)
rms = np.sqrt(np.mean((dufits.func_val(np.array(id_dict['id_wave'])[mask==0],final_fit)-np.array(id_dict['id_pix'])[mask==0])**2))
final_fit_pix,mask2 = dufits.iter_fit(np.array(id_dict['id_pix']), np.array(id_dict['id_wave']),'legendre',deg, niter=5)
id_dict['final_fit'] = final_fit
id_dict['rms'] = rms
id_dict['final_fit_pix'] = final_fit_pix
id_dict['wave_min'] = dufits.func_val(0,final_fit_pix)
id_dict['wave_max'] = dufits.func_val(ny-1,final_fit_pix)
id_dict['mask'] = mask
all_wv_soln.append(id_dict)
return xfit, fdicts, gauss,all_wv_soln
########################################################
# Arc/Wavelength Routines (Linelists come next)
########################################################
def find_arc_lines(spec,rms_thresh=7.,nwidth=5):
"""Find and centroid arc lines in an input spectrum
Parameters
----------
spec : ndarray
Arc line spectrum
rms_thresh : float
RMS threshold scale
nwidth : int
Line width to test over
"""
# Threshold criterion
npix = spec.size
spec_mask = sigma_clip(spec, sigma=4., iters=5)
rms = np.std(spec_mask)
thresh = rms*rms_thresh
#print("thresh = {:g}".format(thresh))
gdp = spec > thresh
# Avoid edges
gdp = gdp & (np.arange(npix) > 2.*nwidth) & (np.arange(npix) < (npix-2.*nwidth))
# Roll to find peaks (simple algorithm)
# nwidth = 5
nstep = max(1,nwidth // 2)
for kk in range(-nstep,nstep):
if kk < 0:
test = np.roll(spec,kk) < np.roll(spec,kk+1)
else:
test = np.roll(spec,kk) > np.roll(spec,kk+1)
# Compare
gdp = gdp & test
# Center
gdpix = np.where(gdp)[0]
ngd = gdpix.size
xpk = np.zeros(ngd)
flux = np.zeros(ngd)
for jj,igdpix in enumerate(gdpix):
# Simple flux-weight
pix = np.arange(igdpix-nstep,igdpix+nstep+1,dtype=int)
flux[jj] = np.sum(spec[pix])
xpk[jj] = np.sum(pix*spec[pix]) / flux[jj]
# Finish
return xpk , flux
def remove_duplicates_w_id(wy,w,y_id,w_id) :
# might be several identical w_id
y_id=np.array(y_id).astype(int)
w_id=np.array(w_id).astype(int)
y_id2=[]
w_id2=[]
for j in np.unique(w_id) :
w_id2.append(j)
ii=y_id[w_id==j]
if ii.size==1 :
y_id2.append(ii[0])
else :
i=np.argmin(np.abs(wy[ii]-w[j]))
y_id2.append(ii[i])
y_id2=np.array(y_id2).astype(int)
w_id2=np.array(w_id2).astype(int)
tmp=np.argsort(w[w_id2])
y_id2=y_id2[tmp]
w_id2=w_id2[tmp]
return y_id2,w_id2
def remove_duplicates_y_id(yw,y,y_id,w_id) :
# might be several identical y_id
w_id=np.array(w_id).astype(int)
y_id=np.array(y_id).astype(int)
w_id2=[]
y_id2=[]
for j in np.unique(y_id) :
y_id2.append(j)
ii=w_id[y_id==j]
if ii.size==1 :
w_id2.append(ii[0])
else :
i=np.argmin(np.abs(yw[ii]-y[j]))
w_id2.append(ii[i])
w_id2=np.array(w_id2).astype(int)
y_id2=np.array(y_id2).astype(int)
tmp=np.argsort(y[y_id2])
w_id2=w_id2[tmp]
y_id2=y_id2[tmp]
return y_id2,w_id2
def refine_solution(y,w,y_id,w_id,deg=3,tolerance=5.) :
log = get_logger()
# remove duplicates
transfo=np.poly1d(np.polyfit(y[y_id],w[w_id],deg=deg))
wy=transfo(y)
y_id,w_id=remove_duplicates_w_id(wy,w,y_id,w_id)
transfo=np.poly1d(np.polyfit(w[w_id],y[y_id],deg=deg))
yw=transfo(w)
y_id,w_id=remove_duplicates_y_id(yw,y,y_id,w_id)
if len(y_id) != len(np.unique(y_id)) :
log.error("duplicate AT INIT y_id={:s}".format(str(y_id)))
if len(w_id) != len(np.unique(w_id)) :
log.error("duplicate AT INIT w_id={:s}".format(str(w_id)))
nmatch=len(y_id)
#log.info("init nmatch=%d rms=%f wave=%s"%(nmatch,np.std(wy[y_id]-w[w_id]),w[w_id]))
#log.info("init nmatch=%d rms=%f"%(nmatch,np.std(wy[y_id]-w[w_id])))
if nmatch<deg+1 :
log.error("error : init nmatch too small")
return y_id,w_id,1000.,0
rms=0.
# loop on fit of transfo, pairing, cleaning
for loop in range(200) :
# compute transfo
transfo=np.poly1d(np.polyfit(y[y_id],w[w_id],deg=deg))
# apply transfo to measurements
wy=transfo(y)
previous_rms = rms+0.
rms=np.std(wy[y_id]-w[w_id])
# match lines
mdiff0=min(tolerance,max(2.,rms*2.)) # this is a difficult parameter to tune, either loose lever arm, or have false matches !!
mdiff1=tolerance # this is a difficult parameter to tune, either loose lever arm, or have false matches !!
unmatched_indices=np.setdiff1d(np.arange(y.size),y_id)
for i,wi in zip(unmatched_indices,wy[unmatched_indices]) :
dist=np.abs(wi-w)
jj=np.argsort(dist)
for j,o in enumerate(jj) :
if j in w_id :
continue
if dist[j]<mdiff0 or ( o<jj.size-1 and dist[j]<mdiff1 and dist[j]<0.3*dist[jj[o+1]]) :
y_id=np.append(y_id,i)
w_id=np.append(w_id,j)
break
previous_nmatch = nmatch+0
nmatch=len(y_id)
#log.info("iter #%d nmatch=%d rms=%f"%(loop,nmatch,rms))
if nmatch < deg+1 :
log.error("error init nmatch too small")
y_id=[]
w_id=[]
rms=100000.
return y_id,w_id,rms,loop
if nmatch==previous_nmatch and abs(rms-previous_rms)<0.01 and loop>=1 :
break
if nmatch>=min(w.size,y.size) :
#print("break because %d>=min(%d,%d)"%(nmatch,w.size,y.size))
break
return y_id,w_id,rms,loop
def id_remainder(id_dict, llist, deg=4, tolerance=1., verbose=False) :
log = get_logger()
y_id=np.array(id_dict['id_idx']).astype(int)
all_y=np.array(id_dict['pixpk'])
all_known_waves = np.sort(np.array(llist["wave"]))
identified_waves = np.array(id_dict["id_wave"]) # lines identified at previous steps
w_id=[]
for w in identified_waves :
i=np.argmin(np.abs(all_known_waves-w))
diff=np.abs(all_known_waves[i]-w)
if diff>0.1 :
log.warning("discrepant wavelength".format(w,all_known_waves[i]))
w_id.append(i)
w_id = np.array(w_id).astype(int)
y_id,w_id,rms,niter=refine_solution(all_y,all_known_waves,y_id,w_id,deg=deg,tolerance=tolerance)
id_dict['id_idx'] = np.sort(y_id)
id_dict['id_pix'] = np.sort(all_y[y_id])
id_dict['id_wave'] = np.sort(all_known_waves[w_id])
id_dict['rms'] = rms
log.info("{:d} matched for {:d} detected and {:d} known, rms = {:g}".format(len(y_id),len(all_y),len(all_known_waves),rms))
def compute_triplets(wave) :
triplets=[]
wave=np.sort(wave)
for i1,w1 in enumerate(wave[:-1]) :
for i2,w2 in enumerate(wave[i1+1:]) :
for i3,w3 in enumerate(wave[i1+i2+2:]) :
triplet=[w1,w2,w3,i1,i1+1+i2,i1+i2+2+i3,w2-w1,w3-w1,w2**2-w1**2,w3**2-w1**2]
#print(triplet)
#print(wave[i1],wave[i1+1+i2],wave[i1+i2+2+i3])
triplets.append(triplet)
return np.array(triplets)
def id_arc_lines_using_triplets(id_dict,w,dwdy_prior,d2wdy2_prior=1.5e-5,toler=0.2,ntrack=50,nmax=40):
"""Match (as best possible), a set of the input list of expected arc lines to the detected list
Parameters
----------
id_dict : dictionnary with Pixel locations of detected arc lines in "pixpk" and fluxes in "flux"
w : ndarray
array of expected arc lines to be detected and identified
dwdy : float
Average dispersion in the spectrum
d2wdy2_prior : float
Prior on second derivative
toler : float, optional
Tolerance for matching (20%)
ntrack : max. number of solutions to be tracked
Returns
-------
id_dict : dict
dict of identified lines
"""
log=get_logger()
#log.info("y=%s"%str(y))
#log.info("w=%s"%str(w))
y = id_dict["pixpk"]
log.info("ny=%d nw=%d"%(len(y),len(w)))
if nmax<10 :
nmax=10
log.warning("force nmax=10 (arg was too small: {:d})".format(nmax))
if len(y)>nmax :
# log.info("down-selecting the number of detected lines from {:d} to {:d}".format(len(y),nmax))
# keep at least the edges
margin=3
new_y=np.append(y[:margin],y[-margin:])
# now look at the flux to select the other ones
flux=id_dict["flux"][margin:-margin]
ii=np.argsort(flux)
new_y=np.append(new_y,y[margin:-margin][ii[-(nmax-2*margin):]])
y = np.sort(new_y)
# compute triplets of waves of y positions
y_triplets = compute_triplets(y)
w_triplets = compute_triplets(w)
# each pair of triplet defines a 2nd order polynomial (chosen centered on y=2000)
# w = a*(y-2000)**2+b*(y-2000)+c
# w = a*y**2-4000*a*y+b*y+cst
# w = a*(y**2-4000*y)+b*y+cst
# dw_12 = a*(dy2_12-4000*dy_12)+b*dy_12
# dw_13 = a*(dy2_13-4000*dy_13)+b*dy_12
# dw_12 = a*cdy2_12+b*dy_12
# dw_13 = a*cdy2_13+b*dy_13
# with cdy2_12=dy2_12-4000*dy_12
# and cdy2_13=dy2_13-4000*dy_13
# idet = 1./(dy_13*cdy2_12-dy_12*cdy2_13)
# a = idet*(dy_13*dw_12-dy_12*dw_13)
# b = idet*(-cdy2_13*dw_12+cdy2_12*dw_13)
#triplet=[w1,w2,w3,i1,i1+1+i2,i1+i2+2+i3,w2-w1,w3-w1,w2**2-w1**2,w3**2-w1**2]
dy_12=y_triplets[:,6]
dy_13=y_triplets[:,7]
#dy2_12=y_triplets[:,8]
#dy2_13=y_triplets[:,9]
# centered version
cdy2_12=y_triplets[:,8]-4000.*y_triplets[:,6]
cdy2_13=y_triplets[:,9]-4000.*y_triplets[:,7]
idet=1./(dy_13*cdy2_12-dy_12*cdy2_13)
# fill histogram with polynomial coefs and first index of each triplet in the pair for all pairs of triplets(y,w)
# create the 4D histogram
ndwdy = 41
nd2wdy2 = 21
dwdy_min = dwdy_prior*(1-toler)
dwdy_max = dwdy_prior*(1+toler)
dwdy_step = (dwdy_max-dwdy_min)/ndwdy
d2wdy2_min = -d2wdy2_prior
d2wdy2_max = +d2wdy2_prior
d2wdy2_step = (d2wdy2_max-d2wdy2_min)/nd2wdy2
histogram = np.zeros((ndwdy,nd2wdy2,len(y),len(w))) # definition of the histogram
# fill the histogram
for w_triplet in w_triplets :
#d2wdy2 = idet*(dy_13*w_triplet[6]-dy_12*w_triplet[7])
#dwdy = idet*(-cdy2_13*w_triplet[6]+cdy2_12*w_triplet[7])
# bins in the histogram
dwdy_bin = ((idet*(-cdy2_13*w_triplet[6]+cdy2_12*w_triplet[7])-dwdy_min)/dwdy_step).astype(int)
d2wdy2_bin = ((idet*(dy_13*w_triplet[6]-dy_12*w_triplet[7])-d2wdy2_min)/d2wdy2_step).astype(int)
pairs_in_histo=np.where((dwdy_bin>=0)&(dwdy_bin<ndwdy)&(d2wdy2_bin>=0)&(d2wdy2_bin<nd2wdy2))[0]
# fill histo
iw=int(w_triplet[3])
for a,b,c in zip(dwdy_bin[pairs_in_histo],d2wdy2_bin[pairs_in_histo],y_triplets[pairs_in_histo,3].astype(int)) :
histogram[a,b,c,iw] += 1
# find max bins in the histo
histogram_ravel = histogram.ravel()
best_histo_bins = histogram_ravel.argsort()[::-1]
#log.info("nmatch in first bins=%s"%histogram.ravel()[best_histo_bins[:3]])
best_y_id=[]
best_w_id=[]
best_rms=1000.
# loop on best matches ( = most populated bins)
count=0
for histo_bin in best_histo_bins[:ntrack] :
if histogram_ravel[histo_bin]<4 and count>3 :
log.warning("stopping here")
break
count += 1
dwdy_best_bin,d2wdy2_best_bin,iy_best_bin,iw_best_bin = np.unravel_index(histo_bin, histogram.shape) # bin coord
#print("bins=",dwdy_best_bin,d2wdy2_best_bin,iy_best_bin,iw_best_bin)
# pairs of triplets in this histo bin
w_id=np.array([])
y_id=np.array([])
wok=np.where(w_triplets[:,3]==iw_best_bin)[0]
yok=np.where(y_triplets[:,3]==iy_best_bin)[0]
for w_triplet in w_triplets[wok] :
#d2wdy2 = idet[yok]*(dy_13[yok]*w_triplet[6]-dy_12[yok]*w_triplet[7])
#dwdy = idet[yok]*(-cdy2_13[yok]*w_triplet[6]+cdy2_12[yok]*w_triplet[7])
# bins in the histogram
dwdy_bin = ((idet[yok]*(-cdy2_13[yok]*w_triplet[6]+cdy2_12[yok]*w_triplet[7])-dwdy_min)/dwdy_step).astype(int)
d2wdy2_bin = ((idet[yok]*(dy_13[yok]*w_triplet[6]-dy_12[yok]*w_triplet[7])-d2wdy2_min)/d2wdy2_step).astype(int)
wyok=yok[np.where((dwdy_bin==dwdy_best_bin)&(d2wdy2_bin==d2wdy2_best_bin))[0]]
for y_triplet in y_triplets[wyok] :
y_id=np.append(y_id,y_triplet[3:6])
w_id=np.append(w_id,w_triplet[3:6])
# now need to rm duplicates
nw=len(w)
ny=len(y)
unique_common_id=np.unique(y_id.astype(int)*nw+w_id.astype(int))
y_id=(unique_common_id/nw).astype(int)
w_id=(unique_common_id%nw).astype(int)
ordering=np.argsort(y[y_id])
y_id=y_id[ordering]
w_id=w_id[ordering]
# refine
y_id,w_id,rms,niter=refine_solution(y,w,y_id,w_id)
#log.info("get solution with %d match and rms=%f (niter=%d)"%(len(y_id),rms,niter))
if (len(y_id)>len(best_y_id) and rms<max(1,best_rms)) or (len(y_id)==len(best_y_id) and rms<best_rms) or (best_rms>1 and rms<1 and len(y_id)>=8) :
#log.info("new best solution #%d with %d match and rms=%f (niter=%d)"%(count,len(y_id),rms,niter))
#log.info("previous had %d match and rms=%f"%(len(best_y_id),best_rms))
best_y_id = y_id
best_w_id = w_id
best_rms = rms
# stop at some moment
if best_rms<0.2 and len(y_id)>=min(15,min(len(y),len(w))) :
#log.info("stop here because we have a correct solution")
break
if len(y) != len(id_dict["pixpk"]) :
#log.info("re-indexing the result")
tmp_y_id = []
for i in best_y_id :
tmp_y_id.append(np.argmin(np.abs(id_dict["pixpk"]-y[i])))
best_y_id = np.array(tmp_y_id).astype(int)
y = id_dict["pixpk"]
if len(best_w_id) == 0 :
log.error("failed, no match")
id_dict["status"]="failed"
id_dict["id_idx"]=[]
id_dict["id_pix"]=[]
id_dict["id_wave"]=[]
id_dict["rms"]=999.
id_dict["fit"]=None
return
id_dict["status"]="ok"
id_dict["id_idx"]=best_y_id
id_dict["id_pix"]=y[best_y_id]
id_dict["id_wave"]=w[best_w_id]
id_dict["rms"]=best_rms
deg=max(1,min(3,best_y_id.size-2))
id_dict["fit"]= dufits.func_fit(w[best_w_id],y[best_y_id],'polynomial',deg,xmin=0.,xmax=1.)
log.info("{:d} matched for {:d} detected and {:d} known as good, rms = {:g}".format(len(best_y_id),len(y),len(w),best_rms))
########################################################
# Linelist routines
########################################################
def parse_nist(ion, vacuum=True):
"""Parse a NIST ASCII table.
Note that the long ---- should have
been commented out and also the few lines at the start.
Taken from PYPIT
Parameters
----------
ion : str
Name of ion
vaccuum : bool, optional
Use vacuum wavelengths
"""
log=get_logger()
# Find file
medium = 'vacuum'
if not vacuum:
log.info("Using air wavelengths")
medium = 'air'
srch_file = "data/arc_lines/{0}_{1}.ascii".format(ion, medium)
if not resource_exists('lvmspec', srch_file):
log.error("Cannot find NIST file {:s}".format(srch_file))
raise Exception("Cannot find NIST file {:s}".format(srch_file))
# Read, while working around non-ASCII characters in NIST line lists
nist_file = resource_filename('lvmspec', srch_file)
log.info("reading NIST file {:s}".format(nist_file))
default_locale = locale.getlocale(locale.LC_CTYPE)
locale.setlocale(locale.LC_CTYPE, 'en_US.UTF-8')
nist_tbl = Table.read(nist_file, format='ascii.fixed_width')
locale.setlocale(locale.LC_CTYPE, default_locale)
gdrow = nist_tbl['Observed'] > 0. # Eliminate dummy lines
nist_tbl = nist_tbl[gdrow]
# Now unique values only (no duplicates)
uniq, indices = np.unique(nist_tbl['Observed'],return_index=True)
nist_tbl = nist_tbl[indices]
# Deal with Rel
agdrel = []
for row in nist_tbl:
try:
gdrel = int(row['Rel.'])
except:
try:
gdrel = int(row['Rel.'][:-1])
except:
gdrel = 0
agdrel.append(gdrel)
agdrel = np.array(agdrel)
# Remove and add
nist_tbl.remove_column('Rel.')
nist_tbl.remove_column('Ritz')
nist_tbl.add_column(Column(agdrel,name='RelInt'))
nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype=(str, 5)))
nist_tbl.rename_column('Observed','wave')
# Return
return nist_tbl
def load_arcline_list(camera, vacuum=True,lamps=None):
"""Loads arc line list from NIST files
Parses and rejects
Taken from PYPIT
Parameters
----------
lines : list
List of ions to load
vacuum : bool, optional
Use vacuum wavelengths
lamps : optional numpy array of ions, ex np.array(["HgI","CdI","ArI","NeI"])
Returns
-------
alist : Table
Table of arc lines
"""
log=get_logger()
wvmnx = None
if lamps is None :
if camera[0] == 'b':
lamps = ['CdI','ArI','HgI','NeI','KrI']
elif camera[0] == 'r':
lamps = ['CdI','ArI','HgI','NeI','KrI']
elif camera[0] == 'z':
lamps = ['CdI','ArI','HgI','NeI','KrI']
elif camera == 'all': # Used for specex
lamps = ['CdI','ArI','HgI','NeI','KrI']
else:
log.error("Not ready for this camera")
# Get the parse dict
parse_dict = load_parse_dict()
# Read rejection file
medium = 'vacuum'
if not vacuum:
log.info("Using air wavelengths")
medium = 'air'
rej_file = resource_filename('lvmspec', "data/arc_lines/rejected_lines_{0}.yaml".format(medium))
with open(rej_file, 'r') as infile:
rej_dict = yaml.load(infile)
# Loop through the NIST Tables
tbls = []
for iline in lamps:
# Load
tbl = parse_nist(iline, vacuum=vacuum)
# Parse
if iline in parse_dict:
tbl = parse_nist_tbl(tbl,parse_dict[iline])
# Reject
if iline in rej_dict:
log.info("Rejecting select {:s} lines".format(iline))
tbl = reject_lines(tbl,rej_dict[iline])
#print("DEBUG",iline)
#print("DEBUG",tbl[['Ion','wave','RelInt']])
tbls.append(tbl[['Ion','wave','RelInt']])
# Stack
alist = vstack(tbls)
# wvmnx?
if wvmnx is not None:
print('Cutting down line list by wvmnx: {:g},{:g}'.format(wvmnx[0],wvmnx[1]))
gdwv = (alist['wave'] >= wvmnx[0]) & (alist['wave'] <= wvmnx[1])
alist = alist[gdwv]
# Return
return alist
def reject_lines(tbl,rej_dict, rej_tol=0.1):
"""Rejects lines from a NIST table
Taken from PYPIT
Parameters
----------
tbl : Table
Read previously from NIST ASCII file
rej_dict : dict
Dict of rejected lines
rej_tol : float, optional
Tolerance for matching a line to reject to linelist (Angstroms)
Returns
-------
tbl : Table
Rows not rejected
"""
msk = tbl['wave'] == tbl['wave']
# Loop on rejected lines
for wave in rej_dict:
close = np.where(np.abs(wave-tbl['wave']) < rej_tol)[0]
if rej_dict[wave] == 'all':
msk[close] = False
else:
raise ValueError('Not ready for this')
# Return
return tbl[msk]
def parse_nist_tbl(tbl,parse_dict):
"""Parses a NIST table using various criteria
Parameters
----------
tbl : Table
Read previously from NIST ASCII file
parse_dict : dict
Dict of parsing criteria. Read from load_parse_dict
Returns
-------
tbl : Table
Rows meeting the criteria
"""
# Parse
gdI = tbl['RelInt'] >= parse_dict['min_intensity']
gdA = tbl['Aki'] >= parse_dict['min_Aki']
gdw = tbl['wave'] >= parse_dict['min_wave']
# Combine
allgd = gdI & gdA & gdw
# Return
return tbl[allgd]
def load_parse_dict():
"""Dicts for parsing Arc line lists from NIST
Rejected lines are in the rejected_lines.yaml file
"""
dict_parse = dict(min_intensity=0., min_Aki=0., min_wave=0.)
arcline_parse = {}
# ArI
arcline_parse['ArI'] = copy.deepcopy(dict_parse)
arcline_parse['ArI']['min_intensity'] = 1000. # NOT PICKING UP REDDEST LINES
# HgI
arcline_parse['HgI'] = copy.deepcopy(dict_parse)
arcline_parse['HgI']['min_intensity'] = 800.
# HeI
arcline_parse['HeI'] = copy.deepcopy(dict_parse)
arcline_parse['HeI']['min_intensity'] = 20.
# NeI
arcline_parse['NeI'] = copy.deepcopy(dict_parse)
arcline_parse['NeI']['min_intensity'] = 999.
#arcline_parse['NeI']['min_Aki'] = 1. # NOT GOOD FOR DEIMOS, DESI
#arcline_parse['NeI']['min_wave'] = 5700.
arcline_parse['NeI']['min_wave'] = 5850. # NOT GOOD FOR DEIMOS?
# ZnI
arcline_parse['ZnI'] = copy.deepcopy(dict_parse)
arcline_parse['ZnI']['min_intensity'] = 50.
# KrI
arcline_parse['KrI'] = copy.deepcopy(dict_parse)
arcline_parse['KrI']['min_intensity'] = 50.
return arcline_parse
def load_gdarc_lines(camera, llist, vacuum=True,lamps=None,good_lines_filename=None):
"""Loads a select set of arc lines for initial calibrating
Parameters
----------
camera : str
Camera ('b', 'g', 'r')
llist : table of lines to use, with columns Ion, wave
vacuum : bool, optional
Use vacuum wavelengths
lamps : optional numpy array of ions, ex np.array(["HgI","CdI","ArI","NeI"])
Returns
-------
dlamb : float
Dispersion for input camera
wmark : float
wavelength to key off of [???]
gd_lines : ndarray
Array of lines expected to be recorded and good for ID
line_guess : int or None
Guess at the line index corresponding to wmark (default is to guess the 1/2 way point)
"""
log=get_logger()
if lamps is None :
lamps=np.array(["HgI","CdI","ArI","NeI"])
lines={}
dlamb=0.6
if camera[0] == 'b':
dlamb = 0.589
elif camera[0] == 'r':
dlamb = 0.527
elif camera[0] == 'z':
#dlamb = 0.599 # Ang
dlamb = 0.608 # Ang (from teststand, ranges (fiber & wave) from 0.54 to 0.66)
# read good lines
if good_lines_filename is not None :
filename = good_lines_filename
else :
if vacuum :
filename = resource_filename('lvmspec', "data/arc_lines/goodlines_vacuum.ascii")
else :
filename = resource_filename('lvmspec', "data/arc_lines/goodlines_air.ascii")
log.info("Reading good lines in {:s}".format(filename))
lines={}
ifile=open(filename)
for line in ifile.readlines() :
if line[0]=="#" :
continue
vals=line.strip().split()
if len(vals)<3 :
log.warning("ignoring line '{:s}' in {:s}".format(line.strip(),filename))
continue
cameras=vals[2]
if cameras.find(camera[0].upper()) < 0 :
continue
ion=vals[1]
wave=float(vals[0])
if ion in lines:
lines[ion].append(wave)
else :
lines[ion]=[wave,]
ifile.close()
log.info("Good lines = {:s}".format(str(lines)))
log.info("Checking consistency with full line list")
nbad=0
for ion in lines:
ii=np.where(llist["Ion"]==ion)[0]
if ii.size == 0 :
continue
all_waves=np.array(llist["wave"][ii])
for j,w in enumerate(lines[ion]) :
i=np.argmin(np.abs(w-all_waves))
if np.abs(w-all_waves[i])>0.2 :
log.error("cannot find good line {:f} of {:s} in full line list. nearest is {:f}".format(w,ion,all_waves[i]))
nbad += 1
elif np.abs(w-all_waves[i])>0.001 :
log.warning("adjusting hardcoded {:s} line {:f} -> {:f} (the NIST line list is the truth)".format(w,ion,all_waves[i]))
lines[ion][j]=all_waves[i]
if nbad>0 :
log.error("{:d} inconsistent hardcoded lines, exiting".format(nbad))
sys.exit(12)
gd_lines=np.array([])
for lamp in lamps :
if lamp in lines:
gd_lines=np.append(gd_lines,lines[lamp])
# Sort and return
gd_lines.sort()
return dlamb, gd_lines
########################################################
# Fiber routines
########################################################
def fiber_gauss(flat, xtrc, xerr, box_radius=2, max_iter=5, debug=False, verbose=False) :
return fiber_gauss_new(flat, xtrc, xerr, box_radius, max_iter)
def fiber_gauss_new(flat, xtrc, xerr, box_radius=2, max_iter=5, debug=False, verbose=False):
"""Find the PSF sigma for each fiber
This serves as an initial guess to what follows
Parameters
----------
flat : ndarray of fiber flat image
xtrc: ndarray of fiber traces
xerr: ndarray of error in fiber traces
box_radius: int, optinal
Radius of boxcar extraction in pixels
max_iter : int, optional
Maximum number of iterations for rejection
Returns
-------
gauss
list of Gaussian sigma
"""
log=get_logger()
npix_y = flat.shape[0]
npix_x = flat.shape[1]
ny = xtrc.shape[0] # number of ccd rows in trace
assert(ny==npix_y)
nfiber = xtrc.shape[1]
minflux=1. # minimal flux in a row to include in the fit
# Loop on fibers
gauss = []
start = 0
for ii in range(nfiber):
if (ii % 25 == 0): # & verbose:
stop=time.time()
if start==0 :
log.info("Working on fiber {:d} of {:d}".format(ii,nfiber))
else :
log.info("Working on fiber %d of %d (25 done in %3.2f sec)"%(ii,nfiber,stop-start))
start=stop
# collect data
central_xpix=np.floor(xtrc[:,ii]+0.5)
begin_xpix=(central_xpix-box_radius).astype(int)
end_xpix=(central_xpix+box_radius+1).astype(int)
dx=[]
flux=[]
for y in range(ny) :
yflux=flat[y,begin_xpix[y]:end_xpix[y]]
syflux=np.sum(yflux)
if syflux<minflux :
continue
dx.append(np.arange(begin_xpix[y],end_xpix[y])-(xtrc[y,ii]))
flux.append(yflux/syflux)
dx=np.array(dx)
flux=np.array(flux)
# compute profile
# one way to get something robust is to compute median in bins
# it's a bit biasing but the PSF is not a Gaussian anyway
bins=np.linspace(-box_radius,box_radius,100)
bstep=bins[1]-bins[0]
bdx=[]
bflux=[]
for b in bins :
ok=(dx>=b)&(dx<(b+bstep))
if np.sum(ok)>0 :
bdx.append(np.mean(dx[ok]))
bflux.append(np.median(flux[ok]))
if len(bdx)<10 :
log.error("sigma fit failed for fiber #%02d"%ii)
log.error("this should only occur for the fiber near the center of the detector (if at all)")
log.error("using the sigma value from the previous fiber")
gauss.append(gauss[-1])
continue
# this is the profile :
bdx=np.array(bdx)
bflux=np.array(bflux)
# fast iterative gaussian fit
sigma = 1.0
sq2 = math.sqrt(2.)
for i in range(10) :
nsigma = sq2*np.sqrt(np.mean(bdx**2*bflux*np.exp(-bdx**2/2/sigma**2))/np.mean(bflux*np.exp(-bdx**2/2/sigma**2)))
if abs(nsigma-sigma) < 0.001 :
break
sigma = nsigma
gauss.append(sigma)
return np.array(gauss)
def fiber_gauss_old(flat, xtrc, xerr, box_radius=2, max_iter=5, debug=False, verbose=False):
"""Find the PSF sigma for each fiber
This serves as an initial guess to what follows
Parameters
----------
flat : ndarray of fiber flat image
xtrc: ndarray of fiber traces
xerr: ndarray of error in fiber traces
box_radius: int, optinal
Radius of boxcar extraction in pixels
max_iter : int, optional
Maximum number of iterations for rejection
Returns
-------
gauss
list of Gaussian sigma
"""
log=get_logger()
log.warning("fiber_gauss uses astropy.modeling. Consider an alternative")
# Init
nfiber = xtrc.shape[1]
ny = xtrc.shape[0]
iy = np.arange(ny).astype(int)
# Mask
mask = np.zeros_like(flat,dtype=int)
# Sub images
xpix_img = np.outer(np.ones(flat.shape[0]),np.arange(flat.shape[1]))
# Gaussian fit
g_init = models.Gaussian1D(amplitude=1., mean=0., stddev=1.)
g_init.amplitude.fixed = True
g_init.mean.fixed = True
fitter = fitting.LevMarLSQFitter()
# Loop on fibers
gauss = []
start = 0
for ii in range(nfiber):
if (ii % 25 == 0): # & verbose:
stop=time.time()
if start==0 :
log.info("Working on fiber {:d} of {:d}".format(ii,nfiber))
else :
log.info("Working on fiber %d of %d (done 25 in %3.2f sec)"%(ii,nfiber,stop-start))
start=stop
mask[:] = 0
ixt = np.round(xtrc[:,ii]).astype(int)
for jj,ibox in enumerate(range(-box_radius,box_radius+1)):
ix = ixt + ibox
mask[iy,ix] = 1
dx_img = xpix_img - np.outer(xtrc[:,ii],np.ones(flat.shape[1]))
# Sum
flux = np.sum(mask*flat,axis=1)
flux = np.maximum(flux,1.)
# Normalize
nrm_img = flat / np.outer(flux,np.ones(flat.shape[1]))
# Gaussian
cpix = np.where(np.abs(dx_img)<0.10)
if len(cpix[0]) < 50:
cpix = np.where(np.abs(dx_img)<0.40)
amp = np.median(nrm_img[cpix])
g_init.amplitude.value = amp # Fixed
fdimg = dx_img[mask==1].flatten()
fnimg = nrm_img[mask==1].flatten()
# Guess at sigma
gdfn = (fnimg < amp) & (fnimg > 0.)
all_sig = np.abs(fdimg[gdfn]) / np.sqrt( np.log(amp)-np.log(fnimg[gdfn]) )
g_init.stddev.value = np.median(all_sig[np.where((np.abs(fdimg[gdfn])>1) & (np.abs(fdimg[gdfn])<1.5) & (np.isfinite(all_sig)))])
# Initial fit (need to mask!)
parm = fitter(g_init, fdimg, fnimg)
# Iterate
iterate = True
nrej = 0
niter = 0
while iterate & (niter < max_iter):
# Clip
resid = parm(fdimg) - fnimg
resid_mask = sigma_clip(resid, sigma=4., iters=5)
# Fit
gdp = ~resid_mask.mask
parm = fitter(g_init, fdimg[gdp], fnimg[gdp])
# Again?
if np.sum(resid_mask.mask) <= nrej:
iterate = False
else:
nrej = np.sum(resid_mask.mask)
niter += 1
if verbose:
log.info("Rejected {:d} in {:d} iterations".format(nrej,niter))
#debug = False
if debug:
plt.clf()
plt.scatter(fdimg[gdp], fnimg[gdp])
x= np.linspace(-box_radius, box_radius, 200)
plt.plot(x, parm(x), 'r-')
plt.show()
plt.close()
pdb.set_trace()
# Save
gauss.append(parm.stddev.value)
#
return np.array(gauss)
def find_fiber_peaks(flat, ypos=None, nwidth=5, debug=False) :
"""Find the peaks of the fiber flat spectra
Preforms book-keeping error checking
Args:
flat : ndarray of fiber flat image
ypos : int [optional] Row for finding peaks
Default is half-way up the image
nwidth : int [optional] Width of peak (end-to-end)
debug: bool, optional
Returns:
xpk, ypos, cut
list of xpk (nearest pixel) at ypos
ndarray of cut through the image
"""
log=get_logger()
log.info("starting")
# Init
Nbundle = 20
Nfiber = 25 # Fibers per bundle
# Set ypos for peak finding
if ypos is None:
ypos = flat.shape[0]//2
# Cut image
cutimg = flat[ypos-50:ypos+50, :]
# Smash
cut = np.median(cutimg, axis=0)
# Set flux threshold
#srt = np.sort(cutimg.flatten()) # this does not work for sparse fibers
#thresh = srt[int(cutimg.size*0.95)] / 2. # this does not work for sparse fibers
thresh = np.max(cut)/20.
pixels_below_threshold=np.where(cut<thresh)[0]
if pixels_below_threshold.size>2 :
values_below_threshold = sigma_clip(cut[pixels_below_threshold],sigma=3,iters=200)
if values_below_threshold.size>2 :
rms=np.std(values_below_threshold)
nsig=7
new_thresh=max(thresh,nsig*rms)
log.info("Threshold: {:f} -> {:f} ({:d}*rms: {:f})".format(thresh,new_thresh,nsig,nsig*rms))
thresh=new_thresh
#gdp = cut > thresh
# Roll to find peaks (simple algorithm)
#nstep = nwidth // 2
#for kk in range(-nstep,nstep):
# if kk < 0:
# test = np.roll(cut,kk) < np.roll(cut,kk+1)
# else:
# test = np.roll(cut,kk) > np.roll(cut,kk+1)
# # Compare
# gdp = gdp & test
#xpk = np.where(gdp)[0]
# Find clusters of adjacent points
clusters=[]
gdp=np.where(cut > thresh)[0]
cluster=[gdp[0]]
for i in gdp[1:] :
if i==cluster[-1]+1 :
cluster.append(i)
else :
clusters.append(cluster)
cluster=[i]
clusters.append(cluster)
log.info("Number of clusters found: {:d}".format(len(clusters)))
# Record max of each cluster
xpk=np.zeros((len(clusters)), dtype=np.int64)
for i in range(len(clusters)) :
t=np.argmax(cut[clusters[i]])
xpk[i]=clusters[i][t]
if debug:
#pdb.xplot(cut, xtwo=xpk, ytwo=cut[xpk],mtwo='o')
pdb.set_trace()
# Book-keeping and some error checking
if len(xpk) != Nbundle*Nfiber:
log.warning('Found the wrong number of total fibers: {:d}'.format(len(xpk)))
else:
log.info('Found {:d} fibers'.format(len(xpk)))
# Find bundles
xsep = np.roll(xpk,-1) - xpk
medsep = np.median(xsep)
bundle_ends = np.where(np.abs(xsep-medsep) > 0.5*medsep)[0]
if len(bundle_ends) != Nbundle:
log.warning('Found the wrong number of bundles: {:d}'.format(len(bundle_ends)))
else:
log.info('Found {:d} bundles'.format(len(bundle_ends)))
# Confirm correct number of fibers per bundle
bad = ((bundle_ends+1) % Nfiber) != 0
if np.sum(bad) > 0:
log.warning('Wrong number of fibers in a bundle')
#raise ValueError('Wrong number of fibers in a bundle')
# Return
return xpk, ypos, cut
def fit_traces(xset, xerr, func='legendre', order=6, sigrej=20.,
RMS_TOLER=0.03, verbose=False):
"""Fit the traces
Default is 6th order Legendre polynomials
Parameters
----------
xset : ndarray
traces
xerr : ndarray
Error in the trace values (999.=Bad)
RMS_TOLER : float, optional [0.02]
Tolerance on size of RMS in fit
Returns
-------
xnew, fits
xnew : ndarray
New fit values (without error)
fits : list
List of the fit dicts
"""
log=get_logger()
ny = xset.shape[0]
ntrace = xset.shape[1]
xnew = np.zeros_like(xset)
fits = []
yval = np.arange(ny)
for ii in range(ntrace):
mask = xerr[:,ii] > 900.
nmask = np.sum(mask)
# Fit with rejection
dfit, mask = dufits.iter_fit(yval, xset[:,ii], func, order, sig_rej=sigrej,
weights=1./xerr[:,ii], initialmask=mask, maxone=True)#, sigma=xerr[:,ii])
# Stats on residuals
nmask_new = np.sum(mask)-nmask
if nmask_new > 200:
log.error("Rejected many points ({:d}) in fiber {:d}".format(nmask_new, ii))
# Save
xnew[:,ii] = dufits.func_val(yval,dfit)
fits.append(dfit)
# Residuas
gdval = mask==0
resid = xnew[:,ii][gdval] - xset[:,ii][gdval]
rms = np.std(resid)
if verbose:
print('RMS of FIT= {:g}'.format(rms))
if rms > RMS_TOLER:
#from xastropy.xutils import xdebug as xdb
#xdb.xplot(yval, xnew[:,ii], xtwo=yval[gdval],ytwo=xset[:,ii][gdval], mtwo='o')
log.error("RMS {:g} exceeded tolerance for fiber {:d}".format(rms, ii))
# Return
return xnew, fits
def extract_sngfibers_gaussianpsf(img, img_ivar, xtrc, sigma, box_radius=2, verbose=True):
"""Extract spectrum for fibers one-by-one using a Gaussian PSF
Parameters
----------
img : ndarray
Image
img_ivar : ndarray
Image inverse variance
xtrc : ndarray
fiber trace
sigma : float
Gaussian sigma for PSF
box_radius : int, optional
Radius for extraction (+/-)
Returns
-------
spec : ndarray
Extracted spectrum
"""
# Init
xpix_img = np.outer(np.ones(img.shape[0]),np.arange(img.shape[1]))
mask = np.zeros_like(img,dtype=int)
iy = np.arange(img.shape[0],dtype=int)
log = get_logger()
#
all_spec = np.zeros_like(xtrc)
cst = 1./np.sqrt(2*np.pi)
start=0
for qq in range(xtrc.shape[1]):
if verbose & (qq % 25 == 0):
stop=time.time()
if start>0 :
log.info("Working on fiber %d of %d (done 25 in %3.2f sec)"%(qq,xtrc.shape[1],stop-start))
else :
log.info("Working on fiber %d of %d"%(qq,xtrc.shape[1]))
start=stop
# Mask
mask[:,:] = 0
ixt = np.round(xtrc[:,qq]).astype(int)
for jj,ibox in enumerate(range(-box_radius,box_radius+1)):
ix = ixt + ibox
mask[iy,ix] = 1
# Sub-image (for speed, not convenience)
gdp = np.where(mask == 1)
minx = np.min(gdp[1])
maxx = np.max(gdp[1])
nx = (maxx-minx)+1
# Generate PSF
dx_img = xpix_img[:,minx:maxx+1] - np.outer(xtrc[:,qq], np.ones(nx))
psf = cst*np.exp(-0.5 * (dx_img/sigma[qq])**2)/sigma[qq]
#dx_img = xpix_img[:,minx:maxx+1] - np.outer(xtrc[:,qq],np.ones(img.shape[1]))
#g_init = models.Gaussian1D(amplitude=1., mean=0., stddev=sigma[qq])
#psf = mask * g_init(dx_img)
# Extract
#all_spec[:,qq] = np.sum(psf*img,axis=1) / np.sum(psf,axis=1)
#all_spec[:,qq] = np.sum(psf*img[:,minx:maxx+1],axis=1) / np.sum(psf,axis=1)
a=np.sum(img_ivar[:,minx:maxx+1]*psf**2,axis=1)
b=np.sum(img_ivar[:,minx:maxx+1]*psf*img[:,minx:maxx+1],axis=1)
ok=(a>1.e-6)
all_spec[ok,qq] = b[ok] / a[ok]
#import astropy.io.fits as pyfits
#h=pyfits.HDUList([pyfits.PrimaryHDU(),
# pyfits.ImageHDU(img[:,minx:maxx+1],name="FLUX"),
# pyfits.ImageHDU(img_ivar[:,minx:maxx+1],name="IVAR"),
# pyfits.ImageHDU(psf,name="PSF"),
# pyfits.ImageHDU(a,name="A"),
# pyfits.ImageHDU(b,name="B")])
#h.writeto("test.fits")
#sys.exit(12)
# Return
return all_spec
def trace_crude_init(image, xinit0, ypass, invvar=None, radius=2.,
maxshift0=0.5, maxshift=0.15, maxerr=0.2):
# xset, xerr, maxerr, maxshift, maxshift0
"""Python port of trace_crude_idl.pro from IDLUTILS
Modified for initial guess
Parameters
----------
image : 2D ndarray
Image for tracing
xinit : ndarray
Initial guesses for trace peak at ypass
ypass : int
Row for initial guesses
Returns
-------
xset : Trace for each fiber
xerr : Estimated error in that trace
"""
# Init
xinit = xinit0.astype(float)
#xinit = xinit[0:3]
ntrace = xinit.size
ny = image.shape[0]
xset = np.zeros((ny,ntrace))
xerr = np.zeros((ny,ntrace))
if invvar is None:
invvar = np.zeros_like(image) + 1.
#
# Recenter INITIAL Row for all traces simultaneously
#
iy = ypass * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(image, xinit, iy, invvar=invvar, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift0, maxshift0) * (xfiterr < maxerr)
xset[ypass,:] = xinit + xshift
xerr[ypass,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
# /* LOOP FROM INITIAL (COL,ROW) NUMBER TO LARGER ROW NUMBERS */
for iy in range(ypass+1, ny):
xinit = xset[iy-1, :]
ycen = iy * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(image, xinit, ycen, invvar=invvar, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift, maxshift) * (xfiterr < maxerr)
# Save
xset[iy,:] = xinit + xshift
xerr[iy,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
# /* LOOP FROM INITIAL (COL,ROW) NUMBER TO SMALLER ROW NUMBERS */
for iy in range(ypass-1, -1,-1):
xinit = xset[iy+1, :]
ycen = iy * np.ones(ntrace,dtype=int)
xfit,xfiterr = trace_fweight(image, xinit, ycen, invvar=invvar, radius=radius)
# Shift
xshift = np.clip(xfit-xinit, -1*maxshift, maxshift) * (xfiterr < maxerr)
# Save
xset[iy,:] = xinit + xshift
xerr[iy,:] = xfiterr * (xfiterr < maxerr) + 999.0 * (xfiterr >= maxerr)
return xset, xerr
def trace_fweight(fimage, xinit, ycen=None, invvar=None, radius=2., debug=False):
'''Python port of trace_fweight.pro from IDLUTILS
Parameters
----------
fimage: 2D ndarray
Image for tracing
xinit: ndarray
Initial guesses for x-trace
invvar: ndarray, optional
Inverse variance array for the image
radius: float, optional
Radius for centroiding; default to 3.0
'''
# Definitions for Cython
#cdef int nx,ny,ncen
# Init
nx = fimage.shape[1]
ny = fimage.shape[0]
ncen = len(xinit)
# Create xnew, xerr
xnew = xinit.astype(float)
xerr = np.zeros(ncen) + 999.
# ycen
if ycen is None:
if ncen != ny:
raise ValueError('Bad input')
ycen = np.arange(ny, dtype=int)
else:
if len(ycen) != ncen:
raise ValueError('Bad ycen input. Wrong length')
x1 = xinit - radius + 0.5
x2 = xinit + radius + 0.5
ix1 = np.floor(x1).astype(int)
ix2 = np.floor(x2).astype(int)
fullpix = int(np.maximum(np.min(ix2-ix1)-1,0))
sumw = np.zeros(ncen)
sumxw = np.zeros(ncen)
sumwt = np.zeros(ncen)
sumsx1 = np.zeros(ncen)
sumsx2 = np.zeros(ncen)
qbad = np.array([False]*ncen)
if invvar is None:
invvar = np.zeros_like(fimage) + 1.
# Compute
for ii in range(0,fullpix+3):
spot = ix1 - 1 + ii
ih = np.clip(spot,0,nx-1)
xdiff = spot - xinit
#
wt = np.clip(radius - np.abs(xdiff) + 0.5,0,1) * ((spot >= 0) & (spot < nx))
sumw = sumw + fimage[ycen,ih] * wt
sumwt = sumwt + wt
sumxw = sumxw + fimage[ycen,ih] * xdiff * wt
var_term = wt**2 / (invvar[ycen,ih] + (invvar[ycen,ih] == 0))
sumsx2 = sumsx2 + var_term
sumsx1 = sumsx1 + xdiff**2 * var_term
#qbad = qbad or (invvar[ycen,ih] <= 0)
qbad = np.any([qbad, invvar[ycen,ih] <= 0], axis=0)
if debug:
pdb.set_trace()
# Fill up
good = (sumw > 0) & (~qbad)
if np.sum(good) > 0:
delta_x = sumxw[good]/sumw[good]
xnew[good] = delta_x + xinit[good]
xerr[good] = np.sqrt(sumsx1[good] + sumsx2[good]*delta_x**2)/sumw[good]
bad = np.any([np.abs(xnew-xinit) > radius + 0.5,xinit < radius - 0.5,xinit > nx - 0.5 - radius],axis=0)
if np.sum(bad) > 0:
xnew[bad] = xinit[bad]
xerr[bad] = 999.0
# Return
return xnew, xerr
def fix_ycoeff_outliers(xcoeff, ycoeff, deg=5, tolerance=2):
'''
Fix outliers in coefficients for wavelength solution, assuming a continuous function of CCD coordinates
Args:
xcoeff[nfiber, ncoeff] : 2D array of Legendre coefficients for X(wavelength)
ycoeff[nfiber, ncoeff] : 2D array of Legendre coefficients for Y(wavelength)
Options:
deg : integer degree of polynomial to fit
tolerance : replace fibers with difference of wavelength solution larger than this number of pixels after interpolation
Returns:
new_ycoeff[nfiber, ncoeff] with outliers replaced by interpolations
For each coefficient, fit a polynomial vs. fiber number with one
pass of sigma clipping. Remaining outliers are than replaced with
the interpolated fit value.
'''
log = get_logger()
nfibers=ycoeff.shape[0]
if nfibers < 3 :
log.warning("only {:d} fibers, cannot interpolate coefs".format(nfibers))
return ycoeff
deg=min(deg,nfibers-1)
nwave=ycoeff.shape[1]+1
wave_nodes = np.linspace(-1,1,nwave)
# get traces using fit coefs
x=np.zeros((nfibers,nwave))
y=np.zeros((nfibers,nwave))
for i in range(nfibers) :
x[i] = legval(wave_nodes,xcoeff[i])
y[i] = legval(wave_nodes,ycoeff[i])
new_ycoeff=ycoeff.copy()
bad_fibers=None
while True : # loop to discard one fiber at a time
# polynomial fit as a function of x for each wave
yf=np.zeros((nfibers,nwave))
xx=2*(x - np.min(x)) / (np.max(x) - np.min(x)) - 1
for i in range(nwave) :
c=np.polyfit(xx[:,i], y[:,i], deg)
yf[:,i]=np.polyval(c, xx[:,i])
diff=np.max(np.abs(y-yf),axis=1)
for f in range(nfibers) :
log.info("fiber {:d} maxdiff= {:f}".format(f,diff[f]))
worst = np.argmax(diff)
if diff[worst] > tolerance :
log.warning("replace fiber {:d} trace by interpolation".format(worst))
leg_fit = dufits.func_fit(wave_nodes, yf[worst], 'legendre', ycoeff.shape[1]-1, xmin=-1, xmax=1)
new_ycoeff[worst] = leg_fit['coeff']
y[worst] = legval(wave_nodes,new_ycoeff[worst])
if bad_fibers is None :
bad_fibers = np.array([worst])
else :
bad_fibers=np.append(bad_fibers, worst)
bad_fibers=np.unique(bad_fibers)
continue
break
return new_ycoeff
#####################################################################
#####################################################################
# Output
#####################################################################
def write_psf(outfile, xfit, fdicts, gauss, wv_solns, legendre_deg=5, without_arc=False,
XCOEFF=None, fiberflat_header=None, arc_header=None):
""" Write the output to a Base PSF format
Parameters
----------
outfile : str
Output file
xfit : ndarray
Traces
gauss : list
List of gaussian sigmas
fdicts : list
List of trace fits
wv_solns : list
List of wavelength calibrations
ncoeff : int
Number of Legendre coefficients in fits
"""
#
# check legendre degree makes sense based on number of lines
if not without_arc:
nlines=10000
for ii,id_dict in enumerate(wv_solns):
if len(id_dict['id_pix']) > 0 :
nlines_in_fiber=(np.array(id_dict['id_pix'])[id_dict['mask']==0]).size
#print("fiber #%d nlines=%d"%(ii,nlines_in_fiber))
nlines=min(nlines,nlines_in_fiber)
if nlines < legendre_deg+2 :
legendre_deg=nlines-2
print("reducing legendre degree to %d because the min. number of emission lines found is %d"%(legendre_deg,nlines))
ny = xfit.shape[0]
nfiber = xfit.shape[1]
ncoeff=legendre_deg+1
if XCOEFF is None:
XCOEFF = np.zeros((nfiber, ncoeff))
YCOEFF = np.zeros((nfiber, ncoeff))
# Find WAVEMIN, WAVEMAX
if without_arc:
WAVEMIN = 0.
WAVEMAX = ny-1.
wv_solns = [None]*nfiber
else:
WAVEMIN = 10000000.
WAVEMAX = 0.
for id_dict in wv_solns :
if 'wave_min' in id_dict :
WAVEMIN = min(WAVEMIN,id_dict['wave_min'])
if 'wave_max' in id_dict :
WAVEMAX = max(WAVEMAX,id_dict['wave_max'])
WAVEMIN -= 1.
WAVEMAX += 1.
wv_array = np.linspace(WAVEMIN, WAVEMAX, num=ny)
# Fit Legendre to y vs. wave
for ii,id_dict in enumerate(wv_solns):
# Fit y vs. wave
if without_arc:
yleg_fit, mask = dufits.iter_fit(wv_array, np.arange(ny), 'legendre', ncoeff-1, xmin=WAVEMIN, xmax=WAVEMAX, niter=1)
else:
if len(id_dict['id_wave']) > 0 :
yleg_fit, mask = dufits.iter_fit(np.array(id_dict['id_wave'])[id_dict['mask']==0], np.array(id_dict['id_pix'])[id_dict['mask']==0], 'legendre', ncoeff-1, xmin=WAVEMIN, xmax=WAVEMAX, sig_rej=100000.)
else :
yleg_fit = None
mask = None
if yleg_fit is None :
continue
YCOEFF[ii, :] = yleg_fit['coeff']
# Fit x vs. wave
yval = dufits.func_val(wv_array, yleg_fit)
if fdicts is None:
if XCOEFF is None:
raise IOError("Need to set either fdicts or XCOEFF!")
else:
xtrc = dufits.func_val(yval, fdicts[ii])
xleg_fit,mask = dufits.iter_fit(wv_array, xtrc, 'legendre', ncoeff-1, xmin=WAVEMIN, xmax=WAVEMAX, niter=5, sig_rej=100000.)
XCOEFF[ii, :] = xleg_fit['coeff']
# Fix outliers assuming that coefficients vary smoothly vs. CCD coordinates
YCOEFF = fix_ycoeff_outliers(XCOEFF,YCOEFF,tolerance=2)
# Write the FITS file
prihdu = fits.PrimaryHDU(XCOEFF)
prihdu.header['WAVEMIN'] = WAVEMIN
prihdu.header['WAVEMAX'] = WAVEMAX
prihdu.header['EXTNAME'] = 'XTRACE'
prihdu.header['PSFTYPE'] = 'bootcalib'
from lvmutil.depend import add_dependencies
add_dependencies(prihdu.header)
# Add informations for headers
if arc_header is not None :
if "NIGHT" in arc_header:
prihdu.header["ARCNIGHT"] = arc_header["NIGHT"]
if "EXPID" in arc_header:
prihdu.header["ARCEXPID"] = arc_header["EXPID"]
if "CAMERA" in arc_header:
prihdu.header["CAMERA"] = arc_header["CAMERA"]
prihdu.header['NPIX_X'] = arc_header['NAXIS1']
prihdu.header['NPIX_Y'] = arc_header['NAXIS2']
if fiberflat_header is not None :
if 'NPIX_X' not in prihdu.header:
prihdu.header['NPIX_X'] = fiberflat_header['NAXIS1']
prihdu.header['NPIX_Y'] = fiberflat_header['NAXIS2']
if "NIGHT" in fiberflat_header:
prihdu.header["FLANIGHT"] = fiberflat_header["NIGHT"]
if "EXPID" in fiberflat_header:
prihdu.header["FLAEXPID"] = fiberflat_header["EXPID"]
yhdu = fits.ImageHDU(YCOEFF, name='YTRACE')
# also save wavemin wavemax in yhdu
yhdu.header['WAVEMIN'] = WAVEMIN
yhdu.header['WAVEMAX'] = WAVEMAX
gausshdu = fits.ImageHDU(np.array(gauss), name='XSIGMA')
hdulist = fits.HDUList([prihdu, yhdu, gausshdu])
hdulist.writeto(outfile, clobber=True)
def write_line_list(filename,all_wv_soln,llist) :
wave = np.array([])
for id_dict in all_wv_soln :
wave=np.append(wave,id_dict["id_wave"])
wave=np.unique(wave)
ofile=open(filename,"w")
ofile.write("# from bootcalib\n")
ofile.write("Ion wave score RelInt\n")
for w in wave :
ii=np.argmin(np.abs(llist["wave"]-w))
print(w,llist["wave"][ii],llist["Ion"][ii])
ofile.write("{:s} {:f} 1 1\n".format(llist["Ion"][ii],w))
ofile.close()
#####################################################################
#####################################################################
# Utilities
#####################################################################
def script_bootcalib(arc_idx, flat_idx, cameras=None, channels=None, nproc=10):
""" Runs desi_bootcalib on a series of pix files
Returns:
script_bootcalib([0,1,2,3,4,5,6,7,8,9], [10,11,12,13,14])
"""
from subprocess import Popen
#
if cameras is None:
cameras = ['0','1','2','3','4','5','6','7','8','9']
if channels is None:
channels = ['b','r','z']
#channels = ['b']#,'r','z']
nchannels = len(channels)
ncameras = len(cameras)
#
narc = len(arc_idx)
nflat = len(flat_idx)
ntrial = narc*nflat*ncameras*nchannels
# Loop on the systems
nrun = -1
#nrun = 123
while(nrun < ntrial):
proc = []
ofiles = []
for ss in range(nproc):
nrun += 1
iarc = nrun % narc
jflat = (nrun//narc) % nflat
kcamera = (nrun//(narc*nflat)) % ncameras
lchannel = nrun // (narc*nflat*ncameras)
#pdb.set_trace()
if nrun == ntrial:
break
# Names
afile = str('pix-{:s}{:s}-{:08d}.fits'.format(channels[lchannel], cameras[kcamera], arc_idx[iarc]))
ffile = str('pix-{:s}{:s}-{:08d}.fits'.format(channels[lchannel], cameras[kcamera], flat_idx[jflat]))
ofile = str('boot_psf-{:s}{:s}-{:d}{:d}.fits'.format(channels[lchannel], cameras[kcamera],
arc_idx[iarc], flat_idx[jflat]))
qfile = str('qa_boot-{:s}{:s}-{:d}{:d}.pdf'.format(channels[lchannel], cameras[kcamera],
arc_idx[iarc], flat_idx[jflat]))
lfile = str('boot-{:s}{:s}-{:d}{:d}.log'.format(channels[lchannel], cameras[kcamera],
arc_idx[iarc], flat_idx[jflat]))
## Run
script = [str('desi_bootcalib.py'), str('--fiberflat={:s}'.format(ffile)),
str('--arcfile={:s}'.format(afile)),
str('--outfile={:s}'.format(ofile)),
str('--qafile={:s}'.format(qfile))]#,
#str('>'),
#str('{:s}'.format(lfile))]
f = open(lfile, "w")
proc.append(Popen(script, stdout=f))
ofiles.append(f)
exit_codes = [p.wait() for p in proc]
for ofile in ofiles:
ofile.close()
#####################################################################
#####################################################################
#####################################################################
# QA
#####################################################################
def qa_fiber_peaks(xpk, cut, pp=None, figsz=None, nper=100):
""" Generate a QA plot for the fiber peaks
Args:
xpk: x positions on the CCD of the fiber peaks at a ypos
cut: Spatial cut through the detector
pp: PDF file pointer
figsz: figure size, optional
nper: number of fibers per row in the plot, optional
"""
# Init
if figsz is None:
figsz = glbl_figsz
nfiber = xpk.size
nrow = (nfiber // nper) + ((nfiber % nper) > 0)
xplt = np.arange(cut.size)
# Plots
gs = gridspec.GridSpec(nrow, 1)
plt.figure(figsize=figsz)
# Loop
for ii in range(nrow):
ax = plt.subplot(gs[ii])
i0 = ii*nper
i1 = i0 + nper
ax.plot(xplt,cut, 'k-')
ax.plot(xpk, cut[xpk],'go')
xmin = np.min(xpk[i0:i1])-10.
xmax = np.max(xpk[i0:i1])+10.
ax.set_xlim(xmin,xmax)
# Save and close
if pp is not None:
pp.savefig(bbox_inches='tight')
else:
plt.show()
plt.close()
def qa_fiber_Dx(xfit, fdicts, pp=None, figsz=None):
""" Show the spread in the trace per fiber
Used to diagnose the traces
Args:
xfit: traces
fdicts: dict of the traces
pp: PDF file pointer
figsz: figure size, optional
"""
#
if figsz is None:
figsz = glbl_figsz
# Calculate Dx
nfiber = xfit.shape[1]
Dx = []
for ii in range(nfiber):
Dx.append(np.max(xfit[:, ii])-np.min(xfit[:, ii]))
# Plot
plt.figure(figsize=figsz)
plt.scatter(np.arange(nfiber), np.array(Dx))
# Label
plt.xlabel('Fiber', fontsize=17.)
plt.ylabel(r'$\Delta x$ (pixels)', fontsize=17.)
# Save and close
if pp is None:
plt.show()
else:
pp.savefig(bbox_inches='tight')
plt.close()
def qa_fiber_gauss(gauss, pp=None, figsz=None):
""" Show the Gaussian (sigma) fits to each fiber
Args:
gauss: Gaussian of each fiber
pp: PDF file pointer
figsz: figure size, optional
"""
#
if figsz is None:
figsz = glbl_figsz
# Calculate Dx
nfiber = gauss.size
# Plot
plt.figure(figsize=figsz)
plt.scatter(np.arange(nfiber), gauss)
# Label
plt.xlabel('Fiber', fontsize=17.)
plt.ylabel('Gaussian sigma (pixels)', fontsize=17.)
# Save and close
if pp is None:
plt.show()
else:
pp.savefig(bbox_inches='tight')
plt.close()
def qa_arc_spec(all_spec, all_soln, pp, figsz=None):
""" Generate QA plots of the arc spectra with IDs
Args:
all_spec: Arc 1D fiber spectra
all_soln: Wavelength solutions
pp: PDF file pointer
figsz: figure size, optional
"""
# Init
if figsz is None:
figsz = glbl_figsz
nfiber = len(all_soln)
npix = all_spec.shape[0]
#
nrow = 2
ncol = 3
# Plots
gs = gridspec.GridSpec(nrow, ncol)
plt.figure(figsize=figsz)
# Loop
for ii in range(nrow*ncol):
ax = plt.subplot(gs[ii])
idx = ii * (nfiber//(nrow*ncol))
yspec = np.log10(np.maximum(all_spec[:,idx],1))
ax.plot(np.arange(npix), yspec, 'k-')
ax.set_xlabel('Pixel')
ax.set_ylabel('log Flux')
# ID
id_dict = all_soln[idx]
for jj,xpixpk in enumerate(id_dict['id_pix']):
ax.text(xpixpk, yspec[int(np.round(xpixpk))], '{:g}'.format(id_dict['id_wave'][jj]), ha='center',color='red', rotation=90.)
# Save and close
pp.savefig(bbox_inches='tight')
plt.close()
def qa_fiber_arcrms(all_soln, pp, figsz=None):
""" Show the RMS of the wavelength solutions vs. fiber
Args:
all_soln: Wavelength solutions
pp: PDF file pointer
figsz: figure size, optional
"""
#
if figsz is None:
figsz = glbl_figsz
# Calculate Dx
nfiber = len(all_soln)
rms = [id_dict['rms'] for id_dict in all_soln]
# Plot
plt.figure(figsize=figsz)
plt.scatter(np.arange(nfiber), np.array(rms))
# Label
plt.xlabel('Fiber', fontsize=17.)
plt.ylabel('RMS (pixels)', fontsize=17.)
# Save and close
pp.savefig(bbox_inches='tight')
plt.close()
def qa_fiber_dlamb(all_spec, all_soln, pp, figsz=None):
""" Show the Dlamb of the wavelength solutions vs. fiber
Args:
all_soln: Wavelength solutions
pp: PDF file pointer
figsz: figure size, optional
"""
#
if figsz is None:
figsz = glbl_figsz
# Calculate Dx
nfiber = len(all_soln)
npix = all_spec.shape[0]
xval = np.arange(npix)
dlamb = []
for ii in range(nfiber):
idict = all_soln[ii]
wave = dufits.func_val(xval,idict['final_fit_pix'])
dlamb.append(np.median(np.abs(wave-np.roll(wave,1))))
# Plot
plt.figure(figsize=figsz)
plt.scatter(np.arange(nfiber), np.array(dlamb))
# Label
plt.xlabel('Fiber', fontsize=17.)
plt.ylabel(r'$\Delta \lambda$ (Ang)', fontsize=17.)
# Save and close
pp.savefig(bbox_inches='tight')
plt.close()
def qa_fiber_trace(flat, xtrc, outfil=None, Nfiber=25, isclmin=0.5):
''' Generate a QA plot for the fiber traces
Parameters
----------
flat: ndarray
image
xtrc: ndarray
Trace array
isclmin: float, optional [0.5]
Fraction of 90 percentile flux to scale image by
outfil: str, optional
Output file
normalize: bool, optional
Normalize the flat? If not, use zscale for output
'''
ticks_font = matplotlib.font_manager.FontProperties(family='times new roman',
style='normal', size=16, weight='normal', stretch='normal')
plt.rcParams['font.family']= 'times new roman'
cmm = cm.Greys_r
# Outfil
if outfil is None:
outfil = 'fiber_trace_qa.pdf'
ntrc = xtrc.shape[1]
ycen = np.arange(flat.shape[0])
# Plot
pp = PdfPages(outfil)
plt.clf()
fig = plt.figure(figsize=(8, 5.0),dpi=1200)
#fig.set_size_inches(10.0,6.5)
Nbundle = ntrc // Nfiber + (ntrc%Nfiber > 0)
for qq in range(Nbundle):
ax = plt.gca()
for label in ax.get_yticklabels() :
label.set_fontproperties(ticks_font)
for label in ax.get_xticklabels() :
label.set_fontproperties(ticks_font)
# Cut image
i0 = qq*Nfiber
i1 = np.minimum((qq+1)*Nfiber,ntrc)
x0 = np.maximum(int(np.min(xtrc[:,i0:i1]))-3,0)
x1 = np.minimum(int(np.max(xtrc[:,i0:i1]))+3,flat.shape[1])
sub_flat = flat[:,x0:x1].T
# Scale
srt = np.sort(sub_flat.flatten())
sclmax = srt[int(sub_flat.size*0.9)]
sclmin = isclmin * sclmax
# Plot
mplt = plt.imshow(sub_flat,origin='lower', cmap=cmm,
extent=(0., sub_flat.shape[1]-1, x0,x1-1), aspect='auto')
#extent=(0., sub_flat.shape[1]-1, x0,x1))
#mplt.set_clim(vmin=sclmin, vmax=sclmax)
# Axes
#plt.xlim(0., sub_flat.shape[1]-1)
plt.xlim(0., sub_flat.shape[1]-1)
plt.ylim(x0,x1)
# Traces
for ii in range(i0,i1):
# Left
plt.plot(ycen, xtrc[:,ii], 'r-',alpha=0.7, linewidth=0.5)
# Label
#iy = int(frame.shape[0]/2.)
#plt.text(ltrace[iy,ii], ycen[iy], '{:d}'.format(ii+1), color='red', ha='center')
#plt.text(rtrace[iy,ii], ycen[iy], '{:d}'.format(ii+1), color='green', ha='center')
pp.savefig(bbox_inches='tight')
plt.close()
# Finish
print('Writing {:s} QA for fiber trace'.format(outfil))
pp.close()
| [
"[email protected]"
] | |
e5f75c282a91efceab67452309899a392be1f731 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/304/68337/submittedfiles/testes.py | ef1b112bb914f1155a9ca8d3cbd194b2db577544 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,807 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
########AULA DO DIA 06/09/2017########
"""
idade = int(input('Digite sua idade: '))
print('A idade do indivíduo é',idade,'!')
if int(idade) < 18:
print('Você tem menos de 18 anos')
elif int(idade) == 18:
print('Você tem 18 anos')
else:
print('Você tem mais de 18 anos')
altura = float(input('Digite sua altura: '))
if float(altura) <= 1.50:
print('Você é um smurf!')
elif float(altura) >1.50 < 1.70:
print('Você é normal')
else:
print('Você é um avatar!')
razao = idade/altura
print('Razão idade/altura = %.2f' %(razao))
"""
#QUESTÃO 01:
"""
print(50*'-')
print('QUESTÃO 01')
print(50*'-')
n1 = float(input('Insira a nota do primeiro bimestre: '))
n2 = float(input('Insira a nota do segundo bimestre: '))
n3 = float(input('Insira a nota do terceiro bimestre: '))
n4 = float(input('Insira a nota do quarto bimestre: '))
media = float((n1+n2+n3+n4)/4)
print('A média do aluno foi de %.2f' %(media))
"""
#QUESTÃO 02:
"""
print(50*'-')
print('QUESTÃO 02')
print(50*'-')
a = float(input('Insira metros para a conversão: '))
b = (a*100)
print(a,'metros convertido em centímetros = %.0f' %(b))
"""
#QUESTÃO 03:
"""
print(50*'-')
print('QUESTÃO 03')
print(50*'-')
altura = float(input('Insira a sua altura: '))
peso = float(input('Insira o seu peso: '))
pi = (72.7*altura)-58
print('O seu peso atual é de %.2f quilos e o peso ideal para a sua altura é de %.2f quilos' %(peso,pi))
"""
#QUESTÃO 04:
"""
print(50*'-')
print('QUESTÃO 04')
print(50*'-')
raio = float(input('Insira o raio do círculo em centímetros: '))
area = (3.14159*raio**2)
print('A área do círculo é de %.2f centímetros' %(area))
"""
x = 1
x + 2
x = x + 1
print (x) | [
"[email protected]"
] | |
021870ba7e83cd49d113645f14d918b18cc9157b | a9063fd669162d4ce0e1d6cd2e35974274851547 | /test/test_inline_response20072_plan_base.py | a7ee91720c79393327a18f8d2b9bb2083c8bfc00 | [] | no_license | rootalley/py-zoom-api | 9d29a8c750e110f7bd9b65ff7301af27e8518a3d | bfebf3aa7b714dcac78be7c0affb9050bbce8641 | refs/heads/master | 2022-11-07T14:09:59.134600 | 2020-06-20T18:13:50 | 2020-06-20T18:13:50 | 273,760,906 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,508 | py | # coding: utf-8
"""
Zoom API
The Zoom API allows developers to safely and securely access information from Zoom. You can use this API to build private services or public applications on the [Zoom App Marketplace](http://marketplace.zoom.us). To learn how to get your credentials and create private/public applications, read our [Authorization Guide](https://marketplace.zoom.us/docs/guides/authorization/credentials). All endpoints are available via `https` and are located at `api.zoom.us/v2/`. For instance you can list all users on an account via `https://api.zoom.us/v2/users/`. # noqa: E501
OpenAPI spec version: 2.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from models.inline_response20072_plan_base import InlineResponse20072PlanBase # noqa: E501
from swagger_client.rest import ApiException
class TestInlineResponse20072PlanBase(unittest.TestCase):
"""InlineResponse20072PlanBase unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineResponse20072PlanBase(self):
"""Test InlineResponse20072PlanBase"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.inline_response20072_plan_base.InlineResponse20072PlanBase() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
85c0f14b868a22e23e5e3fd51fe3cc1e84759e72 | 5d3a6f5d1731d32479c3cd65748c58eefa614b07 | /tests/test_models.py | 8bef9f492bb207d37d8955f5b49e2f5693b413af | [] | permissive | timgates42/django-rest-localflavor | fde435f0f07775ccf48187f68e7f29ad9d2a793f | 844f86dbed5be126706b8d65678ed7e1fc9cfed0 | refs/heads/master | 2023-03-17T08:04:59.020065 | 2020-12-30T12:25:36 | 2020-12-30T12:25:36 | 246,013,020 | 0 | 0 | BSD-3-Clause | 2020-03-09T11:04:03 | 2020-03-09T11:04:02 | null | UTF-8 | Python | false | false | 377 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-rest-localflavor
------------
Tests for `django-rest-localflavor` models module.
"""
from django.test import TestCase
from rest_localflavor import models
class TestRest_localflavor(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
| [
"[email protected]"
] | |
1644cc8eed28893ea0314b6a735bb23190e45faa | 8a38bb4e40a78afc69eed06c3d88d45e5995a336 | /jax/experimental/gda_serialization/serialization.py | 438f108293d7b1dc0bc9a3f97dc7ce9aad711ed9 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | MichaelMarien/jax | 979a808ab6ea1698d2f03b1e9aeb826f59955543 | bf3c658114703e955f0b06642c53c6b64c5b2df3 | refs/heads/master | 2023-02-13T00:45:04.056207 | 2022-02-15T19:25:26 | 2022-02-15T19:25:26 | 216,413,853 | 0 | 0 | Apache-2.0 | 2023-02-06T07:02:48 | 2019-10-20T19:13:38 | Python | UTF-8 | Python | false | false | 4,185 | py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GlobalDeviceArray serialization and deserialization."""
import asyncio
import re
from typing import Callable
import jax
from jax.experimental import global_device_array as gda
from jax.experimental.maps import Mesh
import jax.numpy as jnp
import numpy as np
import tensorstore as ts
async def create_async_gda_from_callback(
global_shape: gda.Shape,
global_mesh: Mesh,
mesh_axes: gda.MeshAxes,
data_callback: Callable[[gda.Index], asyncio.Future],
):
global_idx_rid = gda.get_shard_indices_replica_ids(
global_shape, global_mesh, mesh_axes)
local_devices = global_mesh.local_devices
future_arrays = [data_callback(global_idx_rid[d][0])
for d in local_devices]
# Pause here and come back to `from_async_callback()` when future_arrays are
# ready. device_put cannot happen with future_arrays.
local_arrays = await asyncio.gather(*future_arrays)
dbs = [jax.device_put(array, device)
for array, device in zip(local_arrays, local_devices)]
return gda.GlobalDeviceArray(global_shape, global_mesh, mesh_axes, dbs,
gda._GdaFastPathArgs(global_idx_rid, local_devices))
def _get_metadata(gda):
if gda.dtype == jnp.bfloat16:
# Tensorstore uses 'bfloat16', not '<V2'.
dtype = 'bfloat16'
else:
dtype = np.dtype(gda.dtype).str
return {
'compressor': {
'id': 'gzip'
},
'shape': gda.shape,
'chunks': np.array(np.maximum(1, gda.local_data(0).shape)),
'dtype': dtype,
}
def get_tensorstore_spec(ckpt_path: str):
spec = {'driver': 'zarr', 'kvstore': {}}
if ckpt_path.startswith('gs://'):
m = re.fullmatch('^gs://([^/]*)/(.*)$', ckpt_path, re.DOTALL)
if m is None:
raise ValueError('The ckpt_path should contain the bucket name and the '
f'file path inside the bucket. Got: {ckpt_path}')
gcs_bucket = m.group(1)
path_without_bucket = m.group(2)
spec['kvstore'] = {'driver': 'gcs', 'bucket': gcs_bucket,
'path': path_without_bucket}
else:
spec['kvstore'] = {'driver': 'file', 'path': ckpt_path}
return spec
async def async_serialize(gda_inp: gda.GlobalDeviceArray, tensorstore_spec):
if not tensorstore_spec.get('metadata'):
tensorstore_spec['metadata'] = _get_metadata(gda_inp)
t = await ts.open(
ts.Spec(tensorstore_spec),
create=True,
open=True,
context=ts.Context({'file_io_concurrency': {
'limit': 128
}}))
async def _write_array(shard):
if shard.replica_id == 0:
await t[shard.index].write(shard.data)
future_write_state = jax.tree_util.tree_map(_write_array,
tuple(gda_inp.local_shards))
return await asyncio.gather(*future_write_state)
def run_serialization(gdas, tensorstore_specs):
async def _run_serializer():
future_writer = jax.tree_map(async_serialize, gdas, tensorstore_specs)
return await asyncio.gather(*future_writer)
asyncio.run(_run_serializer())
async def async_deserialize(mesh, mesh_axes, tensorstore_spec):
t = ts.open(ts.Spec(tensorstore_spec), open=True).result()
async def cb(index):
return await t[index].read()
return await create_async_gda_from_callback(t.shape, mesh, mesh_axes, cb)
def run_deserialization(global_meshes, mesh_axes, tensorstore_specs):
async def _run_deserializer():
future_gdas = jax.tree_map(async_deserialize, global_meshes, mesh_axes,
tensorstore_specs)
return await asyncio.gather(*future_gdas)
return asyncio.run(_run_deserializer())
| [
"[email protected]"
] | |
1fccb8fb07eda6f838b5b01854800df046dde04d | 01548099ec20976d31cca7a720102c11c56fc9be | /scripts/handle_mysql.py | e9b295986da2e42b703e109bceae62db0fa7fd71 | [] | no_license | OMEN001/Lemon_Api_Test | 6a27a9a9ccf28623006b465a107d53b17ad30404 | 373c9f1a1f1f3160bbe8edcc4b5740f9779947ae | refs/heads/master | 2023-02-25T22:31:39.824908 | 2021-01-24T14:22:52 | 2021-01-24T14:22:52 | 329,324,658 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,651 | py | # -*- coding: utf-8 -*-
# @Time : BaLiang
# @Author : 86187
import pymysql
import random
from scripts.handle_yaml import do_yaml
class HandleMysql:
def __init__(self):
# 创建连接对象
self.conn = pymysql.connect(host=do_yaml.read('mysql', 'host'), # mysql服务器ip或者域名
user=do_yaml.read('mysql', 'user'), # 用户名
password=do_yaml.read('mysql', 'password'),
db=do_yaml.read('mysql', 'db'), # 要连接的数据库名
port=do_yaml.read('mysql', 'port'), # 数据库端口号, 默认为3306(int型)
charset='utf8', # 数据库编码为utf8, 不能设为utf-8
# 默认返回的结果为元祖或者嵌套元祖的列表
# 可以指定cursorclass为DictCursor, 那么返回的结果为字典或者嵌套字典的列表
cursorclass=pymysql.cursors.DictCursor)
# 创建游标对象
self.cursor = self.conn.cursor()
# 执行查询语句
def run(self,sql,args=None,is_more=False):
# 通过游标对象执行sal
self.cursor.execute(sql,args)
# 通过连接对象提交
self.conn.commit()
if is_more:
return self.cursor.fetchall()
else:
return self.cursor.fetchone()
@staticmethod
def create_mobile():
# def create_mobile(self): 因为这里的self没有用到所以创建了静态方法(类中实例方法也可调用静态方法)
"""生成11位数字"""
# return "183" + "".join(random.randint(10000000,99999999))
return "183" + "".join(random.sample("0123456789",8))
def is_existed_mobile(self, mobile):
"""判断收集好是否被注册"""
sql = do_yaml.read("mysql","sql")
if self.run(sql, args=[mobile]):
return True
else:
return False
def create_not_existed_mobile(self):
"""生成一个不存在的电话号码"""
one_mobile = self.create_mobile()
while True:
if not self.is_existed_mobile(one_mobile):
break
return one_mobile
def close(self):
# 关闭游标对象
self.cursor.close()
# 关闭连接对象
self.conn.close()
if __name__ == '__main__':
sql = "select max(id) from member;"
do_mysql = HandleMysql()
max_id = do_mysql.run(sql)
print(max_id["max(id)"] + 1)
do_mysql.close()
| [
"[email protected]"
] | |
5d8433435bfdcf5f87e885ef711187b442bf55db | ecd9cbfa2c30e1bc39cf442e3302c4cb3cf1ea03 | /bin/calculate_phylocsf.py | 767ee44799121f64903399942d84fbb0d1288a98 | [] | no_license | lixin856/proteogenomics-analysis-workflow | 087c84971ab8386ef06d031244da37fd895e82f5 | 0b0a02dfb93cc5806ef9c951d1674d82f454ec29 | refs/heads/master | 2023-05-01T17:13:44.293272 | 2021-05-11T07:23:14 | 2021-05-11T07:23:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,579 | py | #!/usr/bin/env python3
'''
the script is modified from Mikael Hussius @ SciLifeLab, https://github.com/hussius/gff-phylocsf-human
download the following bigwig files first
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+0.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+1.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF+2.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-0.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-1.bw
# wget https://data.broadinstitute.org/compbio1/PhyloCSFtracks/hg19/latest/PhyloCSF-2.bw
'''
import sys
import os
import pyBigWig as pw
import numpy as np
def predict_coding(vec):
coding = "OTHER"
for v in vec:
if not v: continue
if v > 0: coding = "CODING"
return(coding)
if len(sys.argv)<4:
sys.exit("USAGE: python " + sys.argv[0] + "<GFF file> <BigWig file path> <output file>")
infile = sys.argv[1]
bw_file_path = sys.argv[2]
outfile = sys.argv[3]
regs = []
chrom={}
starts={}
ends={}
peptide={}
for line in open(infile):
if not line.startswith("chr"):
continue
fields = line.strip().split()
(chr, start, end, pept) = (fields[0], fields[3], fields[4], fields[8])
if not pept.startswith("Parent="): continue
name = chr+":"+start+"-"+end
chrom[name]=chr
starts[name]=int(start)
ends[name]=int(end)
peptide[name]=pept.split("=")[1]
regs.append(name)
scores = {}
rpathbase = os.path.join(bw_file_path,"PhyloCSF")
for rf in ["+0","+1","+2","+3","-0","-1","-2","-3"]:
rpath = rpathbase + rf + ".bw"
if os.path.isfile(rpath):
sys.stderr.write("Searching PhyloCSF reading frame " + rf + "\n")
bw = pw.open(rpath)
frame_score = {}
count = 0
for r in regs:
count += 1
if(count % 50 ==0): sys.stderr.write('\tProcessed ' + str(count) + " peptides out of " + str(len(regs)) + "\n")
sys.stderr.flush()
try:
score = bw.stats(chrom[r], starts[r], ends[r])[0]
except RuntimeError:
pass
frame_score[r] = score
scores[rf] = frame_score
bw.close()
else:
sys.stderr.write("%s doesn't exist \n" % rpath)
output = open(outfile,"w")
output.write("\t".join(["Bare peptide","PhyloCSF+0.score","PhyloCSF+1.score","PhyloCSF+2.score","PhyloCSF-0.score","PhyloCSF-1.score","PhyloCSF-2.score","PhyloCSF_prediction"])+"\n")
pep_scores={}
for r in regs:
scoreList = [scores["+0"][r], scores["+1"][r], scores["+2"][r], scores["-0"][r], scores["-1"][r], scores["-2"][r]]
seq = peptide[r]
if seq not in pep_scores:
pep_scores[seq]=scoreList
else: # this is to consider splice junction peptides which have two regions separated in gff file, we take mean phylocsf score of two regions
for i in range(0,len(scoreList)):
value = scoreList[i]
if value is None and pep_scores[seq][i] is None:
continue
elif None in [value, pep_scores[seq][i]]:
pep_scores[seq][i] = value if value else pep_scores[seq][i]
else:
pep_scores[seq][i] = (pep_scores[seq][i] + value)/2
for seq in pep_scores:
scoreList = pep_scores[seq]
row = [seq]+['NA' if x is None else str(x) for x in scoreList] + [predict_coding(scoreList)]
output.write('\t'.join(row) + '\n')
| [
"[email protected]"
] | |
490c3ff06f2d1f0ec763b0b33cbe461f3ce4c015 | c5c95aee3c04ab89f1aa3505f45768d15994be53 | /myScript.spec | bac2683370cf6c59b42d84e6187d2d5590501df2 | [] | no_license | snagavamsi123/Banking-Bot | 16f55a9cad523ca63fb1bf47a1a802236adcd164 | 9745e51a2c3c8a2cf614d3052a3577fce40a74c0 | refs/heads/master | 2023-03-27T18:54:35.585541 | 2021-04-03T11:31:49 | 2021-04-03T11:31:49 | 354,273,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 920 | spec | # -*- mode: python ; coding: utf-8 -*-
block_cipher = None
a = Analysis(['myScript.py'],
pathex=['C:\\Projects @tp\\Banking Bot\\BankingBot (1)\\BankingBot'],
binaries=[],
datas=[],
hiddenimports=['pandas._libs.tslibs.timedeltas'],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='myScript',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True )
| [
"[email protected]"
] | |
f9610abf23be1a086309f79e6e949e757fd77148 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2/Remi05/RevengeOfPancakes.py | fd169dd8bcbd41b26d7a608b5f9048070362f40d | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 824 | py | import sys
import itertools
def formatOutput(n, result):
return 'Case #' + str(n) + ': ' + result + '\n'
file = open(sys.argv[1])
output = open('output.txt', 'w')
nTests = int(file.readline())
HAPPY = '+'
SAD = '-'
testNb = 1
for line in itertools.islice(file, 0, nTests+1):
stack = line.replace('\n', '')
nFlips = 0
while SAD in stack:
sPos = stack.index(SAD)
if sPos != 0:
stack = (sPos)*SAD + stack[sPos+1:]
nFlips += 1
if HAPPY in stack:
hPos = stack.index(HAPPY)
stack = (hPos)*HAPPY + stack[hPos+1:]
nFlips += 1
else:
stack = stack.replace(SAD, HAPPY)
nFlips += 1
output.write(formatOutput(testNb, str(nFlips)))
testNb += 1
| [
"[[email protected]]"
] | |
494970626e046ba086e9f9147e5adae928f73dd6 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2464487_1/Python/Andrew510/prob1.py | c9fc74984a7c57248747efda818c0b13b2a96b5c | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | #!/usr/bin/python
import math
import sys
infile = sys.stdin
T = int(infile.readline())
for n in range(T):
r, t = map(int, infile.readline().split())
b = 2*r - 1
xx = int(math.sqrt(b*b + 8*t))
sol = (-b + xx) / 4
#XXX
need = 2*sol*sol + sol*b
#print need, t
while need > t:
sol -= 1
need = 2*sol*sol + sol*b
if sol < 0:
sol = 0
print 'Case #%d: %d' % (n+1, sol)
| [
"[email protected]"
] | |
d103f044e4b88a589318d2a3495451a5c29d7e4e | b19f1a0ed3b26f0b5cbc2a0b7db2141edc955901 | /auto_test/Case_rbm/ftp_check_user/message.py | b6bd2eb9ef1d8977d9c1f2fee0b164c6ac7656c4 | [] | no_license | wangqian0818/auto_test_platform | 5a1f04cbf6007e6ff3dbb74e838981de53491526 | 64e32099ac2d79fb70d3727b085465aac0e49d3f | refs/heads/master | 2023-04-26T23:39:04.232001 | 2021-05-22T12:23:02 | 2021-05-22T12:23:02 | 368,116,381 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,098 | py | import time
from common import baseinfo
from ftp_check_user import index
datatime = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
proxy_ip = baseinfo.gwServerIp
ftp_ip = baseinfo.ftp_ip
username = index.username
case2_allow_user = index.case2_allow_user
addftp = {
'AddAgent':{
"MethodName":"AddAgent",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"InProtocol":"ftp",
"Type":2,
"InPort":8887,
"domain":"all",
"SyncId":87,
"OutAddr":[{"OutPort":21,"OutIp":ftp_ip}],
"InIp":proxy_ip
}]
}}
delftp = {
'DelAgent':{
"MethodName":"DelAgent",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"InProtocol":"ftp",
"Type":2,
"InPort":8887,
"domain":"all",
"SyncId":87,
"OutAddr":[{"OutPort":21,"OutIp":ftp_ip}],
"InIp":proxy_ip
}]}
}
ftpcheck1 = {'SetFtpCheck':{
"MethodName":"SetFtpCheck",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"Type":"user","DataCheck":username}
]}
}
ftpcheck2 = {'SetFtpCheck':{
"MethodName":"SetFtpCheck",
"MessageTime":datatime,
"Sender":"Centre0",
"Content":[{
"Type":"user","DataCheck":f'{username};{case2_allow_user}'}
]}
}
| [
"[email protected]"
] | |
1a8534c747ce64a7d53d310af6a6610ca5a802de | b7dd07413c05a13207988535b755b7d28dbc5663 | /Chapter_11/name_function.py | ff49f2a5f8b4c33b8d0e85a0b57ace81dc09e227 | [] | no_license | GrnTeaLatte/AlienInvasion | b671a87cd730c3d4b31a8e8d760d2d02d576cfb3 | d60e8e65adb79e54a1e1c579825827355a7e85ea | refs/heads/main | 2023-02-26T03:55:26.799446 | 2020-11-03T00:42:06 | 2020-11-03T00:42:06 | 336,111,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | def get_formatted_name(first, last, middle=''):
"""Generate a neatly formatted full name."""
if middle:
full_name = first + ' ' + middle + ' ' + last
else:
full_name = first + ' ' + last
return full_name.title()
| [
"[email protected]"
] | |
ba139b91da3b1c9b90d16903405e283f9fc3ed5e | 63b364ad79288be56c55c83b7d78785b60be3a2e | /ssm/core.py | 67c2527f8918067c2a1d811cbbe165edf9664ffd | [
"MIT"
] | permissive | quliuwuyihmy/ssm | 65b69ce0e62ac65a3b99f1d20fc92a99b6769492 | 7d4e8830cfc7f30f8e1429149e239b823478cc14 | refs/heads/master | 2020-04-01T22:42:07.580301 | 2018-10-17T19:55:51 | 2018-10-17T19:55:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,937 | py | import copy
import warnings
from functools import partial
import tqdm
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd.scipy.misc import logsumexp
from autograd.misc.optimizers import sgd, adam
from autograd.tracer import getval
from autograd import grad
from ssm.primitives import hmm_normalizer, hmm_expected_states, hmm_filter, viterbi
from ssm.util import ensure_args_are_lists, ensure_args_not_none, \
ensure_slds_args_not_none, ensure_elbo_args_are_lists, adam_with_convergence_check
class _HMM(object):
"""
Base class for hidden Markov models.
Notation:
K: number of discrete latent states
D: dimensionality of observations
M: dimensionality of inputs
In the code we will sometimes refer to the discrete
latent state sequence as z and the data as x.
"""
def __init__(self, K, D, M, init_state_distn, transitions, observations):
self.K, self.D, self.M = K, D, M
self.init_state_distn = init_state_distn
self.transitions = transitions
self.observations = observations
self._fitting_methods = \
dict(sgd=partial(self._fit_sgd, "sgd"),
adam=partial(self._fit_sgd, "adam"),
em=self._fit_em,
stochastic_em=partial(self._fit_stochastic_em, "adam"),
stochastic_em_sgd=partial(self._fit_stochastic_em, "sgd"),
)
@property
def params(self):
return self.init_state_distn.params, \
self.transitions.params, \
self.observations.params
@params.setter
def params(self, value):
self.init_state_distn.params = value[0]
self.transitions.params = value[1]
self.observations.params = value[2]
@ensure_args_are_lists
def initialize(self, datas, inputs=None, masks=None, tags=None):
"""
Initialize parameters given data.
"""
self.init_state_distn.initialize(datas, inputs=inputs, masks=masks, tags=tags)
self.transitions.initialize(datas, inputs=inputs, masks=masks, tags=tags)
self.observations.initialize(datas, inputs=inputs, masks=masks, tags=tags)
def permute(self, perm):
"""
Permute the discrete latent states.
"""
assert np.all(np.sort(perm) == np.arange(self.K))
self.init_state_distn.permute(perm)
self.transitions.permute(perm)
self.observations.permute(perm)
def sample(self, T, prefix=None, input=None, tag=None, with_noise=True):
K, D = self.K, self.D
# If prefix is given, pad the output with it
if prefix is None:
pad = 1
z = np.zeros(T+1, dtype=int)
data = np.zeros((T+1, D))
input = np.zeros((T+1, self.M)) if input is None else input
mask = np.ones((T+1, D), dtype=bool)
# Sample the first state from the initial distribution
pi0 = np.exp(self.init_state_distn.log_initial_state_distn(data, input, mask, tag))
z[0] = npr.choice(self.K, p=pi0)
data[0] = self.observations.sample_x(z[0], data[:0], with_noise=with_noise)
else:
zhist, xhist = prefix
pad = len(zhist)
assert zhist.dtype == int and zhist.min() >= 0 and zhist.max() < K
assert xhist.shape == (pad, D)
z = np.concatenate((zhist, np.zeros(T, dtype=int)))
data = np.concatenate((xhist, np.zeros((T, D))))
input = np.zeros((T+pad, self.M)) if input is None else input
mask = np.ones((T+pad, D), dtype=bool)
# Fill in the rest of the data
for t in range(pad, pad+T):
Pt = np.exp(self.transitions.log_transition_matrices(data[t-1:t+1], input[t-1:t+1], mask=mask[t-1:t+1], tag=tag))[0]
z[t] = npr.choice(self.K, p=Pt[z[t-1]])
data[t] = self.observations.sample_x(z[t], data[:t], input=input[t], tag=tag, with_noise=with_noise)
return z[pad:], data[pad:]
@ensure_args_not_none
def expected_states(self, data, input=None, mask=None, tag=None):
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
return hmm_expected_states(log_pi0, log_Ps, log_likes)
@ensure_args_not_none
def most_likely_states(self, data, input=None, mask=None, tag=None):
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
return viterbi(log_pi0, log_Ps, log_likes)
@ensure_args_not_none
def filter(self, data, input=None, mask=None, tag=None):
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
return hmm_filter(log_pi0, log_Ps, log_likes)
@ensure_args_not_none
def smooth(self, data, input=None, mask=None, tag=None):
"""
Compute the mean observation under the posterior distribution
of latent discrete states.
"""
Ez, _, _ = self.expected_states(data, input, mask)
return self.observations.smooth(Ez, data, input, tag)
def log_prior(self):
"""
Compute the log prior probability of the model parameters
"""
return self.init_state_distn.log_prior() + \
self.transitions.log_prior() + \
self.observations.log_prior()
@ensure_args_are_lists
def log_likelihood(self, datas, inputs=None, masks=None, tags=None):
"""
Compute the log probability of the data under the current
model parameters.
:param datas: single array or list of arrays of data.
:return total log probability of the data.
"""
ll = 0
for data, input, mask, tag in zip(datas, inputs, masks, tags):
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
ll += hmm_normalizer(log_pi0, log_Ps, log_likes)
assert np.isfinite(ll)
return ll
@ensure_args_are_lists
def log_probability(self, datas, inputs=None, masks=None, tags=None):
return self.log_likelihood(datas, inputs, masks, tags) + self.log_prior()
def expected_log_probability(self, expectations, datas, inputs=None, masks=None, tags=None):
"""
Compute the log probability of the data under the current
model parameters.
:param datas: single array or list of arrays of data.
:return total log probability of the data.
"""
elp = self.log_prior()
for (Ez, Ezzp1, _), data, input, mask, tag in zip(expectations, datas, inputs, masks, tags):
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
# Compute the expected log probability
elp += np.sum(Ez[0] * log_pi0)
elp += np.sum(Ezzp1 * log_Ps)
elp += np.sum(Ez * log_likes)
assert np.isfinite(elp)
return elp
# Model fitting
def _fit_sgd(self, optimizer, datas, inputs, masks, tags, print_intvl=10, **kwargs):
"""
Fit the model with maximum marginal likelihood.
"""
T = sum([data.shape[0] for data in datas])
def _objective(params, itr):
self.params = params
obj = self.log_probability(datas, inputs, masks, tags)
return -obj / T
lls = []
def _print_progress(params, itr, g):
lls.append(self.log_probability(datas, inputs, masks, tags)._value)
if itr % print_intvl == 0:
print("Iteration {}. LL: {}".format(itr, lls[-1]))
optimizers = dict(sgd=sgd, adam=adam, adam_with_convergence_check=adam_with_convergence_check)
self.params = \
optimizers[optimizer](grad(_objective), self.params, callback=_print_progress, **kwargs)
return lls
def _fit_stochastic_em(self, optimizer, datas, inputs, masks, tags, num_epochs=100, **kwargs):
"""
Replace the M-step of EM with a stochastic gradient update using the ELBO computed
on a minibatch of data.
"""
M = len(datas)
T = sum([data.shape[0] for data in datas])
perm = [np.random.permutation(M) for _ in range(num_epochs)]
def _get_minibatch(itr):
epoch = itr // M
m = itr % M
i = perm[epoch][m]
return datas[i], inputs[i], masks[i], tags[i]
def _objective(params, itr):
# Grab a minibatch of data
data, input, mask, tag = _get_minibatch(itr)
Ti = data.shape[0]
# E step: compute expected latent states with current parameters
Ez, Ezzp1, _ = self.expected_states(data, input, mask, tag)
# M step: set the parameter and compute the (normalized) objective function
self.params = params
log_pi0 = self.init_state_distn.log_initial_state_distn(data, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(data, input, mask, tag)
log_likes = self.observations.log_likelihoods(data, input, mask, tag)
# Compute the expected log probability
# (Scale by number of length of this minibatch.)
obj = self.log_prior()
obj += np.sum(Ez[0] * log_pi0) * M
obj += np.sum(Ezzp1 * log_Ps) * (T - M) / (Ti - 1)
obj += np.sum(Ez * log_likes) * T / Ti
assert np.isfinite(obj)
return -obj / T
lls = []
pbar = tqdm.trange(num_epochs * M)
def _print_progress(params, itr, g):
epoch = itr // M
m = itr % M
lls.append(-T * _objective(params, itr))
pbar.set_description("Epoch {} Itr {} LP: {:.1f}".format(epoch, m, lls[-1]))
pbar.update(1)
# Run the optimizer
optimizers = dict(sgd=sgd, adam=adam)
self.params = \
optimizers[optimizer](grad(_objective), self.params, callback=_print_progress, num_iters=num_epochs * M)
return lls
def _fit_em(self, datas, inputs, masks, tags, num_em_iters=100, **kwargs):
"""
Fit the parameters with expectation maximization.
E step: compute E[z_t] and E[z_t, z_{t+1}] with message passing;
M-step: analytical maximization of E_{p(z | x)} [log p(x, z; theta)].
"""
pbar = tqdm.trange(num_em_iters)
lls = [self.log_probability(datas, inputs, masks, tags)]
pbar.set_description("LP: {:.1f}".format(lls[-1]))
for itr in pbar:
# E step: compute expected latent states with current parameters
expectations = [self.expected_states(data, input, mask, tag)
for data, input, mask, tag in zip(datas, inputs, masks, tags)]
# M step: maximize expected log joint wrt parameters
self.init_state_distn.m_step(expectations, datas, inputs, masks, tags, **kwargs)
self.transitions.m_step(expectations, datas, inputs, masks, tags, **kwargs)
self.observations.m_step(expectations, datas, inputs, masks, tags, **kwargs)
# Store progress
lls.append(self.log_prior() + sum([ll for (_, _, ll) in expectations]))
pbar.set_description("LP: {:.1f}".format(lls[-1]))
return lls
@ensure_args_are_lists
def fit(self, datas, inputs=None, masks=None, tags=None, method="sgd", initialize=True, **kwargs):
if method not in self._fitting_methods:
raise Exception("Invalid method: {}. Options are {}".\
format(method, self._fitting_methods.keys()))
if initialize:
self.initialize(datas, inputs=inputs, masks=masks, tags=tags)
return self._fitting_methods[method](datas, inputs=inputs, masks=masks, tags=tags, **kwargs)
class _SwitchingLDS(object):
"""
Switching linear dynamical system fit with
stochastic variational inference on the marginal model,
integrating out the discrete states.
"""
def __init__(self, N, K, D, M, init_state_distn, transitions, dynamics, emissions):
self.N, self.K, self.D, self.M = N, K, D, M
self.init_state_distn = init_state_distn
self.transitions = transitions
self.dynamics = dynamics
self.emissions = emissions
# Only allow fitting by SVI
self._fitting_methods = dict(svi=self._fit_svi)
@property
def params(self):
return self.init_state_distn.params, \
self.transitions.params, \
self.dynamics.params, \
self.emissions.params
@params.setter
def params(self, value):
self.init_state_distn.params = value[0]
self.transitions.params = value[1]
self.dynamics.params = value[2]
self.emissions.params = value[3]
@ensure_args_are_lists
def initialize(self, datas, inputs=None, masks=None, tags=None, num_em_iters=25, verbose=False):
# First initialize the observation model
self.emissions.initialize(datas, inputs, masks, tags)
# Get the initialized variational mean for the data
xs = [self.emissions.initialize_variational_params(data, input, mask, tag)[0]
for data, input, mask, tag in zip(datas, inputs, masks, tags)]
xmasks = [np.ones_like(x, dtype=bool) for x in xs]
# Now run a few iterations of EM on a ARHMM with the variational mean
print("Initializing with an ARHMM using {} steps of EM.".format(num_em_iters))
arhmm = _HMM(self.K, self.D, self.M,
copy.deepcopy(self.init_state_distn),
copy.deepcopy(self.transitions),
copy.deepcopy(self.dynamics))
arhmm.fit(xs, inputs=inputs, masks=xmasks, tags=tags,
method="em", num_em_iters=num_em_iters, num_iters=10, verbose=verbose)
self.init_state_distn = copy.deepcopy(arhmm.init_state_distn)
self.transitions = copy.deepcopy(arhmm.transitions)
self.dynamics = copy.deepcopy(arhmm.observations)
print("Done")
def permute(self, perm):
"""
Permute the discrete latent states.
"""
assert np.all(np.sort(perm) == np.arange(self.K))
self.init_state_distn.permute(perm)
self.transitions.permute(perm)
self.dynamics.permute(perm)
self.emissions.permute(perm)
def log_prior(self):
"""
Compute the log prior probability of the model parameters
"""
return self.init_state_distn.log_prior() + \
self.transitions.log_prior() + \
self.dynamics.log_prior() + \
self.emissions.log_prior()
def sample(self, T, input=None, tag=None):
K, D = self.K, self.D
input = np.zeros((T, self.M)) if input is None else input
mask = np.ones((T, D), dtype=bool)
# Initialize outputs
z = np.zeros(T, dtype=int)
x = np.zeros((T, D))
# Sample discrete and continuous latent states
pi0 = np.exp(self.init_state_distn.log_initial_state_distn(x, input, mask, tag))
z[0] = npr.choice(self.K, p=pi0)
x[0] = self.dynamics.sample_x(z[0], x[:0], tag=tag)
for t in range(1, T):
Pt = np.exp(self.transitions.log_transition_matrices(x[t-1:t+1], input[t-1:t+1], mask=mask[t-1:t+1], tag=tag))[0]
z[t] = npr.choice(self.K, p=Pt[z[t-1]])
x[t] = self.dynamics.sample_x(z[t], x[:t], input=input[t], tag=tag)
# Sample observations given latent states
y = self.emissions.sample_y(z, x, input=input, tag=tag)
return z, x, y
@ensure_slds_args_not_none
def expected_states(self, variational_mean, data, input=None, mask=None, tag=None):
log_pi0 = self.init_state_distn.log_initial_state_distn(variational_mean, input, mask, tag)
log_Ps = self.transitions.log_transition_matrices(variational_mean, input, mask, tag)
log_likes = self.dynamics.log_likelihoods(variational_mean, input, np.ones_like(variational_mean, dtype=bool), tag)
log_likes += self.emissions.log_likelihoods(data, input, mask, tag, variational_mean)
return hmm_expected_states(log_pi0, log_Ps, log_likes)
@ensure_slds_args_not_none
def most_likely_states(self, variational_mean, data, input=None, mask=None, tag=None):
Ez, _ = self.expected_states(variational_mean, data, input, mask, tag)
return np.argmax(Ez, axis=1)
@ensure_slds_args_not_none
def smooth(self, variational_mean, data, input=None, mask=None, tag=None):
"""
Compute the mean observation under the posterior distribution
of latent discrete states.
"""
Ez, _ = self.expected_states(variational_mean, data, input, mask, tag)
return self.emissions.smooth(Ez, variational_mean, data, input, tag)
@ensure_args_are_lists
def log_probability(self, datas, inputs=None, masks=None, tags=None):
warnings.warn("Cannot compute exact marginal log probability for the SLDS. "
"the ELBO instead.")
return np.nan
@ensure_elbo_args_are_lists
def elbo(self, variational_params, datas, inputs=None, masks=None, tags=None, n_samples=1):
"""
Lower bound on the marginal likelihood p(y | theta)
using variational posterior q(x; phi) where phi = variational_params
"""
elbo = 0
for data, input, mask, tag, (q_mu, q_sigma_inv) in \
zip(datas, inputs, masks, tags, variational_params):
q_sigma = np.exp(q_sigma_inv)
for sample in range(n_samples):
# log p(theta)
elbo += self.log_prior()
# Sample x from the variational posterior
x = q_mu + np.sqrt(q_sigma) * npr.randn(data.shape[0], self.D)
# Compute log p(x | theta) = log \sum_z p(x, z | theta)
# The "mask" for x is all ones
x_mask = np.ones_like(x, dtype=bool)
log_pi0 = self.init_state_distn.log_initial_state_distn(x, input, x_mask, tag)
log_Ps = self.transitions.log_transition_matrices(x, input, x_mask, tag)
log_likes = self.dynamics.log_likelihoods(x, input, x_mask, tag)
log_likes += self.emissions.log_likelihoods(data, input, mask, tag, x)
elbo += hmm_normalizer(log_pi0, log_Ps, log_likes)
# -log q(x)
elbo -= np.sum(-0.5 * np.log(2 * np.pi * q_sigma))
elbo -= np.sum(-0.5 * (x - q_mu)**2 / q_sigma)
assert np.isfinite(elbo)
return elbo / n_samples
def _fit_svi(self, datas, inputs, masks, tags, learning=True, optimizer="adam", print_intvl=1, **kwargs):
"""
Fit with stochastic variational inference using a
mean field Gaussian approximation for the latent states x_{1:T}.
"""
T = sum([data.shape[0] for data in datas])
# Initialize the variational posterior parameters
variational_params = [self.emissions.initialize_variational_params(data, input, mask, tag)
for data, input, mask, tag in zip(datas, inputs, masks, tags)]
def _objective(params, itr):
if learning:
self.params, variational_params = params
else:
variational_params = params
obj = self.elbo(variational_params, datas, inputs, masks, tags)
return -obj / T
elbos = []
def _print_progress(params, itr, g):
elbos.append(-_objective(params, itr) * T)
if itr % print_intvl == 0:
print("Iteration {}. ELBO: {:.1f}".format(itr, elbos[-1]))
optimizers = dict(sgd=sgd, adam=adam, adam_with_convergence_check=adam_with_convergence_check)
initial_params = (self.params, variational_params) if learning else variational_params
results = \
optimizers[optimizer](grad(_objective),
initial_params,
callback=_print_progress,
**kwargs)
if learning:
self.params, variational_params = results
else:
variational_params = results
# unpack outputs as necessary
variational_params = variational_params[0] if len(variational_params) == 1 else variational_params
return elbos, variational_params
@ensure_args_are_lists
def fit(self, datas, inputs=None, masks=None, tags=None, method="svi", initialize=True, **kwargs):
if method not in self._fitting_methods:
raise Exception("Invalid method: {}. Options are {}".\
format(method, self._fitting_methods.keys()))
if initialize:
self.initialize(datas, inputs, masks, tags)
return self._fitting_methods[method](datas, inputs, masks, tags, learning=True, **kwargs)
@ensure_args_are_lists
def approximate_posterior(self, datas, inputs=None, masks=None, tags=None, method="svi", **kwargs):
if method not in self._fitting_methods:
raise Exception("Invalid method: {}. Options are {}".\
format(method, self._fitting_methods.keys()))
return self._fitting_methods[method](datas, inputs, masks, tags, learning=False, **kwargs)
class _LDS(_SwitchingLDS):
"""
Switching linear dynamical system fit with
stochastic variational inference on the marginal model,
integrating out the discrete states.
"""
def __init__(self, N, D, M, dynamics, emissions):
from ssm.init_state_distns import InitialStateDistribution
from ssm.transitions import StationaryTransitions
init_state_distn = InitialStateDistribution(1, D, M)
transitions = StationaryTransitions(1, D, M)
super(_LDS, self).__init__(N, 1, D, M, init_state_distn, transitions, dynamics, emissions)
@ensure_slds_args_not_none
def expected_states(self, variational_mean, data, input=None, mask=None, tag=None):
return np.ones((variational_mean.shape[0], 1)), \
np.ones((variational_mean.shape[0], 1, 1)),
@ensure_slds_args_not_none
def most_likely_states(self, variational_mean, data, input=None, mask=None, tag=None):
raise NotImplementedError
def log_prior(self):
return self.dynamics.log_prior() + self.emissions.log_prior()
@ensure_args_are_lists
def log_probability(self, datas, inputs=None, masks=None, tags=None):
warnings.warn("Log probability of LDS is not yet implemented.")
return np.nan
@ensure_elbo_args_are_lists
def elbo(self, variational_params, datas, inputs=None, masks=None, tags=None, n_samples=1):
"""
Lower bound on the marginal likelihood p(y | theta)
using variational posterior q(x; phi) where phi = variational_params
"""
elbo = 0
for data, input, mask, tag, (q_mu, q_sigma_inv) in \
zip(datas, inputs, masks, tags, variational_params):
q_sigma = np.exp(q_sigma_inv)
for sample in range(n_samples):
# log p(theta)
elbo += self.log_prior()
# Sample x from the variational posterior
x = q_mu + np.sqrt(q_sigma) * npr.randn(data.shape[0], self.D)
x_mask = np.ones_like(x, dtype=bool)
# Compute log p(y, x | theta)
elbo += np.sum(self.dynamics.log_likelihoods(x, input, x_mask, tag))
elbo += np.sum(self.emissions.log_likelihoods(data, input, mask, tag, x))
# -log q(x)
elbo -= np.sum(-0.5 * np.log(2 * np.pi * q_sigma))
elbo -= np.sum(-0.5 * (x - q_mu)**2 / q_sigma)
assert np.isfinite(elbo)
return elbo / n_samples
| [
"[email protected]"
] | |
8a3240dd4912d714f1b093941c011621f9e39570 | 1ee3dc4fa096d12e409af3a298ba01f5558c62b5 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/traffic/statistics/statistics.py | dc0d25842e6752902a80cfea75ca7c0ec4747a1a | [
"MIT"
] | permissive | parthpower/ixnetwork_restpy | 321e64a87be0a4d990276d26f43aca9cf4d43cc9 | 73fa29796a5178c707ee4e21d90ff4dad31cc1ed | refs/heads/master | 2020-07-04T13:34:42.162458 | 2019-08-13T20:33:17 | 2019-08-13T20:33:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,933 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class Statistics(Base):
"""The Statistics class encapsulates a required statistics node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the Statistics property from a parent instance.
The internal properties list will contain one and only one set of properties which is populated when the property is accessed.
"""
_SDM_NAME = 'statistics'
def __init__(self, parent):
super(Statistics, self).__init__(parent)
@property
def AdvancedSequenceChecking(self):
"""An instance of the AdvancedSequenceChecking class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.advancedsequencechecking.advancedsequencechecking.AdvancedSequenceChecking)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.advancedsequencechecking.advancedsequencechecking import AdvancedSequenceChecking
return AdvancedSequenceChecking(self)._select()
@property
def CpdpConvergence(self):
"""An instance of the CpdpConvergence class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.cpdpconvergence.cpdpconvergence.CpdpConvergence)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.cpdpconvergence.cpdpconvergence import CpdpConvergence
return CpdpConvergence(self)._select()
@property
def DataIntegrity(self):
"""An instance of the DataIntegrity class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.dataintegrity.dataintegrity.DataIntegrity)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.dataintegrity.dataintegrity import DataIntegrity
return DataIntegrity(self)._select()
@property
def DelayVariation(self):
"""An instance of the DelayVariation class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.delayvariation.delayvariation.DelayVariation)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.delayvariation.delayvariation import DelayVariation
return DelayVariation(self)._select()
@property
def ErrorStats(self):
"""An instance of the ErrorStats class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.errorstats.errorstats.ErrorStats)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.errorstats.errorstats import ErrorStats
return ErrorStats(self)._select()
@property
def InterArrivalTimeRate(self):
"""An instance of the InterArrivalTimeRate class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.interarrivaltimerate.interarrivaltimerate.InterArrivalTimeRate)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.interarrivaltimerate.interarrivaltimerate import InterArrivalTimeRate
return InterArrivalTimeRate(self)._select()
@property
def Iptv(self):
"""An instance of the Iptv class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.iptv.iptv.Iptv)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.iptv.iptv import Iptv
return Iptv(self)._select()
@property
def L1Rates(self):
"""An instance of the L1Rates class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.l1rates.l1rates.L1Rates)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.l1rates.l1rates import L1Rates
return L1Rates(self)._select()
@property
def Latency(self):
"""An instance of the Latency class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.latency.latency.Latency)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.latency.latency import Latency
return Latency(self)._select()
@property
def MisdirectedPerFlow(self):
"""An instance of the MisdirectedPerFlow class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.misdirectedperflow.misdirectedperflow.MisdirectedPerFlow)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.misdirectedperflow.misdirectedperflow import MisdirectedPerFlow
return MisdirectedPerFlow(self)._select()
@property
def MultipleJoinLeaveLatency(self):
"""An instance of the MultipleJoinLeaveLatency class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.multiplejoinleavelatency.multiplejoinleavelatency.MultipleJoinLeaveLatency)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.multiplejoinleavelatency.multiplejoinleavelatency import MultipleJoinLeaveLatency
return MultipleJoinLeaveLatency(self)._select()
@property
def OneTimeJoinLeaveLatency(self):
"""An instance of the OneTimeJoinLeaveLatency class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.onetimejoinleavelatency.onetimejoinleavelatency.OneTimeJoinLeaveLatency)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.onetimejoinleavelatency.onetimejoinleavelatency import OneTimeJoinLeaveLatency
return OneTimeJoinLeaveLatency(self)._select()
@property
def PacketLossDuration(self):
"""An instance of the PacketLossDuration class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.packetlossduration.packetlossduration.PacketLossDuration)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.packetlossduration.packetlossduration import PacketLossDuration
return PacketLossDuration(self)._select()
@property
def Prbs(self):
"""An instance of the Prbs class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.prbs.prbs.Prbs)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.prbs.prbs import Prbs
return Prbs(self)._select()
@property
def SequenceChecking(self):
"""An instance of the SequenceChecking class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.sequencechecking.sequencechecking.SequenceChecking)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.statistics.sequencechecking.sequencechecking import SequenceChecking
return SequenceChecking(self)._select()
| [
"[email protected]"
] | |
7cd000559400fe32194070d58464cca0aa9ce297 | bdc0b8809d52933c10f8eb77442bd0b4453f28f9 | /build/nav_msgs/rosidl_generator_py/nav_msgs/msg/_map_meta_data.py | df3dc46c9436798500483901a5ad8cffd380adde | [] | no_license | ClaytonCalabrese/BuiltRos2Eloquent | 967f688bbca746097016dbd34563716bd98379e3 | 76bca564bfd73ef73485e5c7c48274889032e408 | refs/heads/master | 2021-03-27T22:42:12.976367 | 2020-03-17T14:24:07 | 2020-03-17T14:24:07 | 247,810,969 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,517 | py | # generated from rosidl_generator_py/resource/_idl.py.em
# with input from nav_msgs:msg/MapMetaData.idl
# generated code does not contain a copyright notice
# Import statements for member types
import rosidl_parser.definition # noqa: E402, I100
class Metaclass_MapMetaData(type):
"""Metaclass of message 'MapMetaData'."""
_CREATE_ROS_MESSAGE = None
_CONVERT_FROM_PY = None
_CONVERT_TO_PY = None
_DESTROY_ROS_MESSAGE = None
_TYPE_SUPPORT = None
__constants = {
}
@classmethod
def __import_type_support__(cls):
try:
from rosidl_generator_py import import_type_support
module = import_type_support('nav_msgs')
except ImportError:
import logging
import traceback
logger = logging.getLogger(
'nav_msgs.msg.MapMetaData')
logger.debug(
'Failed to import needed modules for type support:\n' +
traceback.format_exc())
else:
cls._CREATE_ROS_MESSAGE = module.create_ros_message_msg__msg__map_meta_data
cls._CONVERT_FROM_PY = module.convert_from_py_msg__msg__map_meta_data
cls._CONVERT_TO_PY = module.convert_to_py_msg__msg__map_meta_data
cls._TYPE_SUPPORT = module.type_support_msg__msg__map_meta_data
cls._DESTROY_ROS_MESSAGE = module.destroy_ros_message_msg__msg__map_meta_data
from builtin_interfaces.msg import Time
if Time.__class__._TYPE_SUPPORT is None:
Time.__class__.__import_type_support__()
from geometry_msgs.msg import Pose
if Pose.__class__._TYPE_SUPPORT is None:
Pose.__class__.__import_type_support__()
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# list constant names here so that they appear in the help text of
# the message class under "Data and other attributes defined here:"
# as well as populate each message instance
return {
}
class MapMetaData(metaclass=Metaclass_MapMetaData):
"""Message class 'MapMetaData'."""
__slots__ = [
'_map_load_time',
'_resolution',
'_width',
'_height',
'_origin',
]
_fields_and_field_types = {
'map_load_time': 'builtin_interfaces/Time',
'resolution': 'float',
'width': 'uint32',
'height': 'uint32',
'origin': 'geometry_msgs/Pose',
}
SLOT_TYPES = (
rosidl_parser.definition.NamespacedType(['builtin_interfaces', 'msg'], 'Time'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('uint32'), # noqa: E501
rosidl_parser.definition.BasicType('uint32'), # noqa: E501
rosidl_parser.definition.NamespacedType(['geometry_msgs', 'msg'], 'Pose'), # noqa: E501
)
def __init__(self, **kwargs):
assert all('_' + key in self.__slots__ for key in kwargs.keys()), \
'Invalid arguments passed to constructor: %s' % \
', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__))
from builtin_interfaces.msg import Time
self.map_load_time = kwargs.get('map_load_time', Time())
self.resolution = kwargs.get('resolution', float())
self.width = kwargs.get('width', int())
self.height = kwargs.get('height', int())
from geometry_msgs.msg import Pose
self.origin = kwargs.get('origin', Pose())
def __repr__(self):
typename = self.__class__.__module__.split('.')
typename.pop()
typename.append(self.__class__.__name__)
args = []
for s, t in zip(self.__slots__, self.SLOT_TYPES):
field = getattr(self, s)
fieldstr = repr(field)
# We use Python array type for fields that can be directly stored
# in them, and "normal" sequences for everything else. If it is
# a type that we store in an array, strip off the 'array' portion.
if (
isinstance(t, rosidl_parser.definition.AbstractSequence) and
isinstance(t.value_type, rosidl_parser.definition.BasicType) and
t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64']
):
if len(field) == 0:
fieldstr = '[]'
else:
assert fieldstr.startswith('array(')
prefix = "array('X', "
suffix = ')'
fieldstr = fieldstr[len(prefix):-len(suffix)]
args.append(s[1:] + '=' + fieldstr)
return '%s(%s)' % ('.'.join(typename), ', '.join(args))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if self.map_load_time != other.map_load_time:
return False
if self.resolution != other.resolution:
return False
if self.width != other.width:
return False
if self.height != other.height:
return False
if self.origin != other.origin:
return False
return True
@classmethod
def get_fields_and_field_types(cls):
from copy import copy
return copy(cls._fields_and_field_types)
@property
def map_load_time(self):
"""Message field 'map_load_time'."""
return self._map_load_time
@map_load_time.setter
def map_load_time(self, value):
if __debug__:
from builtin_interfaces.msg import Time
assert \
isinstance(value, Time), \
"The 'map_load_time' field must be a sub message of type 'Time'"
self._map_load_time = value
@property
def resolution(self):
"""Message field 'resolution'."""
return self._resolution
@resolution.setter
def resolution(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'resolution' field must be of type 'float'"
self._resolution = value
@property
def width(self):
"""Message field 'width'."""
return self._width
@width.setter
def width(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'width' field must be of type 'int'"
assert value >= 0 and value < 4294967296, \
"The 'width' field must be an unsigned integer in [0, 4294967295]"
self._width = value
@property
def height(self):
"""Message field 'height'."""
return self._height
@height.setter
def height(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'height' field must be of type 'int'"
assert value >= 0 and value < 4294967296, \
"The 'height' field must be an unsigned integer in [0, 4294967295]"
self._height = value
@property
def origin(self):
"""Message field 'origin'."""
return self._origin
@origin.setter
def origin(self, value):
if __debug__:
from geometry_msgs.msg import Pose
assert \
isinstance(value, Pose), \
"The 'origin' field must be a sub message of type 'Pose'"
self._origin = value
| [
"[email protected]"
] | |
8cf6fd89ee600098c8692b913af978be39e9889e | da9a5b53d41474dfc588e7b8a14e0192e81bdb47 | /GUI Programming with Python and Kivy BOOK/slugrace107/accident.py | 5dc107fe1f66fade5a6c85d846a0a1bddf0a81bc | [] | no_license | prospero-apps/python | 1c0bdded512c8e73360511118f7ba643772af774 | 67719d8c25f6f13f21ba0c260aadf43beb8de99d | refs/heads/master | 2021-11-28T16:22:55.108106 | 2021-11-17T22:13:24 | 2021-11-17T22:13:24 | 173,511,053 | 5 | 8 | null | null | null | null | UTF-8 | Python | false | false | 10,139 | py | # File name: accident.py
from abc import ABC, abstractmethod
from kivy.core.audio import SoundLoader
class Accident(ABC):
intro = 'BREAKING NEWS: '
def __init__(self, name,
headlines,
sound,
position = 0,
slug = None,
image = None):
self.name = name
self.headlines = headlines
self.sound = SoundLoader.load(sound)
self.position = position
self.slug = slug
self.image = image
@abstractmethod
def happen(self):
pass
@abstractmethod
def reset(self):
pass
### BROKEN LEG ###
class BrokenLegAccident(Accident):
name = 'Broken Leg'
headlines = [
"just broke his leg and is grounded!",
"broke his leg, which is practically all he consists of!",
"suffered from an open fracture. All he can do now is watch the others win!",
"broke his only leg and now looks pretty helpless!",
"tripped over a root and broke his leg!"]
sound = 'assets/sounds/Accidents/Broken Leg.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### OVERHEAT ###
class OverheatAccident(Accident):
name = 'Overheat'
headlines = [
"has been running faster than he should have. He burned of overheat!",
"burned by friction. Needs to cool down a bit before the next race!",
"roasted on the track from overheat. He's been running way too fast!",
"looks like he has been running faster than his body cooling system can handle!",
"shouldn't have been speeding like that. Overheating can be dangerous!"]
sound = 'assets/sounds/Accidents/Overheat.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### HEART ATTACK ###
class HeartAttackAccident(Accident):
name = 'Heart Attack'
headlines = [
"had a heart attack. Definitely needs a rest!",
"has a poor heart condition. Hadn't he stopped now, it could have killed him!",
"beaten by cardiac infarction. He'd better go to hospital asap!",
"almost killed by heart attack. He had a really narrow escape!",
"beaten by his weak heart. He'd better get some rest!"]
sound = 'assets/sounds/Accidents/Heart Attack.mp3'
image = 'atlas://assets/accidents/accidents/heart attack'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, image=self.image, **kwargs)
def happen(self):
pass
def reset(self):
pass
### GRASS ###
class GrassAccident(Accident):
name = 'Grass'
headlines = [
"just found magic grass. It's famous for powering slugs up!",
"just about to speed up after eating magic grass!",
"powered up by magic grass found unexpectedly on the track!",
"seems to be full of beans after having eaten the magic grass on his way!",
"heading perhaps even for victory after his magic grass meal!"]
sound = 'assets/sounds/Accidents/Grass.mp3'
image = 'atlas://assets/accidents/accidents/grass'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, image=self.image, **kwargs)
def happen(self):
pass
def reset(self):
pass
### ASLEEP ###
class AsleepAccident(Accident):
name = 'Asleep'
headlines = [
"just fell asleep for a while after the long and wearisome running!",
"having a nap. He again has chosen just the perfect time for that!",
"sleeping instead of running. It's getting one of his bad habits!",
"always takes a short nap at this time of the day, no matter what he's doing!",
"knows how important sleep is. Even if it's not the best time for that!"]
sound = 'assets/sounds/Accidents/Asleep.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### BLIND ###
class BlindAccident(Accident):
name = 'Blind'
headlines = [
"gone blind. Now staggering to find his way!",
"shouldn't have been reading in dark. Now it's hard to find the way!",
"temporarily lost his eyesight. Now it's difficult for him to follow the track!",
"trying hard to find his way after going blind on track!",
"staggering to finish the race after going blind because of an infection!"]
sound = 'assets/sounds/Accidents/Blind.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### PUDDLE ###
class PuddleAccident(Accident):
name = 'Puddle'
headlines = [
"drowning in a puddle of water!",
"beaten by yesterday's heavy rainfalls. Just drowning in a puddle!",
"shouldn't have skipped his swimming lessons. Drowning in a puddle now!",
"has always neglected his swimming lessons. How wrong he's been!",
"disappearing in a puddle of water formed afted heavy rainfall!"]
sound = 'assets/sounds/Accidents/Drown.mp3'
image = 'atlas://assets/accidents/accidents/puddle'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, image=self.image, **kwargs)
def happen(self):
pass
def reset(self):
pass
### ELECTROSHOCK ###
class ElectroshockAccident(Accident):
name = 'Electroshock'
headlines = [
"speeding up after being struck by lightning!",
"powered up by lightning. Now running really fast!",
"hit by electric discharge. Seems to have been powered up by it!",
"accelerated by a series of electric discharges!",
"now running much faster after being struck by lightning!"]
sound = 'assets/sounds/Accidents/Electroshock.mp3'
image = 'atlas://assets/accidents/accidents/electroshock'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, image=self.image, **kwargs)
def happen(self):
pass
def reset(self):
pass
### TURNING BACK ###
class TurningBackAccident(Accident):
name = 'Turning Back'
headlines = [
"has forgotten to turn off the gas. Must hurry home before it's too late!",
"just received a phone call. His house is on fire. No time to lose!",
"seems to have more interesting stuff to do than racing.",
"seems to have lost orientation. Well, how these little brains work!",
"has left his snack in the kitchen. He won't race when he's hungry!"]
sound = 'assets/sounds/Accidents/Turning Back.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### SHOOTING EYES ###
class ShootingEyesAccident(Accident):
name = 'Shooting Eyes'
headlines = [
"shooting his eyes. Is he ever going to stop cheating?",
"just shot his eyes. It seems he would do anything to win!",
"sacrificing his eyes for victory's sake!",
"shooting his eyes for victory and hoping for quick regeneration!",
"too slow to win? Maybe him, but who knows, possibly not his eyes!"]
sound = 'assets/sounds/Accidents/Shooting Eyes.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### RUBBERIZED ###
class RubberizedAccident(Accident):
name = 'Rubberized'
headlines = [
"stretching like rubber. This can help!",
"stretching for victory. Seems to be approaching finish line faster!",
"has never forgotten he was an eraser as a kid.",
"cheating again. This time pretending to be a piece of rubber!",
"just discovered his ability to stretch like rubber. Why not use it right now?"]
sound = 'assets/sounds/Accidents/Rubberizer.mp3'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, **kwargs)
def happen(self):
pass
def reset(self):
pass
### DEVOURED ###
class DevouredAccident(Accident):
name = 'Devoured'
headlines = [
"devoured by the infamous slug monster. Bad luck!",
"just swallowed by the terrible slug monster!",
"next on the long list of the slug monster's victims!",
"has never suspected he's gonna end up as a snack!",
"devoured by the legendary slug monster from the nearby swamps!"]
sound = 'assets/sounds/Accidents/Devoured.mp3'
image = 'atlas://assets/accidents/accidents/slug monster'
def __init__(self, **kwargs):
super().__init__(name=self.name, headlines=self.headlines,
sound=self.sound, image=self.image, **kwargs)
def happen(self):
pass
def reset(self):
pass
| [
"[email protected]"
] | |
075ab43e3a0a41cba1a3ddc5516d4f2be053423a | bb2120075f05463160e7ad9a3986f18848733563 | /tests/test_vec2d.py | aabcdfc7d592e0bd89fc791e9d20daec1d79e802 | [
"MIT"
] | permissive | pablodiegoss/pytaon | b443d6a86773c691383aa607b7b1497bcfa7bc3b | 0c5def531249331871197377338471521cf8de2c | refs/heads/master | 2022-12-31T10:51:13.762691 | 2020-10-07T18:09:24 | 2020-10-07T18:09:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,504 | py | """
Módulo de testes para a classe Vec2d.
Utiliza os vetores u = <3,4>, v = <1,1>, ii=<1,0> e jj=<0,1> definidos em conftest.
"""
import pytest
import random
from math import pi, sqrt
from pytaon import Vec2d
def similar(x, y, tol=1e-6):
return abs(x - y) <= tol
class TestVec2d:
def test_angle(self, u, v):
assert u.angle > v.angle
assert similar(u.angle, pi / 4)
assert similar(u.angle_degrees, 45)
def test_angle_setter(self, u, v):
u.angle = 0.0
assert similar(u.x, 5.0)
assert similar(u.y, 0.0)
u.angle_degrees = 45.0
assert similar(u.x, 2.5 * sqrt(2))
assert similar(u.y, 2.5 * sqrt(2))
u.angle = pi / 4
assert similar(u.x, 2.5 * sqrt(2))
assert similar(u.y, 2.5 * sqrt(2))
def test_length(self, u, v):
assert u.length > v.length
assert u.length == 5
assert u.length_sqrd == 25
def test_length_setter(self, u):
x, y = u
u.length *= 2
assert similar(u.x, 2 * x)
assert similar(u.y, 2 * y)
def test_algebraic_operations(self, u, v):
assert u + v == Vec2d(u.x + v.x, u.y + v.y)
assert u - v == Vec2d(u.x - v.x, u.y - v.y)
assert u * 2 == Vec2d(2 * u.x, 2 * u.y)
assert 2 * u == Vec2d(2 * u.x, 2 * u.y)
assert u / 2 == Vec2d(u.x / 2, u.y / 2)
def test_algebraic_operations_with_tuples(self, u, v):
U, V = map(tuple, (u, v))
assert u + v == u + V == U + v
assert u - v == u - V == U - v
def test_neg_and_pos(self, u):
assert (-u) == u * (-1)
assert (+u) == u * (+1)
assert +u is not u
def test_inplace(self, u):
u_orig = u
u += (1, 1)
assert u == Vec2d(4, 5)
assert u is u_orig
u -= (1, 1)
assert u == Vec2d(3, 4)
assert u is u_orig
u *= 2
assert u == Vec2d(6, 8)
assert u is u_orig
u /= 2
assert u == Vec2d(3, 4)
assert u is u_orig
def test_item_getter(self, u, v):
for u in [u, v]:
assert u[0] == u.x
assert u[1] == u.y
def test_item_setter(self, u):
u[0] = n = random.random()
u[1] = m = random.random()
assert u.x == n
assert u.y == m
def test_item_raises_index_error(self, u):
with pytest.raises(IndexError):
u[2]
with pytest.raises(IndexError):
u[2] = 0.0
def test_cross_product(self, u, v):
V = tuple(v)
assert similar(u.cross(v), -v.cross(u))
assert similar(u.cross(v), -1)
assert u.cross(v) == u.cross(V)
def test_dot_product(self, u, v):
V = tuple(v)
assert u.dot(v) == 7.0
assert u.dot(v) == u.dot(V)
def test_get_angle_between(self, ii, v):
II = tuple(ii)
assert v.get_angle_between(v) == 0.0
assert similar(v.get_angle_between((-1) * v), pi)
assert v.get_angle_degrees_between(v) == 0.0
assert similar(v.get_angle_degrees_between((-1) * v), 180)
assert v.get_angle_between(ii) == v.get_angle_between(II)
assert similar(v.get_angle_between(ii), pi / 4)
assert similar(v.get_angle_degrees_between(ii), 45)
def test_get_distance(self, u, v):
assert similar(u.get_distance(v), sqrt(u.get_dist_sqrd(v)))
assert similar(u.get_distance(v), sqrt(13))
assert similar(u.get_dist_sqrd(v), 13)
def test_get_distance_accepts_tuples(self, u, v):
U, V = map(tuple, (u, v))
assert similar(u.get_distance(v), u.get_distance(V))
assert similar(u.get_dist_sqrd(v), u.get_dist_sqrd(V))
def test_normalized(self, u):
assert similar(u.normalized().length, 1)
assert similar(u.normalized().angle, u.angle)
def test_normalized_return_length(self, u):
angle, length = u.angle, u.length
assert similar(u.normalize_return_length(), length)
assert similar(u.angle, angle)
def test_interpolate_to(self, u, v):
assert similar(u.interpolate_to(v, 0), u)
assert similar(u.interpolate_to(v, 1), v)
assert similar(u.interpolate_to(v, 0.5), (u + v) / 2)
def test_interpolate_to_accept_tuples(self, u, v):
V = tuple(v)
assert similar(u.interpolate_to(v, 0.5), u.interpolate_to(V, 0.5))
def test_perpendicular(self, u):
v = u.perpendicular()
assert similar(u.length, v.length)
assert similar(u.dot(v), 0)
assert similar(u.angle_between(v), pi / 2)
def test_perpendicular_normal(self, u, v):
v = u.perpendicular()
assert similar(v.length, 1)
assert similar(u.dot(v), 0)
assert similar(u.angle_between(v), pi / 2)
def test_projection(self, u, v):
proj = u.projection(v)
assert similar(proj.angle, v.angle)
assert proj.length <= u.length
assert similar(v.length * proj.length, u.dot(v))
assert similar(u.length * v.projection(u).length, u.dot(v))
def test_rotate(self, u):
angle, length = u.angle, u.length
rotation = pi * random.random()
assert u.rotate(rotation) is None
assert similar(u.angle, angle + rotation)
assert similar(u.length, length)
def test_rotated(self, u):
rotation = pi * random.random()
u_ = u.rotated(rotation)
assert similar(u_.angle, u.angle + rotation)
assert similar(u_.length, u.length)
| [
"[email protected]"
] | |
0e12c4c1d236ff6bcd3ab8cc64c14f44a5fe1dda | 32eee26d7747ab272a51e9918a5d2dc1bc10f98b | /src/classic_models/training/data_loader.py | a0b8aa0fbf9a94b169ecf4539bbf07dd73df33e9 | [
"Apache-2.0"
] | permissive | David082/daguan_competition_2021_codes | 5d61ec1c88c0be401212490ad2fc61cb4e5132b3 | b488dbab35d8914bc8f19a086b200ca2418d66f8 | refs/heads/master | 2023-07-26T23:36:53.290387 | 2021-09-13T11:36:16 | 2021-09-13T11:36:16 | 402,744,920 | 0 | 0 | Apache-2.0 | 2021-09-13T11:36:17 | 2021-09-03T11:21:40 | Python | UTF-8 | Python | false | false | 8,865 | py | import os
import copy
import json
import logging
import torch
from torch.utils.data import TensorDataset
from src.classic_models.training.utils import get_labels
logger = logging.getLogger(__name__)
class InputExample(object):
"""
A single training/test example for simple sequence classification.
Args:
guid: Unique id for the example.
words: list. The words of the sequence.
label: (Optional) string. The label of the example.
"""
def __init__(self, guid, words,
label_level_1=None,
label_level_2=None):
self.guid = guid
self.words = words
self.label_level_1 = label_level_1
self.label_level_2 = label_level_2
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids,
attention_mask,
label_id_level_1,
label_id_level_2):
self.input_ids = input_ids
self.attention_mask = attention_mask
self.label_id_level_1 = label_id_level_1
self.label_id_level_2 = label_id_level_2
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class DaguanDataProcessor(object):
"""Processor for the BERT data set """
def __init__(self, args):
self.args = args
self.labels_level_1 = get_labels(args.label_file_level_1)
self.labels_level_2 = get_labels(args.label_file_level_2)
@classmethod
def _read_file(cls, input_file, skip_first_line=False):
"""Reads a tab separated value file."""
with open(input_file, "r", encoding="utf-8") as f:
lines = []
for i, line in enumerate(f):
if skip_first_line:
if i == 0:
continue
lines.append(line.strip())
return lines
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for i, line in enumerate(lines):
line = line.strip()
if not line:
continue
line = line.split(",")
# id
id_ = line[0]
guid = "%s-%s" % (set_type, id_)
# 1. input_text
words = line[1].split()
words = [w.strip() for w in words if len(w.strip()) > 0]
# 标签
if set_type == "test":
label_level_1 = 0
label_level_2 = 0
else:
label_name = line[2]
label_name_level_1 = label_name.split("-")[0]
label_name_level_2 = label_name
label_level_1 = self.labels_level_1.index(label_name_level_1)
label_level_2 = self.labels_level_2.index(label_name_level_2)
examples.append(
InputExample(
guid=guid,
words=words,
label_level_1=label_level_1,
label_level_2=label_level_2,
)
)
return examples
def get_examples(self, mode):
"""
Args:
mode: train, dev, test
"""
data_path = os.path.join(self.args.data_dir, "{}.txt".format(mode))
logger.info("LOOKING AT {}".format(data_path))
return self._create_examples(lines=self._read_file(data_path),
set_type=mode)
processors = {
"daguan": DaguanDataProcessor,
}
def convert_examples_to_features(examples,
max_seq_len,
pad_token_id=0,
unk_token_id=1,
mask_padding_with_zero=True,
vocab_list=None
):
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 5000 == 0:
logger.info("Writing example %d of %d" % (ex_index, len(examples)))
# Tokenize word by word (for NER)
tokens = example.words
# Account for [CLS] and [SEP]
special_tokens_count = 0
if len(tokens) > max_seq_len - special_tokens_count:
tokens = tokens[:(max_seq_len - special_tokens_count)]
input_ids = [vocab_list.index(w) if w in vocab_list else unk_token_id for w in tokens]
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
attention_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_seq_len - len(input_ids)
input_ids = input_ids + ([pad_token_id] * padding_length)
attention_mask = attention_mask + ([0 if mask_padding_with_zero else 1] * padding_length)
assert len(input_ids) == max_seq_len, "Error with input length {} vs {}".format(len(input_ids), max_seq_len)
assert len(attention_mask) == max_seq_len, "Error with attention mask length {} vs {}".format(len(attention_mask), max_seq_len)
label_id_level_1 = int(example.label_level_1)
label_id_level_2 = int(example.label_level_2)
if ex_index < 5:
logger.info("*** Example ***")
logger.info("guid: %s" % example.guid)
logger.info("tokens: %s" % " ".join([str(x) for x in tokens]))
logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
logger.info("attention_mask: %s" % " ".join([str(x) for x in attention_mask]))
logger.info("label_level_1: %s (id = %d)" % (example.label_level_1, label_id_level_1))
logger.info("label_level_2: %s (id = %d)" % (example.label_level_2, label_id_level_2))
features.append(
InputFeatures(input_ids=input_ids,
attention_mask=attention_mask,
label_id_level_1=label_id_level_1,
label_id_level_2=label_id_level_2
))
return features
def load_and_cache_examples(args, mode, vocab_list=None):
processor = processors[args.task](args)
# Load data features from cache or dataset file
cached_features_file = os.path.join(
args.data_dir,
'cached_{}_{}_{}'.format(
mode,
args.task,
args.max_seq_len
)
)
if os.path.exists(cached_features_file):
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
# Load data features from dataset file
logger.info("Creating features from dataset file at %s", args.data_dir)
if mode == "train":
examples = processor.get_examples("train")
elif mode == "dev":
examples = processor.get_examples("dev")
elif mode == "test":
examples = processor.get_examples("test")
else:
raise Exception("For mode, Only train, dev, test is available")
# Use cross entropy ignore index as padding label id so that only real label ids contribute to the loss later
features = convert_examples_to_features(examples, args.max_seq_len,
vocab_list=vocab_list)
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file)
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
all_label_id_level_1s = torch.tensor([f.label_id_level_1 for f in features], dtype=torch.long)
all_label_id_level_2s = torch.tensor([f.label_id_level_2 for f in features], dtype=torch.long)
dataset = TensorDataset(all_input_ids,
all_attention_mask,
all_label_id_level_1s,
all_label_id_level_2s,
)
return dataset
| [
"[email protected]"
] | |
d4da526b1b6a2167f6447cb28620af606f1feff5 | 58f38f1d69d4bfc650ad18e0045c36ae29c9d84a | /Django基础部分代码/chapter03/template_if_demo/template_if_demo/settings.py | 6480e6ae4c0c6464a93e7c33669bcc9dbf62e816 | [] | no_license | zjf201811/DjangoWebProject | 0670c61b89387901089bf67cf2423d9341f69913 | fab15784fb326ba4517951e180418ea54de03afe | refs/heads/master | 2020-04-18T12:03:08.798484 | 2019-05-06T03:59:46 | 2019-05-06T03:59:46 | 167,522,193 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,162 | py | """
Django settings for template_if_demo project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sz^wv74%ke1n4rtxs)$bxlfu!)q6ra2i63ak$ms-#h8+piw6fz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'template_if_demo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'template_if_demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
5e03884088e7112a4fe74cab90f05bd21fd61391 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /DaVinciDev_v38r1p1/Phys/StrippingArchive/python/StrippingArchive/Stripping17/StrippingLambdac2PKPiForXSec.py | d220bbc13f2a976b45ccebde6e2a9d69ff4e73dd | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,182 | py | '''
Lambdac cross-section lines
Adapted to current stripping framework by P. Spradlin.
'''
__author__ = ['Francesca Dordei', 'Francesco Dettori', 'Patrick Spradlin']
__date__ = '2010/07/15'
__version__ = '$Revision: 1.3 $'
__all__ = ( 'StrippingLambdac2PKPiForXSecConf',
'makeLambdac2PKPi',
'default_config' )
from Gaudi.Configuration import *
from StrippingConf.StrippingLine import StrippingLine
from GaudiKernel.SystemOfUnits import MeV, mm, ns
from LHCbKernel.Configuration import *
#from Configurables import FilterDesktop, CombineParticles
from GaudiConfUtils.ConfigurableGenerators import FilterDesktop, CombineParticles
from PhysSelPython.Wrappers import Selection
from StrippingUtils.Utils import LineBuilder
from StandardParticles import StdNoPIDsPions, StdNoPIDsKaons, StdNoPIDsProtons
class StrippingLambdac2PKPiForXSecConf(LineBuilder): # {
__configuration_keys__ = ( 'Daug_All_PT_MIN'
, 'Daug_1of3_PT_MIN'
, 'Daug_P_MIN'
, 'Daug_TRCHI2DOF_MAX'
, 'Daug_BPVIPCHI2_MIN'
, 'Proton_PIDp_MIN'
, 'Pi_PIDK_MAX'
, 'K_PIDK_MIN'
, 'Comb_ADAMASS_WIN'
, 'Comb_ADOCAMAX_MAX'
, 'Lambdac_PT_MIN'
, 'Lambdac_VCHI2VDOF_MAX'
, 'Lambdac_BPVVDCHI2_MIN'
, 'Lambdac_BPVDIRA_MIN'
, 'Lambdac_BPVLTIME_MAX'
, 'Lambdac_BPVLTIME_MIN'
, 'HltFilter'
, 'PrescaleLambdac2PKPi'
, 'PostscaleLambdac2PKPi'
)
def __init__(self, name, config) : # {
LineBuilder.__init__(self, name, config)
lambdac_name = name + 'Lambdac2PKPi'
self.inPions = StdNoPIDsPions
self.inKaons = StdNoPIDsKaons
self.inProtons = StdNoPIDsProtons
self.selLambdac2PKPi = makeLambdac2PKPi( name = lambdac_name
, inputSel = [ self.inPions, self.inKaons, self.inProtons ]
, Daug_All_PT_MIN = config['Daug_All_PT_MIN']
, Daug_1of3_PT_MIN = config['Daug_1of3_PT_MIN']
, Daug_P_MIN = config['Daug_P_MIN']
, Daug_TRCHI2DOF_MAX = config['Daug_TRCHI2DOF_MAX']
, Daug_BPVIPCHI2_MIN = config['Daug_BPVIPCHI2_MIN']
, Proton_PIDp_MIN = config['Proton_PIDp_MIN']
, Pi_PIDK_MAX = config['Pi_PIDK_MAX']
, K_PIDK_MIN = config['K_PIDK_MIN']
, Comb_ADAMASS_WIN = config['Comb_ADAMASS_WIN']
, Comb_ADOCAMAX_MAX = config['Comb_ADOCAMAX_MAX']
, Lambdac_PT_MIN = config['Lambdac_PT_MIN']
, Lambdac_VCHI2VDOF_MAX = config['Lambdac_VCHI2VDOF_MAX']
, Lambdac_BPVVDCHI2_MIN = config['Lambdac_BPVVDCHI2_MIN']
, Lambdac_BPVDIRA_MIN = config['Lambdac_BPVDIRA_MIN']
, Lambdac_BPVLTIME_MAX = config['Lambdac_BPVLTIME_MAX']
, Lambdac_BPVLTIME_MIN = config['Lambdac_BPVLTIME_MIN']
)
self.line_Lambdac2PKPi = StrippingLine( lambdac_name + 'Line',
HLT = config['HltFilter'],
prescale = config['PrescaleLambdac2PKPi'],
postscale = config['PostscaleLambdac2PKPi'],
algos = [ self.selLambdac2PKPi ]
)
self.registerLine(self.line_Lambdac2PKPi)
# }
# }
def makeLambdac2PKPi( name
, inputSel
, Daug_All_PT_MIN
, Daug_1of3_PT_MIN
, Daug_P_MIN
, Daug_TRCHI2DOF_MAX
, Daug_BPVIPCHI2_MIN
, Proton_PIDp_MIN
, Pi_PIDK_MAX
, K_PIDK_MIN
, Comb_ADAMASS_WIN
, Comb_ADOCAMAX_MAX
, Lambdac_PT_MIN
, Lambdac_VCHI2VDOF_MAX
, Lambdac_BPVVDCHI2_MIN
, Lambdac_BPVDIRA_MIN
, Lambdac_BPVLTIME_MAX
, Lambdac_BPVLTIME_MIN
, decDescriptors = [ "[Lambda_c+ -> p+ K- pi+]cc" ]
) : # {
daugCuts = "(PT > %(Daug_All_PT_MIN)s)" \
"& (P > %(Daug_P_MIN)s)" \
"& (TRCHI2DOF < %(Daug_TRCHI2DOF_MAX)s)" \
"& (BPVIPCHI2() > %(Daug_BPVIPCHI2_MIN)s)" % locals()
pCuts = "((PIDp-PIDpi) > %(Proton_PIDp_MIN)s)" % locals()
piCuts = "((PIDK-PIDpi) < %(Pi_PIDK_MAX)s)" % locals()
kCuts = "((PIDK-PIDpi) > %(K_PIDK_MIN)s)" % locals()
combCuts = "(ADAMASS('Lambda_c+') < %(Comb_ADAMASS_WIN)s)" \
"& (AMAXCHILD(PT) > %(Daug_1of3_PT_MIN)s)" \
"& (ADOCAMAX('') < %(Comb_ADOCAMAX_MAX)s)" % locals()
lambdacCuts = "(PT > %(Lambdac_PT_MIN)s)" \
"& (VFASPF(VCHI2/VDOF) < %(Lambdac_VCHI2VDOF_MAX)s)" \
"& (BPVVDCHI2 > %(Lambdac_BPVVDCHI2_MIN)s)" \
"& (BPVDIRA > %(Lambdac_BPVDIRA_MIN)s)" \
"& (BPVLTIME('PropertimeFitter/properTime:PUBLIC') > %(Lambdac_BPVLTIME_MIN)s)" \
"& (BPVLTIME('PropertimeFitter/properTime:PUBLIC') < %(Lambdac_BPVLTIME_MAX)s)" % locals()
_Lambdac = CombineParticles(
DecayDescriptors = decDescriptors
, DaughtersCuts = { "pi+" : daugCuts + '&' + piCuts,
"K+" : daugCuts + '&' + kCuts,
"p+" : daugCuts + '&' + pCuts }
, CombinationCut = combCuts
, MotherCut = lambdacCuts
)
return Selection( name,
Algorithm = _Lambdac,
RequiredSelections = inputSel
)
# }
default_config = { 'Daug_All_PT_MIN' : 400.0 * MeV
, 'Daug_1of3_PT_MIN' : 1200.0 * MeV
, 'Daug_P_MIN' : 3200.0 * MeV
, 'Daug_TRCHI2DOF_MAX' : 10.0
, 'Daug_BPVIPCHI2_MIN' : 0.5
, 'Proton_PIDp_MIN' : 10.0
, 'Pi_PIDK_MAX' : 0.0
, 'K_PIDK_MIN' : 10.0
, 'Comb_ADAMASS_WIN' : 90.0 * MeV
, 'Comb_ADOCAMAX_MAX' : 0.1 * mm
, 'Lambdac_PT_MIN' : 0.0 * MeV
, 'Lambdac_VCHI2VDOF_MAX' : 20.0
, 'Lambdac_BPVVDCHI2_MIN' : 8.0
, 'Lambdac_BPVDIRA_MIN' : 0.9999
, 'Lambdac_BPVLTIME_MAX' : 0.0012 * ns
, 'Lambdac_BPVLTIME_MIN' : 0.0 * ns
, 'HltFilter' : "HLT_PASS_RE('Hlt1MB.*')"
, 'PrescaleLambdac2PKPi' : 1.0
, 'PostscaleLambdac2PKPi' : 1.0
}
| [
"[email protected]"
] | |
fe464ac5b360f982dd7e65eb0bbded553f4417b3 | 674a48ab817fb841417d72de7197a84b2d2b71b7 | /wav_mixed/wavelet_composite_no_overlap_parallax_dominant.py | 935c279e67758e93f07bf6484059e2f43710e447 | [] | no_license | cornkle/proj_CEH | 0d1556bbb5992e663257841ada333f32e6da3e22 | 790ad1aa7e7a8c6593a21ee53b2c946b2f7a356b | refs/heads/master | 2023-09-01T01:26:58.669089 | 2023-08-23T14:22:47 | 2023-08-23T14:22:47 | 55,054,763 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,560 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 4 10:15:40 2016
@author: cornkle
"""
import numpy as np
import xarray as xr
from wavelet import util
from utils import u_arrays as ua
from scipy import ndimage
import matplotlib.pyplot as plt
import matplotlib
from eod import tm_utils
import matplotlib as mpl
import multiprocessing
import pdb
from collections import OrderedDict
import matplotlib.cm as cm
import pickle as pkl
from utils import u_met, u_parallelise, u_gis, u_arrays, constants
matplotlib.rc('xtick', labelsize=10)
matplotlib.rc('ytick', labelsize=10)
def composite():
pool = multiprocessing.Pool(processes=7)
files = ua.locate(".nc", '/users/global/cornkle/MCSfiles/WA15_big_-40_15W-20E_size_zR/') # /WA30/
out = '/users/global/cornkle/papers/wavelet/saves/pandas/'
#files = files[0:1500]
print('Nb files', len(files))
tt = 'WA15'
comp_collect = {}
precip = {}
res = pool.map(file_loop, files)
pool.close()
res = [x for x in res if x is not None]
nb_sys = len(res)
print('Number systems: ', nb_sys)
res = [item for sublist in res for item in sublist] # flatten list of lists
for v in res:
comp_collect[v[2]]={'p': [], 't' : [], 'scale':[], 'hour':[], 'id' : []}
precip[v[2]]=[]
# ret.append((kernel, kernelt, sc, id, dic['time.hour'].values.tolist(),
# clat, clon, lat_min, lat_max, lon_min, lon_max, area,
# bulk_pmax, bulk_pmean, bulk_tmean, bulk_tmean_p, bulk_tmin_p, bulk_g30,
# circle_Tcenter, circle_p, circle_t, circle_valid, circle_sum,
# circle_nz, circle_g30, circle_max, circle_p99, circle_p95, circle_p90))
dic = OrderedDict([('scale', []), ('id' , []), ('hour' , []),
('clat',[]), ('clon',[]),('lat_min',[]), ('lat_max' , []), ('lon_min' , []), ('lon_max' , []), ('area' , []),
('bulk_pmax' , []), ('bulk_pmean' ,[]), ('bulk_tmean',[]), ('bulk_tmean_p',[]), ('bulk_tmin_p',[]), ('bulk_g30',[]),
('circle_pix' , []), ('circle_Tcentre', []), ('circle_p' , []), ('circle_t' , []), ('circle_val' , []), ('circle_sum' , []),
('circle_nz' , []), ('circle_g30' , []), ('circle_max' , []), ('circle_p99' , []), ('circle_p95' , []), ('circle_p90' , []), ('circle_val_all', []), ('circle_pc', [])])
keys = comp_collect.keys()
print(keys)
for v in res:
print(v[2])
comp_collect[v[2]]['p'].append(v[0])
comp_collect[v[2]]['t'].append(v[1])
comp_collect[v[2]]['hour'].append(v[4])
comp_collect[v[2]]['id'].append(v[3])
for cnt, kk in enumerate(dic.keys()):
dic[kk].append(v[cnt+2]) # omit kernel and kernelt
precip[v[2]].extend(v[20])
pkl.dump(dic, open(out+'3dmax_gt15000_lax_nonan_dominant_fulldomain.p','wb'))
#pkl.dump(precip, open(out+'precip_3dmax_gt15000_lax_nonan_dominant.p','wb'))
#pkl.dump(comp_collect, open(out + 'comp_collect_composite_lax_nonan_dominant.p', 'wb'))
def file_loop(fi):
ret = []
print('Doing file: ' + fi)
dic = xr.open_dataset(fi)
id = fi.split('/')[-1]
outt = dic['tc_lag0'].values
outp = dic['p'].values
outpc = dic['pconv'].values
lon = dic['lon'].values
lat = dic['lat'].values
outt[np.isnan(outt)] = 150
outt[outt >= -40] = 150
grad = np.gradient(outt)
outt[outt == 150] = np.nan
outp[np.isnan(outt)] = np.nan
outpc[np.isnan(outt)] = np.nan
area = np.nansum(outt <= -40)
try:
bulk_pmax = np.max(outp[(np.isfinite(outp)) & (np.isfinite(outt))])
except ValueError:
return ret
try:
bulk_pmin = np.min(outp[(np.isfinite(outp)) & (np.isfinite(outt))])
except ValueError:
return ret
if (area * 25 < 15000) or (area * 25 > 800000) or (bulk_pmax > 200) or (bulk_pmin < 0):
print(area * 25)
print('throw out')
return
perc = np.percentile(outt[np.isfinite(outt)], 60) # 60
#perc = -60
clat = np.min(dic.lat.values) + ((np.max(dic.lat.values) - np.min(dic.lat.values)) * 0.5)
clon = np.min(dic.lon.values) + ((np.max(dic.lon.values) - np.min(dic.lon.values)) * 0.5)
# if (clon > 10) or (clon < -10) or (clat < 10):
# return
if (clon > 28) or (clon < -17.2) or (clat < 4.1):
return
lat_min = np.min(dic.lat.values)
lat_max = np.max(dic.lat.values)
lon_min = np.min(dic.lon.values)
lon_max = np.max(dic.lon.values)
bulk_tmean = np.nanmean(outt)
bulk_tmin_p = np.min(outt[(np.isfinite(outp)) & (np.isfinite(outt))])
bulk_tmean_p = np.mean(outt[(np.isfinite(outp)) & (np.isfinite(outt))])
bulk_pmean = np.max(outp[(np.isfinite(outp)) & (np.isfinite(outt))])
bulk_g30 = np.sum(outp[(np.isfinite(outp)) & (np.isfinite(outt))] >= 30)
o2 = outt.copy()
o2[np.isnan(o2)] = perc
nok = np.where(abs(grad[0]) > 80)
d = 2
i = nok[0]
j = nok[1]
for ii, jj in zip(i, j):
kern = o2[ii - d:ii + d + 1, jj - d:jj + d + 1]
o2[ii - d:ii + d + 1, jj - d:jj + d + 1] = ndimage.gaussian_filter(kern, 3, mode='nearest')
wav = util.waveletT(o2, 5)
arr = np.array(wav['scales'], dtype=str)
arrf = np.array(wav['scales'], dtype=float)
scale_ind = range(arr.size)
figure = np.zeros_like(outt)
wll = wav['t']
maxoutt = (
wll == ndimage.maximum_filter(wll, (5, 4, 4), mode='reflect',
cval=np.amax(wll) + 1)) # (np.round(orig / 5)) #(5,4,4)
#maxs = np.zeros_like(wll)
yyy = []
xxx = []
scal = []
for nb in scale_ind[::-1]:
orig = float(arr[nb])
print(np.round(orig))
wl = wll[nb, :, :]
maxout = maxoutt[nb, :, :]
try:
yy, xx = np.where((maxout == 1) & (outt <= -50) & (wl > orig ** .5))
except IndexError:
continue
for y, x in zip(yy, xx):
ss = orig
iscale = (np.ceil(ss / 2. / 5.)).astype(int)
if ss <= 20:
iscale = iscale + 1
ycirc, xcirc = ua.draw_cut_circle(x, y, iscale, outt) # 15km radius in every direction for all scales
figure[ycirc, xcirc] = np.round(orig)
xxx.append(x)
yyy.append(y)
scal.append(orig)
figure[np.isnan(outt)] = 0
figure[np.isnan(outp)] = 0
circle_val_all = np.sum(figure > 0)
xx = []
yy = []
cnt = 0
for y, x, sc in zip(yyy[::-1], xxx[::-1], scal[::-1]):
if figure[y, x] == 0:
continue
if cnt > 0:
bulk_g30 = bulk_g30*0
# if sc < 150:spoti
# continue
xx.append(x)
yy.append(y)
int_sc = np.round(sc)
radius = sc
iscale = (np.ceil(radius / 2. / 5.)).astype(int)
if int_sc <= 20:
iscale = iscale + 1
ycircf, xcircf = ua.draw_cut_circle(x, y, iscale, outt) # 20km radius
pos = np.where((figure[ycircf, xcircf] == int_sc))
if len(pos[0]) <= 3:
continue
circle_Tcenter = outt[y, x]
t_para = np.nanmean(outt[ycircf[pos], xcircf[pos]])
# if sc < 90:
# km, coords = u_gis.call_parallax_era(int(dic['time.month']), t_para, lon[y,x], lat[y,x], 0, 0)
# lx, ly = km
# lx = int(np.round(lx/5.))
# ly = int(np.round(ly/5.)) # km into pixels
# else:
lx = 0
ly = 0
# print(lx,ly)
# plt.imshow(outt)
# f = plt.figure()
# plt.imshow(figure)
# f = plt.figure()
# plt.imshow(outp)
# outt[ycircf[pos], xcircf[pos]] = 150
# outt[ycircf[pos]-ly, xcircf[pos]-lx] = 300
# f = plt.figure()
# plt.imshow(outt)
r = 20
kernel = tm_utils.cut_kernel(outp, x-lx, y-ly, r)
kernelt = tm_utils.cut_kernel(outt, x, y, r)
if kernel.shape != (r * 2 + 1, r * 2 + 1):
kernel = np.zeros((41, 41)) + np.nan
if kernelt.shape != (r * 2 + 1, r * 2 + 1):
kernelt = np.zeros((41, 41)) + np.nan
circle_p = outp[ycircf[pos]-ly, xcircf[pos]-lx]
#outp[ycircf[pos] - ly, xcircf[pos] - lx] = np.nan
circle_pc = outpc[ycircf[pos]-ly, xcircf[pos]-lx]
#outpc[ycircf[pos] - ly, xcircf[pos] - lx] = np.nan
circle_t = outt[ycircf[pos], xcircf[pos]]
circle_valid = np.sum(np.isfinite(circle_p))
#
#
if (int_sc >=90):
# outp[ycircf[pos] - ly, xcircf[pos] - lx] = 1000
# outt[ycircf[pos], xcircf[pos]] = 1000
ppos = np.where(outp>=30)
outt[np.isnan(outt)] = -40
# f = plt.figure()
# plt.imshow(outp, cmap='jet', origin='lower')
# f = plt.figure()
# plt.pcolormesh(outt, cmap='jet')
f = plt.figure()
plt.imshow(outt, cmap='jet', origin='lower')
plt.contour(outp, cmap='viridis', vmin=20)
figure[figure < 15] = np.nan
plt.contourf(figure,cmap='Reds', vmin=9, title='dominant')
plt.plot(ppos[1], ppos[0], 'ro')
f = plt.figure()
plt.imshow(figure, cmap='jet', origin='lower')
if ((circle_valid) < 2):
continue
circle_sum = np.nansum(circle_p)
circle_nz = np.nansum(circle_p > 0.1)
circle_g30 = np.nansum(circle_p >= 30)
try:
circle_max = np.nanmax(circle_p)
except ValueError:
circle_max = np.nan
try:
circle_p99 = np.percentile(circle_p[circle_p >= 0.1], 99)
except IndexError:
circle_p99 = np.nan
try:
circle_p95 = np.percentile(circle_p[circle_p >= 0.1], 95)
except IndexError:
circle_p95 = np.nan
try:
circle_p90 = np.percentile(circle_p[circle_p >= 0.1], 90)
except IndexError:
circle_p90 = np.nan
#maxs[posi, y, x] = 1
cnt = cnt+1
ret.append((kernel, kernelt, int_sc, id, dic['time.hour'].values.tolist(),
clat, clon, lat_min, lat_max, lon_min, lon_max, area,
bulk_pmax, bulk_pmean, bulk_tmean, bulk_tmean_p, bulk_tmin_p, bulk_g30,
len(ycircf), circle_Tcenter, circle_p, circle_t, circle_valid, circle_sum,
circle_nz, circle_g30, circle_max, circle_p99, circle_p95, circle_p90, circle_val_all, circle_pc))
return ret
if __name__ == "__main__":
composite()
| [
"[email protected]"
] | |
b6b66a4cef930e66539a9b62c88a7eb8347904bc | 55bcc8b6d5eb2878405f71ad5559cc3c80f5edc9 | /nlpAnylise/nlpctrTwo.py | 30735dfca102be6fd889b08ca03eb4fb2bde0a4d | [
"Apache-2.0"
] | permissive | chenwangwww/ppython | 2989026d5ef6d8a733e8b62ef6d4d7bcd2783b38 | 13a2f1193714133701743bfdf1a8add61a29dd4c | refs/heads/master | 2023-05-03T01:15:01.828534 | 2021-05-13T06:13:55 | 2021-05-13T06:13:55 | 343,254,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,587 | py | #主语
# 谓语
# 宾语
# 主语的定语数组(定语的状语+介宾关系数组)[attStru, ...]
# 宾语的定语数组(定语的状语+介宾关系数组)
# 谓语的状语数组(状语+介宾关系数组) [{'adv': 谓语的状语, 'pob': 介宾关系},...]
# 并列谓语数组(并列谓语的宾语、宾语的定语数组、宾语的定语的状语+介宾关系数组)
# [[并列谓语, 宾语, [attStru, ...]], ...]
# 并列主语数组(并列主语的定语数组、定语的状语+介宾关系数组)
# [[并列主语, [attStru, ...]], ...]
# 并列宾语数组(并列宾语的定语数组、定语的状语+介宾关系数组)
# ---------------------------------------------------------------
# attStru = {'attM': 定语, 'adv': 定语的状语, 'pob': 介宾关系, 'att': 定语的定语}
# masterStru = {'master': 主语或宾语, 'rel': [attStru, ...]}
# advStru = {'adv': 状语, 'pob': 介宾关系}
# predStru = {'pred':谓语, 'objs': [masterStru, ...], 'advs': [advStru, ...]}
# 并列主语数组
# [masterStru, ...]
# 并列谓语数组
# [predStru, ...]
# ---------------------------------------------------------------
from ltp import LTP
ltp = LTP()
class NlpCtr(object):
def __init__(self):
self.seg = None
def trans_result(self, depArr):
tempdepArr = depArr[0]
tempArr = []
for item in tempdepArr:
dic = {
'dep': item[0],
'gov': item[1],
'type': item[2],
}
tempArr.append(dic)
return tempArr
def getHED(self, words):
root = None
for word in words:
if word['gov'] == 0 and word['type'] == 'HED':
root = word['dep']
return root
def getWord(self, words, GOV, wType):
res = None
for word in words:
if word['type'] == wType and word['gov'] == GOV:
res = word['dep']
return res
def getWords(self, words, GOV, wType):
res = []
for word in words:
if word['type'] == wType and word['gov'] == GOV:
res.append(word['dep'])
res = res if len(res) > 0 else None
return res
def getSubject(self, words, HED, ADVS):
subject = self.getWord(words, HED, 'SBV')
if subject is None and ADVS is not None:
for adv in ADVS:
if self.indexToWord(adv) == '被':
subject = self.getWord(words, adv, 'POB')
return subject
def getObject(self, words, HED):
vob = self.getWord(words, HED, 'VOB')
fob = self.getWord(words, HED, 'FOB')
return self.get_not_none([vob, fob])
# 获取定语相关信息
def getATTInfo(self, words, GOV):
atts = self.getWords(words, GOV, 'ATT')
res = []
if atts is not None:
for a in atts:
adv = self.getWord(words, a, 'ADV')
pob = self.getWord(words, adv, 'POB')
res.append((a, (adv, pob)))
res = res if len(res) > 0 else None
return res
# 获取并列主语或宾语的相关信息
def getCOOInfo(self, words, GOV):
res = []
coos = self.getWords(words, GOV, 'COO')
if coos is not None:
for coo in coos:
atts = self.getATTInfo(words, coo)
res.append((coo, atts))
res = res if len(res) > 0 else None
return res
# 状语+介宾关系数组
def getADVPOBS(self, words, ADVS):
res = []
if ADVS is not None:
for adv in ADVS:
pob = self.getWord(words, adv, 'POB')
res.append((adv, pob))
res = res if len(res) > 0 else None
return res
def get_not_none(self, alist):
for a in alist:
if a is not None:
return a
return None
def recuTran(self, source, target):
t = type(source)
if t == list or t == tuple:
for a in source:
subt = type(a)
if subt == list or subt == tuple:
target.append([])
self.recuTran(a, target[-1])
else:
target.append(self.indexToWord(a))
def indexToWord(self, index):
res = None
if index and index <= len(self.seg[0]):
res = self.seg[0][index - 1]
return res
def showWords(self, dic):
items = dic.items()
target = {}
for item in items:
t = type(item[1])
if t == list or t == tuple:
sub = []
self.recuTran(item[1], sub)
target.update({item[0]: sub})
elif item[1] is not None:
sub = self.indexToWord(item[1])
target.update({item[0]: sub})
print(dic)
print(target)
def abstractSentence(self, sentence):
dic = None
self.seg, hidden = ltp.seg([sentence])
dep = ltp.dep(hidden)
pos = ltp.pos(hidden)
words = self.trans_result(dep)
if len(words) > 0:
hed = self.getHED(words)
if hed is not None:
coos = self.getWords(words, hed, 'COO') #并列谓语
advs = self.getWords(words, hed, 'ADV') #谓语的状语
aps = self.getADVPOBS(words, advs) #谓语的状语+介宾关系
subject = self.getSubject(words, hed, advs) #主语
object = self.getObject(words, hed) #宾语
attsS = self.getATTInfo(words, subject) #主语的定语
attsO = self.getATTInfo(words, object) #宾语的定语
coosS = self.getCOOInfo(words, subject) #并列主语
coosO = self.getCOOInfo(words, object) #并列宾语
dic = {
'subject': subject,
'object': object,
'pred': hed,
'coos': coos,
'advs': advs,
'aps': aps,
'attsS': attsS,
'attsO': attsO,
'coosS': coosS,
'coosO': coosO
}
self.showWords(dic)
return dic
nlpCtr = NlpCtr()
# nlpCtr.abstractSentence('他因为酒驾被交警拘留了。')
# nlpCtr.abstractSentence('学术委员会的每个成员都是博士并且是教授。')
nlpCtr.abstractSentence('小明、小霞,和小刘是三兄弟。') | [
"[email protected]"
] | |
3718b80b3f210bd3e22c1c5a08f559e56bad00df | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-sblp/sblp_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=65/sched.py | c840f1d8aaa51cee48681dfe3e4420cbfb56d305 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | -S 0 -X RUN -Q 0 -L 4 86 250
-S 0 -X RUN -Q 0 -L 4 83 400
-S 0 -X RUN -Q 0 -L 4 72 400
-S 1 -X RUN -Q 1 -L 3 60 300
-S 1 -X RUN -Q 1 -L 3 58 175
-S 2 -X RUN -Q 2 -L 2 55 200
-S 2 -X RUN -Q 2 -L 2 48 200
-S 3 -X RUN -Q 3 -L 1 37 125
-S 3 -X RUN -Q 3 -L 1 31 125
-S 4 30 125
-S 4 26 175
-S 5 25 250
-S 4 24 200
-S 5 24 250
-S 4 23 100
-S 4 12 100
-S 4 12 100
| [
"[email protected]"
] | |
49ddd274a9c8562c1cb9e1fedb9abbbc9ea96f2f | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp190_1000.py | 7fa715a9823977e00b8a404e111fa2054431a5e0 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,714 | py | ITEM: TIMESTEP
1000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
-1.6496218340970648e+02 2.1216218340975081e+02
-1.6496218340970648e+02 2.1216218340975081e+02
-1.6496218340970648e+02 2.1216218340975081e+02
ITEM: ATOMS id type xs ys zs
1671 1 0.804029 0.0718461 0.000845276
868 1 0.414143 0.0913991 0.00812121
1314 1 0.672389 0.154284 0.00912022
116 1 0.834906 0.241537 0.0180341
555 1 0.603326 0.258952 0.0154642
976 1 0.207274 0.343015 0.0146075
1800 1 0.64538 0.373703 0.0162016
1388 1 0.178024 0.479279 0.0161467
1896 1 0.0337624 0.495173 0.00994893
1433 1 0.369529 0.496983 0.000144061
1589 1 0.597483 0.494957 0.0026028
1788 1 0.474907 0.00681992 0.0241883
1347 1 0.796792 0.013096 0.0302368
476 1 0.894166 0.0182732 0.019627
756 1 0.416734 0.094628 0.0196988
581 1 0.429506 0.117272 0.0358522
180 1 0.478193 0.122779 0.0358237
1025 1 0.766023 0.146788 0.0360193
488 1 0.215102 0.169437 0.0218118
1469 1 0.394311 0.165841 0.0304703
301 1 0.297955 0.193399 0.0221655
1693 1 0.558997 0.203332 0.0221559
368 1 0.0728511 0.251628 0.0263535
63 1 0.621598 0.299788 0.0301145
194 1 0.237146 0.357353 0.0372387
420 1 0.808712 0.413128 0.019348
602 1 0.923203 0.427134 0.0246589
1625 1 0.331891 0.471118 0.0382476
353 1 0.602032 0.00894157 0.0496826
1319 1 0.00931362 0.0586303 0.0470602
345 1 0.661747 0.0644285 0.056106
1925 1 0.706808 0.0628472 0.0558509
306 1 0.216145 0.096759 0.0448836
506 1 0.347588 0.111329 0.0531847
55 1 0.620537 0.148392 0.0555542
433 1 0.663724 0.153245 0.0416431
1509 1 0.144519 0.1736 0.0399161
1623 1 0.219833 0.19094 0.0427601
289 1 0.891585 0.180252 0.0571782
1882 1 0.536507 0.217334 0.0558339
501 1 0.672389 0.214844 0.0417885
1249 1 0.990236 0.277668 0.0503066
371 1 0.805404 0.346132 0.0539383
681 1 0.31276 0.376955 0.0493869
1408 1 0.655642 0.368943 0.0504174
1554 1 0.207497 0.403694 0.0551626
1155 1 0.237705 0.398109 0.0574975
254 1 0.822732 0.401829 0.0493045
1901 1 0.320656 0.479858 0.0388699
342 1 0.152422 0.487351 0.0430879
1624 1 0.953837 0.0620117 0.0671058
1737 1 0.00136352 0.0916694 0.065143
775 1 0.514337 0.0834875 0.0762591
1644 1 0.61687 0.101179 0.0664493
1418 1 0.0605815 0.129515 0.067428
661 1 0.47286 0.138684 0.0617358
308 1 0.319167 0.183112 0.0628926
1095 1 0.702871 0.211561 0.0594946
550 1 0.843865 0.248177 0.0754526
918 1 0.856299 0.23295 0.0613675
1798 1 0.413709 0.258842 0.069341
1987 1 0.833073 0.278338 0.0649368
325 1 0.746236 0.291929 0.0725846
108 1 0.426731 0.346777 0.0732178
2026 1 0.689114 0.351584 0.0586268
1500 1 0.653811 0.395923 0.0602897
1421 1 0.999104 0.415863 0.0598078
1464 1 0.320508 0.447587 0.0717133
179 1 0.293263 0.480415 0.0699567
625 1 0.303038 0.0317137 0.0876646
1445 1 0.585512 0.0360542 0.0920485
503 1 0.456168 0.0560239 0.0875236
1079 1 0.0260499 0.124333 0.0842391
1459 1 0.848358 0.13359 0.0835318
2025 1 0.00578166 0.162822 0.0801624
819 1 0.0341227 0.155663 0.0929248
1575 1 0.90616 0.155368 0.0865113
366 1 0.290909 0.186589 0.0772939
1778 1 0.462365 0.209427 0.0887383
979 1 0.881848 0.19847 0.094725
1273 1 0.0415696 0.215175 0.0947488
1614 1 0.374927 0.239492 0.0844021
619 1 0.483865 0.252719 0.082512
1638 1 0.143091 0.285118 0.0830587
1884 1 0.427274 0.284433 0.0843827
375 1 0.355388 0.302473 0.0911092
1977 1 0.538087 0.354986 0.0800995
815 1 0.529835 0.356324 0.095602
384 1 0.782834 0.350757 0.0825531
450 1 0.927045 0.361082 0.085974
1586 1 0.444064 0.380312 0.0794632
582 1 0.73099 0.382742 0.0777546
689 1 0.181078 0.495027 0.0868729
1238 1 0.603113 0.0268362 0.110658
793 1 0.749611 0.0359153 0.115019
1752 1 0.224348 0.0519941 0.113934
1338 1 0.28556 0.100374 0.0967109
817 1 0.0843854 0.129167 0.112526
1640 1 0.441379 0.128599 0.102825
649 1 0.660898 0.158732 0.106754
1452 1 0.743523 0.180741 0.105148
814 1 0.0878331 0.23928 0.103302
573 1 0.675581 0.340355 0.113009
453 1 0.686675 0.344481 0.105168
1296 1 0.131982 0.351882 0.107073
332 1 0.86214 0.35383 0.113522
512 1 0.530599 0.402596 0.105793
1208 1 0.620241 0.417851 0.103842
1617 1 0.686918 0.427153 0.110576
1199 1 0.324488 0.45449 0.102172
454 1 0.570642 0.453772 0.108973
1163 1 0.37181 0.462954 0.103973
838 1 0.512079 0.473589 0.0983676
917 1 0.0306067 0.0501905 0.127366
722 1 0.195858 0.0803343 0.130023
455 1 0.46331 0.0801071 0.122417
723 1 0.0279089 0.191494 0.120796
1804 1 0.10333 0.188887 0.129241
895 1 0.225926 0.324659 0.127271
347 1 0.0974918 0.362543 0.119289
1330 1 0.692166 0.450302 0.127368
1824 1 0.0940844 0.463058 0.11661
25 1 0.539411 0.488804 0.123087
1398 1 0.203825 0.00743334 0.146529
1042 1 0.0196976 0.0521705 0.14829
341 1 0.926628 0.0468186 0.138645
1864 1 0.13015 0.0642194 0.149848
894 1 0.756625 0.0612473 0.149797
580 1 0.0749025 0.0974133 0.151051
1805 1 0.560954 0.112647 0.14099
1688 1 0.14761 0.128947 0.152224
1587 1 0.0774307 0.137871 0.149136
1507 1 0.988228 0.152503 0.152995
1663 1 0.0215158 0.162804 0.151076
547 1 0.423987 0.185691 0.142486
1818 1 0.842301 0.176379 0.140994
1576 1 0.182485 0.243408 0.138895
707 1 0.329685 0.231465 0.134905
586 1 0.4354 0.302258 0.140573
714 1 0.519681 0.303087 0.15287
1058 1 0.50405 0.317136 0.153035
767 1 0.460288 0.327712 0.152389
1758 1 0.577945 0.353023 0.146253
2043 1 0.456284 0.42987 0.135595
536 1 0.967799 0.453861 0.150399
1833 1 0.0902472 0.46476 0.14395
1202 1 0.683716 0.489728 0.150293
534 1 0.53462 0.0126528 0.167619
694 1 0.637766 0.00450911 0.163038
732 1 0.957019 0.00367004 0.155054
1562 1 0.711148 0.0471842 0.163848
1170 1 0.315707 0.076048 0.156274
1045 1 0.956855 0.106158 0.16685
773 1 0.819175 0.134539 0.163521
1590 1 0.660986 0.164534 0.164112
1553 1 0.421763 0.209975 0.162403
1440 1 0.0120613 0.225675 0.170521
1890 1 0.0294416 0.239469 0.170134
1403 1 0.142946 0.247121 0.158513
545 1 0.78413 0.249482 0.155009
1670 1 0.500298 0.260728 0.170143
2029 1 0.430058 0.269932 0.164427
46 1 0.118384 0.320131 0.156019
443 1 0.253668 0.384797 0.171337
83 1 0.917046 0.439772 0.169376
1673 1 0.944999 0.437076 0.163723
10 1 0.230155 0.443107 0.170993
239 1 0.286525 0.460894 0.169932
1490 1 0.897952 0.443682 0.170141
100 1 0.656436 0.496492 0.168905
771 1 0.0166725 0.0233854 0.186174
185 1 0.61175 0.059448 0.180652
360 1 0.452695 0.100407 0.175194
1228 1 0.0416855 0.127965 0.179898
20 1 0.829449 0.12042 0.181148
350 1 0.622964 0.234995 0.181854
379 1 0.77612 0.265825 0.188649
1094 1 0.0662626 0.296374 0.18892
1774 1 0.69177 0.30458 0.177841
637 1 0.83608 0.340282 0.186494
1354 1 0.366137 0.363658 0.190289
665 1 0.954305 0.351999 0.1731
1527 1 0.207112 0.384787 0.175222
1779 1 0.467023 0.450299 0.190464
1591 1 0.546183 0.0781162 0.196871
926 1 0.67329 0.0928283 0.210424
418 1 0.0125737 0.119002 0.195877
1088 1 0.428136 0.117469 0.198451
396 1 0.936126 0.129496 0.20031
909 1 0.156845 0.1517 0.203966
578 1 0.82002 0.165617 0.206867
1789 1 0.90725 0.167428 0.210529
1087 1 0.126427 0.187564 0.192504
2044 1 0.279816 0.173211 0.198075
1434 1 0.738868 0.185257 0.204864
1110 1 0.854475 0.212206 0.194002
1912 1 0.244431 0.241413 0.197625
127 1 0.33583 0.29445 0.201284
1026 1 0.763922 0.302463 0.204261
339 1 0.334062 0.312849 0.201554
583 1 0.0920881 0.332246 0.197697
1986 1 0.370076 0.345979 0.194301
1320 1 0.33327 0.369977 0.201792
171 1 0.584854 0.379007 0.201489
1981 1 0.648267 0.470849 0.195833
215 1 0.867159 0.000246854 0.225472
1979 1 0.717816 0.0277186 0.218078
462 1 0.00180441 0.0645926 0.21764
148 1 0.0813206 0.0843945 0.229736
1468 1 0.60394 0.162513 0.212363
640 1 0.521827 0.178934 0.21853
449 1 0.536955 0.209357 0.223164
1736 1 0.583936 0.221709 0.222278
518 1 0.363491 0.235753 0.218311
576 1 0.468414 0.268081 0.223537
1853 1 0.709638 0.277091 0.228214
351 1 0.105427 0.290287 0.215719
1182 1 0.444591 0.301502 0.226179
1534 1 0.701766 0.29015 0.212847
202 1 0.165823 0.322395 0.223621
1422 1 0.276696 0.329799 0.214976
1608 1 0.413691 0.455862 0.212106
1213 1 0.650837 0.451454 0.223416
1964 1 0.675054 0.455439 0.227495
989 1 0.941687 0.467569 0.223624
696 1 0.594629 0.488277 0.211848
170 1 0.952631 0.482467 0.216182
407 1 0.972684 0.480898 0.214615
727 1 0.114823 0.014569 0.24667
798 1 0.249679 0.0100162 0.24912
122 1 0.509857 0.00624149 0.241143
1216 1 0.727016 0.0135462 0.241642
1814 1 0.462523 0.0322586 0.240265
559 1 0.136253 0.0513418 0.237389
729 1 0.194223 0.0849925 0.245841
319 1 0.254631 0.108784 0.240599
934 1 0.583881 0.101283 0.235006
1859 1 0.951897 0.123782 0.240532
344 1 0.45748 0.134915 0.24184
818 1 0.874266 0.252169 0.247826
1573 1 0.0568778 0.277899 0.234104
872 1 0.157443 0.289873 0.239003
1038 1 0.797864 0.302107 0.238667
752 1 0.699858 0.35366 0.23488
942 1 0.459341 0.389199 0.242879
1888 1 0.151113 0.489738 0.248973
321 1 0.187517 0.00341351 0.258789
1871 1 0.062083 0.111816 0.268324
1067 1 0.503128 0.0970379 0.252111
1242 1 0.753727 0.132545 0.251292
1946 1 0.232825 0.228367 0.266829
847 1 0.268223 0.255591 0.262454
1970 1 0.515879 0.286522 0.253811
386 1 0.740257 0.350424 0.252989
39 1 0.203854 0.389455 0.257252
1944 1 0.658788 0.395139 0.262211
1784 1 0.493106 0.449771 0.264704
886 1 0.0373391 0.474308 0.253534
1687 1 0.322584 0.481125 0.264595
1297 1 0.343799 0.486823 0.261627
1374 1 0.585403 0.498968 0.256041
1725 1 0.633869 0.497554 0.263899
184 1 0.280502 0.011225 0.285334
1816 1 0.887459 0.0344887 0.284764
676 1 0.0965683 0.0565536 0.270886
666 1 0.51116 0.0602625 0.280764
1030 1 0.862244 0.0912005 0.274743
385 1 0.0966444 0.152564 0.273566
1291 1 0.254212 0.143785 0.278671
1135 1 0.374239 0.149315 0.275788
1356 1 0.0875628 0.198535 0.276051
1201 1 0.899717 0.207249 0.277858
1435 1 0.77462 0.303892 0.274156
370 1 0.794977 0.304883 0.279694
1905 1 0.0640877 0.380002 0.276492
169 1 0.625269 0.398495 0.273464
82 1 0.311618 0.414358 0.269566
824 1 0.134363 0.429226 0.281622
168 1 0.44665 0.423338 0.26927
1385 1 0.732835 0.426818 0.285888
1545 1 0.860422 0.445491 0.278464
1294 1 0.816237 0.475655 0.286575
616 1 0.544238 0.498535 0.283028
1068 1 0.222966 0.00305304 0.294218
1341 1 0.322362 0.0278706 0.300313
1690 1 0.627023 0.0360629 0.305498
285 1 0.982893 0.0557448 0.303842
1016 1 0.269321 0.0859393 0.297147
425 1 0.467158 0.0785116 0.298154
451 1 0.991381 0.106206 0.30197
1753 1 0.71782 0.146877 0.296727
1329 1 0.674391 0.192797 0.299491
827 1 0.313792 0.229129 0.306572
897 1 0.721816 0.21997 0.307628
1173 1 0.632028 0.252629 0.294958
1180 1 0.653462 0.25226 0.30123
1934 1 0.450352 0.290251 0.289693
1471 1 0.21755 0.343623 0.301593
1009 1 0.951593 0.330936 0.301612
1692 1 0.0746315 0.356858 0.297411
1062 1 0.797477 0.352769 0.297502
932 1 0.359957 0.406687 0.294671
1292 1 0.45864 0.483717 0.297911
992 1 0.525549 0.0176919 0.317628
1552 1 0.634034 0.0983403 0.322698
2014 1 0.617276 0.115715 0.326057
508 1 0.815391 0.122336 0.318818
878 1 0.246895 0.210176 0.324836
588 1 0.417898 0.198426 0.311593
1289 1 0.317931 0.268235 0.325239
1920 1 0.53858 0.264096 0.317648
87 1 0.671652 0.26359 0.319902
796 1 0.0255931 0.280774 0.312637
1676 1 0.284889 0.300821 0.310515
1313 1 0.400724 0.337637 0.316705
263 1 0.929984 0.341386 0.32447
1803 1 0.932972 0.345406 0.31022
411 1 0.116647 0.388859 0.320784
1352 1 0.0856578 0.404563 0.320713
904 1 0.185091 0.419679 0.322899
34 1 0.113244 0.451354 0.323036
540 1 0.556469 0.0335359 0.340699
997 1 0.246658 0.0412295 0.338831
1484 1 0.352386 0.106234 0.344154
207 1 0.459656 0.146214 0.333097
787 1 0.451907 0.180943 0.331305
734 1 0.935631 0.187036 0.333487
1011 1 0.145403 0.219559 0.337559
1031 1 0.793831 0.218077 0.342973
846 1 0.940389 0.237919 0.331526
839 1 0.144077 0.261458 0.343394
1358 1 0.256814 0.283572 0.329324
484 1 0.747243 0.294017 0.331989
402 1 0.952848 0.297352 0.343199
1127 1 0.328142 0.321288 0.338843
1165 1 0.849506 0.322491 0.340105
2024 1 0.272992 0.35555 0.34197
193 1 0.289723 0.361819 0.329371
954 1 0.36652 0.376462 0.328419
1846 1 0.730236 0.368222 0.337306
110 1 0.504631 0.448556 0.343238
1023 1 0.760232 0.460213 0.344661
876 1 0.880076 0.466819 0.332882
502 1 0.927507 0.0364766 0.358712
1373 1 0.223546 0.0639239 0.348855
1636 1 0.229585 0.0609999 0.364185
1492 1 0.246853 0.0624698 0.361046
290 1 0.2867 0.12986 0.364311
157 1 0.76796 0.147287 0.351338
1965 1 0.303757 0.164523 0.359293
526 1 0.866552 0.188853 0.348657
1710 1 0.488765 0.216137 0.348727
1390 1 0.779458 0.233946 0.361866
13 1 0.167551 0.267995 0.353267
251 1 0.321589 0.26081 0.352452
123 1 0.334771 0.257057 0.348493
829 1 0.405786 0.26914 0.352153
1396 1 0.323428 0.270829 0.348231
1499 1 0.759206 0.277531 0.360324
783 1 0.465364 0.319835 0.347085
1012 1 0.012923 0.352599 0.35693
836 1 0.644086 0.383776 0.364051
448 1 0.881896 0.445539 0.348371
1898 1 0.5424 0.480304 0.356098
685 1 0.416416 0.493799 0.35385
391 1 0.468251 0.485418 0.364549
956 1 0.0337593 0.000809982 0.370705
1271 1 0.678057 0.0352421 0.383455
1060 1 0.284218 0.0602747 0.380238
865 1 0.211037 0.0834487 0.373797
21 1 0.61144 0.0871188 0.380746
1634 1 0.281347 0.210919 0.38402
1255 1 0.682145 0.220582 0.366881
1969 1 0.112811 0.244918 0.375777
1470 1 0.659876 0.25082 0.38218
1252 1 0.805043 0.315712 0.367302
623 1 0.0363852 0.340313 0.376068
1962 1 0.781815 0.375402 0.370042
1461 1 0.828232 0.398569 0.370703
1161 1 0.159946 0.459348 0.365843
422 1 0.672494 0.453316 0.367609
223 1 0.721349 0.451721 0.376748
958 1 0.170215 0.464347 0.368887
1577 1 0.269455 0.496892 0.372136
232 1 0.600335 0.49879 0.365639
167 1 0.404018 0.0159627 0.395083
1504 1 0.172767 0.0496486 0.386188
590 1 0.829488 0.095191 0.385801
1592 1 0.898757 0.0977343 0.391953
1662 1 0.255267 0.172661 0.397597
142 1 0.928039 0.172002 0.403832
2038 1 0.414849 0.177097 0.390772
175 1 0.225215 0.196942 0.396694
1224 1 0.103342 0.267977 0.385062
1048 1 0.805895 0.301014 0.395769
1607 1 0.0258551 0.322278 0.400982
440 1 0.121959 0.380192 0.39185
1724 1 0.503398 0.399194 0.397936
1226 1 0.817047 0.391403 0.402252
577 1 0.983607 0.386826 0.38858
731 1 0.344356 0.433973 0.401134
428 1 0.611228 0.446715 0.398696
1360 1 0.785545 0.456358 0.388747
647 1 0.575916 0.469211 0.39627
984 1 0.265881 0.494627 0.389655
1364 1 0.614695 0.483333 0.388775
1893 1 0.811941 0.485525 0.394949
1787 1 0.938541 0.482231 0.389823
77 1 0.827218 0.0877816 0.416981
1206 1 0.574198 0.123529 0.415176
270 1 0.510452 0.167747 0.422889
60 1 0.205825 0.17317 0.417196
61 1 0.594483 0.203241 0.417987
1423 1 0.739795 0.1949 0.405835
1 1 0.0196597 0.212383 0.405558
1389 1 0.843904 0.21874 0.409132
1867 1 0.774987 0.268629 0.409125
8 1 0.806222 0.276259 0.412189
999 1 0.193681 0.338762 0.407792
322 1 0.905774 0.344927 0.42049
1914 1 0.455059 0.349657 0.406052
2002 1 0.572246 0.357645 0.406294
1621 1 0.787569 0.400518 0.406254
1770 1 0.351977 0.439283 0.414093
630 1 0.48727 0.205889 0.435091
149 1 0.644183 0.208652 0.425934
2021 1 0.133206 0.256951 0.425688
1766 1 0.935215 0.279146 0.42367
471 1 0.377935 0.322151 0.439331
1427 1 0.0391758 0.348115 0.439028
1721 1 0.475402 0.391901 0.429733
1796 1 0.940631 0.414758 0.429408
1503 1 0.867083 0.45239 0.428126
1428 1 0.16659 0.0525806 0.459705
658 1 0.855196 0.0955 0.448147
206 1 0.16338 0.111 0.443838
479 1 0.0620495 0.131088 0.445362
937 1 0.220637 0.125951 0.454756
1287 1 0.229688 0.173412 0.448103
1958 1 0.486326 0.210938 0.444693
29 1 0.51934 0.20897 0.443896
124 1 0.762186 0.200532 0.451619
1103 1 0.681318 0.224389 0.445162
1217 1 0.832045 0.244731 0.44543
244 1 0.515234 0.26002 0.460998
1655 1 0.264712 0.26956 0.4531
1187 1 0.0666166 0.300444 0.460969
1230 1 0.89389 0.34492 0.452658
799 1 0.426821 0.355316 0.448068
760 1 0.506149 0.398504 0.442487
1227 1 0.307473 0.410258 0.449315
1620 1 0.677669 0.450349 0.442809
1205 1 0.567166 0.0663736 0.479767
1416 1 0.409057 0.140866 0.463989
472 1 0.574657 0.144382 0.468561
957 1 0.171189 0.169193 0.465958
1956 1 0.473214 0.157487 0.477856
492 1 0.914453 0.161734 0.462015
750 1 0.0757083 0.207989 0.475973
1801 1 0.457244 0.208195 0.466966
1874 1 0.76474 0.198311 0.471813
240 1 0.768462 0.29842 0.468049
1911 1 0.79889 0.310503 0.471726
1467 1 0.144521 0.337019 0.47561
1700 1 0.371507 0.349057 0.478613
281 1 0.690454 0.359836 0.468896
1376 1 0.348925 0.38411 0.467845
1856 1 0.617138 0.38511 0.475668
211 1 0.8606 0.0508733 0.480936
150 1 0.799176 0.103897 0.494548
1769 1 0.11276 0.186374 0.483606
925 1 0.166224 0.182602 0.485552
730 1 0.429453 0.178605 0.490763
927 1 0.594823 0.197424 0.48104
687 1 0.7609 0.206888 0.49322
390 1 0.66112 0.224012 0.485289
1628 1 0.454324 0.245052 0.48744
2010 1 0.334167 0.253954 0.482093
444 1 0.0629633 0.284505 0.486221
869 1 0.283686 0.371371 0.499129
1192 1 0.653962 0.3814 0.497091
1714 1 0.404396 0.391186 0.492758
432 1 0.470553 0.418586 0.491679
128 1 0.653403 0.457401 0.49253
176 1 0.891794 0.471044 0.497894
1602 1 0.349012 0.00959395 0.507546
1921 1 0.908415 0.00281578 0.513297
768 1 0.927618 0.0551937 0.505412
1251 1 0.745072 0.0888681 0.513116
1072 1 0.637994 0.097523 0.509995
1994 1 0.793851 0.107052 0.503304
1136 1 0.488002 0.164318 0.514431
387 1 0.519243 0.174193 0.515159
1780 1 0.620227 0.178632 0.501221
305 1 0.341779 0.199336 0.517462
893 1 0.434507 0.229185 0.518115
329 1 0.612404 0.229106 0.512254
78 1 0.592151 0.24047 0.513065
620 1 0.386616 0.250537 0.506577
634 1 0.509689 0.267354 0.510044
774 1 0.666829 0.319379 0.513679
880 1 0.298296 0.338533 0.503313
1715 1 0.913216 0.376301 0.505183
604 1 0.616907 0.404992 0.514353
409 1 0.0453871 0.454845 0.50975
1424 1 0.284117 0.454166 0.51611
1248 1 0.75198 0.454064 0.516358
1186 1 0.820752 0.458396 0.504679
631 1 0.106349 0.49729 0.515688
313 1 0.351488 0.0532684 0.531332
114 1 0.728937 0.0414212 0.537566
1047 1 0.221675 0.115877 0.535018
247 1 0.891004 0.147687 0.535575
1904 1 0.118486 0.162904 0.526301
1918 1 0.275208 0.16091 0.529714
1179 1 0.599182 0.159821 0.53406
996 1 0.888489 0.227117 0.521505
546 1 0.833884 0.239045 0.533854
249 1 0.252349 0.260899 0.536694
388 1 0.801327 0.282954 0.525603
1879 1 0.51835 0.310672 0.533727
1622 1 0.568831 0.313531 0.530689
853 1 0.830717 0.317178 0.528945
464 1 0.335978 0.338374 0.531424
403 1 0.894016 0.334904 0.52768
1567 1 0.998329 0.332483 0.53131
1631 1 0.155167 0.377257 0.537937
1457 1 0.223797 0.379471 0.521161
1414 1 0.438364 0.422552 0.53154
495 1 0.214535 0.479345 0.523032
712 1 0.674877 0.464896 0.529493
1570 1 0.61115 0.0270705 0.544511
947 1 0.185482 0.0763668 0.554033
1394 1 0.303551 0.0671021 0.548325
2047 1 0.837075 0.101849 0.545862
1034 1 0.980481 0.140156 0.554247
268 1 0.321629 0.173917 0.548092
27 1 0.508161 0.219183 0.555783
84 1 0.67976 0.216334 0.557434
513 1 0.880211 0.242568 0.541829
414 1 0.989623 0.237463 0.540887
1483 1 0.398922 0.261518 0.554692
365 1 0.615322 0.253768 0.553215
1476 1 0.662972 0.254249 0.5492
1115 1 0.497286 0.281297 0.541452
605 1 0.782887 0.299432 0.540081
881 1 0.47672 0.337834 0.550079
833 1 0.811494 0.334145 0.54354
1363 1 0.397982 0.354898 0.557106
1510 1 0.47703 0.373154 0.54204
1847 1 0.184192 0.420941 0.551867
915 1 0.0617974 0.492042 0.547455
186 1 0.456735 0.489669 0.552704
790 1 0.721785 0.48079 0.552825
158 1 0.402272 0.0542683 0.56577
906 1 0.176135 0.070887 0.55831
1980 1 0.116937 0.094563 0.576102
931 1 0.109516 0.118012 0.574832
400 1 0.332755 0.131008 0.571369
551 1 0.712309 0.199281 0.568274
95 1 0.233613 0.22493 0.561999
1927 1 0.293309 0.229022 0.572326
1945 1 0.659139 0.219984 0.575582
748 1 0.906487 0.230616 0.561639
333 1 0.273882 0.24761 0.564936
515 1 0.64903 0.242182 0.568912
1968 1 0.381343 0.275737 0.559964
1838 1 0.426581 0.27786 0.568971
424 1 0.684822 0.294054 0.573118
516 1 0.787736 0.3054 0.573201
574 1 0.718127 0.353616 0.572908
395 1 0.599118 0.403185 0.557966
832 1 0.756289 0.402025 0.575787
1835 1 0.762797 0.39979 0.565812
671 1 0.920396 0.388709 0.563848
1460 1 0.138946 0.407056 0.574835
264 1 0.991341 0.416028 0.558303
1413 1 0.447877 0.0488482 0.591078
1172 1 0.641107 0.0615815 0.578901
1277 1 0.635347 0.0896682 0.580613
1181 1 0.636632 0.0996611 0.579425
410 1 0.37212 0.144804 0.593723
1603 1 0.756637 0.164708 0.585722
1446 1 0.961933 0.180501 0.590201
1353 1 0.136208 0.205616 0.593521
1564 1 0.738802 0.221411 0.583835
1569 1 0.620255 0.27686 0.592428
1096 1 0.164832 0.301456 0.58441
982 1 0.822923 0.346577 0.585795
1404 1 0.0213084 0.397646 0.581171
1339 1 0.374441 0.417164 0.58973
1278 1 0.400028 0.476949 0.585447
465 1 0.628364 0.47762 0.591378
645 1 0.508938 0.0317539 0.600453
626 1 0.176789 0.152835 0.602702
1738 1 0.67495 0.169571 0.60873
1794 1 0.927149 0.183537 0.600797
1915 1 0.879033 0.225564 0.604062
37 1 0.374118 0.247725 0.605669
1311 1 0.996511 0.232653 0.606589
1174 1 0.784357 0.26831 0.614682
1055 1 0.862773 0.283322 0.601416
1708 1 0.404 0.330881 0.606413
2018 1 0.884157 0.336307 0.614254
120 1 0.873176 0.348613 0.59658
136 1 0.114163 0.485669 0.607528
1916 1 0.66918 0.0102343 0.61612
766 1 0.0182052 0.0906365 0.618564
1004 1 0.582467 0.0964486 0.628847
651 1 0.580056 0.105483 0.622102
1497 1 0.358366 0.145574 0.629969
1439 1 0.61281 0.136096 0.625648
1600 1 0.453173 0.171122 0.616397
1415 1 0.336097 0.190408 0.627673
1917 1 0.523875 0.185213 0.633287
42 1 0.514343 0.173954 0.629993
389 1 0.942345 0.199193 0.630385
1857 1 0.874326 0.217378 0.617349
1139 1 0.925278 0.224452 0.626864
1017 1 0.423135 0.247661 0.62808
1539 1 0.725364 0.23923 0.622706
1572 1 0.251745 0.263347 0.627242
1892 1 0.521513 0.263766 0.62607
1695 1 0.791654 0.260372 0.626642
859 1 0.0508968 0.277301 0.619989
54 1 0.438157 0.272662 0.621616
1876 1 0.505665 0.286215 0.631149
822 1 0.156439 0.321003 0.618414
1310 1 0.225254 0.314062 0.628021
1696 1 0.249358 0.321574 0.619436
521 1 0.223882 0.328908 0.628468
1852 1 0.0256741 0.354386 0.621294
994 1 0.183464 0.359409 0.618501
1475 1 0.605793 0.359055 0.629688
1099 1 0.618947 0.383013 0.61879
861 1 0.894332 0.397929 0.617285
562 1 0.871663 0.438276 0.617646
296 1 0.20528 0.473691 0.623887
1735 1 0.684386 0.117572 0.639178
1961 1 0.890417 0.189687 0.635084
1044 1 0.755751 0.211646 0.643158
1519 1 0.985776 0.244134 0.64799
1604 1 0.517866 0.292092 0.636931
358 1 0.845075 0.296382 0.647777
1020 1 0.868059 0.307745 0.636584
801 1 0.519432 0.336344 0.649097
1176 1 0.911955 0.39788 0.645184
1728 1 0.642673 0.00331501 0.671673
1077 1 0.0673011 0.0684548 0.660242
1057 1 0.463164 0.0827418 0.673028
737 1 0.606176 0.131067 0.655174
1250 1 0.0175915 0.149958 0.671414
212 1 0.806946 0.151905 0.655425
1426 1 0.651145 0.160089 0.664514
1043 1 0.103775 0.196931 0.667597
437 1 0.994037 0.211153 0.655969
1976 1 0.508546 0.227311 0.654262
1877 1 0.782068 0.221188 0.663641
356 1 0.39736 0.266901 0.663631
276 1 0.546127 0.335593 0.660541
703 1 0.362844 0.355962 0.668649
1093 1 0.0153092 0.384486 0.664655
898 1 0.861161 0.405587 0.666656
1299 1 0.984993 0.496823 0.656185
191 1 0.970509 0.0479743 0.684649
837 1 0.0936794 0.0757936 0.678408
1678 1 0.0754078 0.130657 0.688477
1380 1 0.425635 0.121666 0.678332
1243 1 0.864419 0.185837 0.685793
1775 1 0.293559 0.227396 0.688157
1828 1 0.516676 0.222926 0.674791
615 1 0.19592 0.254289 0.675545
1978 1 0.46172 0.268283 0.675597
1517 1 0.736881 0.286936 0.675372
961 1 0.603504 0.309031 0.677855
16 1 0.714227 0.325411 0.685136
2035 1 0.756241 0.309591 0.674212
1270 1 0.803527 0.318185 0.674381
2030 1 0.659931 0.335452 0.691922
1533 1 0.060171 0.352817 0.682775
2015 1 0.891361 0.358266 0.685972
228 1 0.407305 0.387334 0.680511
1641 1 0.246117 0.426446 0.686159
1407 1 0.478649 0.448513 0.680595
1167 1 0.277557 0.478088 0.677394
33 1 0.367998 0.481124 0.684993
1810 1 0.816323 0.00540208 0.702992
1091 1 0.610851 0.0245681 0.694227
964 1 0.789582 0.0361528 0.701698
1928 1 0.4859 0.0501052 0.70591
36 1 0.622547 0.0431553 0.698256
1131 1 0.66645 0.107264 0.693585
233 1 0.735119 0.126284 0.709054
490 1 0.0662715 0.13643 0.692312
1100 1 0.430063 0.137896 0.701541
1049 1 0.74193 0.156825 0.694107
1559 1 0.0486205 0.190664 0.698853
246 1 0.787483 0.233014 0.710819
188 1 0.873757 0.232364 0.710284
1598 1 0.106227 0.256775 0.709103
300 1 0.624145 0.266847 0.693073
1881 1 0.583206 0.277075 0.711387
617 1 0.808453 0.285526 0.711314
971 1 0.617141 0.315357 0.70814
2008 1 0.692241 0.308203 0.702989
811 1 0.0700364 0.36711 0.706407
196 1 0.14442 0.397816 0.709007
969 1 0.754844 0.400362 0.708408
1485 1 0.343698 0.436935 0.710222
820 1 0.480192 0.427637 0.699626
1878 1 0.0720407 0.0839786 0.716496
1078 1 0.455715 0.0855769 0.719399
1984 1 0.842809 0.0796064 0.72876
1627 1 0.404142 0.104491 0.722093
1748 1 0.0313998 0.14374 0.728397
216 1 0.281567 0.144638 0.712355
1909 1 0.791629 0.152817 0.720111
1018 1 0.55828 0.160452 0.716647
1754 1 0.613882 0.154356 0.712401
843 1 0.842656 0.206025 0.719526
1169 1 0.967262 0.21064 0.726321
214 1 0.776188 0.229941 0.715404
558 1 0.777683 0.219199 0.718088
477 1 0.602449 0.240696 0.718615
1184 1 0.0154439 0.265109 0.720625
527 1 0.389258 0.338084 0.713809
200 1 0.651203 0.329785 0.729777
2007 1 0.678306 0.357835 0.724622
103 1 0.762588 0.354778 0.724569
2028 1 0.835678 0.365167 0.729128
1975 1 0.770397 0.46256 0.723824
234 1 0.145066 0.481664 0.73046
795 1 0.266573 0.481651 0.720527
1931 1 0.362693 0.486021 0.713189
713 1 0.762654 0.492768 0.726947
2016 1 0.969872 0.489261 0.726468
1684 1 0.325405 0.00905198 0.742329
1351 1 0.452946 0.000222709 0.743848
1743 1 0.734629 0.0142163 0.734395
1323 1 0.899364 0.00638142 0.749799
923 1 0.416191 0.0226296 0.732553
1474 1 0.401037 0.0428702 0.745049
311 1 0.455556 0.0499623 0.749223
987 1 0.332273 0.0753971 0.741548
1073 1 0.475935 0.0663705 0.749745
1548 1 0.320674 0.0944292 0.73664
776 1 0.278288 0.129588 0.743159
1792 1 0.949293 0.14871 0.742956
1840 1 0.376478 0.205975 0.736554
852 1 0.407488 0.257349 0.747772
1506 1 0.400971 0.273982 0.738703
641 1 0.505336 0.285143 0.732326
181 1 0.478484 0.30461 0.738588
610 1 0.553959 0.303652 0.733358
1903 1 0.0313677 0.354865 0.742397
94 1 0.556086 0.394724 0.740568
1116 1 0.303876 0.420932 0.731731
709 1 0.519499 0.419913 0.737306
256 1 0.614293 0.409968 0.740382
1891 1 0.528833 0.474189 0.739725
1159 1 0.0412345 0.0139684 0.755418
1785 1 0.68738 0.0182085 0.759868
1303 1 0.286314 0.0333807 0.753024
595 1 0.402994 0.0230417 0.761811
757 1 0.595882 0.0396428 0.762193
1370 1 0.524194 0.0967602 0.752945
405 1 0.694193 0.0995657 0.767494
467 1 0.324309 0.117527 0.751763
2042 1 0.114959 0.164364 0.755339
500 1 0.141895 0.15742 0.757225
960 1 0.586891 0.173438 0.750907
563 1 0.484515 0.196583 0.760446
1966 1 0.957726 0.193566 0.751028
1134 1 0.477981 0.217378 0.750073
1547 1 0.964725 0.211847 0.7589
544 1 0.259763 0.247163 0.768355
1544 1 0.191621 0.271318 0.766245
56 1 0.313715 0.272509 0.763354
1560 1 0.466268 0.272116 0.764309
1935 1 0.0265653 0.31869 0.753241
208 1 0.873096 0.417288 0.756219
1247 1 0.986991 0.418681 0.765466
326 1 0.581411 0.437251 0.759784
567 1 0.184974 0.489209 0.752234
663 1 0.291423 0.0189814 0.787776
921 1 0.970269 0.00435907 0.769958
1786 1 0.465688 0.0199608 0.771718
404 1 0.624218 0.0274317 0.771826
2032 1 0.990593 0.0267498 0.771201
973 1 0.74247 0.047056 0.77943
417 1 0.455811 0.150301 0.783106
1699 1 0.472709 0.209024 0.779074
537 1 0.63408 0.211162 0.786528
1000 1 0.755168 0.215297 0.777443
132 1 0.859598 0.229614 0.778662
670 1 0.997901 0.211877 0.782908
441 1 0.306491 0.238448 0.77967
307 1 0.473654 0.241619 0.781302
99 1 0.523434 0.245853 0.778632
229 1 0.383757 0.259789 0.778708
1209 1 0.0996692 0.287124 0.770072
1844 1 0.78828 0.280507 0.782055
1383 1 0.364342 0.30009 0.775252
89 1 0.621738 0.361293 0.774113
682 1 0.57095 0.410458 0.770856
1866 1 0.943412 0.421844 0.787032
1720 1 0.479157 0.46288 0.770112
1092 1 0.461862 0.488118 0.780399
812 1 0.455532 0.0556944 0.803838
1951 1 0.537761 0.0609774 0.798119
622 1 0.606752 0.0606173 0.796356
1123 1 0.00463982 0.0883055 0.803924
1222 1 0.256628 0.131731 0.790484
834 1 0.919544 0.116159 0.791574
237 1 0.301455 0.142742 0.795429
978 1 0.363923 0.189042 0.789478
1887 1 0.844054 0.202055 0.792838
88 1 0.889135 0.213014 0.796583
533 1 0.323672 0.261298 0.79247
1551 1 0.390096 0.288168 0.795343
491 1 0.670063 0.302434 0.795487
672 1 0.464851 0.310583 0.801888
1653 1 0.827315 0.309602 0.79076
864 1 0.177649 0.369161 0.788515
58 1 0.396633 0.375245 0.796518
1750 1 0.800795 0.367121 0.794823
1200 1 0.518058 0.393268 0.799693
1489 1 0.534754 0.418514 0.799711
1842 1 0.874831 0.40603 0.79825
498 1 0.985479 0.411914 0.800286
704 1 0.468895 0.434975 0.80176
568 1 0.89303 0.460246 0.788868
1568 1 0.712854 0.486551 0.79252
1054 1 0.480148 0.00383201 0.814811
119 1 0.068517 0.0813215 0.818636
1630 1 0.305685 0.113369 0.81424
1705 1 0.131364 0.121692 0.810366
314 1 0.424602 0.121318 0.813604
1253 1 0.513065 0.116138 0.821312
45 1 0.999263 0.145226 0.808126
1108 1 0.548846 0.228968 0.819704
1211 1 0.928739 0.226967 0.813339
486 1 0.05013 0.231709 0.808317
1771 1 0.899053 0.258298 0.82305
1409 1 0.104417 0.272263 0.816369
1717 1 0.890787 0.337136 0.808849
1359 1 0.07731 0.363264 0.81324
1021 1 0.346585 0.393837 0.812524
1740 1 0.451557 0.406806 0.812065
1482 1 0.719494 0.451611 0.807697
1188 1 0.372585 0.473546 0.817188
1665 1 0.413413 0.469204 0.819264
2017 1 0.487712 0.495389 0.812962
705 1 0.500007 0.498052 0.819707
667 1 0.612316 0.0363571 0.844778
1148 1 0.176556 0.0530089 0.84568
144 1 0.489726 0.11031 0.84331
334 1 0.563787 0.208151 0.843539
1008 1 0.851066 0.209165 0.838868
807 1 0.939864 0.204755 0.841769
1593 1 0.0694188 0.223347 0.834676
1325 1 0.86398 0.242433 0.838333
1588 1 0.0589817 0.261767 0.835291
1119 1 0.87963 0.285835 0.827281
1348 1 0.0226948 0.307082 0.845498
764 1 0.662185 0.304207 0.83206
1178 1 0.303088 0.308273 0.845957
882 1 0.816359 0.314127 0.838386
1902 1 0.908892 0.307766 0.843963
487 1 0.638783 0.373791 0.844993
1681 1 0.896756 0.397679 0.833171
266 1 0.660327 0.405955 0.831657
525 1 0.668264 0.418094 0.84276
73 1 0.0488773 0.434149 0.833171
1648 1 0.455217 0.439979 0.83002
146 1 0.861588 0.440253 0.830153
1982 1 0.848921 0.444222 0.83349
118 1 0.176576 0.478626 0.839803
210 1 0.0165695 0.0227568 0.856533
1372 1 0.590827 0.0330958 0.862934
1584 1 0.726172 0.0352927 0.84885
162 1 0.814205 0.0758556 0.852563
2037 1 0.160057 0.0945488 0.853481
293 1 0.500155 0.108819 0.857039
1837 1 0.564947 0.128934 0.850849
137 1 0.468905 0.287681 0.846428
1819 1 0.185278 0.394949 0.852068
480 1 0.349134 0.385638 0.861741
199 1 0.404142 0.400242 0.852396
156 1 0.453961 0.41122 0.857136
1680 1 0.898648 0.421402 0.862488
1312 1 0.268019 0.470196 0.861933
260 1 0.903834 0.476467 0.862872
195 1 0.5197 0.487694 0.849477
445 1 0.680702 0.486086 0.857118
1906 1 0.768352 0.497266 0.857782
205 1 0.290279 0.0178938 0.878645
879 1 0.82261 0.0127084 0.867893
1829 1 0.0931753 0.0325153 0.883557
1128 1 0.669566 0.0687995 0.876524
1953 1 0.332491 0.0827162 0.881453
1834 1 0.171779 0.109575 0.878414
621 1 0.762357 0.121875 0.879034
1256 1 0.874427 0.149071 0.880126
601 1 0.411691 0.172677 0.881495
32 1 0.459851 0.207168 0.869691
1832 1 0.497581 0.192415 0.876229
639 1 0.225687 0.249531 0.865948
892 1 0.116966 0.260119 0.865661
1899 1 0.94332 0.268867 0.880802
680 1 0.0201376 0.270557 0.881588
1417 1 0.21552 0.334475 0.8738
763 1 0.778688 0.34073 0.876501
3 1 0.93887 0.33434 0.867753
998 1 0.428018 0.349867 0.870487
660 1 0.661628 0.401062 0.870774
644 1 0.195303 0.416655 0.874636
1645 1 0.59974 0.412115 0.874665
147 1 0.686058 0.422415 0.865584
303 1 0.498933 0.456457 0.881863
1972 1 0.170346 0.477169 0.873185
688 1 0.449443 0.494452 0.877191
373 1 0.641792 0.485258 0.868138
657 1 0.0743185 0.0310868 0.885211
1081 1 0.165236 0.0558401 0.899928
1349 1 0.248223 0.0431702 0.898458
1472 1 0.524127 0.0875995 0.889657
1050 1 0.950994 0.126692 0.899203
1959 1 0.151639 0.204617 0.892895
71 1 0.708337 0.272218 0.890981
1713 1 0.259224 0.294404 0.896778
107 1 0.78096 0.289722 0.900498
485 1 0.359966 0.360229 0.897581
1132 1 0.448243 0.356273 0.90239
914 1 0.552786 0.380755 0.892923
952 1 0.816546 0.417455 0.902897
1941 1 0.99807 0.446057 0.892937
1733 1 0.231774 0.0307876 0.920131
1550 1 0.235769 0.0405418 0.916755
977 1 0.586361 0.0695353 0.920995
749 1 0.674707 0.073926 0.920541
1137 1 0.341916 0.0927874 0.904737
1456 1 0.443591 0.153485 0.909733
429 1 0.685995 0.160477 0.920746
1308 1 0.780399 0.15806 0.915085
340 1 0.15295 0.188776 0.913363
674 1 0.522263 0.279202 0.919648
2027 1 0.575676 0.292148 0.914269
499 1 0.97864 0.331385 0.912065
1523 1 0.931925 0.349132 0.919589
1515 1 0.223615 0.379714 0.917737
761 1 0.0524698 0.409795 0.909445
1027 1 0.11546 0.428137 0.920867
1332 1 0.625469 0.429815 0.917546
30 1 0.0198906 0.459868 0.915898
762 1 0.116394 0.448259 0.904885
143 1 0.216812 0.486124 0.912744
539 1 0.48555 0.0174361 0.931336
164 1 0.480195 0.00777124 0.942255
1511 1 0.738449 0.00896427 0.928983
2033 1 0.786011 0.134027 0.932098
1677 1 0.237404 0.143636 0.935416
541 1 0.81775 0.161329 0.933043
69 1 0.134018 0.174867 0.928396
1694 1 0.0260126 0.200681 0.924413
1220 1 0.173006 0.208464 0.928532
772 1 0.243551 0.227179 0.92529
1661 1 0.715813 0.223676 0.930461
361 1 0.428225 0.24103 0.939775
271 1 0.269409 0.33944 0.930881
575 1 0.59153 0.333691 0.938425
1650 1 0.882885 0.332546 0.929566
1317 1 0.34863 0.368602 0.931693
245 1 0.407669 0.402657 0.934756
1158 1 0.638708 0.399962 0.938255
1549 1 0.321168 0.41657 0.939908
336 1 0.712638 0.406676 0.92375
717 1 0.949047 0.415971 0.938649
112 1 0.168087 0.441851 0.934897
699 1 0.217223 0.438225 0.940938
11 1 0.136688 0.449242 0.930543
278 1 0.0885187 0.475435 0.932069
1229 1 0.3428 0.46506 0.933227
1382 1 0.508422 0.47828 0.933225
1059 1 0.818618 0.0494701 0.950931
770 1 0.379467 0.0602394 0.95069
1304 1 0.542577 0.0714268 0.957462
12 1 0.723936 0.0994358 0.943798
866 1 0.219596 0.131877 0.946179
1157 1 0.551457 0.122968 0.957132
1036 1 0.938574 0.144867 0.943679
1827 1 0.469929 0.181258 0.958089
794 1 0.712136 0.19349 0.958121
474 1 0.354797 0.312208 0.953727
177 1 0.417341 0.316675 0.952911
920 1 0.460408 0.311393 0.957393
166 1 0.601516 0.316737 0.954036
494 1 0.603604 0.335069 0.950231
335 1 0.838766 0.332499 0.946859
1260 1 0.365684 0.378736 0.949807
1010 1 0.197071 0.417017 0.959068
1543 1 0.0814806 0.438297 0.945152
1450 1 0.0899414 0.45936 0.957145
1309 1 0.368739 0.475259 0.943675
43 1 0.0676338 0.00864445 0.976296
1732 1 0.237801 0.0349235 0.972696
662 1 0.915379 0.0273013 0.98037
1302 1 0.578723 0.0695346 0.967053
1581 1 0.541758 0.0970717 0.969749
1406 1 0.659636 0.154473 0.968319
1988 1 0.739371 0.166984 0.976459
1657 1 0.0525221 0.19636 0.968578
636 1 0.169245 0.204867 0.980042
1245 1 0.962183 0.246007 0.963367
1683 1 0.865357 0.266269 0.966094
203 1 0.978395 0.258081 0.970933
1198 1 0.362225 0.279266 0.978123
1601 1 0.557321 0.312611 0.969001
1950 1 0.628779 0.309687 0.979315
803 1 0.423019 0.33035 0.970125
1239 1 0.553037 0.339878 0.963611
1923 1 0.659272 0.330659 0.970059
1930 1 0.863752 0.376967 0.975294
884 1 0.799919 0.391824 0.971125
706 1 0.128285 0.421887 0.979966
1343 1 0.9162 0.422805 0.978174
398 1 0.810206 0.451412 0.969435
2020 1 0.708649 0.0349631 0.997274
1080 1 0.806499 0.042043 0.982417
248 1 0.646001 0.133199 0.989659
1234 1 0.675993 0.13662 0.992425
1746 1 0.973402 0.203291 0.998311
1391 1 0.462801 0.214737 0.986478
192 1 0.833101 0.231648 0.981592
1387 1 0.769155 0.291626 0.991159
280 1 0.683972 0.311597 0.981982
1231 1 0.684947 0.316383 0.992478
469 1 0.996178 0.346854 0.993928
507 1 0.763491 0.382013 0.987855
277 1 0.207168 0.385505 0.992291
1430 1 0.0965697 0.412432 0.999798
1449 1 0.127335 0.419824 0.995519
553 1 0.712754 0.408471 0.994985
401 1 0.691768 0.435368 0.985116
1781 1 0.81228 0.447852 0.996428
1089 1 0.935873 0.598657 0.0160258
468 1 0.0837488 0.688819 0.0169443
972 1 0.422292 0.730007 0.0170982
690 1 0.848457 0.807332 0.0188231
1505 1 0.10518 0.815307 0.0162954
1326 1 0.39578 0.840065 0.0114913
1610 1 0.408791 0.831317 0.011626
1007 1 0.887237 0.856142 0.00804493
1983 1 0.144337 0.874066 0.000201401
1802 1 0.613181 0.889058 0.00273268
2 1 0.871198 0.93919 0.0051368
851 1 0.609393 0.954686 0.00763904
374 1 0.954395 0.954853 0.0183893
1085 1 0.177548 0.973229 0.00184156
1679 1 0.223772 0.99417 0.0134235
272 1 0.263764 0.987465 0.0141805
970 1 0.652718 0.534067 0.0279837
524 1 0.548988 0.541534 0.026312
1605 1 0.610285 0.545151 0.0224978
584 1 0.792253 0.610622 0.0341222
1697 1 0.496256 0.644523 0.0365881
1618 1 0.927859 0.66514 0.0285142
255 1 0.988675 0.678122 0.0326163
1350 1 0.225457 0.694796 0.0230266
2040 1 0.22462 0.725278 0.0284379
1773 1 0.693247 0.738506 0.0383635
1466 1 0.509989 0.78612 0.0303603
716 1 0.650927 0.797567 0.01957
1246 1 0.728086 0.836615 0.0260517
377 1 0.187367 0.86123 0.0351683
579 1 0.0396049 0.897973 0.0376249
940 1 0.686897 0.90156 0.0304841
265 1 0.17864 0.976656 0.0349316
549 1 0.0252978 0.538817 0.0469278
1368 1 0.617149 0.572525 0.0555688
1241 1 0.869973 0.611803 0.0495295
1836 1 0.416515 0.654029 0.0408266
596 1 0.193907 0.680305 0.0501722
806 1 0.54875 0.727691 0.0386505
1580 1 0.645344 0.792932 0.0519412
1082 1 0.540254 0.860467 0.0573636
9 1 0.287765 0.876244 0.0393299
236 1 0.0959918 0.91662 0.044582
1262 1 0.399356 0.911748 0.047231
1739 1 0.197446 0.980678 0.0425479
1477 1 0.383434 0.976334 0.0497094
1566 1 0.638597 0.973636 0.051774
1269 1 0.685143 0.514782 0.0734939
1218 1 0.644244 0.541573 0.0734975
1947 1 0.95978 0.557486 0.0741116
1438 1 0.816964 0.707717 0.0597586
1723 1 0.141215 0.749384 0.0580688
482 1 0.458516 0.821413 0.059978
35 1 0.913444 0.820694 0.0736905
1237 1 0.929752 0.818355 0.0659086
48 1 0.690828 0.828252 0.0614402
953 1 0.0121061 0.847762 0.0613701
297 1 0.586689 0.848656 0.0616868
269 1 0.27814 0.895539 0.0621301
416 1 0.395088 0.895201 0.0601898
1508 1 0.45813 0.90208 0.0582822
1362 1 0.0147787 0.948483 0.0668465
1894 1 0.0319177 0.975662 0.0730663
57 1 0.300889 0.968397 0.0755
1537 1 0.339672 0.525997 0.0909423
800 1 0.488781 0.554985 0.0858392
856 1 0.4337 0.609661 0.084524
1397 1 0.845954 0.608727 0.0852626
788 1 0.738848 0.618272 0.0790475
1171 1 0.777416 0.668655 0.0823697
1203 1 0.313502 0.711165 0.0893326
419 1 0.919361 0.717057 0.0772102
1973 1 0.245106 0.768729 0.0788472
1232 1 0.409581 0.755042 0.0796337
1006 1 0.896783 0.755837 0.0865319
1378 1 0.902534 0.771297 0.0913038
2000 1 0.761429 0.818831 0.0923875
1809 1 0.142135 0.838682 0.077575
1487 1 0.123738 0.989073 0.0837973
514 1 0.251831 0.993454 0.0937691
1455 1 0.316437 0.518046 0.109912
435 1 0.878408 0.500386 0.104359
145 1 0.62476 0.538335 0.106832
1097 1 0.0542095 0.546818 0.112423
273 1 0.211938 0.649464 0.1012
1757 1 0.234051 0.65021 0.111863
830 1 0.460297 0.65333 0.10176
531 1 0.625117 0.667498 0.112216
1528 1 0.849224 0.67143 0.0976259
903 1 0.23792 0.686717 0.101053
990 1 0.895908 0.674259 0.0997807
1153 1 0.288408 0.694837 0.109306
318 1 0.569453 0.739398 0.111184
327 1 0.70052 0.792467 0.110431
1795 1 0.526236 0.863747 0.101388
511 1 0.996549 0.905156 0.113354
1594 1 0.231091 0.991048 0.114381
76 1 0.558637 0.50983 0.116537
415 1 0.846943 0.512723 0.126185
349 1 0.903728 0.535501 0.13421
1524 1 0.0461122 0.551963 0.13287
1989 1 0.357587 0.563844 0.121427
1996 1 0.374183 0.573173 0.12571
529 1 0.108296 0.591677 0.120846
1579 1 0.851671 0.578145 0.129035
130 1 0.382023 0.676267 0.130011
504 1 0.376255 0.704106 0.129385
1900 1 0.342901 0.73523 0.118719
1626 1 0.85855 0.751291 0.118356
726 1 0.769211 0.782206 0.120318
711 1 0.842458 0.778106 0.131974
1938 1 0.918444 0.800564 0.126504
1453 1 0.928162 0.797351 0.131586
1051 1 0.524349 0.812444 0.125929
862 1 0.971086 0.822015 0.122553
1557 1 0.348152 0.828144 0.123655
295 1 0.0809845 0.856147 0.134464
1193 1 0.689897 0.882838 0.132702
1331 1 0.379299 0.8858 0.123517
669 1 0.964026 0.893549 0.117209
1967 1 0.995522 0.898078 0.128833
1922 1 0.489905 0.955071 0.122661
592 1 0.980558 0.95125 0.131867
1848 1 0.530306 0.97444 0.115976
172 1 0.777434 0.514538 0.149649
739 1 0.177897 0.58345 0.149783
1400 1 0.459901 0.584224 0.134716
875 1 0.719691 0.593441 0.153482
1074 1 0.950587 0.584312 0.140091
304 1 0.633868 0.610627 0.151543
1196 1 0.146395 0.650612 0.145531
535 1 0.0264034 0.696051 0.136324
858 1 0.09063 0.698787 0.139948
1741 1 0.345114 0.702143 0.149502
1863 1 0.458126 0.693502 0.138776
299 1 0.380358 0.7248 0.15352
1122 1 0.531369 0.767197 0.144853
779 1 0.282088 0.781284 0.146991
597 1 0.524882 0.771699 0.151603
1002 1 0.883788 0.774239 0.136589
683 1 0.586823 0.793924 0.144355
438 1 0.658962 0.800242 0.14424
458 1 0.923499 0.830971 0.149472
901 1 0.972382 0.846916 0.138407
1014 1 0.161741 0.868693 0.136206
1377 1 0.85167 0.890536 0.150567
1698 1 0.0780042 0.975859 0.13711
446 1 0.439672 0.988535 0.139601
1113 1 0.0222725 0.513448 0.17156
1488 1 0.410585 0.508693 0.169705
1815 1 0.237578 0.529249 0.158743
408 1 0.332144 0.554247 0.165753
1284 1 0.820763 0.548203 0.166971
140 1 0.416882 0.620181 0.167388
1235 1 0.602954 0.632243 0.161616
967 1 0.451088 0.641601 0.155776
2041 1 0.56575 0.655589 0.169508
522 1 0.524622 0.693285 0.167107
951 1 0.0018453 0.720452 0.169905
519 1 0.974044 0.748738 0.168762
1660 1 0.0280803 0.752035 0.156454
493 1 0.943366 0.756282 0.167988
966 1 0.100739 0.787395 0.168451
2003 1 0.30992 0.790521 0.156607
1858 1 0.174539 0.884221 0.154062
413 1 0.34084 0.873482 0.163683
1288 1 0.872025 0.977197 0.157071
1971 1 0.0947652 0.997938 0.164667
746 1 0.694439 0.993208 0.156552
101 1 0.294307 0.528735 0.173724
695 1 0.424136 0.549654 0.190685
153 1 0.639914 0.556879 0.184304
1651 1 0.0914601 0.5585 0.183685
1367 1 0.507619 0.611132 0.184905
654 1 0.577533 0.610785 0.192055
1764 1 0.544076 0.695293 0.187651
1215 1 0.699272 0.697 0.179289
1751 1 0.748551 0.713766 0.175632
470 1 0.636857 0.804077 0.1913
1910 1 0.956848 0.795992 0.179935
1582 1 0.67167 0.814289 0.17668
988 1 0.24981 0.839135 0.191993
1064 1 0.0751613 0.856059 0.183506
286 1 0.144284 0.866064 0.174078
1106 1 0.925215 0.872043 0.190628
554 1 0.780569 0.888205 0.177066
74 1 0.303413 0.913757 0.18789
1672 1 0.991688 0.908895 0.173752
44 1 0.427648 0.927407 0.1923
204 1 0.975496 0.950843 0.176301
638 1 0.991839 0.946322 0.178203
659 1 0.356423 0.980536 0.1911
258 1 0.415762 0.513278 0.20567
163 1 0.283602 0.5242 0.206214
243 1 0.419286 0.535583 0.204538
131 1 0.861016 0.59856 0.20445
1204 1 0.770525 0.633958 0.202049
1454 1 0.912989 0.627605 0.207264
64 1 0.531886 0.666799 0.204338
1258 1 0.940513 0.665109 0.194952
566 1 0.264153 0.688813 0.206414
1084 1 0.675268 0.691447 0.204384
1561 1 0.292431 0.694753 0.200922
310 1 0.567013 0.705915 0.198838
1479 1 0.572725 0.703924 0.209407
28 1 0.647901 0.717702 0.208054
1486 1 0.220519 0.824542 0.206961
226 1 0.915571 0.825858 0.196787
17 1 0.901513 0.881863 0.201003
786 1 0.201866 0.945394 0.195974
1843 1 0.465189 0.948387 0.204194
945 1 0.577903 0.951997 0.193548
1480 1 0.852055 0.942416 0.196827
1162 1 0.567739 0.535763 0.220015
1117 1 0.222001 0.578072 0.226282
1613 1 0.229208 0.589109 0.216875
1301 1 0.99782 0.591407 0.213305
1992 1 0.597693 0.600051 0.227254
478 1 0.810248 0.631828 0.213113
1704 1 0.430892 0.658025 0.226667
941 1 0.815294 0.655164 0.228314
831 1 0.363372 0.701903 0.217869
1629 1 0.603152 0.735265 0.216594
1143 1 0.50476 0.771274 0.21558
201 1 0.164332 0.792448 0.221973
841 1 0.520805 0.814856 0.225991
1478 1 0.488186 0.839225 0.220032
1822 1 0.570587 0.829382 0.227072
1146 1 0.390906 0.873354 0.214778
461 1 0.558576 0.866081 0.215346
1761 1 0.121583 0.924414 0.212923
1149 1 0.95915 0.926859 0.213491
463 1 0.901194 0.972818 0.221563
758 1 0.287779 0.995405 0.216417
2004 1 0.419066 0.994216 0.224321
708 1 0.507439 0.985756 0.224449
165 1 0.114069 0.53251 0.247876
1328 1 0.673408 0.524502 0.231666
41 1 0.211284 0.620024 0.236469
2001 1 0.617312 0.623649 0.243879
1005 1 0.0262219 0.677828 0.23451
1190 1 0.117945 0.680012 0.233624
1144 1 0.215498 0.724749 0.242291
1596 1 0.990672 0.724598 0.249128
1039 1 0.835637 0.763712 0.24893
1393 1 0.222635 0.775695 0.242047
618 1 0.467466 0.774048 0.249898
369 1 0.655826 0.806891 0.243452
1606 1 0.223701 0.862699 0.249154
1053 1 0.340821 0.950361 0.236092
848 1 0.963188 0.94489 0.236092
1854 1 0.912381 0.987338 0.231193
1371 1 0.242855 0.506901 0.266895
1939 1 0.0854892 0.533181 0.269047
1726 1 0.572294 0.531272 0.266707
928 1 0.696303 0.538329 0.253489
1642 1 0.0418973 0.583345 0.267544
1111 1 0.609444 0.608917 0.25447
1883 1 0.682606 0.612422 0.268879
421 1 0.829109 0.631645 0.253317
2013 1 0.841694 0.673066 0.266026
106 1 0.829057 0.685156 0.251399
821 1 0.831109 0.689475 0.261021
991 1 0.535987 0.694765 0.259457
1635 1 0.94787 0.711472 0.25505
62 1 0.970569 0.697987 0.264528
190 1 0.51717 0.732472 0.256113
1494 1 0.799748 0.744238 0.260782
995 1 0.156777 0.751067 0.263319
1412 1 0.544295 0.826588 0.266231
1861 1 0.0771175 0.829556 0.262171
292 1 0.541571 0.85809 0.260888
1257 1 0.213326 0.896521 0.26891
481 1 0.497044 0.89219 0.260996
139 1 0.810136 0.894917 0.264411
1936 1 0.514368 0.931743 0.263954
1293 1 0.358004 0.955604 0.256368
968 1 0.0218955 0.980258 0.250027
919 1 0.623775 0.509079 0.279491
1041 1 0.411539 0.527188 0.281786
908 1 0.546421 0.575914 0.271555
70 1 0.99019 0.599207 0.285213
1150 1 0.193002 0.663209 0.286201
1808 1 0.333259 0.671628 0.28842
1266 1 0.79813 0.664131 0.26973
789 1 0.358138 0.689701 0.286821
1052 1 0.735432 0.700467 0.287739
348 1 0.853244 0.716899 0.274314
279 1 0.946518 0.718785 0.275434
1295 1 0.78975 0.750042 0.284297
357 1 0.245287 0.782434 0.271231
719 1 0.837471 0.78461 0.271294
781 1 0.551948 0.803847 0.280272
238 1 0.462744 0.814799 0.278716
585 1 0.569725 0.813172 0.278641
394 1 0.0531151 0.851625 0.273659
560 1 0.969616 0.868788 0.285076
1974 1 0.449661 0.893461 0.270915
1940 1 0.624727 0.890696 0.281241
1731 1 0.501583 0.925773 0.284388
452 1 0.0403613 0.943564 0.280718
2022 1 0.95189 0.991716 0.284007
1872 1 0.366648 0.506549 0.298921
152 1 0.54628 0.530425 0.301548
253 1 0.258352 0.613863 0.29206
693 1 0.501717 0.605494 0.307511
538 1 0.856733 0.625229 0.307353
1022 1 0.989221 0.624916 0.30718
1357 1 0.487622 0.684527 0.294922
542 1 0.0553888 0.697844 0.288862
860 1 0.0176678 0.727135 0.302497
1839 1 0.0779432 0.750258 0.302356
1261 1 0.117873 0.803146 0.306796
367 1 0.370096 0.866521 0.301286
1401 1 0.347244 0.899032 0.297039
209 1 0.578524 0.908939 0.294799
887 1 0.924183 0.923892 0.295903
552 1 0.155581 0.950155 0.293126
1300 1 0.648171 0.954503 0.298893
1221 1 0.93591 0.948151 0.29884
569 1 0.784974 0.977847 0.300844
1514 1 0.290087 0.527593 0.315692
252 1 0.00307265 0.549628 0.323519
489 1 0.641958 0.588828 0.323928
840 1 0.977159 0.583001 0.312829
447 1 0.0425672 0.603244 0.312846
1334 1 0.343981 0.681129 0.316119
1425 1 0.871297 0.803358 0.31409
980 1 0.153553 0.862154 0.315552
117 1 0.190502 0.8556 0.322789
1647 1 0.213432 0.88892 0.315009
287 1 0.616291 0.906798 0.323673
1701 1 0.0805022 0.928908 0.326311
965 1 0.2789 0.973148 0.326361
1147 1 0.964863 0.994723 0.3232
993 1 0.075737 0.508559 0.340432
849 1 0.607452 0.536096 0.333685
121 1 0.491604 0.573449 0.335883
1369 1 0.514542 0.591914 0.329598
66 1 0.56867 0.61082 0.330946
1777 1 0.654879 0.618871 0.342775
309 1 0.289976 0.638048 0.34231
1772 1 0.379954 0.647255 0.340045
611 1 0.516162 0.643413 0.333262
1565 1 0.0110298 0.663652 0.34485
1652 1 0.443895 0.659196 0.339729
1442 1 0.109335 0.755139 0.329668
1316 1 0.440761 0.757303 0.334035
1024 1 0.379159 0.841424 0.335558
1436 1 0.0860348 0.854568 0.327288
1125 1 0.787605 0.849999 0.345787
439 1 0.884173 0.872246 0.338582
1495 1 0.758925 0.933736 0.333347
936 1 0.77385 0.929147 0.335861
1765 1 0.328615 0.959209 0.345931
53 1 0.233672 0.976959 0.332307
337 1 0.586562 0.984464 0.326964
896 1 0.714876 0.987943 0.329529
1616 1 0.79449 0.996303 0.328234
133 1 0.293702 0.622305 0.347297
261 1 0.749767 0.616525 0.350868
1632 1 0.906187 0.669137 0.349221
1706 1 0.0690753 0.676347 0.355295
900 1 0.393668 0.727341 0.348567
1071 1 0.382309 0.798018 0.358314
225 1 0.0382273 0.809409 0.351227
769 1 0.661766 0.823614 0.351205
1015 1 0.511035 0.833008 0.35595
1028 1 0.392789 0.904711 0.355203
556 1 0.446406 0.941211 0.347273
1762 1 0.921245 0.952174 0.35835
684 1 0.50852 0.996113 0.360475
728 1 0.119212 0.517776 0.368255
1142 1 0.237101 0.505628 0.370761
845 1 0.508418 0.505014 0.368403
1813 1 0.350149 0.521807 0.373024
520 1 0.95488 0.554418 0.371521
1745 1 0.0615042 0.577574 0.374266
1151 1 0.218489 0.598268 0.366239
943 1 0.706547 0.606884 0.366607
1943 1 0.629304 0.670253 0.375458
456 1 0.197326 0.710589 0.382005
564 1 0.113475 0.748803 0.367535
944 1 0.712656 0.759571 0.380353
1821 1 0.339384 0.826139 0.370797
828 1 0.0138624 0.827683 0.376219
1366 1 0.363652 0.827616 0.377324
863 1 0.899874 0.838301 0.371974
608 1 0.508336 0.875617 0.378943
1066 1 0.426393 0.899967 0.379027
1346 1 0.337428 0.949407 0.370473
1823 1 0.940911 0.952305 0.377808
1612 1 0.995968 0.502308 0.399952
691 1 0.258633 0.54231 0.400916
1540 1 0.169407 0.615576 0.395789
159 1 0.55323 0.644824 0.403174
885 1 0.0919464 0.655179 0.393178
113 1 0.158253 0.816193 0.401371
591 1 0.200992 0.813966 0.395642
959 1 0.308003 0.821175 0.391323
283 1 0.918737 0.841565 0.400357
1611 1 0.217009 0.862778 0.393699
816 1 0.752857 0.883478 0.394216
1035 1 0.547988 0.899204 0.394906
1734 1 0.888833 0.884749 0.401731
784 1 0.0522865 0.915672 0.385742
949 1 0.10198 0.958111 0.403217
2048 1 0.58226 0.956837 0.396982
1929 1 0.0536102 0.962947 0.401598
785 1 0.385018 0.513842 0.40953
1518 1 0.752605 0.513502 0.418046
328 1 0.135641 0.532349 0.413969
1826 1 0.46407 0.524944 0.40668
105 1 0.235387 0.54809 0.405931
1667 1 0.215254 0.687165 0.422952
383 1 0.944296 0.679474 0.417484
755 1 0.580877 0.717593 0.416057
224 1 0.813191 0.762587 0.405182
1868 1 0.841651 0.774061 0.408409
1437 1 0.237654 0.79894 0.405007
1643 1 0.540514 0.804737 0.42302
1830 1 0.0357215 0.989843 0.416385
1462 1 0.0506754 0.988436 0.41152
702 1 0.853405 0.993164 0.404833
262 1 0.873307 0.988181 0.41707
275 1 0.921734 0.994423 0.416554
392 1 0.283144 0.549496 0.436817
1240 1 0.9196 0.550097 0.441515
738 1 0.772518 0.55813 0.441684
930 1 0.157977 0.676519 0.424906
557 1 0.759605 0.705324 0.437944
430 1 0.26823 0.783141 0.434741
646 1 0.861274 0.770706 0.430394
741 1 0.343441 0.804434 0.425841
675 1 0.652015 0.810761 0.437717
1129 1 0.25893 0.856186 0.424413
1307 1 0.682677 0.921427 0.428008
791 1 0.167288 0.930673 0.439778
899 1 0.427223 0.937876 0.436067
1656 1 0.988498 0.938788 0.436402
1003 1 0.608613 0.976001 0.438473
1324 1 0.821985 0.991587 0.434432
7 1 0.860488 0.986676 0.426339
1825 1 0.890765 0.528595 0.456162
338 1 0.273558 0.590629 0.444296
643 1 0.679877 0.594988 0.455953
1069 1 0.392545 0.637563 0.450795
877 1 0.505579 0.644913 0.446699
1855 1 0.688163 0.648788 0.449853
1880 1 0.545001 0.681192 0.456939
1889 1 0.245201 0.714252 0.456443
874 1 0.484675 0.742308 0.442903
929 1 0.516986 0.860266 0.454665
1086 1 0.038911 0.882337 0.455501
777 1 0.819647 0.881794 0.457792
1585 1 0.287423 0.91872 0.445847
380 1 0.443757 0.944881 0.447644
710 1 0.848248 0.945022 0.44942
1531 1 0.964062 0.957438 0.458355
1001 1 0.207288 0.979928 0.446788
1473 1 0.778843 0.979807 0.460225
1759 1 0.211372 0.543402 0.464782
346 1 0.656928 0.548382 0.477024
235 1 0.694033 0.610047 0.468251
642 1 0.290082 0.629782 0.468353
363 1 0.780017 0.617211 0.470955
1807 1 0.363473 0.66276 0.477276
436 1 0.445923 0.697632 0.469021
1013 1 0.202488 0.717345 0.467147
955 1 0.57233 0.723243 0.462087
1465 1 0.603313 0.716042 0.468376
1954 1 0.33639 0.736123 0.480218
1776 1 0.529625 0.805574 0.463003
935 1 0.864551 0.791966 0.465078
1090 1 0.37995 0.833906 0.480117
1926 1 0.48731 0.859642 0.480497
1529 1 0.881211 0.926504 0.464313
1379 1 0.0198088 0.959278 0.478314
1669 1 0.0353375 0.983125 0.465115
221 1 0.719085 0.989318 0.465458
1337 1 0.332065 0.511259 0.495277
528 1 0.153531 0.549611 0.49285
891 1 0.450081 0.575107 0.48614
673 1 0.0579752 0.608749 0.490807
442 1 0.0682087 0.667378 0.49176
1806 1 0.120995 0.698368 0.483296
79 1 0.361356 0.724304 0.487083
1875 1 0.993081 0.77086 0.493631
399 1 0.984358 0.781001 0.489486
1793 1 0.261171 0.79574 0.489095
1536 1 0.622736 0.810675 0.48857
1138 1 0.467696 0.845632 0.49648
1056 1 0.594516 0.845119 0.486252
15 1 0.819397 0.829623 0.499716
1869 1 0.115216 0.854641 0.480884
517 1 0.352805 0.881908 0.494875
67 1 0.802125 0.906321 0.490056
1102 1 0.52467 0.936552 0.498842
650 1 0.725994 0.974726 0.490338
1101 1 0.991547 0.980166 0.490461
725 1 0.310462 0.99323 0.491998
1265 1 0.85781 0.995644 0.483639
1386 1 0.577436 0.537481 0.511429
1493 1 0.610629 0.535052 0.50011
1755 1 0.475043 0.584304 0.507618
1502 1 0.452095 0.642142 0.504223
183 1 0.514956 0.639592 0.509918
141 1 0.541175 0.669615 0.516862
1574 1 0.882629 0.672939 0.504237
257 1 0.083009 0.685271 0.510212
80 1 0.251574 0.687567 0.501121
825 1 0.569634 0.690002 0.513773
1597 1 0.283136 0.711018 0.511527
317 1 0.0766939 0.726716 0.513435
724 1 0.188258 0.735366 0.512413
1709 1 0.250816 0.734239 0.513378
475 1 0.335437 0.736525 0.504883
22 1 0.962423 0.734092 0.512919
648 1 0.216387 0.75657 0.515686
355 1 0.588159 0.765151 0.504351
1345 1 0.59428 0.770612 0.513101
72 1 0.700848 0.837367 0.504604
354 1 0.0140328 0.84661 0.514682
68 1 0.230325 0.863416 0.502056
1109 1 0.274299 0.999004 0.507788
1275 1 0.888942 0.987314 0.51896
160 1 0.427439 0.508929 0.530599
65 1 0.598907 0.566045 0.536998
1955 1 0.395157 0.617849 0.538368
692 1 0.577396 0.621032 0.525502
1432 1 0.160244 0.666743 0.529864
910 1 0.595136 0.656764 0.528978
4 1 0.716784 0.679117 0.526181
1175 1 0.774605 0.682886 0.530754
1124 1 0.14141 0.718857 0.537607
1443 1 0.166569 0.718515 0.523629
18 1 0.824962 0.780908 0.537795
2023 1 0.908906 0.8594 0.538211
1264 1 0.174168 0.879161 0.534468
1268 1 0.620155 0.866089 0.529476
532 1 0.644763 0.873345 0.535263
1191 1 0.0933439 0.885326 0.523549
1244 1 0.684992 0.894452 0.528782
782 1 0.972488 0.914946 0.535558
1327 1 0.155636 0.933698 0.524772
5 1 0.426955 0.931185 0.521058
1405 1 0.787258 0.923399 0.526275
1177 1 0.850665 0.931591 0.520327
1160 1 0.504707 0.953475 0.523289
1932 1 0.817314 0.946571 0.526631
459 1 0.229657 0.972676 0.52662
1546 1 0.502652 0.963834 0.52264
948 1 0.266165 0.52811 0.551456
47 1 0.968179 0.52594 0.550896
1225 1 0.652247 0.561013 0.545854
1112 1 0.487289 0.610388 0.54289
241 1 0.429775 0.651632 0.542975
1999 1 0.332658 0.697932 0.55464
1281 1 0.364926 0.698621 0.553446
870 1 0.674535 0.746775 0.548574
1392 1 0.812295 0.748738 0.552245
938 1 0.639101 0.760817 0.538983
2009 1 0.0226208 0.782308 0.555241
1675 1 0.0135355 0.79582 0.545066
1990 1 0.47192 0.942742 0.553081
701 1 0.586253 0.505989 0.566939
1791 1 0.337663 0.525584 0.559287
187 1 0.624238 0.522036 0.571971
1065 1 0.233961 0.591586 0.565523
1194 1 0.870916 0.656769 0.574345
797 1 0.246093 0.685081 0.558458
1130 1 0.738727 0.689188 0.571177
1168 1 0.944292 0.685263 0.562473
1263 1 0.000135897 0.694739 0.563213
457 1 0.282003 0.693444 0.570744
939 1 0.661882 0.698104 0.559643
1040 1 0.704793 0.709572 0.573612
1441 1 0.880049 0.702606 0.572791
565 1 0.681534 0.783834 0.56848
1849 1 0.262556 0.852957 0.570434
1512 1 0.26467 0.931772 0.559259
274 1 0.646731 0.924455 0.56369
2019 1 0.999777 0.95791 0.558333
1283 1 0.711027 0.513713 0.584156
1451 1 0.0592867 0.531078 0.58043
983 1 0.296487 0.56112 0.593998
765 1 0.660723 0.570115 0.589592
809 1 0.49423 0.586927 0.582535
284 1 0.353543 0.605982 0.58958
1712 1 0.399304 0.607828 0.58621
426 1 0.418789 0.605511 0.587926
572 1 0.980541 0.612833 0.595889
1913 1 0.745548 0.635304 0.582891
1799 1 0.119876 0.678763 0.580801
320 1 0.0555326 0.71978 0.58951
26 1 0.445225 0.727476 0.595885
81 1 0.0391775 0.7531 0.592099
364 1 0.217583 0.786665 0.577993
134 1 0.690223 0.785381 0.585423
298 1 0.794392 0.777139 0.578903
198 1 0.800216 0.830407 0.593908
780 1 0.617887 0.863092 0.578543
1609 1 0.248841 0.899572 0.589283
1702 1 0.231426 0.91931 0.592147
883 1 0.785466 0.978862 0.577397
1321 1 0.951227 0.975382 0.578737
220 1 0.924375 0.991475 0.595295
1841 1 0.168378 0.502596 0.612674
530 1 0.623135 0.520981 0.608061
1140 1 0.50166 0.616132 0.601988
406 1 0.930772 0.627961 0.606314
744 1 0.930644 0.63858 0.609253
561 1 0.389062 0.662463 0.598306
330 1 0.484539 0.678958 0.611184
700 1 0.77722 0.709356 0.607787
138 1 0.127105 0.740857 0.603114
59 1 0.432455 0.788948 0.610471
1141 1 0.912908 0.801595 0.603904
1384 1 0.804911 0.812696 0.613242
1274 1 0.672713 0.841134 0.606279
1402 1 0.327834 0.864423 0.604456
963 1 0.512183 0.915905 0.607573
155 1 0.789741 0.956225 0.614205
1654 1 0.588433 0.977518 0.613112
1599 1 0.0143148 0.990304 0.596932
302 1 0.15609 0.99385 0.599858
1037 1 0.533841 0.508194 0.622012
628 1 0.321856 0.537396 0.633493
427 1 0.388056 0.530318 0.625343
102 1 0.75277 0.532095 0.622222
718 1 0.675121 0.569413 0.634557
1322 1 0.34869 0.577981 0.634342
632 1 0.559301 0.594548 0.630482
250 1 0.806116 0.644097 0.622006
548 1 0.254191 0.677073 0.61613
613 1 0.0186006 0.704486 0.631195
1521 1 0.781491 0.717191 0.615864
913 1 0.209556 0.813164 0.634608
1280 1 0.418655 0.895611 0.627017
1817 1 0.651188 0.905054 0.626889
24 1 0.706144 0.906502 0.629017
1121 1 0.236781 0.996066 0.623721
154 1 0.46711 0.506522 0.644423
1491 1 0.854555 0.518686 0.642736
1107 1 0.840732 0.554401 0.642556
743 1 0.501465 0.575664 0.645676
1633 1 0.377797 0.577135 0.651664
1845 1 0.0478432 0.61628 0.64484
1185 1 0.05933 0.616204 0.638252
288 1 0.1127 0.638811 0.646496
1133 1 0.474862 0.638209 0.647965
912 1 0.990191 0.652563 0.645518
571 1 0.716492 0.656532 0.641084
664 1 0.511307 0.684378 0.641212
1272 1 0.683151 0.679735 0.653622
93 1 0.950909 0.67449 0.652032
1749 1 0.231957 0.742767 0.651546
2046 1 0.940936 0.740509 0.63499
1744 1 0.14747 0.782653 0.642961
397 1 0.402804 0.815456 0.638603
174 1 0.500781 0.856474 0.647054
745 1 0.671451 0.878771 0.652546
38 1 0.904006 0.866654 0.648002
1995 1 0.257875 0.887402 0.645185
1722 1 0.420181 0.900363 0.63781
1862 1 0.552935 0.895421 0.641579
197 1 0.571832 0.898221 0.636885
946 1 0.0591157 0.911102 0.648157
1419 1 0.0647412 0.932764 0.637109
1342 1 0.637927 0.929399 0.646115
679 1 0.20665 0.990922 0.65017
50 1 0.973865 0.981664 0.641071
736 1 0.076207 0.581252 0.658486
510 1 0.777553 0.580671 0.6597
677 1 0.438068 0.610346 0.672457
362 1 0.808242 0.62939 0.654732
1420 1 0.33526 0.64094 0.667866
86 1 0.471647 0.63798 0.672352
1381 1 0.767626 0.641521 0.669968
1075 1 0.181147 0.664544 0.660928
1513 1 0.975744 0.688082 0.658327
606 1 0.308204 0.711433 0.658015
890 1 0.0112286 0.722272 0.669051
1639 1 0.0582913 0.736786 0.658885
1411 1 0.0896132 0.738301 0.672905
1666 1 0.746537 0.747881 0.668126
1619 1 0.794833 0.751896 0.669629
1756 1 0.43914 0.821595 0.655262
1895 1 0.73535 0.967276 0.665211
6 1 0.618452 0.547244 0.673583
189 1 0.332254 0.608709 0.674931
813 1 0.764673 0.616526 0.674433
1649 1 0.0816023 0.652521 0.676021
655 1 0.976399 0.653131 0.685175
1960 1 0.923446 0.671727 0.676831
178 1 0.329775 0.675586 0.67482
1556 1 0.997556 0.693306 0.682111
698 1 0.701009 0.717734 0.675816
91 1 0.801474 0.7555 0.680518
1448 1 0.832431 0.756473 0.691954
294 1 0.0985018 0.769609 0.688696
1933 1 0.587808 0.826291 0.687399
1098 1 0.576318 0.842806 0.690871
754 1 0.0965061 0.879082 0.681892
1282 1 0.139316 0.884773 0.686764
635 1 0.219526 0.929836 0.686426
2012 1 0.258927 0.933102 0.689925
1120 1 0.144975 0.978821 0.684699
652 1 0.0960549 0.514036 0.692796
1267 1 0.888055 0.512676 0.705213
1730 1 0.104438 0.520299 0.701623
312 1 0.77371 0.529341 0.704434
1207 1 0.158256 0.550978 0.707732
1963 1 0.90625 0.546056 0.710553
668 1 0.127868 0.621563 0.706752
1333 1 0.987342 0.647927 0.710983
1276 1 0.910145 0.667852 0.703725
599 1 0.0425952 0.687877 0.701065
950 1 0.368969 0.694387 0.705585
600 1 0.744391 0.699026 0.696839
359 1 0.871246 0.710091 0.696586
1019 1 0.239932 0.767455 0.6967
589 1 0.945666 0.754893 0.69479
805 1 0.459772 0.769561 0.705924
1685 1 0.261587 0.812611 0.693466
888 1 0.575382 0.959654 0.69269
1811 1 0.0702895 0.992713 0.694451
1707 1 0.842993 0.984015 0.693922
1355 1 0.46357 0.548738 0.720372
1716 1 0.454921 0.610182 0.721729
1335 1 0.493266 0.629173 0.725022
792 1 0.246273 0.662154 0.717732
1318 1 0.351377 0.675253 0.730639
975 1 0.346293 0.692499 0.713833
1541 1 0.0834726 0.713326 0.717134
697 1 0.363445 0.730298 0.717193
1212 1 0.556606 0.725052 0.726421
40 1 0.901108 0.729067 0.722517
1076 1 0.287341 0.78144 0.730709
721 1 0.305356 0.79234 0.726596
1501 1 0.166266 0.822213 0.7124
1831 1 0.0193904 0.829223 0.729047
720 1 0.592736 0.849483 0.715752
543 1 0.639003 0.85128 0.720146
1993 1 0.664384 0.884591 0.728995
2005 1 0.567969 0.890091 0.72029
1532 1 0.760124 0.896495 0.718617
2006 1 0.104194 0.925821 0.712366
1555 1 0.510259 0.939857 0.729489
911 1 0.635921 0.951675 0.723264
1236 1 0.69633 0.969632 0.728551
1286 1 0.787768 0.963472 0.728674
1937 1 0.486659 0.999628 0.724334
1691 1 0.352954 0.525582 0.74543
496 1 0.612185 0.556638 0.748835
1768 1 0.089988 0.58031 0.741003
1729 1 0.278604 0.607852 0.748289
51 1 0.437277 0.629552 0.746486
1530 1 0.675451 0.678959 0.739078
1535 1 0.486925 0.733114 0.742845
1104 1 0.322496 0.787674 0.737394
331 1 0.196669 0.788484 0.748323
393 1 0.647907 0.805639 0.744589
75 1 0.15739 0.840423 0.742348
96 1 0.111617 0.861344 0.744304
97 1 0.785302 0.846629 0.739847
31 1 0.596659 0.911406 0.740173
844 1 0.925266 0.933891 0.739848
1154 1 0.0812531 0.950298 0.742804
1897 1 0.967655 0.952155 0.741443
1070 1 0.130884 0.515261 0.757625
678 1 0.341307 0.510792 0.756025
962 1 0.207995 0.59252 0.757155
1870 1 0.0489119 0.626643 0.762767
161 1 0.898058 0.629928 0.762098
1907 1 0.350887 0.665346 0.761017
231 1 0.333202 0.71958 0.761155
111 1 0.522871 0.725341 0.764892
1166 1 0.713395 0.738735 0.767053
1583 1 0.0865586 0.772556 0.756972
873 1 0.511769 0.784975 0.763863
808 1 0.705518 0.788421 0.754623
854 1 0.387632 0.826063 0.76776
85 1 0.634665 0.845866 0.768088
753 1 0.938677 0.921867 0.759314
509 1 0.634683 0.946103 0.766984
1949 1 0.231819 0.510695 0.781538
376 1 0.0671808 0.534481 0.769455
907 1 0.631236 0.576275 0.77918
19 1 0.204492 0.580714 0.775993
1998 1 0.271143 0.582415 0.781353
222 1 0.885522 0.591882 0.783322
733 1 0.56106 0.664648 0.783258
1637 1 0.923825 0.676483 0.780233
1571 1 0.259776 0.708682 0.776183
922 1 0.282188 0.774105 0.77662
259 1 0.44838 0.797991 0.78612
1233 1 0.622938 0.803419 0.774019
90 1 0.233305 0.811169 0.774083
1118 1 0.00335834 0.840587 0.777567
629 1 0.580205 0.840152 0.771782
1365 1 0.0404866 0.863663 0.775353
1458 1 0.156655 0.884163 0.772462
804 1 0.197788 0.869172 0.773975
603 1 0.538503 0.890464 0.778128
372 1 0.186138 0.911702 0.78119
1664 1 0.405612 0.93705 0.786699
924 1 0.976258 0.950468 0.773246
867 1 0.776735 0.543649 0.802285
1152 1 0.904013 0.63429 0.806375
1279 1 0.651065 0.635211 0.789311
826 1 0.164656 0.686393 0.801117
52 1 0.735051 0.689807 0.797772
1463 1 0.0282288 0.741694 0.790457
1686 1 0.808102 0.752731 0.800851
213 1 0.807743 0.798313 0.801993
1526 1 0.384435 0.821612 0.792904
291 1 0.244896 0.841 0.796948
1516 1 0.957993 0.850614 0.79383
1156 1 0.592561 0.896169 0.791361
1886 1 0.899556 0.890601 0.803911
1727 1 0.513107 0.911711 0.799563
1763 1 0.324627 0.929051 0.795192
1711 1 0.228233 0.990326 0.790082
612 1 0.527396 0.503743 0.821657
1126 1 0.546834 0.568024 0.816446
92 1 0.36005 0.589856 0.824888
656 1 0.499987 0.58167 0.822454
1865 1 0.829866 0.578819 0.818141
1395 1 0.797821 0.596799 0.818881
1689 1 0.291526 0.668892 0.825692
1985 1 0.0286264 0.688071 0.824743
466 1 0.334303 0.679694 0.816713
1783 1 0.199529 0.756643 0.820277
871 1 0.718946 0.809835 0.823707
686 1 0.234463 0.844906 0.811724
1942 1 0.436711 0.908451 0.820975
151 1 0.639617 0.941816 0.823572
1315 1 0.975589 0.944298 0.808418
316 1 0.22384 0.971968 0.815158
1782 1 0.050387 0.547767 0.833782
607 1 0.599468 0.556419 0.833719
1305 1 0.689685 0.576666 0.827596
217 1 0.966116 0.571073 0.845991
1952 1 0.12293 0.602572 0.841624
505 1 0.248077 0.613495 0.829199
624 1 0.616762 0.670077 0.843578
1668 1 0.792728 0.664597 0.840051
523 1 0.334787 0.72255 0.830482
483 1 0.0817073 0.750761 0.845759
460 1 0.285428 0.751001 0.84568
1997 1 0.674855 0.801332 0.827758
1481 1 0.534222 0.826904 0.835684
242 1 0.134327 0.831795 0.833131
1873 1 0.805016 0.846448 0.834449
129 1 0.917706 0.86362 0.841539
2039 1 0.435216 0.902158 0.830721
1340 1 0.347289 0.962883 0.834942
1682 1 0.845602 0.97536 0.834621
2011 1 0.167947 0.983129 0.828631
902 1 0.549179 0.505173 0.864356
218 1 0.888074 0.500835 0.851407
173 1 0.0246704 0.576886 0.857109
1525 1 0.32457 0.570525 0.861742
905 1 0.740032 0.57991 0.85552
497 1 0.57253 0.609164 0.857876
598 1 0.333403 0.640089 0.850605
104 1 0.675649 0.724591 0.855975
653 1 0.937777 0.826761 0.850331
323 1 0.99312 0.836266 0.862459
1957 1 0.118318 0.880456 0.85201
1767 1 0.425651 0.909146 0.848079
715 1 0.489227 0.919906 0.853216
1083 1 0.220568 0.959639 0.858568
933 1 0.808654 0.951391 0.859481
1219 1 0.26092 0.963421 0.863995
1851 1 0.114704 0.518064 0.873241
227 1 0.606378 0.649307 0.869508
1658 1 0.969316 0.636891 0.880289
1760 1 0.201314 0.76875 0.878216
1860 1 0.596082 0.757105 0.882853
1344 1 0.76029 0.763273 0.874565
594 1 0.639624 0.775256 0.876485
282 1 0.132946 0.801924 0.870652
1290 1 0.378441 0.794437 0.878678
759 1 0.38401 0.793286 0.867028
1522 1 0.929437 0.96117 0.869668
1336 1 0.582343 0.98365 0.878418
1214 1 0.348922 0.515855 0.897749
1189 1 0.234692 0.658147 0.89996
1429 1 0.345971 0.666946 0.88793
735 1 0.017502 0.699762 0.891791
614 1 0.170055 0.719835 0.898574
378 1 0.56615 0.712467 0.900535
1674 1 0.53085 0.785518 0.891098
1538 1 0.309981 0.821955 0.898117
431 1 0.320841 0.809854 0.895969
324 1 0.875123 0.880499 0.889532
1742 1 0.297947 0.899247 0.903487
315 1 0.43378 0.922063 0.888274
747 1 0.772378 0.924273 0.884819
1924 1 0.950606 0.509353 0.904226
857 1 0.816187 0.551086 0.918798
1719 1 0.881347 0.551533 0.921311
1646 1 0.110421 0.561284 0.917544
1197 1 0.438676 0.56489 0.909422
1029 1 0.550711 0.567626 0.912114
985 1 0.634198 0.55883 0.903894
2034 1 0.243659 0.600495 0.919045
1747 1 0.0393689 0.680108 0.912573
1991 1 0.33915 0.674458 0.911607
742 1 0.349477 0.724794 0.912405
587 1 0.638022 0.759787 0.910084
1885 1 0.660234 0.771065 0.907391
1183 1 0.282343 0.799105 0.91707
382 1 0.594145 0.86659 0.910421
1908 1 0.705525 0.900612 0.912212
1061 1 0.0488176 0.937271 0.917792
1210 1 0.00123038 0.970274 0.915162
381 1 0.614946 0.515715 0.932352
1703 1 0.669869 0.503148 0.932054
1195 1 0.753177 0.505727 0.93684
570 1 0.690837 0.530661 0.936082
1820 1 0.901639 0.540614 0.937351
1431 1 0.913676 0.579395 0.928063
740 1 0.356305 0.645024 0.923293
1558 1 0.545964 0.678753 0.926406
126 1 0.312108 0.728765 0.931051
352 1 0.514699 0.740441 0.934136
778 1 0.778836 0.740224 0.933205
125 1 0.796602 0.771217 0.931644
343 1 0.805905 0.788345 0.929472
49 1 0.10601 0.788896 0.929451
1063 1 0.40053 0.806301 0.923352
14 1 0.848423 0.816484 0.940086
2036 1 0.113913 0.864135 0.932778
219 1 0.811821 0.857287 0.938197
1298 1 0.327038 0.866256 0.931612
633 1 0.396302 0.889735 0.933278
267 1 0.535972 0.89444 0.935764
1595 1 0.396323 0.922667 0.930545
802 1 0.856022 0.915615 0.941293
1615 1 0.33521 0.941368 0.924491
627 1 0.301765 0.567577 0.947172
1306 1 0.183026 0.608 0.944395
916 1 0.322531 0.606869 0.942851
135 1 0.193748 0.633352 0.957142
1790 1 0.666612 0.642755 0.957028
1033 1 0.635926 0.658961 0.951799
842 1 0.332359 0.674624 0.957508
109 1 0.161086 0.716609 0.955495
473 1 0.388194 0.717469 0.95107
1285 1 0.741609 0.74936 0.956697
230 1 0.514856 0.79219 0.948826
1718 1 0.206499 0.842753 0.943389
1542 1 0.332526 0.889784 0.952255
751 1 0.340785 0.956053 0.950453
434 1 0.675976 0.951446 0.947776
810 1 0.321134 0.985008 0.956844
1032 1 0.809584 0.512693 0.974322
1498 1 0.0891929 0.534879 0.98017
2031 1 0.473005 0.549497 0.979806
1563 1 0.508772 0.549177 0.962122
423 1 0.730115 0.55575 0.977047
1164 1 0.580464 0.581848 0.964744
850 1 0.197357 0.627114 0.964423
974 1 0.764376 0.630333 0.977527
609 1 0.535318 0.672344 0.965263
1361 1 0.72218 0.680815 0.965545
1114 1 0.409585 0.693577 0.969881
1410 1 0.737067 0.705262 0.970814
855 1 0.879813 0.725224 0.962345
1850 1 0.970873 0.732405 0.97722
1812 1 0.542342 0.786127 0.963064
115 1 0.111217 0.816883 0.971208
98 1 0.242513 0.811501 0.975086
182 1 0.323188 0.825662 0.963208
1046 1 0.498763 0.807739 0.969822
1520 1 0.929838 0.861284 0.962889
1659 1 0.734023 0.87149 0.976961
1259 1 0.846369 0.892195 0.969713
1223 1 0.135357 0.927563 0.979515
823 1 0.156006 0.95774 0.977588
1578 1 0.9868 0.96672 0.964286
2045 1 0.254156 0.501156 0.997581
1919 1 0.200078 0.540598 0.981186
1447 1 0.826769 0.58083 0.994607
986 1 0.630698 0.60336 0.988521
889 1 0.759572 0.609657 0.99047
1496 1 0.299786 0.619595 0.998513
1948 1 0.665458 0.647537 0.997501
593 1 0.771195 0.68111 0.992236
23 1 0.211944 0.693588 0.982205
1145 1 0.50918 0.733496 0.995189
412 1 0.478642 0.765578 0.98226
1254 1 0.668081 0.767375 0.990512
1375 1 0.901307 0.75399 0.989548
835 1 0.108948 0.825986 0.984663
981 1 0.48803 0.811595 0.990615
1105 1 0.390871 0.889706 0.989447
1399 1 0.70794 0.903501 0.98143
1444 1 0.820112 0.947248 0.99117
1797 1 0.65955 0.965872 0.99423
| [
"[email protected]"
] | |
9075dd1014f657925ce2124e438848edeafc5986 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/network/aaz/latest/network/lb/inbound_nat_pool/_create.py | d0545878bf25bb35ef2c3afc23da82bba846e82e | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 121,667 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network lb inbound-nat-pool create",
)
class Create(AAZCommand):
"""Create an inbound NAT address pool.
:example: Create an inbound NAT address pool.
az network lb inbound-nat-pool create -g MyResourceGroup --lb-name MyLb -n MyNatPool --protocol Tcp --frontend-port-range-start 80 --frontend-port-range-end 89 --backend-port 80 --frontend-ip MyFrontendIp
"""
_aaz_info = {
"version": "2022-05-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/loadbalancers/{}", "2022-05-01", "properties.inboundNatPools[]"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
self.SubresourceSelector(ctx=self.ctx, name="subresource")
return self.build_lro_poller(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.lb_name = AAZStrArg(
options=["--lb-name"],
help="The load balancer name.",
required=True,
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="The name of the resource that is unique within the set of inbound NAT pools used by the load balancer. This name can be used to access the resource.",
required=True,
)
# define Arg Group "Parameters.properties.inboundNatPools[]"
# define Arg Group "Properties"
_args_schema = cls._args_schema
_args_schema.backend_port = AAZIntArg(
options=["--backend-port"],
arg_group="Properties",
help="The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535.",
required=True,
)
_args_schema.enable_floating_ip = AAZBoolArg(
options=["--floating-ip", "--enable-floating-ip"],
arg_group="Properties",
help="Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint.",
)
_args_schema.enable_tcp_reset = AAZBoolArg(
options=["--enable-tcp-reset"],
arg_group="Properties",
help="Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP.",
)
_args_schema.frontend_ip_name = AAZStrArg(
options=["--frontend-ip", "--frontend-ip-name"],
arg_group="Properties",
help="The name or ID of the frontend IP configuration.",
)
_args_schema.frontend_port_range_end = AAZIntArg(
options=["--frontend-port-range-end"],
arg_group="Properties",
help="The last port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65535.",
required=True,
)
_args_schema.frontend_port_range_start = AAZIntArg(
options=["--frontend-port-range-start"],
arg_group="Properties",
help="The first port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65534.",
required=True,
)
_args_schema.idle_timeout_in_minutes = AAZIntArg(
options=["--idle-timeout", "--idle-timeout-in-minutes"],
arg_group="Properties",
help="The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.",
)
_args_schema.protocol = AAZStrArg(
options=["--protocol"],
arg_group="Properties",
help="The reference to the transport protocol used by the inbound NAT pool.",
required=True,
enum={"All": "All", "Tcp": "Tcp", "Udp": "Udp"},
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.LoadBalancersGet(ctx=self.ctx)()
self.pre_instance_create()
self.InstanceCreateByJson(ctx=self.ctx)()
self.post_instance_create(self.ctx.selectors.subresource.required())
yield self.LoadBalancersCreateOrUpdate(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
@register_callback
def pre_instance_create(self):
pass
@register_callback
def post_instance_create(self, instance):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.selectors.subresource.required(), client_flatten=True)
return result
class SubresourceSelector(AAZJsonSelector):
def _get(self):
result = self.ctx.vars.instance
result = result.properties.inboundNatPools
filters = enumerate(result)
filters = filter(
lambda e: e[1].name == self.ctx.args.name,
filters
)
idx = next(filters)[0]
return result[idx]
def _set(self, value):
result = self.ctx.vars.instance
result = result.properties.inboundNatPools
filters = enumerate(result)
filters = filter(
lambda e: e[1].name == self.ctx.args.name,
filters
)
idx = next(filters, [len(result)])[0]
result[idx] = value
return
class LoadBalancersGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"loadBalancerName", self.ctx.args.lb_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-05-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_CreateHelper._build_schema_load_balancer_read(cls._schema_on_200)
return cls._schema_on_200
class LoadBalancersCreateOrUpdate(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200_201,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200, 201]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200_201,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}",
**self.url_parameters
)
@property
def method(self):
return "PUT"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"loadBalancerName", self.ctx.args.lb_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-05-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Content-Type", "application/json",
),
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
@property
def content(self):
_content_value, _builder = self.new_content_builder(
self.ctx.args,
value=self.ctx.vars.instance,
)
return self.serialize_content(_content_value)
def on_200_201(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200_201
)
_schema_on_200_201 = None
@classmethod
def _build_schema_on_200_201(cls):
if cls._schema_on_200_201 is not None:
return cls._schema_on_200_201
cls._schema_on_200_201 = AAZObjectType()
_CreateHelper._build_schema_load_balancer_read(cls._schema_on_200_201)
return cls._schema_on_200_201
class InstanceCreateByJson(AAZJsonInstanceCreateOperation):
def __call__(self, *args, **kwargs):
self.ctx.selectors.subresource.set(self._create_instance())
def _create_instance(self):
_instance_value, _builder = self.new_content_builder(
self.ctx.args,
typ=AAZObjectType
)
_builder.set_prop("name", AAZStrType, ".name")
_builder.set_prop("properties", AAZObjectType, ".", typ_kwargs={"flags": {"required": True, "client_flatten": True}})
properties = _builder.get(".properties")
if properties is not None:
properties.set_prop("backendPort", AAZIntType, ".backend_port", typ_kwargs={"flags": {"required": True}})
properties.set_prop("enableFloatingIP", AAZBoolType, ".enable_floating_ip")
properties.set_prop("enableTcpReset", AAZBoolType, ".enable_tcp_reset")
properties.set_prop("frontendIPConfiguration", AAZObjectType)
properties.set_prop("frontendPortRangeEnd", AAZIntType, ".frontend_port_range_end", typ_kwargs={"flags": {"required": True}})
properties.set_prop("frontendPortRangeStart", AAZIntType, ".frontend_port_range_start", typ_kwargs={"flags": {"required": True}})
properties.set_prop("idleTimeoutInMinutes", AAZIntType, ".idle_timeout_in_minutes")
properties.set_prop("protocol", AAZStrType, ".protocol", typ_kwargs={"flags": {"required": True}})
frontend_ip_configuration = _builder.get(".properties.frontendIPConfiguration")
if frontend_ip_configuration is not None:
frontend_ip_configuration.set_prop("id", AAZStrType, ".frontend_ip_name")
return _instance_value
class _CreateHelper:
"""Helper class for Create"""
_schema_application_security_group_read = None
@classmethod
def _build_schema_application_security_group_read(cls, _schema):
if cls._schema_application_security_group_read is not None:
_schema.etag = cls._schema_application_security_group_read.etag
_schema.id = cls._schema_application_security_group_read.id
_schema.location = cls._schema_application_security_group_read.location
_schema.name = cls._schema_application_security_group_read.name
_schema.properties = cls._schema_application_security_group_read.properties
_schema.tags = cls._schema_application_security_group_read.tags
_schema.type = cls._schema_application_security_group_read.type
return
cls._schema_application_security_group_read = _schema_application_security_group_read = AAZObjectType()
application_security_group_read = _schema_application_security_group_read
application_security_group_read.etag = AAZStrType(
flags={"read_only": True},
)
application_security_group_read.id = AAZStrType()
application_security_group_read.location = AAZStrType()
application_security_group_read.name = AAZStrType(
flags={"read_only": True},
)
application_security_group_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
application_security_group_read.tags = AAZDictType()
application_security_group_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_application_security_group_read.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
tags = _schema_application_security_group_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_application_security_group_read.etag
_schema.id = cls._schema_application_security_group_read.id
_schema.location = cls._schema_application_security_group_read.location
_schema.name = cls._schema_application_security_group_read.name
_schema.properties = cls._schema_application_security_group_read.properties
_schema.tags = cls._schema_application_security_group_read.tags
_schema.type = cls._schema_application_security_group_read.type
_schema_backend_address_pool_read = None
@classmethod
def _build_schema_backend_address_pool_read(cls, _schema):
if cls._schema_backend_address_pool_read is not None:
_schema.etag = cls._schema_backend_address_pool_read.etag
_schema.id = cls._schema_backend_address_pool_read.id
_schema.name = cls._schema_backend_address_pool_read.name
_schema.properties = cls._schema_backend_address_pool_read.properties
_schema.type = cls._schema_backend_address_pool_read.type
return
cls._schema_backend_address_pool_read = _schema_backend_address_pool_read = AAZObjectType()
backend_address_pool_read = _schema_backend_address_pool_read
backend_address_pool_read.etag = AAZStrType(
flags={"read_only": True},
)
backend_address_pool_read.id = AAZStrType()
backend_address_pool_read.name = AAZStrType()
backend_address_pool_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
backend_address_pool_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_backend_address_pool_read.properties
properties.backend_ip_configurations = AAZListType(
serialized_name="backendIPConfigurations",
flags={"read_only": True},
)
properties.drain_period_in_seconds = AAZIntType(
serialized_name="drainPeriodInSeconds",
)
properties.inbound_nat_rules = AAZListType(
serialized_name="inboundNatRules",
flags={"read_only": True},
)
properties.load_balancer_backend_addresses = AAZListType(
serialized_name="loadBalancerBackendAddresses",
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.location = AAZStrType()
properties.outbound_rule = AAZObjectType(
serialized_name="outboundRule",
)
cls._build_schema_sub_resource_read(properties.outbound_rule)
properties.outbound_rules = AAZListType(
serialized_name="outboundRules",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.tunnel_interfaces = AAZListType(
serialized_name="tunnelInterfaces",
)
backend_ip_configurations = _schema_backend_address_pool_read.properties.backend_ip_configurations
backend_ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(backend_ip_configurations.Element)
inbound_nat_rules = _schema_backend_address_pool_read.properties.inbound_nat_rules
inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_rules.Element)
load_balancer_backend_addresses = _schema_backend_address_pool_read.properties.load_balancer_backend_addresses
load_balancer_backend_addresses.Element = AAZObjectType()
_element = _schema_backend_address_pool_read.properties.load_balancer_backend_addresses.Element
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_backend_address_pool_read.properties.load_balancer_backend_addresses.Element.properties
properties.admin_state = AAZStrType(
serialized_name="adminState",
)
properties.inbound_nat_rules_port_mapping = AAZListType(
serialized_name="inboundNatRulesPortMapping",
flags={"read_only": True},
)
properties.ip_address = AAZStrType(
serialized_name="ipAddress",
)
properties.load_balancer_frontend_ip_configuration = AAZObjectType(
serialized_name="loadBalancerFrontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.load_balancer_frontend_ip_configuration)
properties.network_interface_ip_configuration = AAZObjectType(
serialized_name="networkInterfaceIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.network_interface_ip_configuration)
properties.subnet = AAZObjectType()
cls._build_schema_sub_resource_read(properties.subnet)
properties.virtual_network = AAZObjectType(
serialized_name="virtualNetwork",
)
cls._build_schema_sub_resource_read(properties.virtual_network)
inbound_nat_rules_port_mapping = _schema_backend_address_pool_read.properties.load_balancer_backend_addresses.Element.properties.inbound_nat_rules_port_mapping
inbound_nat_rules_port_mapping.Element = AAZObjectType()
_element = _schema_backend_address_pool_read.properties.load_balancer_backend_addresses.Element.properties.inbound_nat_rules_port_mapping.Element
_element.backend_port = AAZIntType(
serialized_name="backendPort",
)
_element.frontend_port = AAZIntType(
serialized_name="frontendPort",
)
_element.inbound_nat_rule_name = AAZStrType(
serialized_name="inboundNatRuleName",
)
load_balancing_rules = _schema_backend_address_pool_read.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
outbound_rules = _schema_backend_address_pool_read.properties.outbound_rules
outbound_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(outbound_rules.Element)
tunnel_interfaces = _schema_backend_address_pool_read.properties.tunnel_interfaces
tunnel_interfaces.Element = AAZObjectType()
_element = _schema_backend_address_pool_read.properties.tunnel_interfaces.Element
_element.identifier = AAZIntType()
_element.port = AAZIntType()
_element.protocol = AAZStrType()
_element.type = AAZStrType()
_schema.etag = cls._schema_backend_address_pool_read.etag
_schema.id = cls._schema_backend_address_pool_read.id
_schema.name = cls._schema_backend_address_pool_read.name
_schema.properties = cls._schema_backend_address_pool_read.properties
_schema.type = cls._schema_backend_address_pool_read.type
_schema_extended_location_read = None
@classmethod
def _build_schema_extended_location_read(cls, _schema):
if cls._schema_extended_location_read is not None:
_schema.name = cls._schema_extended_location_read.name
_schema.type = cls._schema_extended_location_read.type
return
cls._schema_extended_location_read = _schema_extended_location_read = AAZObjectType()
extended_location_read = _schema_extended_location_read
extended_location_read.name = AAZStrType()
extended_location_read.type = AAZStrType()
_schema.name = cls._schema_extended_location_read.name
_schema.type = cls._schema_extended_location_read.type
_schema_frontend_ip_configuration_read = None
@classmethod
def _build_schema_frontend_ip_configuration_read(cls, _schema):
if cls._schema_frontend_ip_configuration_read is not None:
_schema.etag = cls._schema_frontend_ip_configuration_read.etag
_schema.id = cls._schema_frontend_ip_configuration_read.id
_schema.name = cls._schema_frontend_ip_configuration_read.name
_schema.properties = cls._schema_frontend_ip_configuration_read.properties
_schema.type = cls._schema_frontend_ip_configuration_read.type
_schema.zones = cls._schema_frontend_ip_configuration_read.zones
return
cls._schema_frontend_ip_configuration_read = _schema_frontend_ip_configuration_read = AAZObjectType()
frontend_ip_configuration_read = _schema_frontend_ip_configuration_read
frontend_ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
frontend_ip_configuration_read.id = AAZStrType()
frontend_ip_configuration_read.name = AAZStrType()
frontend_ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
frontend_ip_configuration_read.type = AAZStrType(
flags={"read_only": True},
)
frontend_ip_configuration_read.zones = AAZListType()
properties = _schema_frontend_ip_configuration_read.properties
properties.gateway_load_balancer = AAZObjectType(
serialized_name="gatewayLoadBalancer",
)
cls._build_schema_sub_resource_read(properties.gateway_load_balancer)
properties.inbound_nat_pools = AAZListType(
serialized_name="inboundNatPools",
flags={"read_only": True},
)
properties.inbound_nat_rules = AAZListType(
serialized_name="inboundNatRules",
flags={"read_only": True},
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.outbound_rules = AAZListType(
serialized_name="outboundRules",
flags={"read_only": True},
)
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.public_ip_prefix = AAZObjectType(
serialized_name="publicIPPrefix",
)
cls._build_schema_sub_resource_read(properties.public_ip_prefix)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
inbound_nat_pools = _schema_frontend_ip_configuration_read.properties.inbound_nat_pools
inbound_nat_pools.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_pools.Element)
inbound_nat_rules = _schema_frontend_ip_configuration_read.properties.inbound_nat_rules
inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_rules.Element)
load_balancing_rules = _schema_frontend_ip_configuration_read.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
outbound_rules = _schema_frontend_ip_configuration_read.properties.outbound_rules
outbound_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(outbound_rules.Element)
zones = _schema_frontend_ip_configuration_read.zones
zones.Element = AAZStrType()
_schema.etag = cls._schema_frontend_ip_configuration_read.etag
_schema.id = cls._schema_frontend_ip_configuration_read.id
_schema.name = cls._schema_frontend_ip_configuration_read.name
_schema.properties = cls._schema_frontend_ip_configuration_read.properties
_schema.type = cls._schema_frontend_ip_configuration_read.type
_schema.zones = cls._schema_frontend_ip_configuration_read.zones
_schema_ip_configuration_read = None
@classmethod
def _build_schema_ip_configuration_read(cls, _schema):
if cls._schema_ip_configuration_read is not None:
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
return
cls._schema_ip_configuration_read = _schema_ip_configuration_read = AAZObjectType()
ip_configuration_read = _schema_ip_configuration_read
ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
ip_configuration_read.id = AAZStrType()
ip_configuration_read.name = AAZStrType()
ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_ip_configuration_read.properties
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
_schema_inbound_nat_rule_read = None
@classmethod
def _build_schema_inbound_nat_rule_read(cls, _schema):
if cls._schema_inbound_nat_rule_read is not None:
_schema.etag = cls._schema_inbound_nat_rule_read.etag
_schema.id = cls._schema_inbound_nat_rule_read.id
_schema.name = cls._schema_inbound_nat_rule_read.name
_schema.properties = cls._schema_inbound_nat_rule_read.properties
_schema.type = cls._schema_inbound_nat_rule_read.type
return
cls._schema_inbound_nat_rule_read = _schema_inbound_nat_rule_read = AAZObjectType()
inbound_nat_rule_read = _schema_inbound_nat_rule_read
inbound_nat_rule_read.etag = AAZStrType(
flags={"read_only": True},
)
inbound_nat_rule_read.id = AAZStrType()
inbound_nat_rule_read.name = AAZStrType()
inbound_nat_rule_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
inbound_nat_rule_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_inbound_nat_rule_read.properties
properties.backend_address_pool = AAZObjectType(
serialized_name="backendAddressPool",
)
cls._build_schema_sub_resource_read(properties.backend_address_pool)
properties.backend_ip_configuration = AAZObjectType(
serialized_name="backendIPConfiguration",
)
cls._build_schema_network_interface_ip_configuration_read(properties.backend_ip_configuration)
properties.backend_port = AAZIntType(
serialized_name="backendPort",
)
properties.enable_floating_ip = AAZBoolType(
serialized_name="enableFloatingIP",
)
properties.enable_tcp_reset = AAZBoolType(
serialized_name="enableTcpReset",
)
properties.frontend_ip_configuration = AAZObjectType(
serialized_name="frontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.frontend_ip_configuration)
properties.frontend_port = AAZIntType(
serialized_name="frontendPort",
)
properties.frontend_port_range_end = AAZIntType(
serialized_name="frontendPortRangeEnd",
)
properties.frontend_port_range_start = AAZIntType(
serialized_name="frontendPortRangeStart",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.protocol = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
_schema.etag = cls._schema_inbound_nat_rule_read.etag
_schema.id = cls._schema_inbound_nat_rule_read.id
_schema.name = cls._schema_inbound_nat_rule_read.name
_schema.properties = cls._schema_inbound_nat_rule_read.properties
_schema.type = cls._schema_inbound_nat_rule_read.type
_schema_load_balancer_read = None
@classmethod
def _build_schema_load_balancer_read(cls, _schema):
if cls._schema_load_balancer_read is not None:
_schema.etag = cls._schema_load_balancer_read.etag
_schema.extended_location = cls._schema_load_balancer_read.extended_location
_schema.id = cls._schema_load_balancer_read.id
_schema.location = cls._schema_load_balancer_read.location
_schema.name = cls._schema_load_balancer_read.name
_schema.properties = cls._schema_load_balancer_read.properties
_schema.sku = cls._schema_load_balancer_read.sku
_schema.tags = cls._schema_load_balancer_read.tags
_schema.type = cls._schema_load_balancer_read.type
return
cls._schema_load_balancer_read = _schema_load_balancer_read = AAZObjectType()
load_balancer_read = _schema_load_balancer_read
load_balancer_read.etag = AAZStrType(
flags={"read_only": True},
)
load_balancer_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(load_balancer_read.extended_location)
load_balancer_read.id = AAZStrType()
load_balancer_read.location = AAZStrType()
load_balancer_read.name = AAZStrType(
flags={"read_only": True},
)
load_balancer_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
load_balancer_read.sku = AAZObjectType()
load_balancer_read.tags = AAZDictType()
load_balancer_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_load_balancer_read.properties
properties.backend_address_pools = AAZListType(
serialized_name="backendAddressPools",
)
properties.frontend_ip_configurations = AAZListType(
serialized_name="frontendIPConfigurations",
)
properties.inbound_nat_pools = AAZListType(
serialized_name="inboundNatPools",
)
properties.inbound_nat_rules = AAZListType(
serialized_name="inboundNatRules",
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
)
properties.outbound_rules = AAZListType(
serialized_name="outboundRules",
)
properties.probes = AAZListType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
backend_address_pools = _schema_load_balancer_read.properties.backend_address_pools
backend_address_pools.Element = AAZObjectType()
cls._build_schema_backend_address_pool_read(backend_address_pools.Element)
frontend_ip_configurations = _schema_load_balancer_read.properties.frontend_ip_configurations
frontend_ip_configurations.Element = AAZObjectType()
cls._build_schema_frontend_ip_configuration_read(frontend_ip_configurations.Element)
inbound_nat_pools = _schema_load_balancer_read.properties.inbound_nat_pools
inbound_nat_pools.Element = AAZObjectType()
_element = _schema_load_balancer_read.properties.inbound_nat_pools.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"required": True, "client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_load_balancer_read.properties.inbound_nat_pools.Element.properties
properties.backend_port = AAZIntType(
serialized_name="backendPort",
flags={"required": True},
)
properties.enable_floating_ip = AAZBoolType(
serialized_name="enableFloatingIP",
)
properties.enable_tcp_reset = AAZBoolType(
serialized_name="enableTcpReset",
)
properties.frontend_ip_configuration = AAZObjectType(
serialized_name="frontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.frontend_ip_configuration)
properties.frontend_port_range_end = AAZIntType(
serialized_name="frontendPortRangeEnd",
flags={"required": True},
)
properties.frontend_port_range_start = AAZIntType(
serialized_name="frontendPortRangeStart",
flags={"required": True},
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
inbound_nat_rules = _schema_load_balancer_read.properties.inbound_nat_rules
inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_inbound_nat_rule_read(inbound_nat_rules.Element)
load_balancing_rules = _schema_load_balancer_read.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
_element = _schema_load_balancer_read.properties.load_balancing_rules.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_load_balancer_read.properties.load_balancing_rules.Element.properties
properties.backend_address_pool = AAZObjectType(
serialized_name="backendAddressPool",
)
cls._build_schema_sub_resource_read(properties.backend_address_pool)
properties.backend_address_pools = AAZListType(
serialized_name="backendAddressPools",
)
properties.backend_port = AAZIntType(
serialized_name="backendPort",
)
properties.disable_outbound_snat = AAZBoolType(
serialized_name="disableOutboundSnat",
)
properties.enable_floating_ip = AAZBoolType(
serialized_name="enableFloatingIP",
)
properties.enable_tcp_reset = AAZBoolType(
serialized_name="enableTcpReset",
)
properties.frontend_ip_configuration = AAZObjectType(
serialized_name="frontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.frontend_ip_configuration)
properties.frontend_port = AAZIntType(
serialized_name="frontendPort",
flags={"required": True},
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.load_distribution = AAZStrType(
serialized_name="loadDistribution",
)
properties.probe = AAZObjectType()
cls._build_schema_sub_resource_read(properties.probe)
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
backend_address_pools = _schema_load_balancer_read.properties.load_balancing_rules.Element.properties.backend_address_pools
backend_address_pools.Element = AAZObjectType()
cls._build_schema_sub_resource_read(backend_address_pools.Element)
outbound_rules = _schema_load_balancer_read.properties.outbound_rules
outbound_rules.Element = AAZObjectType()
_element = _schema_load_balancer_read.properties.outbound_rules.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_load_balancer_read.properties.outbound_rules.Element.properties
properties.allocated_outbound_ports = AAZIntType(
serialized_name="allocatedOutboundPorts",
)
properties.backend_address_pool = AAZObjectType(
serialized_name="backendAddressPool",
flags={"required": True},
)
cls._build_schema_sub_resource_read(properties.backend_address_pool)
properties.enable_tcp_reset = AAZBoolType(
serialized_name="enableTcpReset",
)
properties.frontend_ip_configurations = AAZListType(
serialized_name="frontendIPConfigurations",
flags={"required": True},
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
frontend_ip_configurations = _schema_load_balancer_read.properties.outbound_rules.Element.properties.frontend_ip_configurations
frontend_ip_configurations.Element = AAZObjectType()
cls._build_schema_sub_resource_read(frontend_ip_configurations.Element)
probes = _schema_load_balancer_read.properties.probes
probes.Element = AAZObjectType()
_element = _schema_load_balancer_read.properties.probes.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_load_balancer_read.properties.probes.Element.properties
properties.interval_in_seconds = AAZIntType(
serialized_name="intervalInSeconds",
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.number_of_probes = AAZIntType(
serialized_name="numberOfProbes",
)
properties.port = AAZIntType(
flags={"required": True},
)
properties.probe_threshold = AAZIntType(
serialized_name="probeThreshold",
)
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.request_path = AAZStrType(
serialized_name="requestPath",
)
load_balancing_rules = _schema_load_balancer_read.properties.probes.Element.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
sku = _schema_load_balancer_read.sku
sku.name = AAZStrType()
sku.tier = AAZStrType()
tags = _schema_load_balancer_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_load_balancer_read.etag
_schema.extended_location = cls._schema_load_balancer_read.extended_location
_schema.id = cls._schema_load_balancer_read.id
_schema.location = cls._schema_load_balancer_read.location
_schema.name = cls._schema_load_balancer_read.name
_schema.properties = cls._schema_load_balancer_read.properties
_schema.sku = cls._schema_load_balancer_read.sku
_schema.tags = cls._schema_load_balancer_read.tags
_schema.type = cls._schema_load_balancer_read.type
_schema_network_interface_ip_configuration_read = None
@classmethod
def _build_schema_network_interface_ip_configuration_read(cls, _schema):
if cls._schema_network_interface_ip_configuration_read is not None:
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
_schema.type = cls._schema_network_interface_ip_configuration_read.type
return
cls._schema_network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read = AAZObjectType()
network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read
network_interface_ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_ip_configuration_read.id = AAZStrType()
network_interface_ip_configuration_read.name = AAZStrType()
network_interface_ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_ip_configuration_read.type = AAZStrType()
properties = _schema_network_interface_ip_configuration_read.properties
properties.application_gateway_backend_address_pools = AAZListType(
serialized_name="applicationGatewayBackendAddressPools",
)
properties.application_security_groups = AAZListType(
serialized_name="applicationSecurityGroups",
)
properties.gateway_load_balancer = AAZObjectType(
serialized_name="gatewayLoadBalancer",
)
cls._build_schema_sub_resource_read(properties.gateway_load_balancer)
properties.load_balancer_backend_address_pools = AAZListType(
serialized_name="loadBalancerBackendAddressPools",
)
properties.load_balancer_inbound_nat_rules = AAZListType(
serialized_name="loadBalancerInboundNatRules",
)
properties.primary = AAZBoolType()
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.private_link_connection_properties = AAZObjectType(
serialized_name="privateLinkConnectionProperties",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
properties.virtual_network_taps = AAZListType(
serialized_name="virtualNetworkTaps",
)
application_gateway_backend_address_pools = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools
application_gateway_backend_address_pools.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties
properties.backend_addresses = AAZListType(
serialized_name="backendAddresses",
)
properties.backend_ip_configurations = AAZListType(
serialized_name="backendIPConfigurations",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
backend_addresses = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_addresses
backend_addresses.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_addresses.Element
_element.fqdn = AAZStrType()
_element.ip_address = AAZStrType(
serialized_name="ipAddress",
)
backend_ip_configurations = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_ip_configurations
backend_ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(backend_ip_configurations.Element)
application_security_groups = _schema_network_interface_ip_configuration_read.properties.application_security_groups
application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(application_security_groups.Element)
load_balancer_backend_address_pools = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools
load_balancer_backend_address_pools.Element = AAZObjectType()
cls._build_schema_backend_address_pool_read(load_balancer_backend_address_pools.Element)
load_balancer_inbound_nat_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules
load_balancer_inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_inbound_nat_rule_read(load_balancer_inbound_nat_rules.Element)
private_link_connection_properties = _schema_network_interface_ip_configuration_read.properties.private_link_connection_properties
private_link_connection_properties.fqdns = AAZListType(
flags={"read_only": True},
)
private_link_connection_properties.group_id = AAZStrType(
serialized_name="groupId",
flags={"read_only": True},
)
private_link_connection_properties.required_member_name = AAZStrType(
serialized_name="requiredMemberName",
flags={"read_only": True},
)
fqdns = _schema_network_interface_ip_configuration_read.properties.private_link_connection_properties.fqdns
fqdns.Element = AAZStrType()
virtual_network_taps = _schema_network_interface_ip_configuration_read.properties.virtual_network_taps
virtual_network_taps.Element = AAZObjectType()
cls._build_schema_virtual_network_tap_read(virtual_network_taps.Element)
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
_schema.type = cls._schema_network_interface_ip_configuration_read.type
_schema_network_interface_tap_configuration_read = None
@classmethod
def _build_schema_network_interface_tap_configuration_read(cls, _schema):
if cls._schema_network_interface_tap_configuration_read is not None:
_schema.etag = cls._schema_network_interface_tap_configuration_read.etag
_schema.id = cls._schema_network_interface_tap_configuration_read.id
_schema.name = cls._schema_network_interface_tap_configuration_read.name
_schema.properties = cls._schema_network_interface_tap_configuration_read.properties
_schema.type = cls._schema_network_interface_tap_configuration_read.type
return
cls._schema_network_interface_tap_configuration_read = _schema_network_interface_tap_configuration_read = AAZObjectType()
network_interface_tap_configuration_read = _schema_network_interface_tap_configuration_read
network_interface_tap_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_tap_configuration_read.id = AAZStrType()
network_interface_tap_configuration_read.name = AAZStrType()
network_interface_tap_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_tap_configuration_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_tap_configuration_read.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.virtual_network_tap = AAZObjectType(
serialized_name="virtualNetworkTap",
)
cls._build_schema_virtual_network_tap_read(properties.virtual_network_tap)
_schema.etag = cls._schema_network_interface_tap_configuration_read.etag
_schema.id = cls._schema_network_interface_tap_configuration_read.id
_schema.name = cls._schema_network_interface_tap_configuration_read.name
_schema.properties = cls._schema_network_interface_tap_configuration_read.properties
_schema.type = cls._schema_network_interface_tap_configuration_read.type
_schema_network_interface_read = None
@classmethod
def _build_schema_network_interface_read(cls, _schema):
if cls._schema_network_interface_read is not None:
_schema.etag = cls._schema_network_interface_read.etag
_schema.extended_location = cls._schema_network_interface_read.extended_location
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
return
cls._schema_network_interface_read = _schema_network_interface_read = AAZObjectType()
network_interface_read = _schema_network_interface_read
network_interface_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(network_interface_read.extended_location)
network_interface_read.id = AAZStrType()
network_interface_read.location = AAZStrType()
network_interface_read.name = AAZStrType(
flags={"read_only": True},
)
network_interface_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_read.tags = AAZDictType()
network_interface_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties
properties.auxiliary_mode = AAZStrType(
serialized_name="auxiliaryMode",
)
properties.disable_tcp_state_tracking = AAZBoolType(
serialized_name="disableTcpStateTracking",
)
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.dscp_configuration = AAZObjectType(
serialized_name="dscpConfiguration",
)
cls._build_schema_sub_resource_read(properties.dscp_configuration)
properties.enable_accelerated_networking = AAZBoolType(
serialized_name="enableAcceleratedNetworking",
)
properties.enable_ip_forwarding = AAZBoolType(
serialized_name="enableIPForwarding",
)
properties.hosted_workloads = AAZListType(
serialized_name="hostedWorkloads",
flags={"read_only": True},
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.mac_address = AAZStrType(
serialized_name="macAddress",
flags={"read_only": True},
)
properties.migration_phase = AAZStrType(
serialized_name="migrationPhase",
)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.nic_type = AAZStrType(
serialized_name="nicType",
)
properties.primary = AAZBoolType(
flags={"read_only": True},
)
properties.private_endpoint = AAZObjectType(
serialized_name="privateEndpoint",
)
cls._build_schema_private_endpoint_read(properties.private_endpoint)
properties.private_link_service = AAZObjectType(
serialized_name="privateLinkService",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.tap_configurations = AAZListType(
serialized_name="tapConfigurations",
flags={"read_only": True},
)
properties.virtual_machine = AAZObjectType(
serialized_name="virtualMachine",
)
cls._build_schema_sub_resource_read(properties.virtual_machine)
properties.vnet_encryption_supported = AAZBoolType(
serialized_name="vnetEncryptionSupported",
flags={"read_only": True},
)
properties.workload_type = AAZStrType(
serialized_name="workloadType",
)
dns_settings = _schema_network_interface_read.properties.dns_settings
dns_settings.applied_dns_servers = AAZListType(
serialized_name="appliedDnsServers",
flags={"read_only": True},
)
dns_settings.dns_servers = AAZListType(
serialized_name="dnsServers",
)
dns_settings.internal_dns_name_label = AAZStrType(
serialized_name="internalDnsNameLabel",
)
dns_settings.internal_domain_name_suffix = AAZStrType(
serialized_name="internalDomainNameSuffix",
flags={"read_only": True},
)
dns_settings.internal_fqdn = AAZStrType(
serialized_name="internalFqdn",
flags={"read_only": True},
)
applied_dns_servers = _schema_network_interface_read.properties.dns_settings.applied_dns_servers
applied_dns_servers.Element = AAZStrType()
dns_servers = _schema_network_interface_read.properties.dns_settings.dns_servers
dns_servers.Element = AAZStrType()
hosted_workloads = _schema_network_interface_read.properties.hosted_workloads
hosted_workloads.Element = AAZStrType()
ip_configurations = _schema_network_interface_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(ip_configurations.Element)
private_link_service = _schema_network_interface_read.properties.private_link_service
private_link_service.etag = AAZStrType(
flags={"read_only": True},
)
private_link_service.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(private_link_service.extended_location)
private_link_service.id = AAZStrType()
private_link_service.location = AAZStrType()
private_link_service.name = AAZStrType(
flags={"read_only": True},
)
private_link_service.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_link_service.tags = AAZDictType()
private_link_service.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties
properties.alias = AAZStrType(
flags={"read_only": True},
)
properties.auto_approval = AAZObjectType(
serialized_name="autoApproval",
)
properties.enable_proxy_protocol = AAZBoolType(
serialized_name="enableProxyProtocol",
)
properties.fqdns = AAZListType()
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.load_balancer_frontend_ip_configurations = AAZListType(
serialized_name="loadBalancerFrontendIpConfigurations",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.private_endpoint_connections = AAZListType(
serialized_name="privateEndpointConnections",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.visibility = AAZObjectType()
auto_approval = _schema_network_interface_read.properties.private_link_service.properties.auto_approval
auto_approval.subscriptions = AAZListType()
subscriptions = _schema_network_interface_read.properties.private_link_service.properties.auto_approval.subscriptions
subscriptions.Element = AAZStrType()
fqdns = _schema_network_interface_read.properties.private_link_service.properties.fqdns
fqdns.Element = AAZStrType()
ip_configurations = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
_element = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations.Element.properties
properties.primary = AAZBoolType()
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
load_balancer_frontend_ip_configurations = _schema_network_interface_read.properties.private_link_service.properties.load_balancer_frontend_ip_configurations
load_balancer_frontend_ip_configurations.Element = AAZObjectType()
cls._build_schema_frontend_ip_configuration_read(load_balancer_frontend_ip_configurations.Element)
network_interfaces = _schema_network_interface_read.properties.private_link_service.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
private_endpoint_connections = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections
private_endpoint_connections.Element = AAZObjectType()
_element = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections.Element.properties
properties.link_identifier = AAZStrType(
serialized_name="linkIdentifier",
flags={"read_only": True},
)
properties.private_endpoint = AAZObjectType(
serialized_name="privateEndpoint",
)
cls._build_schema_private_endpoint_read(properties.private_endpoint)
properties.private_link_service_connection_state = AAZObjectType(
serialized_name="privateLinkServiceConnectionState",
)
cls._build_schema_private_link_service_connection_state_read(properties.private_link_service_connection_state)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
visibility = _schema_network_interface_read.properties.private_link_service.properties.visibility
visibility.subscriptions = AAZListType()
subscriptions = _schema_network_interface_read.properties.private_link_service.properties.visibility.subscriptions
subscriptions.Element = AAZStrType()
tags = _schema_network_interface_read.properties.private_link_service.tags
tags.Element = AAZStrType()
tap_configurations = _schema_network_interface_read.properties.tap_configurations
tap_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_tap_configuration_read(tap_configurations.Element)
tags = _schema_network_interface_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_interface_read.etag
_schema.extended_location = cls._schema_network_interface_read.extended_location
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
_schema_network_security_group_read = None
@classmethod
def _build_schema_network_security_group_read(cls, _schema):
if cls._schema_network_security_group_read is not None:
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
return
cls._schema_network_security_group_read = _schema_network_security_group_read = AAZObjectType()
network_security_group_read = _schema_network_security_group_read
network_security_group_read.etag = AAZStrType(
flags={"read_only": True},
)
network_security_group_read.id = AAZStrType()
network_security_group_read.location = AAZStrType()
network_security_group_read.name = AAZStrType(
flags={"read_only": True},
)
network_security_group_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_security_group_read.tags = AAZDictType()
network_security_group_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_security_group_read.properties
properties.default_security_rules = AAZListType(
serialized_name="defaultSecurityRules",
flags={"read_only": True},
)
properties.flow_logs = AAZListType(
serialized_name="flowLogs",
flags={"read_only": True},
)
properties.flush_connection = AAZBoolType(
serialized_name="flushConnection",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.security_rules = AAZListType(
serialized_name="securityRules",
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
default_security_rules = _schema_network_security_group_read.properties.default_security_rules
default_security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(default_security_rules.Element)
flow_logs = _schema_network_security_group_read.properties.flow_logs
flow_logs.Element = AAZObjectType()
_element = _schema_network_security_group_read.properties.flow_logs.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.location = AAZStrType()
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_security_group_read.properties.flow_logs.Element.properties
properties.enabled = AAZBoolType()
properties.flow_analytics_configuration = AAZObjectType(
serialized_name="flowAnalyticsConfiguration",
)
properties.format = AAZObjectType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.retention_policy = AAZObjectType(
serialized_name="retentionPolicy",
)
properties.storage_id = AAZStrType(
serialized_name="storageId",
flags={"required": True},
)
properties.target_resource_guid = AAZStrType(
serialized_name="targetResourceGuid",
flags={"read_only": True},
)
properties.target_resource_id = AAZStrType(
serialized_name="targetResourceId",
flags={"required": True},
)
flow_analytics_configuration = _schema_network_security_group_read.properties.flow_logs.Element.properties.flow_analytics_configuration
flow_analytics_configuration.network_watcher_flow_analytics_configuration = AAZObjectType(
serialized_name="networkWatcherFlowAnalyticsConfiguration",
)
network_watcher_flow_analytics_configuration = _schema_network_security_group_read.properties.flow_logs.Element.properties.flow_analytics_configuration.network_watcher_flow_analytics_configuration
network_watcher_flow_analytics_configuration.enabled = AAZBoolType()
network_watcher_flow_analytics_configuration.traffic_analytics_interval = AAZIntType(
serialized_name="trafficAnalyticsInterval",
)
network_watcher_flow_analytics_configuration.workspace_id = AAZStrType(
serialized_name="workspaceId",
)
network_watcher_flow_analytics_configuration.workspace_region = AAZStrType(
serialized_name="workspaceRegion",
)
network_watcher_flow_analytics_configuration.workspace_resource_id = AAZStrType(
serialized_name="workspaceResourceId",
)
format = _schema_network_security_group_read.properties.flow_logs.Element.properties.format
format.type = AAZStrType()
format.version = AAZIntType()
retention_policy = _schema_network_security_group_read.properties.flow_logs.Element.properties.retention_policy
retention_policy.days = AAZIntType()
retention_policy.enabled = AAZBoolType()
tags = _schema_network_security_group_read.properties.flow_logs.Element.tags
tags.Element = AAZStrType()
network_interfaces = _schema_network_security_group_read.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
security_rules = _schema_network_security_group_read.properties.security_rules
security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(security_rules.Element)
subnets = _schema_network_security_group_read.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_network_security_group_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
_schema_private_endpoint_read = None
@classmethod
def _build_schema_private_endpoint_read(cls, _schema):
if cls._schema_private_endpoint_read is not None:
_schema.etag = cls._schema_private_endpoint_read.etag
_schema.extended_location = cls._schema_private_endpoint_read.extended_location
_schema.id = cls._schema_private_endpoint_read.id
_schema.location = cls._schema_private_endpoint_read.location
_schema.name = cls._schema_private_endpoint_read.name
_schema.properties = cls._schema_private_endpoint_read.properties
_schema.tags = cls._schema_private_endpoint_read.tags
_schema.type = cls._schema_private_endpoint_read.type
return
cls._schema_private_endpoint_read = _schema_private_endpoint_read = AAZObjectType()
private_endpoint_read = _schema_private_endpoint_read
private_endpoint_read.etag = AAZStrType(
flags={"read_only": True},
)
private_endpoint_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(private_endpoint_read.extended_location)
private_endpoint_read.id = AAZStrType()
private_endpoint_read.location = AAZStrType()
private_endpoint_read.name = AAZStrType(
flags={"read_only": True},
)
private_endpoint_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_endpoint_read.tags = AAZDictType()
private_endpoint_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_endpoint_read.properties
properties.application_security_groups = AAZListType(
serialized_name="applicationSecurityGroups",
)
properties.custom_dns_configs = AAZListType(
serialized_name="customDnsConfigs",
)
properties.custom_network_interface_name = AAZStrType(
serialized_name="customNetworkInterfaceName",
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.manual_private_link_service_connections = AAZListType(
serialized_name="manualPrivateLinkServiceConnections",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.private_link_service_connections = AAZListType(
serialized_name="privateLinkServiceConnections",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
application_security_groups = _schema_private_endpoint_read.properties.application_security_groups
application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(application_security_groups.Element)
custom_dns_configs = _schema_private_endpoint_read.properties.custom_dns_configs
custom_dns_configs.Element = AAZObjectType()
_element = _schema_private_endpoint_read.properties.custom_dns_configs.Element
_element.fqdn = AAZStrType()
_element.ip_addresses = AAZListType(
serialized_name="ipAddresses",
)
ip_addresses = _schema_private_endpoint_read.properties.custom_dns_configs.Element.ip_addresses
ip_addresses.Element = AAZStrType()
ip_configurations = _schema_private_endpoint_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
_element = _schema_private_endpoint_read.properties.ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_endpoint_read.properties.ip_configurations.Element.properties
properties.group_id = AAZStrType(
serialized_name="groupId",
)
properties.member_name = AAZStrType(
serialized_name="memberName",
)
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
manual_private_link_service_connections = _schema_private_endpoint_read.properties.manual_private_link_service_connections
manual_private_link_service_connections.Element = AAZObjectType()
cls._build_schema_private_link_service_connection_read(manual_private_link_service_connections.Element)
network_interfaces = _schema_private_endpoint_read.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
private_link_service_connections = _schema_private_endpoint_read.properties.private_link_service_connections
private_link_service_connections.Element = AAZObjectType()
cls._build_schema_private_link_service_connection_read(private_link_service_connections.Element)
tags = _schema_private_endpoint_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_private_endpoint_read.etag
_schema.extended_location = cls._schema_private_endpoint_read.extended_location
_schema.id = cls._schema_private_endpoint_read.id
_schema.location = cls._schema_private_endpoint_read.location
_schema.name = cls._schema_private_endpoint_read.name
_schema.properties = cls._schema_private_endpoint_read.properties
_schema.tags = cls._schema_private_endpoint_read.tags
_schema.type = cls._schema_private_endpoint_read.type
_schema_private_link_service_connection_state_read = None
@classmethod
def _build_schema_private_link_service_connection_state_read(cls, _schema):
if cls._schema_private_link_service_connection_state_read is not None:
_schema.actions_required = cls._schema_private_link_service_connection_state_read.actions_required
_schema.description = cls._schema_private_link_service_connection_state_read.description
_schema.status = cls._schema_private_link_service_connection_state_read.status
return
cls._schema_private_link_service_connection_state_read = _schema_private_link_service_connection_state_read = AAZObjectType()
private_link_service_connection_state_read = _schema_private_link_service_connection_state_read
private_link_service_connection_state_read.actions_required = AAZStrType(
serialized_name="actionsRequired",
)
private_link_service_connection_state_read.description = AAZStrType()
private_link_service_connection_state_read.status = AAZStrType()
_schema.actions_required = cls._schema_private_link_service_connection_state_read.actions_required
_schema.description = cls._schema_private_link_service_connection_state_read.description
_schema.status = cls._schema_private_link_service_connection_state_read.status
_schema_private_link_service_connection_read = None
@classmethod
def _build_schema_private_link_service_connection_read(cls, _schema):
if cls._schema_private_link_service_connection_read is not None:
_schema.etag = cls._schema_private_link_service_connection_read.etag
_schema.id = cls._schema_private_link_service_connection_read.id
_schema.name = cls._schema_private_link_service_connection_read.name
_schema.properties = cls._schema_private_link_service_connection_read.properties
_schema.type = cls._schema_private_link_service_connection_read.type
return
cls._schema_private_link_service_connection_read = _schema_private_link_service_connection_read = AAZObjectType()
private_link_service_connection_read = _schema_private_link_service_connection_read
private_link_service_connection_read.etag = AAZStrType(
flags={"read_only": True},
)
private_link_service_connection_read.id = AAZStrType()
private_link_service_connection_read.name = AAZStrType()
private_link_service_connection_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_link_service_connection_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_link_service_connection_read.properties
properties.group_ids = AAZListType(
serialized_name="groupIds",
)
properties.private_link_service_connection_state = AAZObjectType(
serialized_name="privateLinkServiceConnectionState",
)
cls._build_schema_private_link_service_connection_state_read(properties.private_link_service_connection_state)
properties.private_link_service_id = AAZStrType(
serialized_name="privateLinkServiceId",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.request_message = AAZStrType(
serialized_name="requestMessage",
)
group_ids = _schema_private_link_service_connection_read.properties.group_ids
group_ids.Element = AAZStrType()
_schema.etag = cls._schema_private_link_service_connection_read.etag
_schema.id = cls._schema_private_link_service_connection_read.id
_schema.name = cls._schema_private_link_service_connection_read.name
_schema.properties = cls._schema_private_link_service_connection_read.properties
_schema.type = cls._schema_private_link_service_connection_read.type
_schema_public_ip_address_read = None
@classmethod
def _build_schema_public_ip_address_read(cls, _schema):
if cls._schema_public_ip_address_read is not None:
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.extended_location = cls._schema_public_ip_address_read.extended_location
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.sku = cls._schema_public_ip_address_read.sku
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
_schema.zones = cls._schema_public_ip_address_read.zones
return
cls._schema_public_ip_address_read = _schema_public_ip_address_read = AAZObjectType()
public_ip_address_read = _schema_public_ip_address_read
public_ip_address_read.etag = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(public_ip_address_read.extended_location)
public_ip_address_read.id = AAZStrType()
public_ip_address_read.location = AAZStrType()
public_ip_address_read.name = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
public_ip_address_read.sku = AAZObjectType()
public_ip_address_read.tags = AAZDictType()
public_ip_address_read.type = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.zones = AAZListType()
properties = _schema_public_ip_address_read.properties
properties.ddos_settings = AAZObjectType(
serialized_name="ddosSettings",
)
properties.delete_option = AAZStrType(
serialized_name="deleteOption",
)
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.ip_address = AAZStrType(
serialized_name="ipAddress",
)
properties.ip_configuration = AAZObjectType(
serialized_name="ipConfiguration",
)
cls._build_schema_ip_configuration_read(properties.ip_configuration)
properties.ip_tags = AAZListType(
serialized_name="ipTags",
)
properties.linked_public_ip_address = AAZObjectType(
serialized_name="linkedPublicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.linked_public_ip_address)
properties.migration_phase = AAZStrType(
serialized_name="migrationPhase",
)
properties.nat_gateway = AAZObjectType(
serialized_name="natGateway",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address_version = AAZStrType(
serialized_name="publicIPAddressVersion",
)
properties.public_ip_allocation_method = AAZStrType(
serialized_name="publicIPAllocationMethod",
)
properties.public_ip_prefix = AAZObjectType(
serialized_name="publicIPPrefix",
)
cls._build_schema_sub_resource_read(properties.public_ip_prefix)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.service_public_ip_address = AAZObjectType(
serialized_name="servicePublicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.service_public_ip_address)
ddos_settings = _schema_public_ip_address_read.properties.ddos_settings
ddos_settings.ddos_protection_plan = AAZObjectType(
serialized_name="ddosProtectionPlan",
)
cls._build_schema_sub_resource_read(ddos_settings.ddos_protection_plan)
ddos_settings.protection_mode = AAZStrType(
serialized_name="protectionMode",
)
dns_settings = _schema_public_ip_address_read.properties.dns_settings
dns_settings.domain_name_label = AAZStrType(
serialized_name="domainNameLabel",
)
dns_settings.fqdn = AAZStrType()
dns_settings.reverse_fqdn = AAZStrType(
serialized_name="reverseFqdn",
)
ip_tags = _schema_public_ip_address_read.properties.ip_tags
ip_tags.Element = AAZObjectType()
_element = _schema_public_ip_address_read.properties.ip_tags.Element
_element.ip_tag_type = AAZStrType(
serialized_name="ipTagType",
)
_element.tag = AAZStrType()
nat_gateway = _schema_public_ip_address_read.properties.nat_gateway
nat_gateway.etag = AAZStrType(
flags={"read_only": True},
)
nat_gateway.id = AAZStrType()
nat_gateway.location = AAZStrType()
nat_gateway.name = AAZStrType(
flags={"read_only": True},
)
nat_gateway.properties = AAZObjectType(
flags={"client_flatten": True},
)
nat_gateway.sku = AAZObjectType()
nat_gateway.tags = AAZDictType()
nat_gateway.type = AAZStrType(
flags={"read_only": True},
)
nat_gateway.zones = AAZListType()
properties = _schema_public_ip_address_read.properties.nat_gateway.properties
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_addresses = AAZListType(
serialized_name="publicIpAddresses",
)
properties.public_ip_prefixes = AAZListType(
serialized_name="publicIpPrefixes",
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
public_ip_addresses = _schema_public_ip_address_read.properties.nat_gateway.properties.public_ip_addresses
public_ip_addresses.Element = AAZObjectType()
cls._build_schema_sub_resource_read(public_ip_addresses.Element)
public_ip_prefixes = _schema_public_ip_address_read.properties.nat_gateway.properties.public_ip_prefixes
public_ip_prefixes.Element = AAZObjectType()
cls._build_schema_sub_resource_read(public_ip_prefixes.Element)
subnets = _schema_public_ip_address_read.properties.nat_gateway.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_sub_resource_read(subnets.Element)
sku = _schema_public_ip_address_read.properties.nat_gateway.sku
sku.name = AAZStrType()
tags = _schema_public_ip_address_read.properties.nat_gateway.tags
tags.Element = AAZStrType()
zones = _schema_public_ip_address_read.properties.nat_gateway.zones
zones.Element = AAZStrType()
sku = _schema_public_ip_address_read.sku
sku.name = AAZStrType()
sku.tier = AAZStrType()
tags = _schema_public_ip_address_read.tags
tags.Element = AAZStrType()
zones = _schema_public_ip_address_read.zones
zones.Element = AAZStrType()
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.extended_location = cls._schema_public_ip_address_read.extended_location
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.sku = cls._schema_public_ip_address_read.sku
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
_schema.zones = cls._schema_public_ip_address_read.zones
_schema_security_rule_read = None
@classmethod
def _build_schema_security_rule_read(cls, _schema):
if cls._schema_security_rule_read is not None:
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
_schema.type = cls._schema_security_rule_read.type
return
cls._schema_security_rule_read = _schema_security_rule_read = AAZObjectType()
security_rule_read = _schema_security_rule_read
security_rule_read.etag = AAZStrType(
flags={"read_only": True},
)
security_rule_read.id = AAZStrType()
security_rule_read.name = AAZStrType()
security_rule_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
security_rule_read.type = AAZStrType()
properties = _schema_security_rule_read.properties
properties.access = AAZStrType(
flags={"required": True},
)
properties.description = AAZStrType()
properties.destination_address_prefix = AAZStrType(
serialized_name="destinationAddressPrefix",
)
properties.destination_address_prefixes = AAZListType(
serialized_name="destinationAddressPrefixes",
)
properties.destination_application_security_groups = AAZListType(
serialized_name="destinationApplicationSecurityGroups",
)
properties.destination_port_range = AAZStrType(
serialized_name="destinationPortRange",
)
properties.destination_port_ranges = AAZListType(
serialized_name="destinationPortRanges",
)
properties.direction = AAZStrType(
flags={"required": True},
)
properties.priority = AAZIntType()
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.source_address_prefix = AAZStrType(
serialized_name="sourceAddressPrefix",
)
properties.source_address_prefixes = AAZListType(
serialized_name="sourceAddressPrefixes",
)
properties.source_application_security_groups = AAZListType(
serialized_name="sourceApplicationSecurityGroups",
)
properties.source_port_range = AAZStrType(
serialized_name="sourcePortRange",
)
properties.source_port_ranges = AAZListType(
serialized_name="sourcePortRanges",
)
destination_address_prefixes = _schema_security_rule_read.properties.destination_address_prefixes
destination_address_prefixes.Element = AAZStrType()
destination_application_security_groups = _schema_security_rule_read.properties.destination_application_security_groups
destination_application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(destination_application_security_groups.Element)
destination_port_ranges = _schema_security_rule_read.properties.destination_port_ranges
destination_port_ranges.Element = AAZStrType()
source_address_prefixes = _schema_security_rule_read.properties.source_address_prefixes
source_address_prefixes.Element = AAZStrType()
source_application_security_groups = _schema_security_rule_read.properties.source_application_security_groups
source_application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(source_application_security_groups.Element)
source_port_ranges = _schema_security_rule_read.properties.source_port_ranges
source_port_ranges.Element = AAZStrType()
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
_schema.type = cls._schema_security_rule_read.type
_schema_sub_resource_read = None
@classmethod
def _build_schema_sub_resource_read(cls, _schema):
if cls._schema_sub_resource_read is not None:
_schema.id = cls._schema_sub_resource_read.id
return
cls._schema_sub_resource_read = _schema_sub_resource_read = AAZObjectType()
sub_resource_read = _schema_sub_resource_read
sub_resource_read.id = AAZStrType()
_schema.id = cls._schema_sub_resource_read.id
_schema_subnet_read = None
@classmethod
def _build_schema_subnet_read(cls, _schema):
if cls._schema_subnet_read is not None:
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
_schema.type = cls._schema_subnet_read.type
return
cls._schema_subnet_read = _schema_subnet_read = AAZObjectType()
subnet_read = _schema_subnet_read
subnet_read.etag = AAZStrType(
flags={"read_only": True},
)
subnet_read.id = AAZStrType()
subnet_read.name = AAZStrType()
subnet_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
subnet_read.type = AAZStrType()
properties = _schema_subnet_read.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.address_prefixes = AAZListType(
serialized_name="addressPrefixes",
)
properties.application_gateway_ip_configurations = AAZListType(
serialized_name="applicationGatewayIpConfigurations",
)
properties.delegations = AAZListType()
properties.ip_allocations = AAZListType(
serialized_name="ipAllocations",
)
properties.ip_configuration_profiles = AAZListType(
serialized_name="ipConfigurationProfiles",
flags={"read_only": True},
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
flags={"read_only": True},
)
properties.nat_gateway = AAZObjectType(
serialized_name="natGateway",
)
cls._build_schema_sub_resource_read(properties.nat_gateway)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.private_endpoint_network_policies = AAZStrType(
serialized_name="privateEndpointNetworkPolicies",
)
properties.private_endpoints = AAZListType(
serialized_name="privateEndpoints",
flags={"read_only": True},
)
properties.private_link_service_network_policies = AAZStrType(
serialized_name="privateLinkServiceNetworkPolicies",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.purpose = AAZStrType(
flags={"read_only": True},
)
properties.resource_navigation_links = AAZListType(
serialized_name="resourceNavigationLinks",
flags={"read_only": True},
)
properties.route_table = AAZObjectType(
serialized_name="routeTable",
)
properties.service_association_links = AAZListType(
serialized_name="serviceAssociationLinks",
flags={"read_only": True},
)
properties.service_endpoint_policies = AAZListType(
serialized_name="serviceEndpointPolicies",
)
properties.service_endpoints = AAZListType(
serialized_name="serviceEndpoints",
)
address_prefixes = _schema_subnet_read.properties.address_prefixes
address_prefixes.Element = AAZStrType()
application_gateway_ip_configurations = _schema_subnet_read.properties.application_gateway_ip_configurations
application_gateway_ip_configurations.Element = AAZObjectType()
_element = _schema_subnet_read.properties.application_gateway_ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.application_gateway_ip_configurations.Element.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_sub_resource_read(properties.subnet)
delegations = _schema_subnet_read.properties.delegations
delegations.Element = AAZObjectType()
_element = _schema_subnet_read.properties.delegations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_subnet_read.properties.delegations.Element.properties
properties.actions = AAZListType(
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.service_name = AAZStrType(
serialized_name="serviceName",
)
actions = _schema_subnet_read.properties.delegations.Element.properties.actions
actions.Element = AAZStrType()
ip_allocations = _schema_subnet_read.properties.ip_allocations
ip_allocations.Element = AAZObjectType()
cls._build_schema_sub_resource_read(ip_allocations.Element)
ip_configuration_profiles = _schema_subnet_read.properties.ip_configuration_profiles
ip_configuration_profiles.Element = AAZObjectType()
_element = _schema_subnet_read.properties.ip_configuration_profiles.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.ip_configuration_profiles.Element.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
ip_configurations = _schema_subnet_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_ip_configuration_read(ip_configurations.Element)
private_endpoints = _schema_subnet_read.properties.private_endpoints
private_endpoints.Element = AAZObjectType()
cls._build_schema_private_endpoint_read(private_endpoints.Element)
resource_navigation_links = _schema_subnet_read.properties.resource_navigation_links
resource_navigation_links.Element = AAZObjectType()
_element = _schema_subnet_read.properties.resource_navigation_links.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.resource_navigation_links.Element.properties
properties.link = AAZStrType()
properties.linked_resource_type = AAZStrType(
serialized_name="linkedResourceType",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
route_table = _schema_subnet_read.properties.route_table
route_table.etag = AAZStrType(
flags={"read_only": True},
)
route_table.id = AAZStrType()
route_table.location = AAZStrType()
route_table.name = AAZStrType(
flags={"read_only": True},
)
route_table.properties = AAZObjectType(
flags={"client_flatten": True},
)
route_table.tags = AAZDictType()
route_table.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.route_table.properties
properties.disable_bgp_route_propagation = AAZBoolType(
serialized_name="disableBgpRoutePropagation",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.routes = AAZListType()
properties.subnets = AAZListType(
flags={"read_only": True},
)
routes = _schema_subnet_read.properties.route_table.properties.routes
routes.Element = AAZObjectType()
_element = _schema_subnet_read.properties.route_table.properties.routes.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_subnet_read.properties.route_table.properties.routes.Element.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.has_bgp_override = AAZBoolType(
serialized_name="hasBgpOverride",
)
properties.next_hop_ip_address = AAZStrType(
serialized_name="nextHopIpAddress",
)
properties.next_hop_type = AAZStrType(
serialized_name="nextHopType",
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
subnets = _schema_subnet_read.properties.route_table.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_subnet_read.properties.route_table.tags
tags.Element = AAZStrType()
service_association_links = _schema_subnet_read.properties.service_association_links
service_association_links.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_association_links.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.service_association_links.Element.properties
properties.allow_delete = AAZBoolType(
serialized_name="allowDelete",
)
properties.link = AAZStrType()
properties.linked_resource_type = AAZStrType(
serialized_name="linkedResourceType",
)
properties.locations = AAZListType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
locations = _schema_subnet_read.properties.service_association_links.Element.properties.locations
locations.Element = AAZStrType()
service_endpoint_policies = _schema_subnet_read.properties.service_endpoint_policies
service_endpoint_policies.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_endpoint_policies.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.kind = AAZStrType(
flags={"read_only": True},
)
_element.location = AAZStrType()
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.service_endpoint_policies.Element.properties
properties.contextual_service_endpoint_policies = AAZListType(
serialized_name="contextualServiceEndpointPolicies",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.service_alias = AAZStrType(
serialized_name="serviceAlias",
)
properties.service_endpoint_policy_definitions = AAZListType(
serialized_name="serviceEndpointPolicyDefinitions",
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
contextual_service_endpoint_policies = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.contextual_service_endpoint_policies
contextual_service_endpoint_policies.Element = AAZStrType()
service_endpoint_policy_definitions = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.service_endpoint_policy_definitions
service_endpoint_policy_definitions.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.service_endpoint_policy_definitions.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.service_endpoint_policy_definitions.Element.properties
properties.description = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.service = AAZStrType()
properties.service_resources = AAZListType(
serialized_name="serviceResources",
)
service_resources = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.service_endpoint_policy_definitions.Element.properties.service_resources
service_resources.Element = AAZStrType()
subnets = _schema_subnet_read.properties.service_endpoint_policies.Element.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_subnet_read.properties.service_endpoint_policies.Element.tags
tags.Element = AAZStrType()
service_endpoints = _schema_subnet_read.properties.service_endpoints
service_endpoints.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_endpoints.Element
_element.locations = AAZListType()
_element.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
_element.service = AAZStrType()
locations = _schema_subnet_read.properties.service_endpoints.Element.locations
locations.Element = AAZStrType()
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
_schema.type = cls._schema_subnet_read.type
_schema_virtual_network_tap_read = None
@classmethod
def _build_schema_virtual_network_tap_read(cls, _schema):
if cls._schema_virtual_network_tap_read is not None:
_schema.etag = cls._schema_virtual_network_tap_read.etag
_schema.id = cls._schema_virtual_network_tap_read.id
_schema.location = cls._schema_virtual_network_tap_read.location
_schema.name = cls._schema_virtual_network_tap_read.name
_schema.properties = cls._schema_virtual_network_tap_read.properties
_schema.tags = cls._schema_virtual_network_tap_read.tags
_schema.type = cls._schema_virtual_network_tap_read.type
return
cls._schema_virtual_network_tap_read = _schema_virtual_network_tap_read = AAZObjectType()
virtual_network_tap_read = _schema_virtual_network_tap_read
virtual_network_tap_read.etag = AAZStrType(
flags={"read_only": True},
)
virtual_network_tap_read.id = AAZStrType()
virtual_network_tap_read.location = AAZStrType()
virtual_network_tap_read.name = AAZStrType(
flags={"read_only": True},
)
virtual_network_tap_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
virtual_network_tap_read.tags = AAZDictType()
virtual_network_tap_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_virtual_network_tap_read.properties
properties.destination_load_balancer_front_end_ip_configuration = AAZObjectType(
serialized_name="destinationLoadBalancerFrontEndIPConfiguration",
)
cls._build_schema_frontend_ip_configuration_read(properties.destination_load_balancer_front_end_ip_configuration)
properties.destination_network_interface_ip_configuration = AAZObjectType(
serialized_name="destinationNetworkInterfaceIPConfiguration",
)
cls._build_schema_network_interface_ip_configuration_read(properties.destination_network_interface_ip_configuration)
properties.destination_port = AAZIntType(
serialized_name="destinationPort",
)
properties.network_interface_tap_configurations = AAZListType(
serialized_name="networkInterfaceTapConfigurations",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
network_interface_tap_configurations = _schema_virtual_network_tap_read.properties.network_interface_tap_configurations
network_interface_tap_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_tap_configuration_read(network_interface_tap_configurations.Element)
tags = _schema_virtual_network_tap_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_virtual_network_tap_read.etag
_schema.id = cls._schema_virtual_network_tap_read.id
_schema.location = cls._schema_virtual_network_tap_read.location
_schema.name = cls._schema_virtual_network_tap_read.name
_schema.properties = cls._schema_virtual_network_tap_read.properties
_schema.tags = cls._schema_virtual_network_tap_read.tags
_schema.type = cls._schema_virtual_network_tap_read.type
__all__ = ["Create"]
| [
"[email protected]"
] | |
79daec963e10fb204d7c5820ecf5dfa742767c35 | 208bc8b87cb20fc6e57c8c8846cbe947b2eec1f3 | /pyocd/core/memory_interface.py | 0bd655b77628999f3cd190bde3c656e1c7f96830 | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | canerbulduk/pyOCD | 28c545f25ef9b2949a1cd49c00faeeda986a26fe | a61e8b8dc2050309510d9fe7ca63680aafe06749 | refs/heads/main | 2023-08-24T21:10:52.427697 | 2021-11-09T15:13:48 | 2021-11-09T15:13:48 | 426,275,463 | 0 | 0 | Apache-2.0 | 2021-11-09T15:08:22 | 2021-11-09T15:08:21 | null | UTF-8 | Python | false | false | 4,837 | py | # pyOCD debugger
# Copyright (c) 2018-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..utility import conversion
class MemoryInterface(object):
"""! @brief Interface for memory access."""
def write_memory(self, addr, data, transfer_size=32):
"""! @brief Write a single memory location.
By default the transfer size is a word."""
raise NotImplementedError()
def read_memory(self, addr, transfer_size=32, now=True):
"""! @brief Read a memory location.
By default, a word will be read."""
raise NotImplementedError()
def write_memory_block32(self, addr, data):
"""! @brief Write an aligned block of 32-bit words."""
raise NotImplementedError()
def read_memory_block32(self, addr, size):
"""! @brief Read an aligned block of 32-bit words."""
raise NotImplementedError()
def write64(self, addr, value):
"""! @brief Shorthand to write a 64-bit word."""
self.write_memory(addr, value, 64)
def write32(self, addr, value):
"""! @brief Shorthand to write a 32-bit word."""
self.write_memory(addr, value, 32)
def write16(self, addr, value):
"""! @brief Shorthand to write a 16-bit halfword."""
self.write_memory(addr, value, 16)
def write8(self, addr, value):
"""! @brief Shorthand to write a byte."""
self.write_memory(addr, value, 8)
def read64(self, addr, now=True):
"""! @brief Shorthand to read a 64-bit word."""
return self.read_memory(addr, 64, now)
def read32(self, addr, now=True):
"""! @brief Shorthand to read a 32-bit word."""
return self.read_memory(addr, 32, now)
def read16(self, addr, now=True):
"""! @brief Shorthand to read a 16-bit halfword."""
return self.read_memory(addr, 16, now)
def read8(self, addr, now=True):
"""! @brief Shorthand to read a byte."""
return self.read_memory(addr, 8, now)
def read_memory_block8(self, addr, size):
"""! @brief Read a block of unaligned bytes in memory.
@return an array of byte values
"""
res = []
# try to read 8bits data
if (size > 0) and (addr & 0x01):
mem = self.read8(addr)
res.append(mem)
size -= 1
addr += 1
# try to read 16bits data
if (size > 1) and (addr & 0x02):
mem = self.read16(addr)
res.append(mem & 0xff)
res.append((mem >> 8) & 0xff)
size -= 2
addr += 2
# try to read aligned block of 32bits
if (size >= 4):
mem = self.read_memory_block32(addr, size // 4)
res += conversion.u32le_list_to_byte_list(mem)
size -= 4*len(mem)
addr += 4*len(mem)
if (size > 1):
mem = self.read16(addr)
res.append(mem & 0xff)
res.append((mem >> 8) & 0xff)
size -= 2
addr += 2
if (size > 0):
mem = self.read8(addr)
res.append(mem)
return res
def write_memory_block8(self, addr, data):
"""! @brief Write a block of unaligned bytes in memory."""
size = len(data)
idx = 0
#try to write 8 bits data
if (size > 0) and (addr & 0x01):
self.write8(addr, data[idx])
size -= 1
addr += 1
idx += 1
# try to write 16 bits data
if (size > 1) and (addr & 0x02):
self.write16(addr, data[idx] | (data[idx+1] << 8))
size -= 2
addr += 2
idx += 2
# write aligned block of 32 bits
if (size >= 4):
data32 = conversion.byte_list_to_u32le_list(data[idx:idx + (size & ~0x03)])
self.write_memory_block32(addr, data32)
addr += size & ~0x03
idx += size & ~0x03
size -= size & ~0x03
# try to write 16 bits data
if (size > 1):
self.write16(addr, data[idx] | (data[idx+1] << 8))
size -= 2
addr += 2
idx += 2
#try to write 8 bits data
if (size > 0):
self.write8(addr, data[idx])
| [
"[email protected]"
] | |
75738732f1f444b4a8f55db79e8378fbee80e1dc | 46577285b990bb2711cc718b99a24f78c53a7da7 | /파이썬 알고리즘_4기/자물쇠와 열쇠 사본/solution.py | 2f4518155d216a95b0238d57febf454450496e5c | [] | no_license | suwonraison900206/TIL | 890f02ff768c3a7d6ed647a27ba8da96b0cc06b4 | 2153623238bcc6965ec6983df9e7b207cc5aa361 | refs/heads/master | 2022-06-08T09:51:12.289608 | 2022-05-18T12:04:56 | 2022-05-18T12:04:56 | 235,004,266 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,396 | py | def solution(key, lock):
answer = True
def turn_key(key):
key_lst = []
for i in range(len(key)):
a = []
for j in range(len(key)-1, -1, -1):
a.append(key[j][i])
key_lst.append(a)
return key_lst
lst = [[0] * (len(lock) * 3) for __ in range(len(lock) * 3)]
for i in range(len(lock), (len(lock) * 2)):
for j in range(len(lock), (len(lock) * 2)):
lst[i][j] = lock[i-len(lock)][j - len(lock)]
L = len(lst)
K = len(key)
for cnt in range(4):
key = turn_key(key)
for i in range(0, (L-K+1)):
for j in range(0, (L-K+1)):
flag = True
for q in range(K):
for w in range(K):
lst[i+q][j+w] = lst[i+q][j+w] + key[q][w]
for x in range(len(lock), (len(lock) * 2)):
for y in range(len(lock), (len(lock) * 2)):
if lst[x][y] == 0 or lst[x][y] == 2:
flag = False
if flag == True:
return True
for q in range(K):
for w in range(K):
lst[i+q][j+w] = lst[i+q][j+w] - key[q][w]
return False
key = [[0, 0, 0], [1, 0, 0], [0, 1, 1]]
lock = [[1, 1, 1], [1, 1, 0], [1, 0, 1]]
print(solution(key,lock)) | [
"[email protected]"
] | |
bfd99852e69fe7ffaa50887a79dd2443937aae29 | 26f6313772161851b3b28b32a4f8d255499b3974 | /Books/CrackingtheCodingInterview/1708_CircusTowerLCCI.py | e0a049aefcf9028bc196ec5e00e061eeb6f66683 | [] | no_license | here0009/LeetCode | 693e634a3096d929e5c842c5c5b989fa388e0fcd | f96a2273c6831a8035e1adacfa452f73c599ae16 | refs/heads/master | 2023-06-30T19:07:23.645941 | 2021-07-31T03:38:51 | 2021-07-31T03:38:51 | 266,287,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,429 | py | """
有个马戏团正在设计叠罗汉的表演节目,一个人要站在另一人的肩膀上。出于实际和美观的考虑,在上面的人要比下面的人矮一点且轻一点。已知马戏团每个人的身高和体重,请编写代码计算叠罗汉最多能叠几个人。
示例:
输入:height = [65,70,56,75,60,68] weight = [100,150,90,190,95,110]
输出:6
解释:从上往下数,叠罗汉最多能叠 6 层:(56,90), (60,95), (65,100), (68,110), (70,150), (75,190)
提示:
height.length == weight.length <= 10000
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/circus-tower-lcci
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
from typing import List
from functools import lru_cache
class Solution:
def bestSeqAtIndex(self, height: List[int], weight: List[int]) -> int:
"""
TLE
"""
@lru_cache(None)
def calc(idx, h, w):
if idx == length:
return 0
res = calc(idx + 1, h, w)
for j in range(idx, length):
h2, w2 = sorted_h_w[j]
if h2 > h and w2 > w:
res = max(res, 1 + calc(j, h2, w2))
return res
length = len(height)
sorted_h_w = sorted(zip(height, weight))
# print(sorted_h_w)
return calc(0, 0, 0)
from typing import List
from bisect import bisect_left, bisect_right
class Solution:
def bestSeqAtIndex(self, height: List[int], weight: List[int]) -> int:
length = len(height)
sorted_h_w = sorted(zip(height, weight), key=lambda x: (x[0], -x[1]))
stack = []
# print(sorted_h_w)
for i in range(length):
_, w = sorted_h_w[i]
idx = bisect_left(stack, w) # bisect_left will replace the idx, bisect_right will append num to the end
if idx == len(stack):
if not stack or w > stack[-1]:
stack.append(w)
else:
stack[idx] = w
# print(stack)
return len(stack)
S = Solution()
height = [65,70,56,75,60,68]
weight = [100,150,90,190,95,110]
print(S.bestSeqAtIndex(height, weight))
height =[1,2,3,4]
weight =[4,3,2,1]
print(S.bestSeqAtIndex(height, weight))
nums = [1,3,3,3,4,5,7,10]
for k in range(10):
print(k, bisect_left(nums, k), bisect_right(nums, k))
| [
"[email protected]"
] | |
89673a5966ae7356ef9eff43e2a87a6420672288 | ded10c2f2f5f91c44ec950237a59225e8486abd8 | /.history/3/ising2d_microstates_run_20200506001321.py | 7e09e413f76c69e16147063ce74e71d6947cc7bc | [] | no_license | jearistiz/Statistical-Physics-Projects | 276a86407b32ded4e06b32efb2fadbd8eff8daed | d9c5b16a50856e148dc8604d92b6de3ea21fc552 | refs/heads/master | 2022-11-05T03:41:23.623050 | 2020-06-28T06:36:05 | 2020-06-28T06:36:05 | 254,909,897 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,809 | py | from ising2d_microstates import *
################################################################################################
# PANEL DE CONTROL
################################################################################################
# Decide si corre algoritmo matrix squaring con aproximación de trotter
run_microstates_algorithm = True
# Decide si corre algoritmo para cálculo de energía interna
run_avg_energy = True
# Decide si corre algoritmo para optimización de dx y beta_ini
run_optimization = False
################################################################################################
# PARÁMETROS GENERALES PARA LAS FIGURAS
################################################################################################
# Usar latex en texto de figuras y agrandar tamaño de fuente
plt.rc('text', usetex=True)
plt.rcParams.update({'font.size':15,'text.latex.unicode':True})
# Obtenemos path para guardar archivos en el mismo directorio donde se ubica el script
script_dir = os.path.dirname(os.path.abspath(__file__))
# or just a list of the list of key value pairs
# list_key_value = [ [k,v] for k, v in dict.items() ]
if run_microstates_algorithm:
L = 4
microstates = ising_microstates(L)
print('All microstates, each in a single row:')
print(pd.DataFrame(microstates),'\n')
neighbours = ising_neighbours(L)
energies = ising_energy(microstates, neighbours, save_data=True)
ising_energy_plot(energies, L, save_plot=True)
microstate_rand_index = 2 ** (L*L) - np.random.randint(1, 2 ** (L*L))
microstate_rand = microstates[microstate_rand_index,:]
print('One random microstate as a 2D grid:')
print(pd.DataFrame(microstate_rand.reshape((L,L))), '\n')
ising_microstate_plot(microstate_rand, save_plot=True)
| [
"[email protected]"
] | |
65ac4195a5a3a9edf018eb3a67a7ce8033813f0e | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/Wellfleet-TI-RUI-MIB.py | 4cb13fe3eac166af78ef58402f7f017088e83a71 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 3,080 | py | #
# PySNMP MIB module Wellfleet-TI-RUI-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Wellfleet-TI-RUI-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:35:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Counter32, Bits, Gauge32, Integer32, Unsigned32, Counter64, IpAddress, NotificationType, TimeTicks, iso = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Counter32", "Bits", "Gauge32", "Integer32", "Unsigned32", "Counter64", "IpAddress", "NotificationType", "TimeTicks", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
wfServices, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfServices")
wfTiRui = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2))
wfTiRuiState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("idle", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfTiRuiState.setStatus('mandatory')
wfTiRuiAction = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wfTiRuiAction.setStatus('mandatory')
wfTiRuiResult = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfTiRuiResult.setStatus('mandatory')
wfTiRuiInReqs = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfTiRuiInReqs.setStatus('mandatory')
wfTiRuiOutResults = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfTiRuiOutResults.setStatus('mandatory')
wfTiRuiOutResultsErr = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 3, 2, 2, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wfTiRuiOutResultsErr.setStatus('mandatory')
mibBuilder.exportSymbols("Wellfleet-TI-RUI-MIB", wfTiRuiInReqs=wfTiRuiInReqs, wfTiRuiResult=wfTiRuiResult, wfTiRuiState=wfTiRuiState, wfTiRuiOutResultsErr=wfTiRuiOutResultsErr, wfTiRui=wfTiRui, wfTiRuiOutResults=wfTiRuiOutResults, wfTiRuiAction=wfTiRuiAction)
| [
"[email protected]"
] | |
103da40cbb391b6592cac3e559ff96bfbd0bcadf | dbe1f4110921a08cb13e22ea325d503bd5627195 | /chuhuo_2.7_clickhouse/bluedon/bdwafd/newscantools/plugins/ChckDeadLinksScript.py | 624b61e9bc42c1fa2dd55d579de07834790dac83 | [] | no_license | Hehouhua/waf_branches | 92dc1b1cbecba20f24ef6c7372dde7caa43f9158 | ca76f3a1ed8150b423474c9e37aee37841a5ee35 | refs/heads/main | 2023-01-07T11:33:31.667688 | 2020-11-03T06:58:33 | 2020-11-03T06:58:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from lib.common import *
import httplib2
import urlparse
import re
def run_url(http,ob,item):
try:
result=[]
url=item['url']
detail=u''
detail=detail.encode('utf8')
parse=urlparse.urlparse(url)
path=parse.path
if path=="" or path=="/":
return result
if item['params'] == "":
return result
#end if
if item['method'] == 'get':
response,content=requestUrl(http,item['url']+"?"+item['params'],ob['task_id'],ob['domain_id'])
if response['status']!='200':
request=getRequest(item['url']+"?"+item['params'],'GET')
response=getResponse(response,"")
result.append(getRecord(ob,item['url']+"?"+item['params'],ob['level'],detail,request,response))
except Exception,e:
logging.getLogger().error("File:LocalFileIncludeScript.py, run_url function :" + str(e) + ",task id:" + ob['task_id'] + ",domain id:" + ob['domain_id'])
write_scan_log(ob['task_id'],ob['domain_id'],"File:LocalFileIncludeScript.py, run_url function :" + str(e)+", url:"+item['url']+"?"+item['params'])
#end try
return result
| [
"[email protected]"
] | |
46cff41cf2f4186df4da0d2a9bc67633ba43561b | 2436422e8d584f1dd9fc27a403e02a5571b033b2 | /server/rest/views.py | 2635af1999df24bf3511af493d4d3a08bd53aa07 | [] | no_license | yhs3434/Study-Room | 20657a7d068edbc24cb27ddef49c6e18890a0fcb | 8b6804f02ba47517aa8e725f5531b7e0e5ac7315 | refs/heads/master | 2020-04-04T17:19:28.954492 | 2020-02-05T17:09:43 | 2020-02-05T17:09:43 | 156,116,178 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,511 | py | from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from rest_framework.decorators import api_view
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from .models import User, Group, Subject, Tendency
from .models import User_Group, User_Subject, User_Tendency
from .models import Wait
from .serializers import UserSerializer, UserSubjectSerializer, UserTendencySerializer
from. serializers import WaitSerializer, GroupSerializer, UserGroupSerializer
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from django.http import Http404
# Create your views here.
def index(request):
return render(request, 'rest/index.html', {})
@api_view(['GET', 'POST'])
def user_list(request):
if request.method == 'GET':
users = User.objects.all()
serializer = UserSerializer(users, many=True)
return JsonResponse(serializer.data, safe=False)
elif request.method == 'POST':
data = JSONParser().parse(request)
serializer = UserSerializer(data = data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@api_view(['GET', 'PUT', 'DELETE'])
def user_detail(request, pk):
try:
user = User.objects.get(pk=pk)
except(User.DoesNotExist):
return (HttpResponse(status=404))
if (request.method == 'GET'):
serializer = UserSerializer(user)
return (JsonResponse(serializer.data))
elif (request.method == 'PUT'):
data = JSONParser().parse(request)
serializer = UserSerializer(user, data=data)
if serializer.is_valid():
serializer.save()
return (JsonResponse(serializer.data))
return (JsonResponse(serializer.errors, status=404))
elif request.method == 'DELETE':
user.delete()
return (HttpResponse(status=204))
@api_view(['POST'])
def user_login(request):
if (request.method == 'POST'):
data = JSONParser().parse(request)
try:
user = User.objects.filter(auth_id = data['auth_id'])
if (user[0].auth_pw == data['auth_pw']):
serializer = UserSerializer(user[0])
return (Response(data=serializer.data, status=status.HTTP_200_OK))
else:
return (Response(status=status.HTTP_404_NOT_FOUND))
except(User.DoesNotExist):
return (Response(status=status.HTTP_404_NOT_FOUND))
else:
return (Response(status = status.HTTP_400_BAD_REQUEST))
@api_view(['GET', 'POST'])
def choice_subject(request):
if (request.method == 'POST'):
data = JSONParser().parse(request)
user_id = data['id']
list = []
for key in data:
if(key == 'id'):
continue
try:
subject_id = (Subject.objects.get(name=key)).id
if(data[key] == 1):
insert = User_Subject.objects.create(user_id=User.objects.get(pk=user_id), subject_id=Subject.objects.get(pk=subject_id))
list.append(insert)
else:
queryset = User_Subject.objects.all()
queryset = queryset.filter(user_id=User.objects.get(pk=user_id), subject_id=Subject.objects.get(pk=subject_id))
queryset.delete()
except:
continue
return Response(status=status.HTTP_200_OK)
elif (request.method == 'GET'):
user_subject = User_Subject.objects.all()
serializer = UserSubjectSerializer(user_subject, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
else:
return Response(status=status.HTTP_404_NOT_FOUND)
@api_view(['POST', 'GET'])
def choice_tendency(request):
if (request.method=='POST'):
data = JSONParser().parse(request)
user_id = data['id']
user = User.objects.get(pk=user_id)
try:
queryset = User_Tendency.objects.filter(user_id=user)
queryset.delete()
except:
print('user(',user_id,') choose tendencies.')
try:
insert = User_Tendency.objects.create(user_id=user, rule=data['규칙'], learning=data['학습량'], \
numberPeople=data['인원'], friendship=data['친목'], environment=data['환경'], style=data['스타일'])
except:
return Response(status=status.HTTP_406_NOT_ACCEPTABLE)
return Response(status=status.HTTP_200_OK)
elif (request.method=='GET'):
user_tendency = User_Tendency.objects.all()
serializer = UserTendencySerializer(user_tendency, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
else:
return Response(status=status.HTTP_404_NOT_FOUND)
# 매칭 구현을 위한 뷰 (사용 할지 말지는 미지수)
class FindGroup(APIView):
def get(self, request):
waiter = Wait.objects.all()
serializer = WaitSerializer(waiter, many=True)
return Response(serializer.data)
def post(self, request):
data = JSONParser().parse(request)
try:
user = User.objects.get(pk=data['id'])
except User.DoesNotExist:
return Http404
Wait.objects.filter(user=user).delete()
Wait.objects.create(user=user)
return Response(status=status.HTTP_201_CREATED)
# 매칭 구현을 위한 뷰 (사용 할지 말지는 미지수)
class FindGroupDetail(APIView):
def get_object(self, pk):
try:
return Wait.objects.get(pk=pk)
except Wait.DoesNotExist:
return Http404
def get(self, request, pk, format=None):
waiter = self.get_object(pk)
serializer = WaitSerializer(waiter)
return Response(serializer.data)
def delete(self, request, pk, format=None):
waiter = self.get_object(pk)
waiter.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# 방 목록, 생성 클래스
class group_list(APIView):
def get(self, request):
groups = Group.objects.filter(public=True).order_by("-created_date")
serializer = GroupSerializer(groups, many=True)
return Response(serializer.data)
def post(self, request):
data = JSONParser().parse(request)
serializer = GroupSerializer(data=data)
if (serializer.is_valid()):
serializer.save()
return Response(data=serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(status.HTTP_406_NOT_ACCEPTABLE)
# 방 가입, 삭제 클래스
class group_detail(APIView):
def get_object(self, pk):
try:
return Group.objects.get(pk=pk)
except Group.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
def get(self, request, pk):
group = self.get_object(pk)
serializer = GroupSerializer(group)
return Response(data = serializer.data, status = status.HTTP_200_OK)
def delete(self, request, pk):
obj = self.get_object(pk)
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# 스터디 그룹 가입 함수.
@api_view(['POST'])
def join_group(request):
if (request.method != 'POST'):
return Response(status=status.HTTP_400_BAD_REQUEST)
data = JSONParser().parse(request)
user_id = data['user_id']
group_id = data['group_id']
try:
user = User.objects.get(pk=user_id)
group = Group.objects.get(pk=group_id)
except:
return Response(status=status.HTTP_404_NOT_FOUND)
try:
num_of_people = group.num_people
max_of_people = group.max_num_people
if(num_of_people<max_of_people):
obj, created = User_Group.objects.update_or_create(user=user, group=group)
if(created):
group.num_people += 1
group.save()
except:
return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(status=status.HTTP_201_CREATED)
class UserGroupList(APIView):
def get_object(self, user_pk, group_pk):
try:
print(user_pk, group_pk)
user = User.objects.get(pk= user_pk)
group = Group.objects.get(pk= group_pk)
except User.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
except Group.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
return user, group
def get(self, request, user_pk, group_pk):
user, group = self.get_object(user_pk, group_pk)
try:
user_group = User_Group.objects.filter(user = user).get(group = group)
return Response(data=200, status=status.HTTP_200_OK)
except User_Group.DoesNotExist:
return Response(data=404, status=status.HTTP_404_NOT_FOUND)
def delete(self, request, user_pk, group_pk):
user, group = self.get_object(user_pk, group_pk)
try:
user_group = User_Group.objects.filter(user = user).get(group = group)
except User_Group.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
try:
group = user_group.group
user_group.delete()
group.num_people -= 1
group.save()
return Response(status=status.HTTP_204_NO_CONTENT)
except:
return Response(status=status.HTTP_400_BAD_REQUEST)
class UserGroupListUser(APIView):
def get_object(self, pk):
try:
user = User.objects.get(pk = pk)
except User.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
return user
def get(self, request, pk):
user = self.get_object(pk)
try:
user_group = User_Group.objects.filter(user = user)
list_id = []
for obj in user_group:
list_id.append(obj.group.id)
group = Group.objects.filter(pk__in = list_id).order_by("-created_date")
serializer = GroupSerializer(group, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
except User_Group.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND) | [
"[email protected]"
] | |
5dbca4f77abbcf27decc3fc2b15a7295c0496a74 | 7fae23d734356d2016f13f231673ee9ea1b6efd5 | /catkin_ws/devel/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkState.py | f4f9e09cc9915ae8a183933b57272e86be88a698 | [] | no_license | Tidu09/Eyantra | 57c4fd01a39305ff01758ed021643d894e61e60f | 2bca1cdce1742e79f1550b832cda086dc22e946f | refs/heads/main | 2023-01-12T07:57:54.356909 | 2020-11-22T05:41:40 | 2020-11-22T05:41:40 | 306,226,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | /home/tidu/catkin_ws/devel/.private/gazebo_msgs/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkState.py | [
"[email protected]"
] | |
df6ed0a656f9ec810d286b827144bf78ca8db30a | 2da6133f3cd5c5fc19355292d60253b8c0dbcd49 | /.history/antz/views_20200404012439.py | 3dfd28a5bafc973ee616d892836f17b28524ad88 | [] | no_license | mirfarzam/python-advance-jadi-maktabkhooneh | b24f5c03ab88e3b12c166a439b925af92f50de49 | d9bcecae73fd992f1290c6fd76761683bb512825 | refs/heads/master | 2021-05-25T21:33:37.782734 | 2020-04-07T22:39:28 | 2020-04-07T22:39:28 | 253,927,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | from django.shortcuts import render
def index(request):
return
| [
"[email protected]"
] | |
2561a47c4d7afb0c4bf31d06192529c574366797 | 2fa016eeb6d4d4cc61fb0d43aa9f0fd1ad4ef2e3 | /python/函数/quadratic.py | 236d535788afe5e6a4ac3ede0dd3b40f57f946f1 | [] | no_license | juechen-zzz/learngit | 521e0d2c13d97248f6f8b1f2096f718dc497351b | 513d3e57f4e0fce72ca4ecd1f30be2d261ee9260 | refs/heads/master | 2021-07-04T17:20:58.456812 | 2020-08-27T02:08:05 | 2020-08-27T02:08:05 | 163,482,583 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | # -*- coding: utf-8 -*-
##一元二次方程求解
import math;
def quadratic(a,b,c):
m = b*b - 4*a*c;
x1 = (-b + math.sqrt(m))/(-2*a);
x2 = (-b - math.sqrt(m))/(-2*a);
return x1,x2
r = quadratic(1,6,9);
print(r); | [
"[email protected]"
] | |
f16744f50e18e59f1326b83f02cb864a6192f983 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4137/codes/1836_2604.py | 2d39fb44f590d1ffec7a63f648064058d2d0a792 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | from numpy import*
from numpy.linalg import*
n = array(eval(input("Pagamentos:")))
for i in range(shape(n)[0]):
print(max(n[i]))
| [
"[email protected]"
] | |
b0cf9febae60c6a97766768835e4a2dd26a5c061 | e24db52fb64c06e859e36122008fe27d7f2b7a81 | /model/fixed_length_pqueue.py | c1d5e99db2b86d1534e97d1221d61c221a000d05 | [] | no_license | zhuowangsylu/sklearn-sknnsuite | 1cd7f46d4f0f54190db8262a35e987cf103e62d4 | 22fe386be643e309d6491e9a408711e3472e396d | refs/heads/master | 2020-12-08T09:20:49.940874 | 2017-06-10T20:24:56 | 2017-06-10T20:24:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py | import itertools
import heapq
class FixedLengthPQueue(object):
def __init__(self, length):
"""
:param length: max length of queue. Should be greater then 0
"""
self.pq = [] # list of entries arranged in a heap
self.entry_finder = {} # mapping of tasks to entries
self.counter = itertools.count() # unique sequence count
self.length = length
def add_task(self, task, priority=0):
"""Add a new task or update the priority of an existing task"""
if len(self.pq) == self.length:
self.pop_task()
count = next(self.counter)
entry = [- priority, count, task]
self.entry_finder[task] = entry
heapq.heappush(self.pq, entry)
def pop_task(self):
"""Remove and return the lowest priority task. Raise KeyError if empty."""
while self.pq:
priority, count, task = heapq.heappop(self.pq)
del self.entry_finder[task]
return task
raise KeyError('pop from an empty priority queue') | [
"[email protected]"
] | |
55d758c86721b7858d56921aa5add18d6ae86097 | 6be845bf70a8efaf390da28c811c52b35bf9e475 | /windows/Resources/Python/Core/Lib/email/_parseaddr.py | 853096ece7f0c40a593e0fe0820a7588fca4ed1f | [] | no_license | kyeremalprime/ms | 228194910bf2ed314d0492bc423cc687144bb459 | 47eea098ec735b2173ff0d4e5c493cb8f04e705d | refs/heads/master | 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 14,230 | py | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: _parseaddr.py
"""Email address parsing code.
Lifted directly from rfc822.py. This should eventually be rewritten.
"""
__all__ = [
'mktime_tz',
'parsedate',
'parsedate_tz',
'quote']
import time
SPACE = ' '
EMPTYSTRING = ''
COMMASPACE = ', '
_monthnames = [
'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = [
'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
_timezones = {'UT': 0,'UTC': 0,'GMT': 0,'Z': 0,'AST': -400,
'ADT': -300,'EST': -500,
'EDT': -400,'CST': -600,
'CDT': -500,'MST': -700,
'MDT': -600,'PST': -800,
'PDT': -700}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
data = data.split()
if data[0].endswith(',') or data[0].lower() in _daynames:
del data[0]
else:
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i + 1:]
if len(data) == 3:
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [
s[:i], s[i + 1:]]
else:
data.append('')
if len(data) < 5:
return
else:
data = data[:5]
dd, mm, yy, tm, tz = data
mm = mm.lower()
if mm not in _monthnames:
dd, mm = mm, dd.lower()
if mm not in _monthnames:
return
mm = _monthnames.index(mm) + 1
if mm > 12:
mm -= 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
thh, tmm = tm
tss = '0'
elif len(tm) == 3:
thh, tmm, tss = tm
else:
return
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return
if yy < 100:
if yy > 68:
yy += 1900
else:
yy += 2000
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * (tzoffset // 100 * 3600 + tzoffset % 100 * 60)
return (
yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset)
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if isinstance(t, tuple):
return t[:9]
else:
return t
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
if data[9] is None:
return time.mktime(data[:8] + (-1, ))
else:
t = time.mktime(data[:8] + (0, ))
return t - data[9] - time.timezone
return
def quote(str):
"""Prepare string to be used in a quoted string.
Turns backslash and double quote characters into quoted pairs. These
are the only characters that need to be quoted inside a quoted string.
Does not add the surrounding double quotes.
"""
return str.replace('\\', '\\\\').replace('"', '\\"')
class AddrlistClass():
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of RFC 2822 in
front of you.
Note: this class interface is deprecated and may be removed in the future.
Use rfc822.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing
one or more addresses.
"""
self.specials = '()<>@,:;."[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.FWS = self.LWS + self.CR
self.atomends = self.specials + self.LWS + self.CR
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Parse up to the start of the next address."""
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else:
break
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
while self.pos < len(self.field):
ad = self.getaddress()
if ad:
result += ad
else:
result.append(('', ''))
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
if plist:
returnlist = [
(
SPACE.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(SPACE.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(SPACE.join(plist) + ' (' + ' '.join(self.commentlist) + ')', routeaddr)]
else:
returnlist = [
(
SPACE.join(plist), routeaddr)]
elif plist:
returnlist = [
(
SPACE.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = False
self.pos += 1
self.gotonext()
adlist = ''
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = False
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = True
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
if self.field[self.pos] == '.':
aslist.append('.')
self.pos += 1
elif self.field[self.pos] == '"':
aslist.append('"%s"' % quote(self.getquote()))
elif self.field[self.pos] in self.atomends:
break
else:
aslist.append(self.getatom())
self.gotonext()
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return EMPTYSTRING.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return EMPTYSTRING.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else:
sdlist.append(self.getatom())
return EMPTYSTRING.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments=True):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment.
If self is not looking at an instance of `beginchar' then
getdelimited returns the empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = False
self.pos += 1
while self.pos < len(self.field):
if quote:
slist.append(self.field[self.pos])
quote = False
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue
elif self.field[self.pos] == '\\':
quote = True
else:
slist.append(self.field[self.pos])
self.pos += 1
return EMPTYSTRING.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', False)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', True)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', False)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = [
'']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else:
atomlist.append(self.field[self.pos])
self.pos += 1
return EMPTYSTRING.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.FWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __add__(self, other):
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if x not in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
for x in other.addresslist:
if x not in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
newaddr = AddressList(None)
for x in self.addresslist:
if x not in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
return self.addresslist[index] | [
"[email protected]"
] | |
5cb01c05221174a919960232eb3fdbabe308c355 | fb94faa56d5763607be6566925132f4957d751cf | /pyroms_toolbox/pyroms_toolbox/zview.py | 508a6b3ba23f1910f4321953d47cf1b4eeea0055 | [
"BSD-3-Clause"
] | permissive | csherwood-usgs/pyroms | 44e684ec0b20e242cf3743d128332be330209289 | be5e40a1720561bb18698f08a2c74b1906c73bab | refs/heads/master | 2021-01-24T23:51:00.789744 | 2013-12-30T20:46:15 | 2013-12-30T20:46:15 | 16,179,889 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,825 | py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from mpl_toolkits.basemap import Basemap
import pyroms
import pyroms_toolbox
def zview(var, tindex, depth, grid, filename=None, \
cmin=None, cmax=None, clev=None, clb_format='%.2f', \
fill=False, contour=False, d=4, range=None, fts=None, \
title=None, clb=True, pal=None, proj='merc', \
fill_land=False, outfile=None):
"""
map = zview(var, tindex, depth, grid, {optional switch})
optional switch:
- filename if defined, load the variable from file
- cmin set color minimum limit
- cmax set color maximum limit
- clev set the number of color step
- fill use contourf instead of pcolor
- contour overlay contour (request fill=True)
- range set axis limit
- fts set font size (default: 12)
- title add title to the plot
- clb add colorbar (defaul: True)
- pal set color map (default: cm.jet)
- proj set projection type (default: merc)
- fill_land fill land masked area with gray (defaul: True)
- outfile if defined, write figure to file
plot a constante-z slice of variable var. If filename is provided,
var must be a string and the variable will be load from the file.
grid can be a grid object or a gridid. In the later case, the grid
object correponding to the provided gridid will be loaded.
If proj is not None, return a Basemap object to be used with quiver
for example.
"""
# get grid
if type(grid).__name__ == 'ROMS_Grid':
grd = grid
else:
grd = pyroms.grid.get_ROMS_grid(grid)
# get variable
if filename == None:
var = var
else:
data = pyroms.io.Dataset(filename)
var = data.variables[var]
Np, Mp, Lp = grd.vgrid.z_r[0,:].shape
if tindex is not -1:
assert len(var.shape) == 4, 'var must be 4D (time plus space).'
K, N, M, L = var.shape
else:
assert len(var.shape) == 3, 'var must be 3D (no time dependency).'
N, M, L = var.shape
# determine where on the C-grid these variable lies
if N == Np and M == Mp and L == Lp:
Cpos='rho'
mask = grd.hgrid.mask_rho
if N == Np and M == Mp and L == Lp-1:
Cpos='u'
mask = grd.hgrid.mask_u
if N == Np and M == Mp-1 and L == Lp:
Cpos='v'
mask = grd.hgrid.mask_v
# get constante-z slice
if tindex == -1:
var = var[:,:,:]
else:
var = var[tindex,:,:,:]
depth = -abs(depth)
if fill == True:
zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \
Cpos=Cpos)
else:
zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \
Cpos=Cpos, vert=True)
# plot
if cmin is None:
cmin = zslice.min()
else:
cmin = float(cmin)
if cmax is None:
cmax = zslice.max()
else:
cmax = float(cmax)
if clev is None:
clev = 100.
else:
clev = float(clev)
dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc)
if pal is None:
pal = cm.jet
else:
pal = pal
if fts is None:
fts = 12
else:
fts = fts
#pal.set_over('w', 1.0)
#pal.set_under('w', 1.0)
#pal.set_bad('w', 1.0)
pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False)
if range is None:
lon_min = lon.min()
lon_max = lon.max()
lon_0 = (lon_min + lon_max) / 2.
lat_min = lat.min()
lat_max = lat.max()
lat_0 = (lat_min + lat_max) / 2.
else:
lon_min = range[0]
lon_max = range[1]
lon_0 = (lon_min + lon_max) / 2.
lat_min = range[2]
lat_max = range[3]
lat_0 = (lat_min + lat_max) / 2.
# clear figure
#plt.clf()
if proj is not None:
map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \
urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \
resolution='h', area_thresh=5.)
#map = pyroms.utility.get_grid_proj(grd, type=proj)
x, y = map(lon,lat)
if fill_land is True and proj is not None:
# fill land and draw coastlines
map.drawcoastlines()
map.fillcontinents(color='grey')
else:
if proj is not None:
Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray)
pyroms_toolbox.plot_coast_line(grd, map)
else:
plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray)
pyroms_toolbox.plot_coast_line(grd)
if fill is True:
if proj is not None:
cf = Basemap.contourf(map, x, y, zslice, vc, cmap = pal, \
norm = pal_norm)
else:
cf = plt.contourf(lon, lat, zslice, vc, cmap = pal, \
norm = pal_norm)
else:
if proj is not None:
cf = Basemap.pcolor(map, x, y, zslice, cmap = pal, norm = pal_norm)
else:
cf = plt.pcolor(lon, lat, zslice, cmap = pal, norm = pal_norm)
if clb is True:
clb = plt.colorbar(cf, fraction=0.075,format=clb_format)
for t in clb.ax.get_yticklabels():
t.set_fontsize(fts)
if contour is True:
if fill is not True:
raise Warning, 'Please run again with fill=True to overlay contour.'
else:
if proj is not None:
Basemap.contour(map, x, y, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid')
else:
plt.contour(lon, lat, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid')
if proj is None and range is not None:
plt.axis(range)
if title is not None:
plt.title(title, fontsize=fts+4)
if proj is not None:
map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.001), \
labels=[0,0,0,1], fmt='%.1f')
map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.001), \
labels=[1,0,0,0], fmt='%.1f')
if outfile is not None:
if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \
outfile.find('.eps') != -1:
print 'Write figure to file', outfile
plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \
orientation='portrait')
else:
print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.'
if proj is None:
return
else:
return map
| [
"[email protected]"
] | |
cf67d8266fd1c7b49d932b7a0d593bdefd4d6ab8 | a67a987ed078da0a1de2908c8c0e08070dee65b1 | /genice/lattices/sTprime.py | 16592d7d2a4fb71ae63cafd4bb23059714c8213e | [] | no_license | Python3pkg/GenIce | ef1ce7ee2997c10e08dde75ac36050a653cd4fc5 | 1e9458b7bf8e0fd2ad5d0c4f8987cea0ae7ca0b0 | refs/heads/master | 2021-01-21T17:31:51.595858 | 2017-05-21T14:09:32 | 2017-05-21T14:09:32 | 91,962,047 | 0 | 0 | null | 2017-05-21T14:09:28 | 2017-05-21T14:09:28 | null | UTF-8 | Python | false | false | 1,457 | py | """
Data source: Smirnov, G. S. & Stegailov, V. V. Toward Determination of the New Hydrogen Hydrate Clathrate Structures. J Phys Chem Lett 4, 3560-3564 (2013).
"""
density = 1.2 #default density
bondlen = 1.2 #bond threshold
celltype = "rect"
cell="""
4.04345643838 3.18400949048 3.18412710497
"""
#estimated by unitcell-designer2.py
coord="relative"
waters="""
0.324999313604 0.241626046227 0.741607546689
0.457688412412 0.491604796867 0.811167153486
0.324997440466 0.741611811736 0.741618888106
0.457662988572 0.991605010596 0.672075052384
0.692311583922 0.991727905597 0.672166183923
0.825002557263 0.741721003892 0.741714422407
0.692337007775 0.491727692099 0.811258277449
0.825000683446 0.241706769677 0.741725809343
0.192314228314 0.311166810957 0.491600600285
0.192335356629 0.67208565913 0.491600986442
0.957685764524 0.672166320486 0.491732720568
0.95766464453 0.311247477429 0.491732335146
0.32499754824 0.241612330348 0.241618912338
0.457662990343 0.491605708657 0.172075024014
0.324999242327 0.741626562519 0.241607525056
0.457688416499 0.991605358409 0.31116718725
0.69233701333 0.991728253805 0.311258313389
0.825000760623 0.741707288042 0.2417257845
0.692311587147 0.491728604283 0.172166142582
0.825002454031 0.241721519946 0.241714444623
0.192335354927 0.172085857455 0.991600896391
0.192314236022 0.811167014397 0.991600717379
0.957664643845 0.811247674634 0.991732448252
0.957685771209 0.172166522813 0.991732627999
"""
| [
"[email protected]"
] | |
e02234ba0c5b8cd9dfb2f91ac9dc4b789beb104e | a0e5418d4f79587dd8ea6f9425a84ded3351d139 | /src/actions/conquer_action.py | 917e56d82ffd6f263150995370328040c06b611b | [] | no_license | thydungeonsean/Shinar_Genesis | db99f17c8d6afbf69a4950b46223b586e55c83cf | ac21f324c11db7c1a722f029a8186c5dc45c9097 | refs/heads/master | 2020-03-15T22:07:48.281124 | 2018-06-07T21:09:18 | 2018-06-07T21:09:18 | 116,487,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,425 | py | from military_action import MilitaryAction
from src.enum.actions import CONQUER_ACTION
from map_tools import get_army_movement_options, get_conquered_points, in_player_domain
from src.enum.object_codes import *
class ConquerAction(MilitaryAction):
def __init__(self, state, player):
MilitaryAction.__init__(self, state, player, CONQUER_ACTION)
def place_text(self):
return 'Raise Army'
def select_text(self):
return 'Conquer the land'
# moving action
def compute_valid_move_points(self):
# points in range of selected army
return get_army_movement_options(self.state, self.selected_army, conquer=True)
def activate_effect(self, point):
# if enemy building - attack it
# else
# get conquered points
conquered = get_conquered_points(self.state, point)
# spread dominion to those points
map(self.extend_rule, conquered)
# end action
def extend_rule(self, point):
self.state.map.dominion_map.add_dominion(self.player.player_id, point)
# battle triggering helper methods
def get_win_effect(self, point, defender):
def win_effect():
self.activate_effect(point)
print 'attacker wins'
# if defender is garrison, apply correct building conquer interaction
if defender.is_garrison() and not defender.sallying:
defender.rout()
self.conquer_building(defender)
else:
defender.rout()
# end point
self.complete_action()
return win_effect
def conquer_building(self, garrison):
building = garrison.building
point = building.coord.int_position
if building.obj_code in {TOWER, PALACE}:
building.raze()
elif building.obj_code == GRANARY:
if in_player_domain(self.state, point):
building.capture(self.player)
self.selected_army.form_garrison(building)
else:
building.raze()
elif building.obj_code == ZIGGURAT:
if in_player_domain(self.state, point) and not building.under_construction and\
self.player.can_add_ziggurat():
building.capture(self.player)
self.selected_army.form_garrison(building)
else:
building.raze()
| [
"[email protected]"
] | |
33f386fa4ae0cf685ab62da52fc7c8b759b4cd0d | 48c65330f577d11cedb29fd970aee35788ab72c6 | /model_flfact_tpv__eg_cupones.py | 6f28705bd6ef39f7f6e6d8c6c9eafc90d2a08dcc | [] | no_license | yeboyebo/elganso_sync | 309ecbaba3127493abe001cd1704cc7098234baa | 66f033a0e27a05c1fc6704ec6ba2bd474d204b7e | refs/heads/master | 2023-07-22T00:17:48.201252 | 2023-07-19T07:48:40 | 2023-07-19T07:48:40 | 173,096,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | # @class_declaration interna_eg_cupones #
import importlib
from YBUTILS.viewREST import helpers
from models.flfact_tpv import models as modelos
class interna_eg_cupones(modelos.mtd_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
# @class_declaration elganso_sync_eg_cupones #
class elganso_sync_eg_cupones(interna_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
# @class_declaration eg_cupones #
class eg_cupones(elganso_sync_eg_cupones, helpers.MixinConAcciones):
pass
class Meta:
proxy = True
def getIface(self=None):
return form.iface
definitions = importlib.import_module("models.flfact_tpv.eg_cupones_def")
form = definitions.FormInternalObj()
form._class_init()
form.iface.ctx = form.iface
form.iface.iface = form.iface
| [
"[email protected]"
] | |
cfb35bf471134268b39e963d38b5d44009cbd811 | f92dff3781ce21b5a1fd18f30ab52a3976d254f5 | /backend/wallet/api/v1/urls.py | 4befa8658a0e2aa3f590ee382d837b3ab9016fa4 | [] | no_license | crowdbotics-apps/test-24800 | 41270507a49b02bc43fc89822b541cd99cb84005 | 6e7d5bb77af12c5d2c6e3f9f46b7d4d39d185267 | refs/heads/master | 2023-03-11T22:27:56.763867 | 2021-03-02T07:29:45 | 2021-03-02T07:29:45 | 343,671,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .viewsets import (
PaymentTransactionViewSet,
TaskerPaymentAccountViewSet,
TaskerWalletViewSet,
PaymentMethodViewSet,
CustomerWalletViewSet,
)
router = DefaultRouter()
router.register("taskerpaymentaccount", TaskerPaymentAccountViewSet)
router.register("customerwallet", CustomerWalletViewSet)
router.register("paymentmethod", PaymentMethodViewSet)
router.register("paymenttransaction", PaymentTransactionViewSet)
router.register("taskerwallet", TaskerWalletViewSet)
urlpatterns = [
path("", include(router.urls)),
]
| [
"[email protected]"
] | |
322a37cf0131836f7a7093c2ddcb5b15b9851a03 | a50e73d880fcea987cd2ddd4cc059a67cd7e22e0 | /day06/求区间的整数和函数版.py | 400f1072c91905899a2f52dd43cd1e51e98234c1 | [] | no_license | Icecarry/learn | 31bed60d5b61201d30bfbaaf520e4e0146e10863 | 2af301b92c9143def9b4c278024d6d2d6e21f0b9 | refs/heads/master | 2021-04-06T07:45:11.938995 | 2018-03-13T06:40:54 | 2018-03-13T06:40:54 | 124,759,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | # 编写一段代码,求1-100之间所有整数的和
# def sum1(m, n):
# return summer
# sum1(1, 50)
# 用户输入
num1 = int(input("请输入起始数:"))
num2 = int(input("请输入结束的数:"))
# print("%d - %d 之间的所有整数和为:" % (num1, num2), sum1(num1, num2))
# 运用循环累加
summer = 0
for x in range(num2 + 1):
if x >= num1:
summer += x
else:
continue
print(summer)
| [
"[email protected]"
] | |
419c2a5f336d46136d581b7847fb23c29400c2e3 | fe62d139012bdde8431b1b9c2b36b2a1491c6ad6 | /temps1series0213plot.py | 011f2b595599cef85f2ad0777247c5330ad45954 | [] | no_license | Gattocrucco/locs12 | 923467db0f49b3bf9e45c6a45c548751c972b130 | fa99528fadc2d8e745486e427319ec67527cf866 | refs/heads/master | 2023-03-28T21:40:22.385179 | 2021-04-08T21:52:11 | 2021-04-08T21:52:11 | 356,057,491 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,782 | py | import numpy as np
from matplotlib import pyplot as plt
from scipy import interpolate
import textbox
inphotons = 2 # in [3, 7, 10, 15, 20, 30, 40]
idcr = 1 # in [25, 250] cps/pdm
rate = 100 # cps
table = np.load('temps1series0213.npy')
figkw = dict(clear=True, sharex=True, figsize=[9, 7])
figs = []
axs = []
for wname in ['efficiency', 'fakerate', 'effvsrate', 'effvssigma']:
figkw['sharey'] = 'col' if wname == 'effvsrate' else True
fig, ax = plt.subplots(2, 2, num=f'temps1series0213plot-{wname}', **figkw)
figs.append(fig)
axs.append(ax)
axs = np.array(axs)
for ax in axs[0].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('S1 detection efficiency')
if ax.is_last_row():
ax.set_xlabel('Threshold on filter output')
for ax in axs[1].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('Fake rate [cps]')
if ax.is_last_row():
ax.set_xlabel('Threshold on filter output')
for ax in axs[2].reshape(-1):
if ax.is_first_col():
ax.set_ylabel('S1 detection efficiency')
if ax.is_last_row():
ax.set_xlabel('Fake rate [cps]')
for ax in axs[3].reshape(-1):
if ax.is_first_col():
ax.set_ylabel(f'Efficiency at fake rate {rate} cps')
if ax.is_last_row():
ax.set_xlabel('Template $\\sigma$ [ns]')
# the shape of table is over (DCR, VL, nphotons, sigma)
for ivl in range(table.shape[1]):
entries = table[idcr, ivl]
if np.count_nonzero(entries['done']) == 0:
continue
for ifilter, fname in enumerate(['ER', 'NR']):
qax = axs[:, ifilter, ivl]
for ifig, ax in enumerate(qax):
for inph, entry in enumerate(entries):
if not np.any(entry['done']) or ifig != 3:
continue
entry = entry[entry['done']]
nph = entry[0]['parameters']['nphotons']
plotkw = dict(
alpha=(inph + 1) / len(entries),
color='#600',
label=f'{nph}',
linestyle=['-', '--', '-.', ':'][inph % 4],
)
x = entry['parameters']['sigma']
interpkw = dict(assume_sorted=True, copy=False)
y = [
interpolate.interp1d(subent['rate'], subent[fname]['efficiency'], **interpkw)(rate)
for subent in entry
]
ax.plot(x, y, **plotkw)
for isigma, entry in enumerate(entries[inphotons]):
if not entry['done'] or ifig == 3:
continue
sigma = entry['parameters']['sigma']
plotkw = dict(
alpha=(isigma + 1) / len(entries[inphotons]),
color='#600',
label=f'{sigma:.3g}',
linestyle=['-', '--', '-.', ':'][isigma % 4],
)
if ifig == 0:
x = entry['threshold']
y = entry[fname]['effthr']
elif ifig == 1:
x = entry['threshold']
y = entry[fname]['ratethr']
elif ifig == 2:
x = entry['rate']
y = entry[fname]['efficiency']
changepoint = np.flatnonzero(np.diff(y))
start = max(0 , changepoint[ 0] - 1)
end = min(len(y), changepoint[-1] + 3)
sel = slice(start, end)
x = x[sel]
y = y[sel]
ax.plot(x, y, **plotkw)
for ax in qax:
s1type = 'ER' if entries[0, 0]['parameters']['VL'] < 1 else 'NR'
ax.set_title(f'{s1type} S1, {fname} filter')
ax.minorticks_on()
ax.grid(True, which='major', linestyle='--')
ax.grid(True, which='minor', linestyle=':')
for ifig, fax in enumerate(axs):
if ifig == 3:
legendtitle = 'Nphotons'
else:
legendtitle = 'Template $\\sigma$ [ns]'
fax[0, 0].legend(loc='best', fontsize='small', ncol=2, title=legendtitle)
params = table[idcr, 0, inphotons, 0]['parameters']
info = f"""\
DCR = {params['DCR'] * 1e9:.3g} cps/pdm
tres = 10 ns
nevents = 1000"""
if ifig != 3:
info = f"nphotons = {params['nphotons']}\n" + info
infoheight = 'lower' if ifig in [2, 3] else 'upper'
textbox.textbox(fax[0, 1], info, loc=f'{infoheight} right', fontsize='small')
if ifig == 1:
fax[0, 0].set_yscale('log')
if ifig == 2:
fax[0, 0].set_xscale('log')
for fig in figs:
fig.tight_layout()
fig.show()
| [
"[email protected]"
] | |
4ac44ea7572c0f7f3ccbbb100cda5acbab08db23 | 70e75a0b0ca56fd8318606cc093e13fe3d700227 | /Programmers/2020_카카오_상반기_공채/src/3.자물쇠와열쇠.py | ac042fd66fc62a187dd6c8ce877832ab61ec41db | [] | no_license | hanameee/Algorithm | ba4632797ff8ea999f37d2578f32c3c00da02b0f | bf1203544e1b44d2bbf929fd729e263278260313 | refs/heads/master | 2023-02-04T19:22:44.845863 | 2023-01-27T17:05:26 | 2023-01-27T17:05:26 | 220,486,014 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,486 | py | from copy import deepcopy
def is_solved(graph, m, n):
for i in range(m-1, m-1+n):
for j in range(m-1, m-1+n):
if graph[i][j] != 1:
return False
return True
def rotate(key):
new_key = [[0]*len(key[0]) for _ in range(len(key[0]))]
for i in range(len(key)):
for j in range(len(key)):
new_key[j][len(key)-i-1] = key[i][j]
return new_key
def process(key, graph, m, n):
g_len = len(graph)
for i in range(m-1+n):
for j in range(m-1+n):
g = deepcopy(graph)
# 키를 적용시켜본다
for key_i in range(len(key)):
if i+key_i > n+m-1:
break
for key_j in range(len(key)):
g[i+key_i][j+key_j] += key[key_i][key_j]
if is_solved(g, m, n):
return True
return False
def solution(key, lock):
m = len(key)
n = len(lock)
mp = [[0]*(n+2*(m-1)) for _ in range(n+2*(m-1))]
for i in range(m-1, m-1+n):
for j in range(m-1, m-1+n):
mp[i][j] = lock[i-m+1][j-m+1]
rotated_keys = [key]
for i in range(3):
new_key = rotate(key)
rotated_keys.append((new_key))
key = new_key
for key in rotated_keys:
result = process(key, mp, m, n)
if result:
return True
return False
print(solution([[0, 0, 0], [1, 1, 0], [1, 1, 1]],
[[1, 1, 1], [1, 1, 0], [1, 0, 1]]))
| [
"[email protected]"
] | |
8af58098a16f7c4b58e3049f703029d17be1afe1 | c475cd8531a94ffae69cc92371d41531dbbddb6c | /Projects/bullet3-2.89/examples/pybullet/gym/pybullet_data/laikago/laikago.py | 149c67714cd0441b6c31e290b2f2dca76dd6b712 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown",
"Zlib"
] | permissive | WolfireGames/overgrowth | 72d3dd29cbd7254337265c29f8de3e5c32400114 | 594a2a4f9da0855304ee8cd5335d042f8e954ce1 | refs/heads/main | 2023-08-15T19:36:56.156578 | 2023-05-17T08:17:53 | 2023-05-17T08:20:36 | 467,448,492 | 2,264 | 245 | Apache-2.0 | 2023-05-09T07:29:58 | 2022-03-08T09:38:54 | C++ | UTF-8 | Python | false | false | 3,056 | py | import pybullet as p
import time
p.connect(p.GUI)
plane = p.loadURDF("plane.urdf")
p.setGravity(0,0,-9.8)
p.setTimeStep(1./500)
#p.setDefaultContactERP(0)
#urdfFlags = p.URDF_USE_SELF_COLLISION+p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS
urdfFlags = p.URDF_USE_SELF_COLLISION
quadruped = p.loadURDF("laikago_toes.urdf",[0,0,.5],[0,0.5,0.5,0], flags = urdfFlags,useFixedBase=False)
#enable collision between lower legs
for j in range (p.getNumJoints(quadruped)):
print(p.getJointInfo(quadruped,j))
#2,5,8 and 11 are the lower legs
lower_legs = [2,5,8,11]
for l0 in lower_legs:
for l1 in lower_legs:
if (l1>l0):
enableCollision = 1
print("collision for pair",l0,l1, p.getJointInfo(quadruped,l0)[12],p.getJointInfo(quadruped,l1)[12], "enabled=",enableCollision)
p.setCollisionFilterPair(quadruped, quadruped, 2,5,enableCollision)
jointIds=[]
paramIds=[]
jointOffsets=[]
jointDirections=[-1,1,1,1,1,1,-1,1,1,1,1,1]
jointAngles=[0,0,0,0,0,0,0,0,0,0,0,0]
for i in range (4):
jointOffsets.append(0)
jointOffsets.append(-0.7)
jointOffsets.append(0.7)
maxForceId = p.addUserDebugParameter("maxForce",0,100,20)
for j in range (p.getNumJoints(quadruped)):
p.changeDynamics(quadruped,j,linearDamping=0, angularDamping=0)
info = p.getJointInfo(quadruped,j)
#print(info)
jointName = info[1]
jointType = info[2]
if (jointType==p.JOINT_PRISMATIC or jointType==p.JOINT_REVOLUTE):
jointIds.append(j)
p.getCameraImage(480,320)
p.setRealTimeSimulation(0)
joints=[]
with open("data1.txt","r") as filestream:
for line in filestream:
maxForce = p.readUserDebugParameter(maxForceId)
currentline = line.split(",")
frame = currentline[0]
t = currentline[1]
joints=currentline[2:14]
for j in range (12):
targetPos = float(joints[j])
p.setJointMotorControl2(quadruped,jointIds[j],p.POSITION_CONTROL,jointDirections[j]*targetPos+jointOffsets[j], force=maxForce)
p.stepSimulation()
for lower_leg in lower_legs:
#print("points for ", quadruped, " link: ", lower_leg)
pts = p.getContactPoints(quadruped,-1, lower_leg)
#print("num points=",len(pts))
#for pt in pts:
# print(pt[9])
time.sleep(1./500.)
index = 0
for j in range (p.getNumJoints(quadruped)):
p.changeDynamics(quadruped,j,linearDamping=0, angularDamping=0)
info = p.getJointInfo(quadruped,j)
js = p.getJointState(quadruped,j)
#print(info)
jointName = info[1]
jointType = info[2]
if (jointType==p.JOINT_PRISMATIC or jointType==p.JOINT_REVOLUTE):
paramIds.append(p.addUserDebugParameter(jointName.decode("utf-8"),-4,4,(js[0]-jointOffsets[index])/jointDirections[index]))
index=index+1
p.setRealTimeSimulation(1)
while (1):
for i in range(len(paramIds)):
c = paramIds[i]
targetPos = p.readUserDebugParameter(c)
maxForce = p.readUserDebugParameter(maxForceId)
p.setJointMotorControl2(quadruped,jointIds[i],p.POSITION_CONTROL,jointDirections[i]*targetPos+jointOffsets[i], force=maxForce)
| [
"[email protected]"
] | |
2bb89f735b7789b3fe3c0e3995cc2bbf484329da | 986a8c617725cb707dd21c5bd1487dd9d46adaa0 | /mutation/example_2.py | a9d60729aa1d8151f26eee7697bd93132e37abce | [] | no_license | tohfaakib/python_playground | 6ee497cc7011be798e68f74ce331c24bd7a4edab | 13f2b2e179e2c4f4198ac99965c53e1ddcf436e0 | refs/heads/master | 2020-09-16T09:46:24.231034 | 2019-12-24T05:06:24 | 2019-12-24T05:06:24 | 223,732,363 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | def add_to(num, target=[]):
target.append(num)
return target
print(add_to(1))
# output: [1]
print(add_to(2))
# output: [1, 2]
print(add_to(3))
# output: [1, 2, 3]
def add_to_2(num, target=None):
if target is None:
target = []
target.append(num)
return target
print(add_to_2(1))
# output: [1]
print(add_to_2(2))
# output: [2]
print(add_to_2(3))
# output: [3]
| [
"[email protected]"
] | |
1aa3293f0f6dade194eb63b0104f6ef1b168dc27 | 654f400751dfb180a937e0f18f6b722119a5b4f1 | /tests/unitario/zend_django/parametros/test_parametro_models.py | 8756743f0a08311b89c4a2e539506ed56101ebbb | [] | no_license | imagilex/tereapps | fae8bcb18ad4276f09a6ef6887d0c685c7a5522a | 51d4da8dab1d184cb7dcfe144ac8d2405a179028 | refs/heads/main | 2023-07-04T12:19:32.943411 | 2021-08-10T19:41:00 | 2021-08-10T19:41:00 | 343,847,920 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,310 | py | import pytest
from django.contrib.auth.models import User
from django.test import TestCase
from zend_django.models import ParametroSistema
from zend_django.models import ParametroUsuario
from zend_django.models import ParametroUsuarioValor
from zend_django.parametros_models import PARAM_TYPES
from zend_django.parametros_models import PARAM_TYPES_Tuples
from zend_django.parametros_models import get_param_type_to_show
pytestmark = pytest.mark.django_db
class TestParametrosModelsFunctions(TestCase):
def test_get_param_type_to_show(self):
for tipo in PARAM_TYPES_Tuples:
self.assertEqual(get_param_type_to_show(tipo[0]), tipo[1])
class TestParametroSistemaModel(TestCase):
def setUp(self):
self.objs = [
ParametroSistema.objects.get_or_create(
seccion='seccion',
nombre='nombre',
nombre_para_mostrar='mostrar como',
tipo=PARAM_TYPES['CADENA'],
)[0],
ParametroSistema.objects.get_or_create(
seccion='seccion',
nombre='nombre_2',
nombre_para_mostrar='mostrar como',
valor='valor_default',
tipo=PARAM_TYPES['IMAGEN'],
)[0]
]
def test_to_string(self):
self.assertEqual(
f"{self.objs[0]}",
self.objs[0].nombre_para_mostrar)
self.assertEqual(
f"{self.objs[1]}",
f"{self.objs[1].nombre_para_mostrar}: {self.objs[1].valor}")
def test_tipo_txt(self):
for obj in self.objs:
self.assertEqual(obj.tipo_txt, get_param_type_to_show(obj.tipo))
def test_get(self):
for obj in self.objs:
self.assertEqual(
obj.valor, ParametroSistema.get(obj.seccion, obj.nombre))
self.assertIn("no encontrado", ParametroSistema.get(
self.objs[0].seccion, "inexistente"))
self.assertIn("no encontrado", ParametroSistema.get(
"inexistente", self.objs[0].nombre))
self.assertIn("no encontrado", ParametroSistema.get(
"inexistente", "inexistente"))
class TestParametroUsuario(TestCase):
def setUp(self):
self.objs = [
ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre',
tipo=PARAM_TYPES['CADENA'],
)[0],
ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre_2',
valor_default='valor_default',
tipo=PARAM_TYPES['IMAGEN'],
)[0]
]
self.usrs = [
User.objects.get_or_create(username="testuser")[0],
User.objects.get_or_create(username="testuser")[1],
]
self.values = [
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[0],
parametro=self.objs[0],
valor="Valor"
)[0],
]
def test_to_string(self):
self.assertEqual(
f"{self.objs[0]}",
self.objs[0].nombre)
self.assertEqual(
f"{self.objs[1]}",
f"{self.objs[1].nombre}: {self.objs[1].valor_default}")
def test_tipo_txt(self):
for obj in self.objs:
self.assertEqual(obj.tipo_txt, get_param_type_to_show(obj.tipo))
def test_get_default(self):
for obj in self.objs:
self.assertEqual(
obj.valor_default,
ParametroUsuario.get_default(obj.seccion, obj.nombre))
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
self.objs[0].seccion, "inexistente")
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
"inexistente", self.objs[0].nombre)
self.assertRaises(
ParametroUsuario.DoesNotExist, ParametroUsuario.get_default,
"inexistente", "inexistente")
def test_get_value(self):
self.assertEqual(ParametroUsuario.get_valor(
self.usrs[0], "seccion", "nombre"), "Valor")
self.assertEqual(ParametroUsuario.get_valor(
self.usrs[0], "seccion", "nombre_2"), "valor_default")
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "inexistente", "nombre"))
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "seccion", "inexistente"))
self.assertEqual("", ParametroUsuario.get_valor(
self.usrs[0], "inexistente", "inexistente"))
def test_set_valor(self):
cnt1 = len(ParametroUsuarioValor.objects.all())
self.assertTrue(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "nombre", "Valor"))
self.assertTrue(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "nombre_2", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "inexistente", "nombre", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "seccion", "inexistente", "Valor"))
self.assertFalse(ParametroUsuario.set_valor(
self.usrs[0], "inexistente", "inexistente", "Valor"))
cnt2 = len(ParametroUsuarioValor.objects.all())
self.assertGreaterEqual(cnt2, cnt2)
class TestParametroUsuarioValor(TestCase):
def setUp(self):
self.usrs = [
User.objects.get_or_create(username="testuser")[0],
User.objects.get_or_create(username="testuser2")[0]
]
self.param = ParametroUsuario.objects.get_or_create(
seccion='seccion',
nombre='nombre',
tipo=PARAM_TYPES['CADENA'],
)[0]
self.objs = [
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[0],
parametro=self.param,
valor="Valor"
)[0],
ParametroUsuarioValor.objects.get_or_create(
user=self.usrs[1],
parametro=self.param,
)[0],
]
def test_to_string(self):
self.assertEqual("Valor", f'{self.objs[0]}')
self.assertEqual("", f'{self.objs[1]}')
| [
"[email protected]"
] | |
931b28badc8e29c492e27998419342b19c1db54c | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_w_M_to_W_focus_Zok_div/ch032/wiColorJ/Sob_k05_s001_EroM/pyr_Tcrop255_p60_j15/pyr_1s/L5/step09_1side_L5.py | 7219559e641a143505af65d8a2b9b3627b574ddb | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,182 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_I_w_M_to_Wx_Wy_Wz_combine import I_w_M_to_W
from step08_b_use_G_generate_0_util import Tight_crop, Color_jit
from step09_c_train_step import Train_step_I_w_M_to_W
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
color_jit = Color_jit(do_ratio=0.6)
use_what_gen_op = I_w_M_to_W( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 0) )
use_what_train_step = Train_step_I_w_M_to_W( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 15), color_jit=color_jit )
use_hid_ch = 32
import time
start_time = time.time()
###############################################################################################################################################################################################
###############################################################################################################################################################################################
########################################################### Block1
### Block1
#########################################################################################
pyramid_1side_1 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
pyramid_1side_2 = [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
pyramid_1side_3 = [1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1]
pyramid_1side_4 = [1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1]
pyramid_1side_5 = [1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1]
pyramid_1side_6 = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
#########################################################################################
ch032_pyramid_1side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch=use_hid_ch, depth_level=5, out_ch=1, d_amount=3, bottle_divide=True, unet_acti="sigmoid", conv_block_num=pyramid_1side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
#########################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_4
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
print(use_model.model_describe)
| [
"[email protected]"
] | |
f8ea905f492854fd8ecc472ff5ac65bb0b66c53f | 2aa5d0ae8f74ebb0026c715f8c871388bed4427c | /nurse/tests.py | 71daa82205d698b13836a4c9030dcb85e07285e1 | [] | no_license | surajit003/valentis | 1614c46b77c5827ea187b47284f49d46584d7291 | 53fd97bd17f36ce14b2be28bb08d5b65abde8d82 | refs/heads/master | 2022-11-26T23:18:41.982907 | 2020-08-09T10:16:39 | 2020-08-09T10:16:39 | 285,679,287 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,319 | py | import unittest
from django.urls import reverse
from django.test import Client
from .models import Nurse
from django.contrib.auth.models import User
from django.contrib.auth.models import Group
from django.contrib.contenttypes.models import ContentType
def create_django_contrib_auth_models_user(**kwargs):
defaults = {}
defaults["username"] = "username"
defaults["email"] = "[email protected]"
defaults.update(**kwargs)
return User.objects.create(**defaults)
def create_django_contrib_auth_models_group(**kwargs):
defaults = {}
defaults["name"] = "group"
defaults.update(**kwargs)
return Group.objects.create(**defaults)
def create_django_contrib_contenttypes_models_contenttype(**kwargs):
defaults = {}
defaults.update(**kwargs)
return ContentType.objects.create(**defaults)
def create_models(**kwargs):
defaults = {}
defaults["systolic"] = "systolic"
defaults["diastolic"] = "diastolic"
defaults["temperature"] = "temperature"
defaults["oxygen_saturation"] = "oxygen_saturation"
defaults["urinalysis"] = "urinalysis"
defaults["heart_rate"] = "heart_rate"
defaults["others"] = "others"
defaults["attending_nurse"] = "attending_nurse"
defaults["patient_no"] = "patient_no"
defaults["first_name"] = "first_name"
defaults["last_name"] = "last_name"
defaults["middle_name"] = "middle_name"
defaults.update(**kwargs)
return Nurse.objects.create(**defaults)
class modelsViewTest(unittest.TestCase):
'''
Tests for Nurse
'''
def setUp(self):
self.client = Client()
def test_list_models(self):
url = reverse('nurse_models_list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_create_models(self):
url = reverse('nurse_models_create')
data = {
"systolic": "systolic",
"diastolic": "diastolic",
"temperature": "temperature",
"oxygen_saturation": "oxygen_saturation",
"urinalysis": "urinalysis",
"heart_rate": "heart_rate",
"others": "others",
"attending_nurse": "attending_nurse",
"patient_no": "patient_no",
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 302)
def test_detail_models(self):
models = create_models()
url = reverse('nurse_models_detail', args=[models.slug,])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_models(self):
models = create_models()
data = {
"systolic": "systolic",
"diastolic": "diastolic",
"temperature": "temperature",
"oxygen_saturation": "oxygen_saturation",
"urinalysis": "urinalysis",
"heart_rate": "heart_rate",
"others": "others",
"attending_nurse": "attending_nurse",
"patient_no": "patient_no",
}
url = reverse('nurse_models_update', args=[models.slug,])
response = self.client.post(url, data)
self.assertEqual(response.status_code, 302)
| [
"[email protected]"
] | |
b113374c074f9ff48b5efb71f0783554bc365763 | 3dd23bd87205ccb425709f3ec9dea6341425526d | /examples/texture_font.py | 9b5c4485c97860269b20fc0031f5afa8e653cd47 | [
"BSD-3-Clause"
] | permissive | titusz/freetype-py | ff05faef474534c24f20eaf0f53c9ce091c8eef0 | 72ac4c442251bc2b0d5617c5703eb48d3348e804 | refs/heads/master | 2021-01-18T08:26:04.215075 | 2012-05-21T14:59:29 | 2012-05-21T14:59:29 | 2,360,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,752 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
#
# FreeType high-level python API - Copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
#
# -----------------------------------------------------------------------------
'''
Texture font class
'''
import sys
import math
import numpy as np
import OpenGL.GL as gl
from freetype import *
class TextureAtlas:
'''
Group multiple small data regions into a larger texture.
The algorithm is based on the article by Jukka Jylänki : "A Thousand Ways
to Pack the Bin - A Practical Approach to Two-Dimensional Rectangle Bin
Packing", February 27, 2010. More precisely, this is an implementation of
the Skyline Bottom-Left algorithm based on C++ sources provided by Jukka
Jylänki at: http://clb.demon.fi/files/RectangleBinPack/
Example usage:
--------------
atlas = TextureAtlas(512,512,3)
region = atlas.get_region(20,20)
...
atlas.set_region(region, data)
'''
def __init__(self, width=1024, height=1024, depth=1):
'''
Initialize a new atlas of given size.
Parameters
----------
width : int
Width of the underlying texture
height : int
Height of the underlying texture
depth : 1 or 3
Depth of the underlying texture
'''
self.width = int(math.pow(2, int(math.log(width, 2) + 0.5)))
self.height = int(math.pow(2, int(math.log(height, 2) + 0.5)))
self.depth = depth
self.nodes = [ (0,0,self.width), ]
self.data = np.zeros((self.height, self.width, self.depth),
dtype=np.ubyte)
self.texid = 0
self.used = 0
def upload(self):
'''
Upload atlas data into video memory.
'''
if not self.texid:
self.texid = gl.glGenTextures(1)
gl.glBindTexture( gl.GL_TEXTURE_2D, self.texid )
gl.glTexParameteri( gl.GL_TEXTURE_2D,
gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP )
gl.glTexParameteri( gl.GL_TEXTURE_2D,
gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP )
gl.glTexParameteri( gl.GL_TEXTURE_2D,
gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR )
gl.glTexParameteri( gl.GL_TEXTURE_2D,
gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR )
if self.depth == 1:
gl.glTexImage2D( gl.GL_TEXTURE_2D, 0, gl.GL_ALPHA,
self.width, self.height, 0,
gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, self.data )
else:
gl.glTexImage2D( gl.GL_TEXTURE_2D, 0, gl.GL_RGB,
self.width, self.height, 0,
gl.GL_RGB, gl.GL_UNSIGNED_BYTE, self.data )
def set_region(self, region, data):
'''
Set a given region width provided data.
Parameters
----------
region : (int,int,int,int)
an allocated region (x,y,width,height)
data : numpy array
data to be copied into given region
'''
x, y, width, height = region
self.data[y:y+height,x:x+width, :] = data
def get_region(self, width, height):
'''
Get a free region of given size and allocate it
Parameters
----------
width : int
Width of region to allocate
height : int
Height of region to allocate
Return
------
A newly allocated region as (x,y,width,height) or (-1,-1,0,0)
'''
best_height = sys.maxint
best_index = -1
best_width = sys.maxint
region = 0, 0, width, height
for i in range(len(self.nodes)):
y = self.fit(i, width, height)
if y >= 0:
node = self.nodes[i]
if (y+height < best_height or
(y+height == best_height and node[2] < best_width)):
best_height = y+height
best_index = i
best_width = node[2]
region = node[0], y, width, height
if best_index == -1:
return -1,-1,0,0
node = region[0], region[1]+height, width
self.nodes.insert(best_index, node)
i = best_index+1
while i < len(self.nodes):
node = self.nodes[i]
prev_node = self.nodes[i-1]
if node[0] < prev_node[0]+prev_node[2]:
shrink = prev_node[0]+prev_node[2] - node[0]
x,y,w = self.nodes[i]
self.nodes[i] = x+shrink, y, w-shrink
if self.nodes[i][2] <= 0:
del self.nodes[i]
i -= 1
else:
break
else:
break
i += 1
self.merge()
self.used += width*height
return region
def fit(self, index, width, height):
'''
Test if region (width,height) fit into self.nodes[index]
Parameters
----------
index : int
Index of the internal node to be tested
width : int
Width or the region to be tested
height : int
Height or the region to be tested
'''
node = self.nodes[index]
x,y = node[0], node[1]
width_left = width
if x+width > self.width:
return -1
i = index
while width_left > 0:
node = self.nodes[i]
y = max(y, node[1])
if y+height > self.height:
return -1
width_left -= node[2]
i += 1
return y
def merge(self):
'''
Merge nodes
'''
i = 0
while i < len(self.nodes)-1:
node = self.nodes[i]
next_node = self.nodes[i+1]
if node[1] == next_node[1]:
self.nodes[i] = node[0], node[1], node[2]+next_node[2]
del self.nodes[i+1]
else:
i += 1
class TextureFont:
'''
A texture font gathers a set of glyph relatively to a given font filename
and size.
'''
def __init__(self, atlas, filename, size):
'''
Initialize font
Parameters:
-----------
atlas: TextureAtlas
Texture atlas where glyph texture will be stored
filename: str
Font filename
size : float
Font size
'''
self.atlas = atlas
self.filename = filename
self.size = size
self.glyphs = {}
face = Face( self.filename )
face.set_char_size( int(self.size*64))
self._dirty = False
metrics = face.size
self.ascender = metrics.ascender/64.0
self.descender = metrics.descender/64.0
self.height = metrics.height/64.0
self.linegap = self.height - self.ascender + self.descender
self.depth = atlas.depth
set_lcd_filter(FT_LCD_FILTER_LIGHT)
def __getitem__(self, charcode):
'''
x.__getitem__(y) <==> x[y]
'''
if charcode not in self.glyphs.keys():
self.load('%c' % charcode)
return self.glyphs[charcode]
def get_texid(self):
'''
Get underlying texture identity .
'''
if self._dirty:
self.atlas.upload()
self._dirty = False
return self.atlas.texid
texid = property(get_texid,
doc='''Underlying texture identity.''')
def load(self, charcodes = ''):
'''
Build glyphs corresponding to individual characters in charcodes.
Parameters:
-----------
charcodes: [str | unicode]
Set of characters to be represented
'''
face = Face( self.filename )
pen = Vector(0,0)
hres = 16*72
hscale = 1.0/16
face.set_char_size( int(self.size * 64), 0, hres, 72 )
matrix = Matrix( int((hscale) * 0x10000L), int((0.0) * 0x10000L),
int((0.0) * 0x10000L), int((1.0) * 0x10000L) )
for charcode in charcodes:
face.set_transform( matrix, pen )
if charcode in self.glyphs.keys():
continue
self.dirty = True
flags = FT_LOAD_RENDER | FT_LOAD_FORCE_AUTOHINT
flags |= FT_LOAD_TARGET_LCD
face.load_char( charcode, flags )
bitmap = face.glyph.bitmap
left = face.glyph.bitmap_left
top = face.glyph.bitmap_top
width = face.glyph.bitmap.width
rows = face.glyph.bitmap.rows
pitch = face.glyph.bitmap.pitch
x,y,w,h = self.atlas.get_region(width/self.depth+2, rows+2)
if x < 0:
print 'Missed !'
continue
x,y = x+1, y+1
w,h = w-2, h-2
data = []
for i in range(rows):
data.extend(bitmap.buffer[i*pitch:i*pitch+width])
data = np.array(data,dtype=np.ubyte).reshape(h,w,3)
gamma = 1.5
Z = ((data/255.0)**(gamma))
data = (Z*255).astype(np.ubyte)
self.atlas.set_region((x,y,w,h), data)
# Build glyph
size = w,h
offset = left, top
advance= face.glyph.advance.x, face.glyph.advance.y
u0 = (x + 0.0)/float(self.atlas.width)
v0 = (y + 0.0)/float(self.atlas.height)
u1 = (x + w - 0.0)/float(self.atlas.width)
v1 = (y + h - 0.0)/float(self.atlas.height)
texcoords = (u0,v0,u1,v1)
glyph = TextureGlyph(charcode, size, offset, advance, texcoords)
self.glyphs[charcode] = glyph
# Generate kerning
for g in self.glyphs.values():
kerning = face.get_kerning(g.charcode, charcode, mode=FT_KERNING_UNSCALED)
if kerning.x != 0:
glyph.kerning[g.charcode] = kerning.x
kerning = face.get_kerning(charcode, g.charcode, mode=FT_KERNING_UNSCALED)
if kerning.x != 0:
g.kerning[charcode] = kerning.x
class TextureGlyph:
'''
A texture glyph gathers information relative to the size/offset/advance and
texture coordinates of a single character. It is generally built
automatically by a TextureFont.
'''
def __init__(self, charcode, size, offset, advance, texcoords):
'''
Build a new texture glyph
Parameter:
----------
charcode : char
Represented character
size: tuple of 2 ints
Glyph size in pixels
offset: tuple of 2 floats
Glyph offset relatively to anchor point
advance: tuple of 2 floats
Glyph advance
texcoords: tuple of 4 floats
Texture coordinates of bottom-left and top-right corner
'''
self.charcode = charcode
self.size = size
self.offset = offset
self.advance = advance
self.texcoords = texcoords
self.kerning = {}
def get_kerning(self, charcode):
''' Get kerning information
Parameters:
-----------
charcode: char
Character preceding this glyph
'''
if charcode in self.kerning.keys():
return self.kerning[charcode]
else:
return 0
| [
"[email protected]"
] | |
fc593f18e687cb291d60be67c2b8038adda0ff0a | c3c7398ec14865ea34c7f03aa5e012ddb19f0d5b | /app/models.py | e35cb5a4cf60d86793de1766edc457b72c9f70fa | [] | no_license | mzm5466/blog | 0e022f0ce85a0079cb72ffd9f472c7684f94d9fb | 13625fe7028a0df11a30d7de32751e34d681de00 | refs/heads/master | 2021-01-23T16:51:58.296591 | 2018-11-17T06:05:50 | 2018-11-17T06:05:50 | 102,748,039 | 0 | 0 | null | 2018-11-12T23:28:57 | 2017-09-07T14:36:32 | JavaScript | UTF-8 | Python | false | false | 1,318 | py | #!/usr/bin/python
#-*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from DjangoUeditor.models import UEditorField
from django.core.urlresolvers import reverse
KIND_CHOICES=(
('python','python'),
('c++','c++'),
('java','java'),
('javascript','javascript'),
('html','html'),
('css','css'),
('linux','linux'),
)
# Create your models here.
class Moment(models.Model):
title=models.CharField(u'标题',max_length=30,default="请在此输入标题")
shortcontent=models.TextField(u'短梗概',max_length=100,default="请在此输入梗概")
content = UEditorField(u'内容', height=300, width=1000,
default=u'', blank=True, imagePath="",
toolbars='full', filePath='')#models.TextField(u'内容',max_length=3000,default="请在此处输入")
user_name=models.CharField(u'作者',max_length=20,default='匿名')
kind=models.CharField(u'文章类型',max_length=20,choices=KIND_CHOICES,default=KIND_CHOICES[0])
createtime=models.DateTimeField(auto_now=True)
def __unicode__(self): # 在Python3中用 __str__ 代替 __unicode__
return self.title
class Meta:
verbose_name = '博客文章'
verbose_name_plural = '博客文章'
ordering = ['-createtime']
| [
"[email protected]"
] | |
cc47cf7d57fa133086e9fd19950f7804b7feb362 | 2f2d2ceb7eb79e1d441ed278a92ea484dee8501e | /analysis/python/scattering/gb_scatt_fit.py | a4edccd2010cf300912f2ac406c7cc461027e8bc | [] | no_license | rtreharne/SZO | b20943b1afaa10f0e4fc032a1f2955eda4fd54b5 | 2fb71f85cd3c19f46782c528d7357a1ae5dc49e5 | refs/heads/master | 2021-01-10T04:28:13.662690 | 2015-12-17T10:16:28 | 2015-12-17T10:16:28 | 48,166,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,927 | py | #! /usr/bin/env python
from numpy import *
from pylab import *
from math import *
from pylab import *
from matplotlib.widgets import Slider
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.mlab as mlab
import tkFileDialog
import numpy as np
import nvsmob
import nelmin
from matplotlib.font_manager import FontProperties
#matplotlib.rc('xtick', labelsize = 15)
#matplotlib.rc('ytick', labelsize = 15, )
class Page:
fig = figure(figsize = (6,5))
#subplots_adjust(bottom=0.35)
global e, m0, e0, hbar, k, T, kT, m
e = 1.602e-19
e0 = 8.85e-12
m0 = 9.11e-31
hbar = 1.055e-34
k = 1.38e-23
T = 300
kT = k*T
m = 0.4*m0
def __init__(self):
self.x = arange(1e19, 1e21, 1e18)
self.ax = subplot(111)
self.xdata, self.ydata, self.greenx, self.greeny= nvsmob.mob()
p = ([50, 1.8, 9.25, 0.54])
p = self.fit(p)
mu_total, mu1, mu2 = self.crunch(p)
self.update(self.x, mu_total, mu1, mu2)
print p
def crunch(self, p):
self.index = 0
mumax = 50
x = self.x
L = p[0]*1e-7
Nt = p[1]*1e14
A = p[2]/e
B = p[3]
mu0 = e*L/sqrt(2*pi*m*kT)
Eb = e**2*Nt**2/(8*8.4*e0*x)
Ef = hbar**2*(3*pi**2*x*1e6)**(2.0/3)/(2*m)
mu1 = mu0*exp(-Eb/kT)
mu2 = (mumax - mu0)/(1+exp(-A*(Ef - (B*e) - Eb)))
mu_total = mu1+mu2
return mu_total, mu1, mu2
def update(self, x, mu_total, mu1, mu2):
sigma = 1e-20
for i in range (0,len(x)):
x[i] = sigma*x[i]
for i in range (0,len(self.xdata)):
self.xdata[i] = sigma*self.xdata[i]
self.greenx[i] = sigma*self.greenx[i]
self.ax.clear()
line3, = self.ax.plot(x, mu_total, linewidth = 3, alpha = 0.75)
data1, = self.ax.plot(self.xdata, self.ydata, 'o', color = 'red', alpha = 0.7, markersize = 8)
data2, = self.ax.plot(self.greenx, self.greeny, '^', color = 'green', alpha = 0.7, markersize = 8)
#self.ax.plot(self.greenx, self.greeny, 'o', color = 'green')
line1, = self.ax.plot(x, mu1, '--', linewidth = 2, color = 'orange')
line2, = self.ax.plot(x, mu2, '-', linewidth = 2, color = 'purple')
line2.set_dashes([8, 4, 2, 4, 2, 4])
#self.ax.set_xscale('log')
self.ax.set_ylim(0, 18)
self.ax.set_ylabel(r'$\mu_e$ (cm$^2$V$^{-1}$s$^{-1}$)', fontsize = 15)
self.ax.set_xlim(0.5, 5)
self.ax.set_xlabel(r'$n_e$ ($\times10^{20}$ cm$^{-3}$)', fontsize = 15)
#self.ax.set_xscale('log')
fontP = FontProperties()
fontP.set_size('large')
leg1 = self.ax.legend((data1,data2,line1, line2,line3), (r'data ($< 0.65\%$ wt. SiO$_{2}$)',r'data ($>0.65\%$ wt. SiO$_{2}$)',r'$\mu_{gb}=\mu_0\exp(-\frac{\phi}{k_BT})$', r'$\mu_t=\frac{\mu_{ii}-\mu_{gb}}{1+\exp[-\alpha(E_f-\beta\phi)]}$',r'$\mu_{eff}=\mu_{gb} + \mu_t$'), 'upper left', prop = fontP,fancybox=False)
leg1.get_frame().set_alpha(0.0)
def func(self, p):
sum = 0
x = self.xdata
y = self.ydata
mumax = 50
L = p[0]*1e-7
Nt = p[1]*1e14
A = p[2]/e
B = p[3]
mu0 = e*L/sqrt(2*pi*m*kT)
Eb, Ef, mu1, mu2, mu_total = [],[],[],[],[]
for i in range (0, len(x)):
Eb.append(e**2*Nt**2/(8*8.4*e0*x[i]))
Ef.append(hbar**2*(3*pi**2*x[i]*1e6)**(2.0/3)/(2*m))
mu1.append(mu0*exp(-Eb[i]/kT))
mu2.append((mumax - mu0)/(1+exp(-A*(Ef[i] - (B*e) - Eb[i]))))
mu_total.append(mu1[i] + mu2[i])
sum += sqrt((mu_total[i]-y[i])**2)
sum = sum/len(x)
return sum
def fit(self, p):
for i in range (0, 5):
result, fx, conv_flag, nfe, res = nelmin.minimize(self.func, p)
p = result
return result
graph = Page()
show()
| [
"[email protected]"
] | |
21f24ea6ddca2a3fb1ffdbb15429ef55979c5e7d | 439f3bbc4b9a84b27052b2d1d5ea166bca2e1498 | /setup.py | dff003eddf7047204a0ae39f94ca37da91cabe76 | [
"Apache-2.0"
] | permissive | zeroyou/AndroidViewClient | dc52c821b11e96decf6066b670165c154101bc81 | 06cefca324d3ab255e4367990a5d1389b0a39d3d | refs/heads/master | 2022-12-25T00:57:29.432351 | 2020-09-20T00:41:47 | 2020-09-20T00:41:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='androidviewclient',
version='20.0.0b5',
description='''AndroidViewClient is a 100% pure python library and tools
that simplifies test script creation providing higher level
operations and the ability of obtaining the tree of Views present at
any given moment on the device or emulator screen.
''',
license='Apache',
keywords='android uiautomator viewclient monkeyrunner test automation',
author='Diego Torres Milano',
author_email='[email protected]',
url='https://github.com/dtmilano/AndroidViewClient/',
packages=find_packages('src'),
package_dir={'':'src'},
package_data={'':['*.png']},
include_package_data=True,
scripts=['tools/culebra', 'tools/dump'],
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License'],
install_requires=['setuptools', 'requests', 'numpy', 'matplotlib', 'culebratester-client >= 2.0.6'],
)
| [
"[email protected]"
] | |
a55eb2498c338eae709672405c5607d332a40235 | c81d7dfef424b088bf2509a1baf406a80384ea5a | /venv/Lib/site-packages/whitenoise/middleware.py | f359321a7b53c64e2d2620204bde7381490358d6 | [] | no_license | Goutham2591/OMK_PART2 | 111210d78fc4845481ed55c852b8f2f938918f4a | cb54fb21ebf472bffc6ee4f634bf1e68303e113d | refs/heads/master | 2022-12-10T01:43:08.213010 | 2018-04-05T02:09:41 | 2018-04-05T02:09:41 | 124,828,094 | 0 | 1 | null | 2022-12-07T23:43:03 | 2018-03-12T03:20:14 | Python | UTF-8 | Python | false | false | 1,545 | py | from __future__ import absolute_import
from django.http import FileResponse
from whitenoise.django import DjangoWhiteNoise
class WhiteNoiseMiddleware(DjangoWhiteNoise):
"""
Wrap DjangoWhiteNoise to allow it to function as Django middleware, rather
than WSGI middleware
This functions as both old- and new-style middleware, so can be included in
either MIDDLEWARE or MIDDLEWARE_CLASSES.
"""
def __init__(self, get_response=None):
self.get_response = get_response
# We pass None for `application`
super(WhiteNoiseMiddleware, self).__init__(None)
def __call__(self, request):
response = self.process_request(request)
if response is None:
response = self.get_response(request)
return response
def process_request(self, request):
if self.autorefresh:
static_file = self.find_file(request.path_info)
else:
static_file = self.files.get(request.path_info)
if static_file is not None:
return self.serve(static_file, request)
def serve(self, static_file, request):
response = static_file.get_response(request.method, request.META)
status = int(response.status)
http_response = FileResponse(response.file or (), status=status)
# Remove default content-type
del http_response['content-type']
for key, value in response.headers:
http_response[key] = value
return http_response
| [
"[email protected]"
] | |
34ae4058e8b7b076fbec0cd6c034ebe978798f7f | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /pkgs/tornado-4.4.1-py27_0/lib/python2.7/site-packages/tornado/test/tcpserver_test.py | c01c04ddfb2baf903a76fbf5dfa182c3c2d21172 | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 1,361 | py | from __future__ import absolute_import, division, print_function, with_statement
import socket
from tornado import gen
from tornado.iostream import IOStream
from tornado.log import app_log
from tornado.stack_context import NullContext
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, ExpectLog, bind_unused_port, gen_test
class TCPServerTest(AsyncTestCase):
@gen_test
def test_handle_stream_coroutine_logging(self):
# handle_stream may be a coroutine and any exception in its
# Future will be logged.
class TestServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
yield gen.moment
stream.close()
1 / 0
server = client = None
try:
sock, port = bind_unused_port()
with NullContext():
server = TestServer()
server.add_socket(sock)
client = IOStream(socket.socket())
with ExpectLog(app_log, "Exception in callback"):
yield client.connect(('localhost', port))
yield client.read_until_close()
yield gen.moment
finally:
if server is not None:
server.stop()
if client is not None:
client.close()
| [
"[email protected]"
] | |
048da83cda17a4a360c09c39781995cf2d3af27f | 3691259d4be62b60d8d52f38b36d6a24e5fd4536 | /docs/examples/compute/cloudsigma/create_vlan_subscription.py | a29e2394267225a7fe1141d0ed6edefb84091bdc | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | chenjiang1985/libcloud | f385fac278777c2bbfedaf440d353c9ad9eb5c69 | 587212da626dfe0e2936737108bcc49d666cf4b4 | refs/heads/master | 2021-07-16T14:29:21.821490 | 2019-11-27T02:20:43 | 2019-11-27T02:20:43 | 222,844,781 | 1 | 2 | Apache-2.0 | 2020-10-27T22:06:36 | 2019-11-20T03:41:31 | Python | UTF-8 | Python | false | false | 376 | py | from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
cls = get_driver(Provider.CLOUDSIGMA)
driver = cls('username', 'password', region='zrh', api_version='2.0')
subscription = driver.ex_create_subscription(amount=1, period='30 days',
resource='vlan', auto_renew=True)
print(subscription)
| [
"[email protected]"
] | |
58b1d1422474f6027aa3f69edddb42e44fbb2a52 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /life_and_point/public_number/go_person_with_week/time/small_fact/tell_high_number.py | 8f2dcc2a9607cc6373040ba43be5bb60c8ac08a7 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py |
#! /usr/bin/env python
def thing(str_arg):
way(str_arg)
print('seem_great_time')
def way(str_arg):
print(str_arg)
if __name__ == '__main__':
thing('be_work_to_next_time')
| [
"[email protected]"
] | |
f83fa49d5475aa107fb54496baa054affa656ec8 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adverbs/_truest.py | f2ef775949b9144ce37ca8397bf6342c1da23e39 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py |
from xai.brain.wordbase.adverbs._true import _TRUE
#calss header
class _TRUEST(_TRUE, ):
def __init__(self,):
_TRUE.__init__(self)
self.name = "TRUEST"
self.specie = 'adverbs'
self.basic = "true"
self.jsondata = {}
| [
"[email protected]"
] | |
f958909a7a6b280c944a1b5dcfc27981d588a125 | 4f01328f202107399b5676c82be9d8fc246a7cf9 | /torch2trt_dynamic/converters/new_ones.py | b45d45eeac35741c2040f6444c7c52a79a7cbd40 | [
"MIT"
] | permissive | JasonDu1993/torch2trt_dynamic | cccc6570ba68399e902346c06f362f80d38b6239 | 24bbad2ea4977bb88ce2ade433058becc6980c82 | refs/heads/master | 2022-12-29T17:10:03.714226 | 2020-10-11T07:24:37 | 2020-10-11T07:24:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,706 | py | from torch2trt_dynamic.torch2trt_dynamic import *
@tensorrt_converter('torch.Tensor.new_ones')
def convert_new_ones(ctx):
input = ctx.method_args[0]
size = get_arg(ctx, 'size', pos=1, default=None)
dtype = get_arg(ctx, 'dtype', pos=2, default=input.dtype)
output = ctx.method_return
if isinstance(size, int):
size = (size, )
# check const
is_const = True
for s in size:
if hasattr(s,'_trt'):
is_const = False
break
if is_const:
# create const value
output_trt = trt_(ctx.network, output)
else:
# create fill
trt_size = []
for s in size:
if hasattr(s, '_trt'):
trt_size.append(s._trt)
else:
trt_size.append(trt_(ctx.network, s))
trt_size = ctx.network.add_concatenation(trt_size).get_output(0)
layer = ctx.network.add_fill(size, trt.FillOperation.RANDOM_UNIFORM)
layer.set_input(0, trt_size)
layer.set_input(1, trt_(ctx.network, input.new_tensor(1)))
layer.set_input(2, trt_(ctx.network, input.new_tensor(1)))
output_trt = layer.get_output(0)
data_type = None
if dtype==torch.float32:
data_type = trt.DataType.FLOAT
elif dtype==torch.int32 or dtype==torch.long:
data_type = trt.DataType.INT32
elif dtype==torch.bool:
data_type = trt.DataType.BOOL
else:
print("unsupported convert type:{}".format(dtype))
if data_type is not None:
layer = ctx.network.add_identity(output_trt)
layer.set_output_type(0, data_type)
output_trt = layer.get_output(0)
output._trt = output_trt
| [
"[email protected]"
] | |
df5fdd8502bafee22cd6422aadd62f41b6e93175 | ade047677ca695a8d27ff50645f1afe8cd1463df | /Entity.py | 74b0d2ab5a75cdc154d488c7aee0e35e161aff20 | [] | no_license | xuzhuo77/WorkSpace-FrameWork | 677f02eead801fb48e60d0411ea4dc5aa516af57 | 109c691c270020ef20f36f625c31166c49386351 | refs/heads/master | 2023-03-06T13:02:28.528231 | 2021-02-21T23:01:08 | 2021-02-21T23:01:08 | 337,577,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | from sqlalchemy import Column, Integer, String,DateTime
from sqlalchemy.ext.declarative import declarative_base
import datetime
from Utils.UniqueIdUtil import gen_guid
base = declarative_base()
class Entity(base):
__abstract__ = True
# __tablename__ = "Entity"
id = Column(Integer, primary_key=True,autoincrement=True)
version = Column(Integer, nullable=False)
update_time=Column(DateTime, default=datetime.datetime.now, comment='更新时间')
# guid =Column(String(64), default=gen_guid(), primary_key=True)
# delete_flag=Column(Integer)
# creator=Column(String(64))
create_date=Column(DateTime, default=datetime.datetime.now, comment='创建时间')
# def __new__(cls, *args, **kwargs):
# print(kwargs)
# def __init__(self,*args,**kwargs):
# for i,k in kwargs:
# def __dict__(self):
# return str({c.name: getattr(self, c.name, None) for c in self.__table__.columns})
| [
"[email protected]"
] | |
b4328f61f077d87793f58c3c84eb3f34d5b7bf26 | 607dc8df19fc5248f6289cdda97857b5d58ca16f | /smac/model/gaussian_process/kernels/rbf_kernel.py | 5bf20765886b3b5320be8d5b80fa8810a4ef6c68 | [
"BSD-3-Clause",
"LicenseRef-scancode-philippe-de-muyter",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | automl/SMAC3 | 7dce243a33023c52d6819deff966f7b502e90ed0 | 541ee7e0383b491b86d1a23dcff669f2efad616d | refs/heads/main | 2023-08-31T17:36:06.067579 | 2023-08-01T13:02:51 | 2023-08-01T13:02:51 | 65,900,469 | 943 | 259 | NOASSERTION | 2023-09-11T02:36:57 | 2016-08-17T10:58:05 | Python | UTF-8 | Python | false | false | 2,738 | py | from __future__ import annotations
import numpy as np
import scipy.optimize
import scipy.spatial.distance
import scipy.special
import sklearn.gaussian_process.kernels as kernels
from smac.model.gaussian_process.kernels.base_kernels import AbstractKernel
from smac.model.gaussian_process.priors.abstract_prior import AbstractPrior
__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
class RBFKernel(AbstractKernel, kernels.RBF):
"""RBF kernel implementation."""
def __init__(
self,
length_scale: float | tuple[float, ...] | np.ndarray = 1.0,
length_scale_bounds: tuple[float, float] | list[tuple[float, float]] | np.ndarray = (1e-5, 1e5),
operate_on: np.ndarray | None = None,
has_conditions: bool = False,
prior: AbstractPrior | None = None,
) -> None:
super().__init__(
operate_on=operate_on,
has_conditions=has_conditions,
prior=prior,
length_scale=length_scale,
length_scale_bounds=length_scale_bounds,
)
def _call(
self,
X: np.ndarray,
Y: np.ndarray | None = None,
eval_gradient: bool = False,
active: np.ndarray | None = None,
) -> np.ndarray | tuple[np.ndarray, np.ndarray]:
X = np.atleast_2d(X)
length_scale = kernels._check_length_scale(X, self.length_scale)
if Y is None:
dists = scipy.spatial.distance.pdist(X / length_scale, metric="sqeuclidean")
K = np.exp(-0.5 * dists)
# convert from upper-triangular matrix to square matrix
K = scipy.spatial.distance.squareform(K)
np.fill_diagonal(K, 1)
else:
if eval_gradient:
raise ValueError("Gradient can only be evaluated when Y is None.")
dists = scipy.spatial.distance.cdist(X / length_scale, Y / length_scale, metric="sqeuclidean")
K = np.exp(-0.5 * dists)
if active is not None:
K = K * active
if eval_gradient:
if self.hyperparameter_length_scale.fixed:
# Hyperparameter l kept fixed
return K, np.empty((X.shape[0], X.shape[0], 0))
elif not self.anisotropic or length_scale.shape[0] == 1:
K_gradient = (K * scipy.spatial.distance.squareform(dists))[:, :, np.newaxis]
return K, K_gradient
elif self.anisotropic:
# We need to recompute the pairwise dimension-wise distances
K_gradient = (X[:, np.newaxis, :] - X[np.newaxis, :, :]) ** 2 / (length_scale**2)
K_gradient *= K[..., np.newaxis]
return K, K_gradient
return K
| [
"[email protected]"
] | |
089cc627b0ee56e98b1c995bece77eec11b6c657 | 76e931912629c37beedf7c9b112b53e7de5babd7 | /1-mouth01/day14/exe01.py | e46e565f07c85e20b97d5613bbc0a84c048de75f | [
"Apache-2.0"
] | permissive | gary-gggggg/gary | c59ac21d8e065f296ff986d11a0e4cbf186a1bc4 | d8ba30ea4bc2b662a2d6a87d247f813e5680d63e | refs/heads/main | 2023-02-23T06:54:34.500683 | 2021-02-01T10:17:02 | 2021-02-01T10:17:02 | 334,905,744 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | from module_exe import data
import module_exe
from module_exe import MyClass
c1 = MyClass()
c1.func02()
c1.func03()
print(data)
module_exe.func01()
| [
"[email protected]"
] | |
87ab68ad4f53266913c451ae4e3913018abb2b9c | 88c1f9ccb62e91d6b0574bcde1043921bdeb0126 | /lib_common/src/d1_common/type_conversions.py | 04d9b2723e4710e9fdf897485b15fcc9c73a449b | [
"Apache-2.0"
] | permissive | jevans97utk/d1_python | 83b8de8780287c655779844f367b9189413da074 | 3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d | refs/heads/master | 2020-05-21T01:16:50.677816 | 2019-04-22T16:09:44 | 2019-04-22T16:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,001 | py | #!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for handling the DataONE types.
- Handle conversions between XML representations used in the D1 Python stack.
- Handle conversions between v1 and v2 DataONE XML types.
The DataONE Python stack uses the following representations for the DataONE API XML
docs:
- As native Unicode ``str``, typically "pretty printed" with indentations, when
formatted for display.
- As UTF-8 encoded ``bytes`` when send sending or receiving over the network, or
loading or saving as files.
- Schema validation and manipulation in Python code as PyXB binding objects.
- General processing as ElementTrees.
In order to allow conversions between all representations without having to implement
separate conversions for each combination of input and output representation, a "hub and
spokes" model is used. Native Unicode str was selected as the "hub" representation due
to:
- PyXB provides translation to/from string and DOM.
- ElementTree provides translation to/from string.
"""
import re
import xml.etree
import xml.etree.ElementTree
import pyxb
import pyxb.namespace.utility
import d1_common.types.dataoneTypes_v1
import d1_common.types.dataoneTypes_v1_1
import d1_common.types.dataoneTypes_v1_2
import d1_common.types.dataoneTypes_v2_0
# Map common namespace prefixes to namespaces
NS_DICT = {
# TODO: 'v1' should map to v1_2.Namespace
'v1': str(d1_common.types.dataoneTypes_v1.Namespace),
'v1_1': str(d1_common.types.dataoneTypes_v1_1.Namespace),
'v1_2': str(d1_common.types.dataoneTypes_v1_2.Namespace),
'v2': str(d1_common.types.dataoneTypes_v2_0.Namespace),
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'ore': 'http://www.openarchives.org/ore/terms/',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'dcterms': 'http://purl.org/dc/terms/',
'cito': 'http://purl.org/spar/cito/',
}
# Map common namespaces to prefixes
NS_REVERSE_DICT = {v: k for k, v in NS_DICT.items()}
BINDING_TO_VERSION_TAG_DICT = {
d1_common.types.dataoneTypes_v1: 'v1',
d1_common.types.dataoneTypes_v1_1: 'v1',
d1_common.types.dataoneTypes_v1_2: 'v1',
d1_common.types.dataoneTypes_v2_0: 'v2',
}
VERSION_TO_BINDING_DICT = {
'v1': d1_common.types.dataoneTypes_v1_2,
'v2': d1_common.types.dataoneTypes_v2_0,
(1, 0): d1_common.types.dataoneTypes_v1,
(1, 1): d1_common.types.dataoneTypes_v1_1,
(1, 2): d1_common.types.dataoneTypes_v1_2,
(2, 0): d1_common.types.dataoneTypes_v2_0,
}
# Register global namespace prefixes for use by ElementTree when serializing.
for prefix_str, uri_str in list(NS_DICT.items()):
xml.etree.ElementTree.register_namespace(prefix_str, uri_str)
def get_version_tag_by_pyxb_binding(pyxb_binding):
"""Map PyXB binding to DataONE API version.
Given a PyXB binding, return the API major version number.
Args:
pyxb_binding: PyXB binding object
Returns:
DataONE API major version number, currently, ``v1``, ``1``, ``v2`` or ``2``.
"""
try:
return BINDING_TO_VERSION_TAG_DICT[pyxb_binding]
except KeyError:
raise ValueError(
'Unknown PyXB binding. pyxb_binding="{}"'.format(repr(pyxb_binding))
)
def get_pyxb_binding_by_api_version(api_major, api_minor=0):
"""Map DataONE API version tag to PyXB binding.
Given a DataONE API major version number, return PyXB binding that can
serialize and deserialize DataONE XML docs of that version.
Args:
api_major, api_minor: str or int
DataONE API major and minor version numbers.
- If ``api_major`` is an integer, it is combined with ``api_minor`` to form an
exact version.
- If ``api_major`` is a string of ``v1`` or ``v2``, ``api_minor`` is ignored
and the latest PyXB bindingavailable for the ``api_major`` version is
returned.
Returns:
PyXB binding: E.g., ``d1_common.types.dataoneTypes_v1_1``.
"""
try:
return VERSION_TO_BINDING_DICT[api_major, api_minor]
except KeyError:
raise ValueError(
'Unknown DataONE API version: {}.{}'.format(api_major, api_minor)
)
def get_version_tag(api_major):
"""Args:
api_major: int DataONE API major version. Valid versions are currently 1 or 2.
Returns: str: DataONE API version tag. Valid version tags are currently ``v1`` or
``v2``.
"""
return 'v{}'.format(api_major)
def extract_version_tag_from_url(url):
"""Extract a DataONE API version tag from a MN or CN service endpoint URL.
Args:
url : str
Service endpoint URL. E.g.: ``https://mn.example.org/path/v2/object/pid``.
Returns:
str : Valid version tags are currently ``v1`` or ``v2``.
"""
m = re.match(r'(/|^)(v\d)(/|$)', url)
if not m:
return None
return m.group(2)
def get_pyxb_namespaces():
"""Returns:
list of str: XML namespaces currently known to PyXB
"""
return pyxb.namespace.utility.AvailableNamespaces()
#
# Convert types to v1
#
def str_to_v1_str(xml_str):
"""Convert a API v2 XML doc to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
If doc is already v1, it is returned unchanged.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``.
"""
if str_is_v1(xml_str):
return xml_str
etree_obj = str_to_etree(xml_str)
strip_v2_elements(etree_obj)
etree_replace_namespace(etree_obj, d1_common.types.dataoneTypes_v1.Namespace)
return etree_to_str(etree_obj)
def pyxb_to_v1_str(pyxb_obj):
"""Convert a API v2 PyXB object to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
Args:
pyxb_obj: PyXB object
API v2 PyXB object. E.g.: ``SystemMetadata v2_0``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``.
"""
return str_to_v1_str(pyxb_to_str(pyxb_obj))
def str_to_v1_pyxb(xml_str):
"""Convert a API v2 XML doc to v1 PyXB object.
Removes elements that are only valid for v2 and changes namespace to v1.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
PyXB object: API v1 PyXB object. E.g.: ``SystemMetadata v1_2``.
"""
str_to_pyxb(str_to_v1_str(xml_str))
#
# Convert types to v2
#
def str_to_v2_str(xml_str):
"""Convert a API v1 XML doc to v2 XML doc.
All v1 elements are valid for v2, so only changes namespace.
Args:
xml_str : str
API v1 XML doc. E.g.: ``SystemMetadata v1``.
Returns:
str : API v2 XML doc. E.g.: ``SystemMetadata v2``.
"""
if str_is_v2(xml_str):
return xml_str
etree_obj = str_to_etree(xml_str)
etree_replace_namespace(etree_obj, d1_common.types.dataoneTypes_v2_0.Namespace)
return etree_to_str(etree_obj)
def pyxb_to_v2_str(pyxb_obj):
"""Convert a API v1 PyXB object to v2 XML doc.
All v1 elements are valid for v2, so only changes namespace.
Args:
pyxb_obj: PyXB object
API v1 PyXB object. E.g.: ``SystemMetadata v1_0``.
Returns:
str : API v2 XML doc. E.g.: ``SystemMetadata v2``.
"""
return str_to_v2_str(pyxb_to_str(pyxb_obj))
def str_to_v2_pyxb(xml_str):
"""Convert a API v1 XML doc to v2 PyXB object.
All v1 elements are valid for v2, so only changes namespace.
Args:
xml_str : str
API v1 XML doc. E.g.: ``SystemMetadata v1``.
Returns:
PyXB object: API v2 PyXB object. E.g.: ``SystemMetadata v2_0``.
"""
str_to_pyxb(str_to_v2_str(xml_str))
# Type checks
def is_pyxb(pyxb_obj):
"""Returns:
bool: **True** if ``pyxb_obj`` is a PyXB object.
"""
return isinstance(pyxb_obj, pyxb.cscRoot)
def is_pyxb_d1_type(pyxb_obj):
"""Returns:
bool: **True** if ``pyxb_obj`` is a PyXB object holding a DataONE API type.
"""
try:
return pyxb_is_v1(pyxb_obj) or pyxb_is_v2(pyxb_obj)
except AttributeError:
return False
def is_pyxb_d1_type_name(pyxb_obj, expected_pyxb_type_name):
"""
Args:
pyxb_obj : object
May be a PyXB object and may hold a DataONE API type.
expected_pyxb_type_name : str
Case sensitive name of a DataONE type.
E.g.: ``SystemMetadata``, ``LogEntry``, ``ObjectInfo``.
Returns:
bool: **True** if object is a PyXB object holding a value of the specified type.
"""
try:
return pyxb_get_type_name(pyxb_obj) == expected_pyxb_type_name
except AttributeError:
return False
def pyxb_get_type_name(obj_pyxb):
"""Args: obj_pyxb: PyXB object.
Returns:
str: Name of the type the PyXB object is holding.
E.g.: ``SystemMetadata``, ``LogEntry``, ``ObjectInfo``.
"""
return pyxb_get_namespace_name(obj_pyxb).split('}')[-1]
# noinspection PyProtectedMember
def pyxb_get_namespace_name(obj_pyxb):
"""Args: obj_pyxb: PyXB object.
Returns:
str: Namespace and Name of the type the PyXB object is holding.
E.g.: ``{http://ns.dataone.org/service/types/v2.0}SystemMetadata``
"""
return str(obj_pyxb._ExpandedName)
def str_is_v1(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is a DataONE API v1 type.
"""
return pyxb_is_v1(str_to_pyxb(xml_str))
def str_is_v2(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is a DataONE API v2 type.
"""
return pyxb_is_v2(str_to_pyxb(xml_str))
def str_is_error(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is a DataONE Exception type.
"""
return str_to_etree(xml_str).tag == 'error'
def str_is_identifier(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is a DataONE Identifier type.
"""
return (
str_to_etree(xml_str).tag
== '{http://ns.dataone.org/service/types/v1}identifier'
)
def str_is_objectList(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is a DataONE ObjectList type.
"""
return (
str_to_etree(xml_str).tag
== '{http://ns.dataone.org/service/types/v1}objectList'
)
def str_is_well_formed(xml_str):
"""
Args:
xml_str : str
DataONE API XML doc.
Returns:
bool: **True** if XML doc is well formed.
"""
try:
str_to_etree(xml_str)
except xml.etree.ElementTree.ParseError:
return False
else:
return True
# noinspection PyProtectedMember
def pyxb_is_v1(pyxb_obj):
"""
Args:
pyxb_obj : PyXB object
PyXB object holding an unknown type.
Returns:
bool: **True** if ``pyxb_obj`` holds an API v1 type.
"""
# TODO: Will not detect v1.2 as v1.
return (
pyxb_obj._element().name().namespace()
== d1_common.types.dataoneTypes_v1.Namespace
)
# noinspection PyProtectedMember
def pyxb_is_v2(pyxb_obj):
"""
Args:
pyxb_obj : PyXB object
PyXB object holding an unknown type.
Returns:
bool: **True** if ``pyxb_obj`` holds an API v2 type.
"""
return (
pyxb_obj._element().name().namespace()
== d1_common.types.dataoneTypes_v2_0.Namespace
)
# Conversions between XML representations
def str_to_pyxb(xml_str):
"""Deserialize API XML doc to PyXB object.
Args:
xml_str: str
DataONE API XML doc
Returns:
PyXB object: Matching the API version of the XML doc.
"""
# PyXB shares information about all known types between all imported pyxb_binding, so
# a v1 binding will work for deserializing a v2 type.
return d1_common.types.dataoneTypes_v1.CreateFromDocument(xml_str)
def str_to_etree(xml_str, encoding='utf-8'):
"""Deserialize API XML doc to an ElementTree.
Args:
xml_str: bytes
DataONE API XML doc
encoding: str
Decoder to use when converting the XML doc ``bytes`` to a Unicode str.
Returns:
ElementTree: Matching the API version of the XML doc.
"""
parser = xml.etree.ElementTree.XMLParser(encoding=encoding)
return xml.etree.ElementTree.fromstring(xml_str, parser=parser)
def pyxb_to_str(pyxb_obj, encoding='utf-8'):
"""Serialize PyXB object to XML doc.
Args:
pyxb_obj: PyXB object
encoding: str
Encoder to use when converting the Unicode strings in the PyXB object to XML doc
``bytes``.
Returns:
str: API XML doc, matching the API version of ``pyxb_obj``.
"""
return pyxb_obj.toxml(encoding)
def etree_to_str(etree_obj, encoding='utf-8'):
"""Serialize ElementTree to XML doc.
Args:
etree_obj: ElementTree
encoding: str
Encoder to use when converting the Unicode strings in the ElementTree to XML doc
``bytes``.
Returns:
str: API XML doc matching the API version of ``etree_obj``.
"""
return xml.etree.ElementTree.tostring(etree_obj, encoding)
def pyxb_to_etree(pyxb_obj):
"""Convert PyXB object to ElementTree.
Args:
pyxb_obj: PyXB object
Returns:
ElementTree: Matching the API version of the PyXB object.
"""
return str_to_etree(pyxb_to_str(pyxb_obj))
def etree_to_pyxb(etree_obj):
"""Convert ElementTree to PyXB object.
Args:
etree_obj: ElementTree
Returns:
PyXB object: Matching the API version of the ElementTree object.
"""
return pyxb_to_str(str_to_etree(etree_obj))
# ElementTree
def replace_namespace_with_prefix(tag_str, ns_reverse_dict=None):
"""Convert XML tag names with namespace on the form ``{namespace}tag`` to form
``prefix:tag``.
Args:
tag_str: str
Tag name with namespace. E.g.:
``{http://www.openarchives.org/ore/terms/}ResourceMap``.
ns_reverse_dict : dict
A dictionary of namespace to prefix to use for the conversion. If not supplied, a
default dict with the namespaces used in DataONE XML types is used.
Returns:
str: Tag name with prefix. E.g.: ``ore:ResourceMap``.
"""
ns_reverse_dict = ns_reverse_dict or NS_REVERSE_DICT
for namespace_str, prefix_str in ns_reverse_dict.items():
tag_str = tag_str.replace(
'{{{}}}'.format(namespace_str), '{}:'.format(prefix_str)
)
return tag_str
def etree_replace_namespace(etree_obj, ns_str):
"""In-place change the namespace of elements in an ElementTree.
Args:
etree_obj: ElementTree
ns_str : str
The namespace to set. E.g.: ``http://ns.dataone.org/service/types/v1``.
"""
def _replace_recursive(el, n):
el.tag = re.sub(r'{.*\}', '{{{}}}'.format(n), el.tag)
el.text = el.text.strip() if el.text else None
el.tail = el.tail.strip() if el.tail else None
for child_el in el:
_replace_recursive(child_el, n)
_replace_recursive(etree_obj, ns_str)
def strip_v2_elements(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types.
Args: etree_obj: ElementTree ElementTree holding one of the DataONE API types
that changed between v1 and v2.
"""
if etree_obj.tag == v2_0_tag('logEntry'):
strip_logEntry(etree_obj)
elif etree_obj.tag == v2_0_tag('log'):
strip_log(etree_obj)
elif etree_obj.tag == v2_0_tag('node'):
strip_node(etree_obj)
elif etree_obj.tag == v2_0_tag('nodeList'):
strip_node_list(etree_obj)
elif etree_obj.tag == v2_0_tag('systemMetadata'):
strip_system_metadata(etree_obj)
else:
raise ValueError('Unknown root element. tag="{}"'.format(etree_obj.tag))
def strip_system_metadata(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types from v1
System Metadata.
Args: etree_obj: ElementTree ElementTree holding a v1 SystemMetadata.
"""
for series_id_el in etree_obj.findall('seriesId'):
etree_obj.remove(series_id_el)
for media_type_el in etree_obj.findall('mediaType'):
etree_obj.remove(media_type_el)
for file_name_el in etree_obj.findall('fileName'):
etree_obj.remove(file_name_el)
def strip_log(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types from v1
Log.
Args: etree_obj: ElementTree ElementTree holding a v1 Log.
"""
for log_entry_el in etree_obj.findall('logEntry'):
strip_logEntry(log_entry_el)
def strip_logEntry(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types from v1
LogEntry.
Args: etree_obj: ElementTree ElementTree holding a v1 LogEntry.
"""
for event_el in etree_obj.findall('event'):
if event_el.text not in (
'create',
'read',
'update',
'delete',
'replicate',
'synchronization_failed',
'replication_failed',
):
event_el.text = 'create'
def strip_node(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types from v1
Node.
Args: etree_obj: ElementTree ElementTree holding a v1 Node.
"""
for property_el in etree_obj.findall('property'):
etree_obj.remove(property_el)
def strip_node_list(etree_obj):
"""In-place remove elements and attributes that are only valid in v2 types from v1
NodeList.
Args: etree_obj: ElementTree ElementTree holding a v1 NodeList.
"""
for node_el in etree_obj.findall('node'):
strip_node(node_el)
def v2_0_tag(element_name):
"""Add a v2 namespace to a tag name.
Args:
element_name: str
The name of a DataONE v2 type. E.g.: ``NodeList``.
Returns:
str: The tag name with DataONE API v2 namespace. E.g.:
``{http://ns.dataone.org/service/types/v2.0}NodeList``
"""
return '{{{}}}{}'.format(NS_DICT['v2'], element_name)
| [
"[email protected]"
] | |
81026946ea3022346dd16b919458822b14a2eb72 | ac2c3e8c278d0aac250d31fd023c645fa3984a1b | /saleor/saleor/graphql/core/enums.py | 8046ea0ca5905d638c87ce50b2abe62ae9f3c1c4 | [
"BSD-3-Clause",
"CC-BY-4.0"
] | permissive | jonndoe/saleor-test-shop | 152bc8bef615382a45ca5f4f86f3527398bd1ef9 | 1e83176684f418a96260c276f6a0d72adf7dcbe6 | refs/heads/master | 2023-01-21T16:54:36.372313 | 2020-12-02T10:19:13 | 2020-12-02T10:19:13 | 316,514,489 | 1 | 1 | BSD-3-Clause | 2020-11-27T23:29:20 | 2020-11-27T13:52:33 | TypeScript | UTF-8 | Python | false | false | 5,088 | py | import graphene
from ...account import error_codes as account_error_codes
from ...app import error_codes as app_error_codes
from ...checkout import error_codes as checkout_error_codes
from ...core import JobStatus, error_codes as core_error_codes
from ...core.permissions import get_permissions_enum_list
from ...core.weight import WeightUnits
from ...csv import error_codes as csv_error_codes
from ...discount import error_codes as discount_error_codes
from ...giftcard import error_codes as giftcard_error_codes
from ...invoice import error_codes as invoice_error_codes
from ...menu import error_codes as menu_error_codes
from ...order import error_codes as order_error_codes
from ...page import error_codes as page_error_codes
from ...payment import error_codes as payment_error_codes
from ...plugins import error_codes as plugin_error_codes
from ...plugins.vatlayer import TaxRateType as CoreTaxRateType
from ...product import error_codes as product_error_codes
from ...shipping import error_codes as shipping_error_codes
from ...warehouse import error_codes as warehouse_error_codes
from ...webhook import error_codes as webhook_error_codes
from ...wishlist import error_codes as wishlist_error_codes
from .utils import str_to_enum
# FIXME CoreTaxRateType should be removed after we will drop old api fields dedicated
# to taxes
class OrderDirection(graphene.Enum):
ASC = ""
DESC = "-"
@property
def description(self):
# Disable all the no-member violations in this function
# pylint: disable=no-member
if self == OrderDirection.ASC:
return "Specifies an ascending sort order."
if self == OrderDirection.DESC:
return "Specifies a descending sort order."
raise ValueError("Unsupported enum value: %s" % self.value)
class ReportingPeriod(graphene.Enum):
TODAY = "TODAY"
THIS_MONTH = "THIS_MONTH"
def to_enum(enum_cls, *, type_name=None, **options) -> graphene.Enum:
"""Create a Graphene enum from a class containing a set of options.
:param enum_cls:
The class to build the enum from.
:param type_name:
The name of the type. Default is the class name + 'Enum'.
:param options:
- description:
Contains the type description (default is the class's docstring)
- deprecation_reason:
Contains the deprecation reason.
The default is enum_cls.__deprecation_reason__ or None.
:return:
"""
# note this won't work until
# https://github.com/graphql-python/graphene/issues/956 is fixed
deprecation_reason = getattr(enum_cls, "__deprecation_reason__", None)
if deprecation_reason:
options.setdefault("deprecation_reason", deprecation_reason)
type_name = type_name or (enum_cls.__name__ + "Enum")
enum_data = [(str_to_enum(code.upper()), code) for code, name in enum_cls.CHOICES]
return graphene.Enum(type_name, enum_data, **options)
TaxRateType = graphene.Enum(
"TaxRateType", [(str_to_enum(rate[0]), rate[0]) for rate in CoreTaxRateType.CHOICES]
)
JobStatusEnum = to_enum(JobStatus)
PermissionEnum = graphene.Enum("PermissionEnum", get_permissions_enum_list())
WeightUnitsEnum = graphene.Enum(
"WeightUnitsEnum", [(str_to_enum(unit[0]), unit[0]) for unit in WeightUnits.CHOICES]
)
AccountErrorCode = graphene.Enum.from_enum(account_error_codes.AccountErrorCode)
AppErrorCode = graphene.Enum.from_enum(app_error_codes.AppErrorCode)
CheckoutErrorCode = graphene.Enum.from_enum(checkout_error_codes.CheckoutErrorCode)
ExportErrorCode = graphene.Enum.from_enum(csv_error_codes.ExportErrorCode)
DiscountErrorCode = graphene.Enum.from_enum(discount_error_codes.DiscountErrorCode)
PluginErrorCode = graphene.Enum.from_enum(plugin_error_codes.PluginErrorCode)
GiftCardErrorCode = graphene.Enum.from_enum(giftcard_error_codes.GiftCardErrorCode)
MenuErrorCode = graphene.Enum.from_enum(menu_error_codes.MenuErrorCode)
MetadataErrorCode = graphene.Enum.from_enum(core_error_codes.MetadataErrorCode)
OrderErrorCode = graphene.Enum.from_enum(order_error_codes.OrderErrorCode)
InvoiceErrorCode = graphene.Enum.from_enum(invoice_error_codes.InvoiceErrorCode)
PageErrorCode = graphene.Enum.from_enum(page_error_codes.PageErrorCode)
PaymentErrorCode = graphene.Enum.from_enum(payment_error_codes.PaymentErrorCode)
PermissionGroupErrorCode = graphene.Enum.from_enum(
account_error_codes.PermissionGroupErrorCode
)
ProductErrorCode = graphene.Enum.from_enum(product_error_codes.ProductErrorCode)
ShopErrorCode = graphene.Enum.from_enum(core_error_codes.ShopErrorCode)
ShippingErrorCode = graphene.Enum.from_enum(shipping_error_codes.ShippingErrorCode)
StockErrorCode = graphene.Enum.from_enum(warehouse_error_codes.StockErrorCode)
WarehouseErrorCode = graphene.Enum.from_enum(warehouse_error_codes.WarehouseErrorCode)
WebhookErrorCode = graphene.Enum.from_enum(webhook_error_codes.WebhookErrorCode)
WishlistErrorCode = graphene.Enum.from_enum(wishlist_error_codes.WishlistErrorCode)
TranslationErrorCode = graphene.Enum.from_enum(core_error_codes.TranslationErrorCode)
| [
"[email protected]"
] | |
9e3b1da5a3fe55cbd4b708c74114095a43a6ea6a | f1748434c3a06e6005618afc1c1b259ce2c5b115 | /Learn/DataStructure/Graph/MinimumSpanningTree/kruskal.py | 04faf853259776ac7c8a87b5d0e1da488a7ebcec | [] | no_license | yue-yue-haha/Algorithm | f5175ae0e1339dba98c1bbd1c6b238634ced969c | 1f7a120d262b80f2b4fc452a33f698ccdd9e1fad | refs/heads/master | 2023-03-12T22:53:32.873956 | 2021-02-19T02:29:14 | 2021-02-19T02:29:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | from functools import cmp_to_key
from Learn.DataStructure.UnionFindSet.template import UnionFindSet
def kruskal(graph: dict):
tree, n = [], len(graph.get("nodes"))
ufs = UnionFindSet(n + 1)
graph.get("edges").sort(key=cmp_to_key(lambda a, b: a[2] - b[2]))
for start, end, weight in graph.get("edges"):
if len(tree) == n - 1:
break
if not ufs.same(start, end):
ufs.union(start, end)
tree.append((start, end, weight))
return tree
if __name__ == '__main__':
graphData = {
"nodes": [1, 2, 3, 4, 5],
"edges": [(1, 2, 1),
(1, 3, 3),
(1, 4, 5),
(2, 3, 2),
(2, 4, 4),
(2, 5, 6)]
}
res = kruskal(graphData)
print(f"Kruskal = {res}")
| [
"[email protected]"
] | |
066e5664df0a50e956687b5f7e4c722d5ee4ec28 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/module_test1_20200420155724.py | ac696c9fa98c42a760ab8d2fbdbab7e551b5fc27 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | list_a = [1,2,7,4,5,6,3]
list_a.sort()
print(list_a) | [
"[email protected]"
] | |
2c99815d97b01dd33b9f52fda6a4d4f27a943fe2 | d3af72e4c623dffeda95e662d495a95c8f2e317a | /scripts/gene_checker/annotations/glimmer_annotation.py | 53a169406e4c8d9a608689a2828ffe6aab7b0b88 | [] | no_license | bioinf/bi2014-mycoplasma-genitalium | 0e2fbf095a461339064ea38f1be4586897f7c2ac | bd8eb82bb8d883faeb0492d74deb7a396577b782 | refs/heads/master | 2016-09-05T11:34:00.325602 | 2014-12-06T12:37:12 | 2014-12-06T12:37:12 | 24,504,082 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | __author__ = 'nikita_kartashov'
from generic_annotation import GenericAnnotation
class GlimmerAnnotation(GenericAnnotation):
def __init__(self, line):
super(GlimmerAnnotation, self).__init__()
self.__parse_annotation(line.split())
self._length = self.end() - self.start()
def __parse_annotation(self, annotation_list):
self._id = annotation_list[0]
self._start = int(annotation_list[1])
self._end = int(annotation_list[2])
self._forward_chain = annotation_list[3][0] == '+'
def check_annotation(self, code):
return super(GlimmerAnnotation, self).check_annotation(code) | [
"[email protected]"
] | |
795a88f021e67b571a81ee946bc1108a9a4f7243 | 336f11ee8934581f05ab620c5324c601ba864b05 | /python_unit_testing/Introduction/The possible test results/possible_test_results.py | 2f19b271168fe43ac4d72e877771ed0bb40c8b2c | [] | no_license | ancient-clever/sandbox | 01adeee2638a23533965cf57ca873a30e7dfad3d | 87dec3bf8860a67a36154ee5d7c826d919d3111b | refs/heads/master | 2022-05-17T04:49:54.703068 | 2020-01-19T17:44:27 | 2020-01-19T17:44:27 | 206,946,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | import unittest
class TestPossibleResults(unittest.TestCase):
def test_success(self):
pass
def test_failure(self):
self.assertEqual(True, False)
def test_error(self):
raise Exception
| [
"[email protected]"
] | |
83dde2b826ec281476013ca779abb5abbedd1de5 | 1617a9a9c92146bcdac89b5efb1ef0d18408160b | /contlab7/31/solution.py | f68d9662c990a36c62a4e6b0cab797991dbca222 | [] | no_license | LitRidl/checker-content | 1b1329b4462b87731e0755ab33480ff063a94a00 | b5d0456c8d4d28db6e6022e272a95a385f253797 | refs/heads/master | 2023-08-17T18:08:07.377680 | 2018-02-04T11:16:34 | 2018-02-04T11:16:34 | 120,077,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
u'''
Составить алгоритм кодирования слова в латинском алфавите по Цезарю со сдвигом, равным 13 (см. шифр rot13).
Входное слово представляет собой последовательность малых латинских букв a-z.
последовательность малых латинских букв, полученная из исходной путём применения шифра rot13
icanhazcheeseburger
'''
from __future__ import print_function
from numpy import base_repr
try:
a = raw_input().strip()
except EOFError:
a = ''
result = a.encode('rot_13')
print('{0}'.format(result))
| [
"[email protected]"
] | |
2efbeedfb1feb7576a382150e2e2754a57d664e2 | 003372621424577306aff35de88f7366fcc4baa0 | /sa_tools_core/libs/qcloud/qcloudsdkbmeip/EipBmBindVpcIpRequest.py | 65b528a54cc4dd083a6ef835f2228e02f6de0ceb | [] | no_license | stoensin/sa-tools-core | ab28ca5f7a8d5703952db9e6554b104682507964 | c0faeef4de8ba677817384d88cb107ad2308c03e | refs/heads/master | 2020-07-11T06:59:33.224305 | 2019-08-22T07:36:46 | 2019-08-22T07:36:46 | 204,472,284 | 1 | 0 | null | 2019-08-26T12:36:18 | 2019-08-26T12:36:18 | null | UTF-8 | Python | false | false | 852 | py | # -*- coding: utf-8 -*-
from ..qcloudsdkcore.request import Request
class EipBmBindVpcIpRequest(Request):
def __init__(self):
super(EipBmBindVpcIpRequest, self).__init__(
'bmeip', 'qcloudcliV1', 'EipBmBindVpcIp', 'bmeip.api.qcloud.com')
def get_eipId(self):
return self.get_params().get('eipId')
def set_eipId(self, eipId):
self.add_param('eipId', eipId)
def get_unVpcId(self):
return self.get_params().get('unVpcId')
def set_unVpcId(self, unVpcId):
self.add_param('unVpcId', unVpcId)
def get_vpcId(self):
return self.get_params().get('vpcId')
def set_vpcId(self, vpcId):
self.add_param('vpcId', vpcId)
def get_vpcIp(self):
return self.get_params().get('vpcIp')
def set_vpcIp(self, vpcIp):
self.add_param('vpcIp', vpcIp)
| [
"[email protected]"
] | |
d70c87e261956e3d381b2fac3f1dc1fe02bde460 | 5d304c6ec0f01edee73e3b612f84307060c0da54 | /add_two_numbers.py | d9649cba9d5f094e5a6a7c4bdc848c78665f5ec0 | [] | no_license | xartisan/leetcode-solutions-in-python | cfa06b9e02f7ec0446cf6b71df4ea46caa359adc | 7e3929a4b5bd0344f93373979c9d1acc4ae192a7 | refs/heads/master | 2020-03-14T17:10:07.957089 | 2018-07-29T10:11:01 | 2018-07-29T10:11:01 | 131,713,447 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,340 | py | # Definition for singly-linked list.
class ListNode:
def __init__(self, x, next_node=None):
self.val = x
self.next = next_node
def __repr__(self):
rv = str(self.val)
if self.next is not None:
rv += ' -> ' + repr(self.next)
return rv
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
# dummy node
dummy_node = ListNode(0)
cur_node = dummy_node
acc = 0
while l1 is not None or l2 is not None:
s = acc + (l1 or dummy_node).val + (l2 or dummy_node).val
acc = 0
if s >= 10:
s, acc = s - 10, 1
new_node = ListNode(s)
cur_node.next = new_node
cur_node = new_node
if l1 is not None:
l1 = l1.next
if l2 is not None:
l2 = l2.next
if acc == 1:
cur_node.next = ListNode(1)
return dummy_node.next
if __name__ == '__main__':
l1 = ListNode(2, ListNode(4, ListNode(3)))
print(l1)
l2 = ListNode(5, ListNode(6, ListNode(4)))
print(l2)
s = Solution()
rv = s.addTwoNumbers(l1, l2)
print(rv)
assert repr(rv) == '7 -> 0 -> 8', 'Wrong answer!'
| [
"[email protected]"
] | |
1d7d09411e8e1745eeb4db330972104eba8afa79 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/WR5000/WR5000_Zp1000_HN100_mumu_cfg.py | 59e93042f1fff94bb505ee06a633c5fd7a303381 | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 6,270 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: Configuration/GenProduction/python/ThirteenTeV/WRToNuMuToMuMuJJ_MW-3400_MNu-1700_TuneCUETP8M1_13TeV-pythia8_cfg.py --fileout step1.root --mc --pileup_input pileup.root --eventcontent RAWSIM --pileup 2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU --era Run2_25ns --datatier GEN-SIM-RAW --conditions 80X_mcRun2_asymptotic_2016_v3 --step GEN,SIM,DIGI,L1,DIGI2RAW,HLT:@frozen25ns --no_exec -n 1000 --python_filename test_GEN_to_HLT_cfg.py --fileout file:step1.root --python_filename step1.py
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('HLT',eras.Run2_25ns)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic50ns13TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.Digi_cff')
process.load('Configuration.StandardSequences.SimL1Emulator_cff')
process.load('Configuration.StandardSequences.DigiToRaw_cff')
process.load('HLTrigger.Configuration.HLT_GRun_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
# Input source
process.source = cms.Source("EmptySource")
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('Configuration/GenProduction/python/ThirteenTeV/WRToNuMuToMuMuJJ_MW-3400_MNu-1700_TuneCUETP8M1_13TeV-pythia8_cfg.py nevts:1000'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('GEN-SIM-RAW'),
filterName = cms.untracked.string('')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
fileName = cms.untracked.string('step1.root'),
outputCommands = process.RAWSIMEventContent.outputCommands,
splitLevel = cms.untracked.int32(0)
)
# Additional output definition
# Other statements
process.mix.input.fileNames = cms.untracked.vstring(['pileup.root'])
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '80X_mcRun2_asymptotic_2016_v3', '')
process.generator = cms.EDFilter("Pythia8GeneratorFilter",
PythiaParameters = cms.PSet(
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'),
processParameters = cms.vstring('LeftRightSymmmetry:ffbar2ZR = on',
'9900024:m0 = 5000',
'9900023:m0 = 1000',
'9900014:m0 = 100',
'9900024:onMode = off',
'9900023:onMode = off',
'9900023:onIfAny = 9900014, 9900014',
'9900024:onIfAny = 13,9900014'),
pythia8CUEP8M1Settings = cms.vstring('Tune:pp 14',
'Tune:ee 7',
'MultipartonInteractions:pT0Ref=2.4024',
'MultipartonInteractions:ecmPow=0.25208',
'MultipartonInteractions:expPow=1.6'),
pythia8CommonSettings = cms.vstring('Tune:preferLHAPDF = 2',
'Main:timesAllowErrors = 10000',
'Check:epTolErr = 0.01',
'Beams:setProductionScalesFromLHEF = off',
'SLHA:keepSM = on',
'SLHA:minMassSM = 1000.',
'ParticleDecays:limitTau0 = on',
'ParticleDecays:tau0Max = 10',
'ParticleDecays:allowPhotonRadiation = on')
),
comEnergy = cms.double(13000.0),
filterEfficiency = cms.untracked.double(1.0),
maxEventsToPrint = cms.untracked.int32(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1)
)
# Path and EndPath definitions
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.digitisation_step = cms.Path(process.pdigi)
process.L1simulation_step = cms.Path(process.SimL1Emulator)
process.digi2raw_step = cms.Path(process.DigiToRaw)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.digitisation_step,process.L1simulation_step,process.digi2raw_step)
process.schedule.extend(process.HLTSchedule)
process.schedule.extend([process.endjob_step,process.RAWSIMoutput_step])
# filter all path with the production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.generator * getattr(process,path)._seq
# customisation of the process.
# Automatic addition of the customisation function from HLTrigger.Configuration.customizeHLTforMC
from HLTrigger.Configuration.customizeHLTforMC import customizeHLTforFullSim
#call to customisation function customizeHLTforFullSim imported from HLTrigger.Configuration.customizeHLTforMC
process = customizeHLTforFullSim(process)
# End of customisation functions
| [
"[email protected]"
] | |
55b7536a054de0a55848639515962924284c30e3 | 4dec0f934760ca69e40b62fa56b37a1aa3918b24 | /test/test_web_deface_utils.py | 418c9c67aa4f0448c4eb77b11bd8eeb6df758bdc | [
"MIT"
] | permissive | rejahrehim/SecureTea-Project | 28ebc89f27ed59e3845b8c82f9316108cda40a24 | 43dec187e5848b9ced8a6b4957b6e9028d4d43cd | refs/heads/master | 2020-03-27T12:36:21.779426 | 2019-09-02T16:01:55 | 2019-09-02T16:01:55 | 146,556,097 | 1 | 0 | MIT | 2018-08-29T06:35:54 | 2018-08-29T06:35:53 | null | UTF-8 | Python | false | false | 1,228 | py | # -*- coding: utf-8 -*-
import unittest
from securetea.lib.web_deface import utils
try:
# if python 3.x.x
from unittest.mock import patch
except ImportError: # python 2.x.x
from mock import patch
class TestUtils(unittest.TestCase):
"""
Test class for SecureTea Web Deface Utils.
"""
@patch("securetea.lib.web_deface.utils.get_system_name")
def test_categorize_os(self, mock_system):
"""
Test categorize_os.
"""
mock_system.return_value = "debian"
self.assertEqual(utils.categorize_os(), "debian")
@patch("securetea.lib.web_deface.utils.platform")
def test_get_system_name(self, mock_platform):
"""
Test get_system_name.
"""
mock_platform.dist.return_value = ["debian"]
res = utils.get_system_name()
self.assertEqual(res, "debian")
@patch("securetea.lib.web_deface.utils.os")
def test_check_root(self, mock_os):
"""
Test check_root.
"""
# Running as root
mock_os.getuid.return_value = 0
self.assertTrue(utils.check_root())
# Not running as root
mock_os.getuid.return_value = 1
self.assertFalse(utils.check_root())
| [
"[email protected]"
] | |
b53fb9567516569729de2c7e54c259ae8eb494da | c53480e8d0fa431a3ac90e713010fae44e8db1a8 | /maskara/gallery/migrations/0032_auto__add_spaceimage__add_spacevideo.py | 1aac354e39aa52798ab2f7e8a4931575710530ca | [] | no_license | batpad/gallerym | 6b5fd2e89ebade106e4c9f5c917c11791b156d08 | 3e1c17dc30b5b23ab40772b6b88c6d1f05393cf2 | refs/heads/master | 2016-09-06T11:36:02.461954 | 2014-10-10T07:48:49 | 2014-10-10T07:48:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,066 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SpaceImage'
db.create_table('gallery_spaceimage', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('changed', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('image', self.gf('filebrowser.fields.FileBrowseField')(max_length=1024)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=1024, blank=True)),
('displayed', self.gf('django.db.models.fields.BooleanField')(default=False)),
('order', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('gallery', ['SpaceImage'])
# Adding model 'SpaceVideo'
db.create_table('gallery_spacevideo', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('changed', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('video_file', self.gf('filebrowser.fields.FileBrowseField')(max_length=1024, null=True, blank=True)),
('vimeo_id', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('caption', self.gf('django.db.models.fields.CharField')(max_length=1024, blank=True)),
('displayed', self.gf('django.db.models.fields.BooleanField')(default=False)),
('order', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('gallery', ['SpaceVideo'])
def backwards(self, orm):
# Deleting model 'SpaceImage'
db.delete_table('gallery_spaceimage')
# Deleting model 'SpaceVideo'
db.delete_table('gallery_spacevideo')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'gallery.artist': {
'Meta': {'ordering': "['name']", 'object_name': 'Artist'},
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bio_pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'birth_location': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'is_represented': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.artistaward': {
'Meta': {'object_name': 'ArtistAward'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artistcollection': {
'Meta': {'object_name': 'ArtistCollection'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artisteducation': {
'Meta': {'object_name': 'ArtistEducation'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artistgroupexhib': {
'Meta': {'object_name': 'ArtistGroupExhib'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artistnews': {
'Meta': {'object_name': 'ArtistNews'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'gallery.artistpress': {
'Meta': {'object_name': 'ArtistPress'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artistpressrelease': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'ArtistPressRelease'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.artistreview': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'ArtistReview'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'translated_by': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.artistsoloexhib': {
'Meta': {'object_name': 'ArtistSoloExhib'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4'})
},
'gallery.artistwork': {
'Meta': {'ordering': "['artist', 'order']", 'object_name': 'ArtistWork'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']"}),
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'category': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'is_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_selected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'material': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'price': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'size': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'size_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'theme': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'year': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'})
},
'gallery.artistworkimage': {
'Meta': {'ordering': "['order']", 'object_name': 'ArtistWorkImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'is_hires': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'work': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.ArtistWork']"})
},
'gallery.event': {
'Meta': {'object_name': 'Event'},
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'featured_artists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['gallery.Artist']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'Gallery Maskara'", 'max_length': '512', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'press_release': ('filebrowser.fields.FileBrowseField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'time_from': ('django.db.models.fields.TimeField', [], {}),
'time_to': ('django.db.models.fields.TimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'gallery.eventpressrelease': {
'Meta': {'object_name': 'EventPressRelease'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.eventreview': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'EventReview'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'translated_by': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.eventwork': {
'Meta': {'object_name': 'EventWork'},
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'work': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.ArtistWork']"})
},
'gallery.exhibition': {
'Meta': {'object_name': 'Exhibition'},
'autopublish_date': ('django.db.models.fields.DateField', [], {}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'curated_by': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'exhibition_works': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['gallery.ArtistWork']", 'null': 'True', 'through': "orm['gallery.ExhibitionWork']", 'blank': 'True'}),
'featured_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['gallery.Artist']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'Gallery Maskara'", 'max_length': '255'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'one_liner': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'press_release': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'preview_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'preview_end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'preview_start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'gallery.exhibitionpressrelease': {
'Meta': {'object_name': 'ExhibitionPressRelease'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exhibition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Exhibition']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.exhibitionreview': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'ExhibitionReview'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exhibition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Exhibition']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'translated_by': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'gallery.exhibitionwork': {
'Meta': {'object_name': 'ExhibitionWork'},
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'exhibition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Exhibition']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'work': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.ArtistWork']"})
},
'gallery.frontpageitem': {
'Meta': {'ordering': "('position',)", 'object_name': 'FrontPageItem'},
'blurb': ('django.db.models.fields.CharField', [], {'default': "'Current Exhibition'", 'max_length': '512', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Event']", 'null': 'True', 'blank': 'True'}),
'exhibition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Exhibition']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'gallery.galleryperson': {
'Meta': {'object_name': 'GalleryPerson'},
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'gallery.publication': {
'Meta': {'object_name': 'Publication'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Artist']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'available': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'editor': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Event']", 'null': 'True', 'blank': 'True'}),
'exhibition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gallery.Exhibition']", 'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'old_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pdf': ('filebrowser.fields.FileBrowseField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'gallery.spaceimage': {
'Meta': {'object_name': 'SpaceImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'displayed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '1024'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'gallery.spacevideo': {
'Meta': {'object_name': 'SpaceVideo'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'displayed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'video_file': ('filebrowser.fields.FileBrowseField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'vimeo_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
},
'gallery.video': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'Video'},
'changed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'video_file': ('filebrowser.fields.FileBrowseField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'vimeo_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
}
}
complete_apps = ['gallery'] | [
"[email protected]"
] | |
3170717deab4907adb3968420a93b4be31911af6 | 303a4d41da8f2cd2000630ff30424d2875490e67 | /hotglycol/genangletarg.py | e3e3568faeabd27bb319e698874ecd591da9fe3c | [] | no_license | noobermin/sharks | beb1d3d6a593e8d62f3d7416697d4de1fe9558b1 | af87113781eb67af45a9c2f79b73b1512ae0a1fa | refs/heads/master | 2022-05-10T11:55:17.200591 | 2021-09-30T14:27:22 | 2021-09-30T14:27:22 | 19,997,024 | 0 | 2 | null | 2016-05-20T19:27:49 | 2014-05-20T20:49:16 | Common Lisp | UTF-8 | Python | false | false | 1,559 | py | #!/usr/bin/env python2
import numpy as np;
def mk45(dim=[-5e-4,5e-4,-5e-4,5e-4,],
N0=1.08e22,
width=0.5e-4,
dropcorners=False,):
xlim = dim[:2];
ylim = dim[2:];
def _corner(x,y,good):
ret = y < width/np.sqrt(2) + ylim[0] -(x-xlim[0])
ret|= y > -width/np.sqrt(2) + ylim[1] -(x-xlim[1])
ret = np.logical_not(ret);
return np.logical_and(ret,good);
def roundcorner(x,y,good):
ret = y < ylim[0] + width - (x-xlim[0])
ret|= y > ylim[1] - width - (x-xlim[1])
ret = good and not ret;
ret|= (x-xlim[0]-width/2)**2 + (y-ylim[0]-width/2)**2 <= width**2/4.0;
ret|= (x-xlim[1]+width/2)**2 + (y-ylim[1]+width/2)**2 <= width**2/4.0;
return ret;
if dropcorners == True:
corner = _corner;
elif dropcorners == 'round':
corner = roundcorner;
else:
corner = lambda x,y,g: g;
@np.vectorize
def f(x,y):
good = xlim[0] <= x <= xlim[1];
good&= ylim[0] <= y <= ylim[1];
good&=np.abs(y - x)*np.sqrt(2) < width;
good =corner(x,y,good);
if good:
return N0;
else:
return 0.0;
return f;
if __name__ == "__main__":
dx = 0.01
mn,mx = -10.5,10.5,
lmn, lmx = -11.5,11.5
width = 2;
F=mk45(dim=[mn,mx,mn,mx],width=width,dropcorners='round');
X,Y=np.mgrid[
lmn:lmx + dx:dx,
lmn:lmx + dx:dx];
import matplotlib.pyplot as plt;
plt.pcolormesh(X,Y,F(X,Y));
plt.axis('equal');
plt.show();
| [
"[email protected]"
] | |
3bd3248d2c6bde2df7607b256dd029658457051f | aef69557d8960205a780e61b7c2dfbb1d7733449 | /Code/SarahBeth/tests.py | 7bbcce152ea94f86ce923360be692fa4a260987c | [] | no_license | sbtries/class_pandaaaa | 579d6be89a511bdc36b0ce8c95545b9b704a734a | bbf9c419a00879118a55c2c19e5b46b08af806bc | refs/heads/master | 2023-07-18T14:18:25.881333 | 2021-09-02T22:48:29 | 2021-09-02T22:48:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | # card1= input('what's your first card?')
# card2= input('what's your first card?')
# card3= input('what's your first card?')
cards2 = ['q', '1', '2']
| [
"[email protected]"
] | |
f91f6579324bac309cb6e4d1a8a4bdeb440f516a | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/l2/egrbytespart1h.py | 69cf9a1ee5ebf125e11163cd4ef07ba94c3b17cf | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 22,891 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class EgrBytesPart1h(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.l2.EgrBytesPart1h", "egress bytes")
counter = CounterMeta("multicast", CounterCategory.COUNTER, "bytes", "egress multicast bytes")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "multicastLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "multicastCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "multicastPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "multicastMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "multicastMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "multicastAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "multicastSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "multicastBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "multicastThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "multicastTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "multicastTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "multicastRate"
meta._counters.append(counter)
counter = CounterMeta("unicast", CounterCategory.COUNTER, "bytes", "egress unicast bytes")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "unicastLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "unicastCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "unicastPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "unicastMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "unicastMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "unicastAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "unicastSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "unicastBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "unicastThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "unicastTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "unicastTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "unicastRate"
meta._counters.append(counter)
meta.moClassName = "l2EgrBytesPart1h"
meta.rnFormat = "CDl2EgrBytesPart1h-%(nodeId)s"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current portion of the egress bytes stats in 1 hour"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.vns.EPpInfo")
meta.parentClasses.add("cobra.model.vns.SDEPpInfo")
meta.parentClasses.add("cobra.model.fv.InBEpP")
meta.parentClasses.add("cobra.model.fv.RtdEpP")
meta.parentClasses.add("cobra.model.dhcp.PRelPg")
meta.parentClasses.add("cobra.model.fv.TnlEpP")
meta.parentClasses.add("cobra.model.fv.BrEpP")
meta.parentClasses.add("cobra.model.fv.AEPg")
meta.parentClasses.add("cobra.model.l2ext.InstP")
meta.parentClasses.add("cobra.model.vns.SHEPpInfo")
meta.parentClasses.add("cobra.model.l3ext.InstP")
meta.parentClasses.add("cobra.model.infra.PEPg")
meta.parentClasses.add("cobra.model.fv.InstPEpP")
meta.parentClasses.add("cobra.model.fv.OoBEpP")
meta.parentClasses.add("cobra.model.infra.CEPg")
meta.parentClasses.add("cobra.model.vns.REPpInfo")
meta.parentClasses.add("cobra.model.fv.SvcBD")
meta.parentClasses.add("cobra.model.mgmt.InB")
meta.parentClasses.add("cobra.model.fv.TnlEPg")
meta.parentClasses.add("cobra.model.fv.SvcEpP")
meta.parentClasses.add("cobra.model.fv.EpP")
meta.parentClasses.add("cobra.model.fv.BD")
meta.parentClasses.add("cobra.model.fv.Ctx")
meta.parentClasses.add("cobra.model.dhcp.CRelPg")
meta.parentClasses.add("cobra.model.l3ext.InstPDef")
meta.superClasses.add("cobra.model.stats.CurrAgPart")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.l2.EgrBytesPart")
meta.rnPrefixes = [
('CDl2EgrBytesPart1h-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "multicastAvg", "multicastAvg", 21696, PropCategory.IMPLICIT_AVG)
prop.label = "egress multicast bytes average value"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastAvg", prop)
prop = PropMeta("str", "multicastBase", "multicastBase", 21691, PropCategory.IMPLICIT_BASELINE)
prop.label = "egress multicast bytes baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastBase", prop)
prop = PropMeta("str", "multicastCum", "multicastCum", 21692, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "egress multicast bytes cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastCum", prop)
prop = PropMeta("str", "multicastLast", "multicastLast", 21690, PropCategory.IMPLICIT_LASTREADING)
prop.label = "egress multicast bytes current value"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastLast", prop)
prop = PropMeta("str", "multicastMax", "multicastMax", 21695, PropCategory.IMPLICIT_MAX)
prop.label = "egress multicast bytes maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastMax", prop)
prop = PropMeta("str", "multicastMin", "multicastMin", 21694, PropCategory.IMPLICIT_MIN)
prop.label = "egress multicast bytes minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastMin", prop)
prop = PropMeta("str", "multicastPer", "multicastPer", 21693, PropCategory.IMPLICIT_PERIODIC)
prop.label = "egress multicast bytes periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastPer", prop)
prop = PropMeta("str", "multicastRate", "multicastRate", 21701, PropCategory.IMPLICIT_RATE)
prop.label = "egress multicast bytes rate"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastRate", prop)
prop = PropMeta("str", "multicastSpct", "multicastSpct", 21697, PropCategory.IMPLICIT_SUSPECT)
prop.label = "egress multicast bytes suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastSpct", prop)
prop = PropMeta("str", "multicastThr", "multicastThr", 21698, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "egress multicast bytes thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("multicastThr", prop)
prop = PropMeta("str", "multicastTr", "multicastTr", 21700, PropCategory.IMPLICIT_TREND)
prop.label = "egress multicast bytes trend"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastTr", prop)
prop = PropMeta("str", "multicastTrBase", "multicastTrBase", 21699, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "egress multicast bytes trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("multicastTrBase", prop)
prop = PropMeta("str", "nodeId", "nodeId", 21607, PropCategory.REGULAR)
prop.label = "Node Id"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("nodeId", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "unicastAvg", "unicastAvg", 21751, PropCategory.IMPLICIT_AVG)
prop.label = "egress unicast bytes average value"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastAvg", prop)
prop = PropMeta("str", "unicastBase", "unicastBase", 21746, PropCategory.IMPLICIT_BASELINE)
prop.label = "egress unicast bytes baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastBase", prop)
prop = PropMeta("str", "unicastCum", "unicastCum", 21747, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "egress unicast bytes cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastCum", prop)
prop = PropMeta("str", "unicastLast", "unicastLast", 21745, PropCategory.IMPLICIT_LASTREADING)
prop.label = "egress unicast bytes current value"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastLast", prop)
prop = PropMeta("str", "unicastMax", "unicastMax", 21750, PropCategory.IMPLICIT_MAX)
prop.label = "egress unicast bytes maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastMax", prop)
prop = PropMeta("str", "unicastMin", "unicastMin", 21749, PropCategory.IMPLICIT_MIN)
prop.label = "egress unicast bytes minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastMin", prop)
prop = PropMeta("str", "unicastPer", "unicastPer", 21748, PropCategory.IMPLICIT_PERIODIC)
prop.label = "egress unicast bytes periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastPer", prop)
prop = PropMeta("str", "unicastRate", "unicastRate", 21756, PropCategory.IMPLICIT_RATE)
prop.label = "egress unicast bytes rate"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastRate", prop)
prop = PropMeta("str", "unicastSpct", "unicastSpct", 21752, PropCategory.IMPLICIT_SUSPECT)
prop.label = "egress unicast bytes suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastSpct", prop)
prop = PropMeta("str", "unicastThr", "unicastThr", 21753, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "egress unicast bytes thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("unicastThr", prop)
prop = PropMeta("str", "unicastTr", "unicastTr", 21755, PropCategory.IMPLICIT_TREND)
prop.label = "egress unicast bytes trend"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastTr", prop)
prop = PropMeta("str", "unicastTrBase", "unicastTrBase", 21754, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "egress unicast bytes trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("unicastTrBase", prop)
meta.namingProps.append(getattr(meta.props, "nodeId"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("ATgToGraphInst", "Graph Instances", "cobra.model.vns.GraphInst"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AEPgToVirtualMachines", "Virtual Machines", "cobra.model.comp.Vm"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("InBToNode", "Node", "cobra.model.fv.Locale"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("EPgToNwIf", "Interface", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtxToNwIf", "Private Network to Interface", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("BDToNwIf", "Bridge Domain to Interface", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, nodeId, markDirty=True, **creationProps):
namingVals = [nodeId]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
35e94a777c8f8bc8a326cf67aab84070f19105e0 | e4dcd63ed6b66b2cc164e4a9667e163e9c40601c | /virtual/bin/confusable_homoglyphs | 8f58c52f317751308d6728b3aa7fd3e3d40eeb4d | [] | no_license | kepha-okari/tabler | 8e95803843f5fd9a8726e4ee85a57b48a77d2a2e | f682a77d581834151f723cdd2de2a37353369047 | refs/heads/master | 2022-12-22T21:52:45.879263 | 2018-07-18T15:45:52 | 2018-07-18T15:45:52 | 136,938,470 | 0 | 0 | null | 2022-12-08T00:59:12 | 2018-06-11T14:32:53 | Python | UTF-8 | Python | false | false | 269 | #!/home/rkepha/Documents/hir/timetabler/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from confusable_homoglyphs.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cli())
| [
"[email protected]"
] | ||
15e3e7fd280ed96ea086aab5efc0b2fc21e4b360 | a904c2fd006d6652d28af8eb8634d29d66d0024f | /net/PRESUBMIT.py | e82166c7b55e59fde047d70a83ff671f5f61f6a2 | [
"BSD-3-Clause"
] | permissive | esprehn/mojo | 1cba014abe08168509ebb202dd4b032f61f06713 | e50a99d5c5b046aa24a5415744f6661cb12a66c3 | refs/heads/master | 2020-12-01T13:05:42.886923 | 2014-10-30T22:23:04 | 2014-10-30T22:23:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chromium presubmit script for src/net.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def GetPreferredTryMasters(project, change):
masters = {
'tryserver.chromium.linux': {
'linux_chromium_rel': set(['defaulttests']),
},
'tryserver.chromium.mac': {
'mac_chromium_rel': set(['defaulttests']),
},
'tryserver.chromium.win': {
'win_chromium_rel': set(['defaulttests']),
}
}
# Changes that touch NSS files will likely need a corresponding OpenSSL edit.
# Conveniently, this one glob also matches _openssl.* changes too.
if any('nss' in f.LocalPath() for f in change.AffectedFiles()):
masters['tryserver.chromium.linux'].setdefault(
'linux_redux', set()).add('defaulttests')
return masters
| [
"[email protected]"
] | |
0872b8ae6fd54a0177c8e8bc0a1dcb97b506dd72 | 946a9dcf4e644f0d3f806f016a23ae8d96095082 | /LeetCode/DP/375_GuessNumberHigherOrLower2.py | 2ac71345a5c550d6ff8bd1d9657c2b24b4745021 | [] | no_license | HzCeee/Algorithms | 3dea898f071f4103ca3eb038f63b01ba4ba95383 | e05f29071d0badd081535e773f43ebc303aa12c4 | refs/heads/master | 2018-10-20T21:54:37.154631 | 2018-10-11T20:46:34 | 2018-10-11T20:46:34 | 116,142,856 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | class Solution(object):
def getMoneyAmount(self, n):
"""
:type n: int
:rtype: int
"""
# dp[low][high] refers to the minimum money need to guarantee a win within range [low, high]
# dp[low][high] = min([guess + max(dp[low][guess], dp[guess][high]) for guess in range(low, high)])
dp = [[0] * n for _ in range(n)]
for low in range(n)[::-1]:
for high in range(low + 1, n):
tmp = [guess + 1 + max(dp[low][guess - 1], dp[guess + 1][high]) for guess in range(low, high)]
dp[low][high] = min(tmp) if tmp else 0
return dp[0][-1] | [
"[email protected]"
] | |
e18292f3203af3b07da2eabe2cbae4e3147fd60b | b6be7bef4c8ffd48c3a1c89fa2ad84bc8d042eb7 | /Inception.py | 57388fe850812ec59514d31ce4604505005ffc93 | [] | no_license | Wushaoyong/tensflowtest | 48e747c1a6cdd7374313013d43cb095a97f3bc34 | f393a945e733fcfe0c63f5dcfffc44c60d2a5862 | refs/heads/master | 2023-01-10T01:47:38.764265 | 2020-11-10T02:26:16 | 2020-11-10T02:26:16 | 305,975,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,758 | py | import tensorflow as tf
with tf.variable_scope('conv7') as scope:
input_image = pool6
input_1 = 110
input_2 = 120
input_3 = input_2 * 3
weights = tf.get_variable('weights',shape=[3, 3, input_1, input_2],
dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.05, dtype=tf.float32))
biases = tf.get_variable('biases',shape=[input_2],dtype=tf.float32,
initializer=tf.constant_initializer(0.1))
weights_deduce = tf.get_variable('weights_deduce',shape=[3, 3, input_3, input_2],dtype=tf.float32, initializer=tf.truncated_normal_initializer
(stddev=0.05, dtype=tf.float32))
biases_deduce = tf.get_variable('biases_deduce',shape=[input_2],
dtype=tf.float32,initializer=tf.constant_initializer(0.1))
conv = tf.nn.conv2d(input_image, weights, strides=[1, 1, 1, 1], padding='SAME')
pre_activation = tf.nn.bias_add(conv, biases)
pre_activation = batch_norm(pre_activation, is_training_mmodel)
conv_relu = tf.nn.relu(pre_activation, name=scope.name)
pool_2_2 = tf.nn.max_pool(conv_relu, ksize=[1, 2, 2, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling2_2')
pool_3_3 = tf.nn.max_pool(conv_relu, ksize=[1, 3, 3, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling3_3')
pool_4_4 = tf.nn.max_pool(conv_relu, ksize=[1, 4, 4, 1], strides=[1, 1, 1, 1], padding='SAME', name='pooling4_4')
pool_pre = tf.concat([pool_2_2, pool_3_3, pool_4_4], 3)
with tf.variable_scope('reduce_layer') as deduce0:
conv = tf.nn.conv2d(pool_pre, weights_deduce, strides=[1, 1, 1, 1], padding='SAME')
pre_activation = tf.nn.bias_add(conv, biases_deduce)
pre_activation = batch_norm2(pre_activation, is_training_mmodel)
pool7 = tf.nn.relu(pre_activation, name=deduce0.name) | [
"[email protected]"
] | |
b0a7dbf1e9f1a38e8c3c7c3f7498e5b970e34a7f | d92fc7ba34412e8a1633b87bea5761c0bdbf196e | /utils/OfflineDataLoader.py | 4a6d8b8f8004d9e9b3de5a094691bbe7da61e89e | [
"MIT"
] | permissive | yigitozgumus/PolimiRecSys2018 | d37876c8d678aecc52d8c4558f2b6d40d1a03d83 | 1e63f46b83bd52399dc600b1abbf254bf47db094 | refs/heads/master | 2023-02-17T16:24:39.853937 | 2022-08-13T08:24:13 | 2022-08-13T08:24:13 | 152,449,643 | 0 | 0 | MIT | 2023-02-10T22:42:06 | 2018-10-10T15:49:36 | Python | UTF-8 | Python | false | false | 2,954 | py | from utils.util import working_directory
import os
import re
import models as m
class OfflineDataLoader(object):
def __init__(self,model_folder="saved_models",parameter_folder="saved_parameters"):
super(OfflineDataLoader, self).__init__()
self.repository = "tuned_parameters"
self.model_folder = model_folder
self.parameter_folder = parameter_folder
self.training = self.model_folder + "/" + "training"
self.submission = self.model_folder + "/" + "submission"
self.training_models = self.get_models(self.training)
self.submission_models = self.get_models(self.submission)
self.parameter_files = self.get_models(self.parameter_folder)
self.repository_files = self.build_repository(self.repository)
def get_model(self,model_name,training=True):
if training:
result = [i for i in self.training_models if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
else:
result = [i for i in self.submission_models if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
def get_parameter(self,model_name):
result = [i for i in self.parameter_files if re.compile(model_name).search(i)]
folder_path = str("/".join(result[0].split("/")[:-1])+"/")
file_name = result[0].split("/")[-1]
return folder_path,file_name
def get_models(self,folder_name):
fileList = os.listdir(folder_name)
filter = re.compile(r'\..+|.+\.txt$')
filtered_files = [folder_name + "/" + i for i in fileList if not filter.search(i)]
return filtered_files
def build_repository(self,repo_folder):
filter = re.compile(r'\..+|.+\.txt$')
listOfFolders = os.listdir(repo_folder)
filteredDirPaths = [repo_folder+"/"+i for i in listOfFolders if not filter.search(i)]
files = []
for folder in filteredDirPaths:
with working_directory(folder):
filePaths = [folder +"/"+ i for i in os.listdir(".")]
files.extend(filePaths)
# Categorize
# Define error filter
errorFilter = re.compile(r'Error.+')
# Make it error free
errorFilteredFiles = [i for i in files if not errorFilter.search(i)]
bestModelFilter = re.compile(r'best_model$')
self.best_models = [i for i in files if bestModelFilter.search(i)]
parameterFilter = re.compile(r'best_parameters$')
self.best_parameters = [i for i in files if parameterFilter.search(i)]
resultFilter = re.compile(r'best_result_test$')
self.best_results = [i for i in files if resultFilter.search(i)]
| [
"[email protected]"
] | |
dde8ce33f41abb767cc6c00643052aeb98027f76 | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /tests/components/modern_forms/test_init.py | fd6ff49547078e61218c25b6e6b05643814c2186 | [
"Apache-2.0"
] | permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 1,766 | py | """Tests for the Modern Forms integration."""
from unittest.mock import MagicMock, patch
from aiomodernforms import ModernFormsConnectionError
from homeassistant.components.modern_forms.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import init_integration, modern_forms_no_light_call_mock
from tests.test_util.aiohttp import AiohttpClientMocker
@patch(
"homeassistant.components.modern_forms.ModernFormsDevice.update",
side_effect=ModernFormsConnectionError,
)
async def test_config_entry_not_ready(
mock_update: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Modern Forms configuration entry not ready."""
entry = await init_integration(hass, aioclient_mock)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Modern Forms configuration entry unloading."""
entry = await init_integration(hass, aioclient_mock)
assert hass.data[DOMAIN]
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
async def test_fan_only_device(hass, aioclient_mock):
"""Test we set unique ID if not set yet."""
await init_integration(
hass, aioclient_mock, mock_type=modern_forms_no_light_call_mock
)
entity_registry = er.async_get(hass)
fan_entry = entity_registry.async_get("fan.modernformsfan_fan")
assert fan_entry
light_entry = entity_registry.async_get("light.modernformsfan_light")
assert light_entry is None
| [
"[email protected]"
] | |
3a801ecc0c5fdc7891d75192545846fbb79e28f5 | 58ff923a903cf2393c87988559e55ab6d0fd5be2 | /venv/lib/python3.9/site-packages/evalml/tests/component_tests/test_estimators.py | a34b16f71e38c2eb5f4463b1ed57a3b3d6748c16 | [] | no_license | gurmeet1109/AVJat0621 | 9930d08b9ae719918ee1e53b673e541c7900f940 | e845a4a5b48f5a63fd2833fbd18b91133d5ca736 | refs/heads/master | 2023-06-11T21:01:21.686729 | 2021-07-03T14:21:51 | 2021-07-03T14:21:51 | 382,629,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,867 | py | import string
import numpy as np
import pandas as pd
import pytest
import woodwork as ww
from evalml.exceptions import ComponentNotYetFittedError
from evalml.model_family import ModelFamily
from evalml.pipelines.components import Estimator
from evalml.pipelines.components.utils import (
_all_estimators_used_in_search,
get_estimators
)
from evalml.problem_types import (
ProblemTypes,
handle_problem_types,
is_binary,
is_multiclass,
is_regression
)
from evalml.utils import get_random_state
def test_estimators_feature_name_with_random_ascii(X_y_binary, X_y_multi, X_y_regression, ts_data, helper_functions):
for estimator_class in _all_estimators_used_in_search():
if estimator_class.__name__ == 'ARIMARegressor':
continue
supported_problem_types = [handle_problem_types(pt) for pt in estimator_class.supported_problem_types]
for problem_type in supported_problem_types:
clf = helper_functions.safe_init_component_with_njobs_1(estimator_class)
if is_binary(problem_type):
X, y = X_y_binary
elif is_multiclass(problem_type):
X, y = X_y_multi
elif is_regression(problem_type):
X, y = X_y_regression
X = get_random_state(clf.random_seed).random((X.shape[0], len(string.printable)))
col_names = ['column_{}'.format(ascii_char) for ascii_char in string.printable]
X = pd.DataFrame(X, columns=col_names)
assert clf.input_feature_names is None
clf.fit(X, y)
assert len(clf.feature_importance) == len(X.columns)
assert not np.isnan(clf.feature_importance).all().all()
predictions = clf.predict(X)
assert len(predictions) == len(y)
assert not np.isnan(predictions).all()
assert (clf.input_feature_names == col_names)
def test_binary_classification_estimators_predict_proba_col_order(helper_functions):
X = pd.DataFrame({'input': np.concatenate([np.array([-1] * 100), np.array([1] * 100)])})
data = np.concatenate([np.zeros(100), np.ones(100)])
y = pd.Series(data)
for estimator_class in _all_estimators_used_in_search():
supported_problem_types = [handle_problem_types(pt) for pt in estimator_class.supported_problem_types]
if ProblemTypes.BINARY in supported_problem_types:
estimator = helper_functions.safe_init_component_with_njobs_1(estimator_class)
estimator.fit(X, y)
predicted_proba = estimator.predict_proba(X)
expected = np.concatenate([(1 - data).reshape(-1, 1), data.reshape(-1, 1)], axis=1)
np.testing.assert_allclose(expected, np.round(predicted_proba).values)
def test_estimator_equality_different_supported_problem_types():
class MockEstimator(Estimator):
name = "Mock Estimator"
model_family = ModelFamily.LINEAR_MODEL
supported_problem_types = ['binary']
mock_estimator = MockEstimator()
mock_estimator.supported_problem_types = ['binary', 'multiclass']
assert mock_estimator != MockEstimator()
assert 'Mock Estimator' != mock_estimator
@pytest.mark.parametrize("data_type", ['li', 'np', 'pd', 'ww'])
def test_all_estimators_check_fit_input_type(data_type, X_y_binary, make_data_type, helper_functions):
X, y = X_y_binary
X = make_data_type(data_type, X)
y = make_data_type(data_type, y)
estimators_to_check = [estimator for estimator in get_estimators('binary')]
for component_class in estimators_to_check:
component = helper_functions.safe_init_component_with_njobs_1(component_class)
component.fit(X, y)
component.predict(X)
component.predict_proba(X)
@pytest.mark.parametrize("data_type", ['li', 'np', 'pd', 'ww'])
def test_all_estimators_check_fit_input_type_regression(data_type, X_y_regression, make_data_type, helper_functions):
X, y = X_y_regression
X = make_data_type(data_type, X)
y = make_data_type(data_type, y)
estimators_to_check = [estimator for estimator in get_estimators('regression')]
for component_class in estimators_to_check:
component = helper_functions.safe_init_component_with_njobs_1(component_class)
component.fit(X, y)
component.predict(X)
def test_estimator_predict_output_type(X_y_binary, ts_data, helper_functions):
X_np, y_np = X_y_binary
assert isinstance(X_np, np.ndarray)
assert isinstance(y_np, np.ndarray)
y_list = list(y_np)
X_df_no_col_names = pd.DataFrame(X_np)
range_index = pd.RangeIndex(start=0, stop=X_np.shape[1], step=1)
X_df_with_col_names = pd.DataFrame(X_np, columns=['x' + str(i) for i in range(X_np.shape[1])])
y_series_no_name = pd.Series(y_np)
y_series_with_name = pd.Series(y_np, name='target')
X_df_no_col_names_ts = pd.DataFrame(data=X_df_no_col_names.values, columns=X_df_no_col_names.columns,
index=pd.date_range(start='1/1/2018', periods=X_df_no_col_names.shape[0]))
X_df_with_col_names_ts = pd.DataFrame(data=X_df_with_col_names.values,
columns=['x' + str(i) for i in range(X_np.shape[1])],
index=pd.date_range(start='1/1/2018', periods=X_df_with_col_names.shape[0]))
datatype_combos = [(X_np, y_np, range_index, np.unique(y_np), False),
(X_np, y_list, range_index, np.unique(y_np), False),
(X_df_no_col_names, y_series_no_name, range_index, y_series_no_name.unique(), False),
(X_df_with_col_names, y_series_with_name, X_df_with_col_names.columns, y_series_with_name.unique(), False),
(X_df_no_col_names_ts, y_series_no_name, range_index, y_series_no_name.unique(), True),
(X_df_with_col_names_ts, y_series_with_name, X_df_with_col_names_ts.columns, y_series_with_name.unique(), True)]
for component_class in _all_estimators_used_in_search():
for X, y, X_cols_expected, y_cols_expected, time_series in datatype_combos:
if component_class.name == 'ARIMA Regressor' and not time_series:
continue
elif component_class.name != 'ARIMA Regressor' and time_series:
continue
print('Checking output of predict for estimator "{}" on X type {} cols {}, y type {} name {}'
.format(component_class.name, type(X),
X.columns if isinstance(X, pd.DataFrame) else None, type(y),
y.name if isinstance(y, pd.Series) else None))
component = helper_functions.safe_init_component_with_njobs_1(component_class)
component.fit(X, y=y)
predict_output = component.predict(X)
assert isinstance(predict_output, pd.Series)
assert len(predict_output) == len(y)
if component_class.name == 'ARIMA Regressor':
assert predict_output.name == 'predicted_mean'
else:
assert predict_output.name is None
if not ((ProblemTypes.BINARY in component_class.supported_problem_types) or
(ProblemTypes.MULTICLASS in component_class.supported_problem_types)):
continue
print('Checking output of predict_proba for estimator "{}" on X type {} cols {}, y type {} name {}'
.format(component_class.name, type(X),
X.columns if isinstance(X, pd.DataFrame) else None, type(y),
y.name if isinstance(y, pd.Series) else None))
predict_proba_output = component.predict_proba(X)
assert isinstance(predict_proba_output, pd.DataFrame)
assert predict_proba_output.shape == (len(y), len(np.unique(y)))
assert (list(predict_proba_output.columns) == y_cols_expected).all()
def test_estimator_check_for_fit_with_overrides(X_y_binary):
class MockEstimatorWithOverrides(Estimator):
name = "Mock Estimator"
model_family = ModelFamily.LINEAR_MODEL
supported_problem_types = ['binary']
def fit(self, X, y):
pass
def predict(self, X):
pass
def predict_proba(self, X):
pass
class MockEstimatorWithOverridesSubclass(Estimator):
name = "Mock Estimator Subclass"
model_family = ModelFamily.LINEAR_MODEL
supported_problem_types = ['binary']
def fit(self, X, y):
pass
def predict(self, X):
pass
def predict_proba(self, X):
pass
X, y = X_y_binary
est = MockEstimatorWithOverrides()
est_subclass = MockEstimatorWithOverridesSubclass()
with pytest.raises(ComponentNotYetFittedError, match='You must fit'):
est.predict(X)
with pytest.raises(ComponentNotYetFittedError, match='You must fit'):
est_subclass.predict(X)
est.fit(X, y)
est.predict(X)
est.predict_proba(X)
est_subclass.fit(X, y)
est_subclass.predict(X)
est_subclass.predict_proba(X)
def test_estimator_manage_woodwork(X_y_binary):
X_df = pd.DataFrame({"foo": [1, 2, 3], "bar": [4, 5, 6], "baz": [7, 8, 9]})
X_df.ww.init()
y_series = pd.Series([1, 2, 3])
y_series = ww.init_series(y_series)
class MockEstimator(Estimator):
name = "Mock Estimator Subclass"
model_family = ModelFamily.LINEAR_MODEL
supported_problem_types = ['binary']
# Test y is None case
est = MockEstimator()
X, y = est._manage_woodwork(X_df, y=None)
assert isinstance(X, pd.DataFrame)
assert y is None
# Test y is not None case
X, y = est._manage_woodwork(X_df, y_series)
assert isinstance(X, pd.DataFrame)
assert isinstance(y, pd.Series)
| [
"[email protected]"
] | |
1608fa8e1c84cd41b61e54ffed640e62909cec44 | c1f15f5834062b0d5a6d6857a89124f3f114b2bd | /quick/features.py | fea39d60247ca6a9a0113700e9d9fa359c349cb5 | [] | no_license | msoedov/quick.py | 4bd3294daf8136989bbb0d2316224360657330c3 | 5c89c11c7f6bc1fabbee757a8bb217dea08e359c | refs/heads/master | 2023-01-09T20:20:49.811969 | 2020-04-13T12:46:52 | 2020-04-13T12:46:52 | 44,098,220 | 15 | 4 | null | 2022-12-26T20:21:03 | 2015-10-12T09:41:22 | Python | UTF-8 | Python | false | false | 4,243 | py | import functools
import sys
import unittest
from collections import namedtuple
from copy import deepcopy
from typing import Any, Callable, List
from .common import *
from .core import Schema, flatten, generate
from .shrink import shrink
config = {"max_count": 100, "max_scale": sys.maxsize}
experiment = namedtuple("experiment", "name fn config")
default = object()
debug = print
def verify(prop: experiment, simplification: bool = False) -> Any:
test_case, schema = generate(prop.fn)
kwargs = flatten(schema)
ok = test_case(**kwargs)
if ok:
return True, kwargs, None, None
if simplification:
shrunked, simplified_to = shrink(test_case, schema)
else:
shrunked = False
simplified_to = kwargs
return False, kwargs, shrunked, simplified_to
def code_gen(
experiment: experiment, x: int, skip_group: Callable, simplification: bool = False
) -> Callable:
@skip_group
def test_experiment(t):
ok, kwargs, shrunked, simplified_to = verify(experiment, simplification)
if not ok:
description = "`{}` Input: #{}".format(experiment.name, kwargs)
if shrunked:
description = "{}\nSimplified to: {}".format(description, simplified_to)
else:
description = "{}\n Failed to simplify".format(description)
t.assertTrue(ok, description)
test_experiment.__doc__ = experiment.name
return test_experiment
class QuickCheck(object):
def __init__(self, **settings) -> None:
super(QuickCheck, self).__init__()
self.settings = settings or config
self.experiments = {}
def __call__(self, experiment_name: str, **defaults) -> Callable:
def decorator(fn):
config = default
if defaults:
config = deepcopy(self.settings)
config.update(defaults)
debug("Register {} to {}".format(experiment_name, fn))
self.experiments[experiment_name] = experiment(experiment_name, fn, config)
return fn
return decorator
forall = __call__
def as_testcase(
self, prototype=unittest.TestCase, skip_on_failure=True, simplification=True
):
"""
:param prototype: class of test case
:param skip_on_failure: boolean flag to skip all test group on first failure
:return: test case class
"""
debug("_" * 50)
class TestProperties(prototype):
"""
Automatically generated tests case based on quick check properties
"""
@classmethod
def should_fail(cls):
cls.__unittest_expecting_failure__ = True
return cls
def skip_if():
skip = False
def wrap(fn):
@functools.wraps(fn)
def inner(*args, **kwargs):
nonlocal skip
if skip and skip_on_failure:
raise unittest.SkipTest("Failed experiment")
try:
return fn(*args, **kwargs)
except Exception as e:
skip = True
raise e
return inner
return wrap
settings = self.settings
properties = []
for experiment in self.experiments.values():
if experiment.config is not default:
settings = experiment.config
max_count = settings["max_count"]
skip_group = skip_if()
debug("Generating {} tests for [{}]".format(max_count, experiment.name))
for x in range(max_count):
test_experiment = code_gen(experiment, x, skip_group, simplification)
setattr(
TestProperties, "{}#{}".format(experiment.name, x), test_experiment
)
properties.append(test_experiment)
TestProperties.properties = properties
return TestProperties
def verify(self) -> List[NoneType]:
test_cls = self.as_testcase()
test = test_cls()
return [prop(test) for prop in test.properties]
forall = QuickCheck()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.